diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..20f6306 --- /dev/null +++ b/.flake8 @@ -0,0 +1,6 @@ +[flake8] +max-line-length = 150 +# mccabe +max-complexity = 10 +ignore = + # E203 # whitespace before : \ No newline at end of file diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml new file mode 100644 index 0000000..6ee2354 --- /dev/null +++ b/.github/workflows/pythonpackage.yml @@ -0,0 +1,36 @@ +name: Python package + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.7, 3.8] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install poetry + make install + - name: Lint + run: | + make lint + - name: Test + run: | + make test + - name: Type checking + run: | + make typecheck \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b7f6eff --- /dev/null +++ b/.gitignore @@ -0,0 +1,132 @@ +data/kenlm/* +!data/kenlm/.gitkeep + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..cb421bb --- /dev/null +++ b/Makefile @@ -0,0 +1,68 @@ + +BASE_DIR := $(shell pwd) +POETRY_RUN := poetry run + +TRAIN_ARTIFACTS_DIR := ${BASE_DIR}/log/train_$(shell date +'%Y%m%d') +TRAIN_CONFIG_PATH := ${BASE_DIR}/config/train.yaml +EVALUATION_CONFIG_PATH := ${BASE_DIR}/config/evaluation.yaml +PREDICT_CONFIG_PATH := ${BASE_DIR}/config/predict.yaml +INTREPRET_CONFIG_PATH := ${BASE_DIR}/config/interpret.yaml + +# override configs (defaults No overrides) +OVERRIDES := + +TENSORBORD_LOG_DIR := ... + +install: + poetry install + +notebook: + ${POETRY_RUN} jupyter-notebook + +train: + @echo training + mkdir -p ${TRAIN_ARTIFACTS_DIR} + ${POETRY_RUN} expats train ${TRAIN_CONFIG_PATH} ${TRAIN_ARTIFACTS_DIR} --overrides ${OVERRIDES} + +train-debug: + IS_DEBUG=true make train TRAIN_CONFIG_PATH=config/train_debug.yaml TRAIN_ARTIFACTS_DIR=log/debug + +evaluate: + @echo evaluation on pre-trained model + ${POETRY_RUN} expats evaluate ${EVALUATION_CONFIG_PATH} --overrides ${OVERRIDES} + +evaluate-debug: + IS_DEBUG=true make evaluate EVALUATION_CONFIG_PATH=config/evaluate_debug.yaml + +predict: + @echo evaluation on pre-trained model + ${POETRY_RUN} expats predict ${PREDICT_CONFIG_PATH} ${PREDICT_OUTPUT_PATH} --overrides ${OVERRIDES} + +predict-debug: + IS_DEBUG=true make predict PREDICT_CONFIG_PATH=config/predict_debug.yaml PREDICT_OUTPUT_PATH=log/debug_predict + +interpret: + @echo interpreting pre-trained model + ${POETRY_RUN} expats interpret ${INTREPRET_CONFIG_PATH} --overrides ${OVERRIDES} + +interpret-debug: + IS_DEBUG=true make interpret INTREPRET_CONFIG_PATH=config/interpret_debug.yaml + +train-then-evaluate: + $(eval ARTIFACT_PATH := ${BASE_DIR}/log/$(shell date +'%Y%m%d%H%M%S')) + make train TRAIN_CONFIG_PATH=${TRAIN_CONFIG_PATH} TRAIN_ARTIFACTS_DIR=${ARTIFACT_PATH} OVERRIDES='${TRAIN_OVERRIDES}' + make evaluate EVALUATION_CONFIG_PATH=${EVALUATION_CONFIG_PATH} OVERRIDES='artifact_path=${ARTIFACT_PATH} ${EVALUATION_OVERRIDES}' + rm -rf ${ARTIFACT_PATH} + +tensorboard: + ${POETRY_RUN} tensorboard --logdir ${TENSORBORD_LOG_DIR} + +# CI +lint: + ${POETRY_RUN} flake8 --show-source --statistics ./expats ./tests + +test: + ${POETRY_RUN} pytest -rf --cov=./expats ./tests + +typecheck: + @echo currently not support to check types \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..8e3e5fa --- /dev/null +++ b/README.md @@ -0,0 +1,86 @@ +# EXPATS: A Toolkit for Explainable Automated Text Scoring + +![EXPATS: A Toolkit for Explainable Automated Text Scoring](overview.png) + +EXPATS is an open-source framework for automated text scoring (ATS) tasks, such as automated essay scoring and readability assessment. Users can develop and experiment with different ATS models quickly by using the toolkit's easy-to-use components, the configuration system, and the command-line interface. The toolkit also provides seamless integration with [the Language Interpretability Tool (LIT)](https://pair-code.github.io/lit/) so that one can interpret and visualize models and their predictions. + +## Requirements + +- [poetry](https://python-poetry.org/) + +## Usage + +1. Clone this repository. + +```bash +$ git clone git@github.com:octanove/expats.git +$ cd expats +``` + +2. Install Python dependencies via poetry, and launch an interactive shell + +```bash +$ poetry install +$ poetry shell +``` + +3. Prepare the dataset for your task + +We'll use ASAP-AES, a standard dataset for autoamted essay scoring. You can download the dataset from [the Kaggle page](https://www.kaggle.com/c/asap-aes). EXPATS supports a dataset reader for ASAP-AES by default. + +4. Write a config file + +In the config file, you specify the type of the task (`task`), the type of the profiler (`profiler`) and its hyperparmeters, and the dataset to use (`dataset`). An example config file for training a BERT-based regressor for ASAP-AES is shown below. + +```bash +$ cat config/asap_aes/train_bert.yaml +task: regression + +profiler: + type: TransformerRegressor + params: + trainer: + gpus: 1 + max_epochs: 80 + accumulate_grad_batches: 2 + network: + output_normalized: true + pretrained_model_name_or_path: bert-base-uncased + lr: 4e-5 + data_loader: + batch_size: 8 + val_ratio: 0.2 + max_length: null + +dataset: + type: asap-aes + params: + path: data/asap-aes/training_set_rel3.tsv +``` + +5. Train your model + +You can train the model by running the `expats train` command as shown below. + +```bash +$ expats train config/asap_aes/train_bert.yaml artifacts +``` + +The result (e.g., log file, the model weights) is stored in the directory `artifacts`. + +6. Evalute your model + +You can evaluate your model by running: + +```bash +$ expats evaluate config/asap_aes/evaluate.yaml +``` + +You can also configure the evaluation settings by modifying the configuration file. + +7. Interpret your model + +You can launch the LIT server to interpret and visualize the trained model and its behavior: +```bash +$ expats interpret config/asap_aes/interpret.yaml +``` diff --git a/config/asap_aes/evaluate.yaml b/config/asap_aes/evaluate.yaml new file mode 100644 index 0000000..b1974fa --- /dev/null +++ b/config/asap_aes/evaluate.yaml @@ -0,0 +1,30 @@ + +# Please set your artifact path specified when training +artifact_path: + +dataset: + type: asap-aes + params: + path: /home/manabe_h/workspace/octanove/profiler/data/asap-aes/training_set_rel3.tsv.random11680_rest + prompt_id: + +output_convert: + regression_to_classification: + type: MinMaxDenormalizedRoundNearestInteger + params: + x_min: + x_max: + +metrics: + classification: + - type: MacroF1 + params: + - type: MicroF1 + params: + - type: Accuracy + params: + - type: QuadraticWeightedKappa + params: + regression: + - type: PearsonCorrelation + params: diff --git a/config/asap_aes/train_bert.yaml b/config/asap_aes/train_bert.yaml new file mode 100644 index 0000000..9c86d0d --- /dev/null +++ b/config/asap_aes/train_bert.yaml @@ -0,0 +1,22 @@ +task: regression + +profiler: + type: TransformerRegressor + params: + trainer: + gpus: 1 + max_epochs: 80 + accumulate_grad_batches: 2 + network: + output_normalized: true + pretrained_model_name_or_path: bert-base-uncased + lr: 4e-5 + data_loader: + batch_size: 8 + val_ratio: 0.2 + max_length: null + +dataset: + type: asap-aes + params: + path: data/asap-aes/training_set_rel3.tsv.random11680 \ No newline at end of file diff --git a/config/asap_aes/train_distilbert.yaml b/config/asap_aes/train_distilbert.yaml new file mode 100644 index 0000000..ae3ef46 --- /dev/null +++ b/config/asap_aes/train_distilbert.yaml @@ -0,0 +1,21 @@ +task: regression + +profiler: + type: TransformerRegressor + params: + trainer: + gpus: 1 + max_epochs: 80 + network: + output_normalized: true + pretrained_model_name_or_path: distilbert-base-uncased + lr: 4e-5 + data_loader: + batch_size: 16 + val_ratio: 0.2 + max_length: null + +dataset: + type: asap-aes + params: + path: data/asap-aes/training_set_rel3.tsv.random11680 diff --git a/config/asap_aes/train_rf.yaml b/config/asap_aes/train_rf.yaml new file mode 100644 index 0000000..aecac5e --- /dev/null +++ b/config/asap_aes/train_rf.yaml @@ -0,0 +1,25 @@ +task: regression + +profiler: + type: DocFeatureMLRegressor + params: + features: + - type: NumberOfTokenFeature + params: + - type: AverageTokenLengthFeature + params: + - type: UnigramLikelihoodFeature + params: + # TODO: please set your local path (recommend abs path) + path: data/tatoeba/eng_sentences.tsv.cut3.tokenize.unigram.tsv + classifier: + type: rf + params: + n_estimators: 100 + max_depth: 5 + random_state: 46 + +dataset: + type: asap-aes + params: + path: data/asap-aes/training_set_rel3.tsv.random11680 \ No newline at end of file diff --git a/config/evaluate.yaml b/config/evaluate.yaml new file mode 100644 index 0000000..0b22ca5 --- /dev/null +++ b/config/evaluate.yaml @@ -0,0 +1,28 @@ + +# Please set your artifact path specified when training +artifact_path: log/train_yyyymmdd + +dataset: + type: asap-aes + params: + # NOTE: do evaluation with "training" data + path: data/asap-aes/training_set_rel3.tsv + +output_convert: + classification_to_regression: + type: ToFloat + params: + +metrics: + classification: + - type: MacroF1 + params: + - type: MicroF1 + params: + - type: Accuracy + params: + - type: QuadraticWeightedKappa + params: + regression: + - type: PearsonCorrelation + params: diff --git a/config/interactively_interpret.yaml b/config/interactively_interpret.yaml new file mode 100644 index 0000000..f5e6eba --- /dev/null +++ b/config/interactively_interpret.yaml @@ -0,0 +1,8 @@ + +# Please set your artifact path specified when training +artifact_path: log/train_yyyymmdd + +dataset: + type: asap-aes + params: + path: data/asap-aes/training_set_rel3.tsv \ No newline at end of file diff --git a/config/predict.yaml b/config/predict.yaml new file mode 100644 index 0000000..4301423 --- /dev/null +++ b/config/predict.yaml @@ -0,0 +1,8 @@ + +# Please set your artifact path specified when training +artifact_path: log/train_yyyymmdd + +dataset: + type: line-by-line + params: + file_path: data/example/annotate_input.txt diff --git a/config/train.yaml b/config/train.yaml new file mode 100644 index 0000000..d3ebd39 --- /dev/null +++ b/config/train.yaml @@ -0,0 +1,25 @@ + +task: classification + +profiler: + type: DocFeatureMLClassifier + params: + features: + - type: NumberOfTokenFeature + params: + - type: AverageTokenLengthFeature + params: + - type: UnigramLikelihoodFeature + params: + path: data/example/dummy_unigram.tsv + classifier: + type: rf + params: + n_estimators: 50 + max_depth: 5 + random_state: 46 + +dataset: + type: asap-aes + params: + path: data/asap-aes/training_set_rel3.tsv \ No newline at end of file diff --git a/config/train_bert_classifier.yaml b/config/train_bert_classifier.yaml new file mode 100644 index 0000000..df59de6 --- /dev/null +++ b/config/train_bert_classifier.yaml @@ -0,0 +1,20 @@ + +task: classification + +profiler: + type: TransformerClassifier + params: + trainer: + gpus: null + max_epochs: 1 + network: + num_class: 10 + pretrained_model_name_or_path: bert-base-uncased + lr: 5e-5 + data_loader: + batch_size: 16 + +dataset: + type: asap-aes + params: + path: data/asap-aes/training_set_rel3.tsv \ No newline at end of file diff --git a/data/example/annotate_input.txt b/data/example/annotate_input.txt new file mode 100644 index 0000000..84011af --- /dev/null +++ b/data/example/annotate_input.txt @@ -0,0 +1,2 @@ +I recommend copying the code and distributing it with your decoder. However, please send improvements upstream. +They're pretty simple and are intended to be reimplemented in your build system. \ No newline at end of file diff --git a/data/example/dummy_unigram.tsv b/data/example/dummy_unigram.tsv new file mode 100644 index 0000000..3dd59b7 --- /dev/null +++ b/data/example/dummy_unigram.tsv @@ -0,0 +1,12 @@ +. 1201360 +to 392383 +I 381699 +the 277443 +that 258837 +n't 240872 +a 202189 +is 184184 +you 181809 +do 168408 +? 158657 +, 155421 \ No newline at end of file diff --git a/expats/__init__.py b/expats/__init__.py new file mode 100644 index 0000000..16e69f8 --- /dev/null +++ b/expats/__init__.py @@ -0,0 +1,2 @@ + +__version__ = '0.1.0' diff --git a/expats/cli.py b/expats/cli.py new file mode 100644 index 0000000..edad308 --- /dev/null +++ b/expats/cli.py @@ -0,0 +1,81 @@ + +import argparse + +from expats.common.config_util import load_from_file, merge_with_dotlist +from expats.common.log import init_setup_log, get_logger +from expats.task import train, evaluate, predict, interpret + + +logger = get_logger(__name__) + + +def main(): + parser = argparse.ArgumentParser("EXPATS: A toolkit for explainable automated text scoring") + sub_parsers = parser.add_subparsers() + + train_parser = sub_parsers.add_parser("train", help="Training profiler") + train_parser.add_argument("config_path", type=str, help="path to config yaml file") + train_parser.add_argument("artifact_path", type=str, help="path to training artifacts") + train_parser.add_argument("--overrides", type=str, nargs='*', help="overide configurations") + train_parser.set_defaults(mode="train") + + evaluate_parser = sub_parsers.add_parser("evaluate", help="Evaluation of trained profiler") + evaluate_parser.add_argument("config_path", type=str, help="path to config yaml file") + evaluate_parser.add_argument("--overrides", type=str, nargs='*', help="overide configurations") + evaluate_parser.set_defaults(mode="evaluate") + + predict_parser = sub_parsers.add_parser("predict", help="Run inference with trained profiler") + predict_parser.add_argument("config_path", type=str, help="path to config yaml file") + predict_parser.add_argument("output_path", type=str, help="path to prediction output file") + predict_parser.add_argument("--overrides", type=str, nargs='*', help="overide configurations") + predict_parser.set_defaults(mode="predict") + + interpret_parser = sub_parsers.add_parser("interpret", help="Launch LIT server to interpret trained profiler") + interpret_parser.add_argument("config_path", type=str, help="path to config yaml file") + interpret_parser.add_argument("--overrides", type=str, nargs='*', help="overide configurations") + interpret_parser.set_defaults(mode="interpret") + + args = parser.parse_args() + + if args.mode == "train": + init_setup_log(args.artifact_path) + logger.info("##### Training #####") + logger.info(f"args: {sorted(vars(args).items())}") + config = load_from_file(args.config_path) + if args.overrides: + config = merge_with_dotlist(config, args.overrides) + logger.info(f"Loaded config: {config}") + train(config, args.artifact_path) + elif args.mode == "evaluate": + init_setup_log() # FIXME: only stdout logging is better? + logger.info("##### Evaluation #####") + logger.info(f"args: {sorted(vars(args).items())}") + config = load_from_file(args.config_path) + if args.overrides: + config = merge_with_dotlist(config, args.overrides) + logger.info(f"Loaded config: {config}") + evaluate(config) + elif args.mode == "predict": + init_setup_log() # FIXME: only stdout logging is better? + logger.info("##### Prediction #####") + logger.info(f"args: {sorted(vars(args).items())}") + config = load_from_file(args.config_path) + if args.overrides: + config = merge_with_dotlist(config, args.overrides) + logger.info(f"Loaded config: {config}") + predict(config, args.output_path) + elif args.mode == "interpret": + init_setup_log() # FIXME: only stdout logging is better? + logger.info("##### Interactive interpretation #####") + logger.info(f"args: {sorted(vars(args).items())}") + config = load_from_file(args.config_path) + if args.overrides: + config = merge_with_dotlist(config, args.overrides) + logger.info(f"Loaded config: {config}") + interpret(config) + + logger.info("Done") + + +if __name__ == "__main__": + main() diff --git a/expats/common/config_util.py b/expats/common/config_util.py new file mode 100644 index 0000000..fbf1a40 --- /dev/null +++ b/expats/common/config_util.py @@ -0,0 +1,44 @@ + +from typing import Any, Dict, List + +from omegaconf import OmegaConf +import yaml + + +def dump_to_file(dic: Dict[str, Any], path: str): + """dump dict-type configuration to file + + Args: + dic (Dict[str, Any]): dict-typed configuration + path (str): path to file to dump + """ + conf = OmegaConf.create(dic) + OmegaConf.save(conf, path) + + +def load_from_file(path: str) -> Dict[str, Any]: + """load configuration yaml file + + Args: + path (str): path to file to be loaded + + Returns: + Dict[str, Any]: loaded configurations + """ + config = OmegaConf.load(path) + return yaml.load(OmegaConf.to_yaml(config), Loader=yaml.FullLoader) + + +def merge_with_dotlist(dic: Dict[str, Any], dotlist: List[str]) -> Dict[str, Any]: + """overwrite configuration values in the dictionary + + Args: + doc (Dict[str, Any]): dict-typed configulations + dotlist (bool): dotlist to overwrite config params + + Returns: + Dict[str, Any]: overwitten configurations + """ + config = OmegaConf.create(dic) + config.merge_with_dotlist(dotlist) + return yaml.load(OmegaConf.to_yaml(config), Loader=yaml.FullLoader) diff --git a/expats/common/instantiate.py b/expats/common/instantiate.py new file mode 100644 index 0000000..bbf1ceb --- /dev/null +++ b/expats/common/instantiate.py @@ -0,0 +1,66 @@ + +from abc import ABCMeta +from collections import defaultdict +from typing import Dict, Optional, Union + +from pydantic import BaseModel + + +class BaseConfig(BaseModel): + """Basic configuration data class + """ + @classmethod + def from_dict(cls, dic: Dict) -> "BaseConfig": + return cls( + **dic + ) + + def to_dict(self) -> Dict: + return self.dict() + + +class ConfigFactoried(metaclass=ABCMeta): + """Basic class to be created via configuration data class. + This is inspired by OpenKiwi + """ + config_class: Optional[BaseConfig] = None + _subclasses: Dict[str, Dict[str, "ConfigFactoried"]] = defaultdict(dict) + + @classmethod + def register(cls, subcls): + cls._subclasses[cls.__name__][subcls.__name__] = subcls + return subcls + + @classmethod + def get_subclass(cls, subcls_name: str): + if cls.__name__ not in cls._subclasses: + raise KeyError( + f"{cls.__name__}'s subclass is not registered. Empty. Availables: f{dict(cls._subclasses)}" + ) + cls2subcls = cls._subclasses[cls.__name__] + subcls = cls2subcls.get(subcls_name, None) + if not subcls: + raise KeyError( + f"{subcls_name} is not registered in {cls.__name__}. Here is the list: {list(cls2subcls.keys())}" + ) + return subcls + + @classmethod + def create_from_factory(cls, subcls_name: str, params: Optional[Dict]): + _params = params if params is not None else {} + subcls = cls.get_subclass(subcls_name) + if subcls.config_class: + return subcls.create( + subcls.config_class.from_dict(_params) + ) + else: + return subcls.create(_params) + + @classmethod + def create(cls, params: Union[BaseConfig, Dict]): + """ + instantiate methods. if config_class is set, params should be BaseConfig else Dict + """ + # default implement + _params = params if type(params) == dict else params.to_dict() + return cls(**_params) diff --git a/expats/common/log.py b/expats/common/log.py new file mode 100644 index 0000000..193a4f8 --- /dev/null +++ b/expats/common/log.py @@ -0,0 +1,39 @@ + +from datetime import datetime +import logging +import os +from typing import Optional + +from expats.settings import SETTINGS + + +DEFAULT_LOG_DIR = os.path.join( + SETTINGS.home_root_path, f"log/{datetime.now().strftime('%Y%m%d_%H:%M')}" +) +LOG_FILENAME = "log.txt" + + +def init_setup_log(log_dir: Optional[str] = None): + """setup logging to be called only once at initial stage + + Args: + log_dir (Optional[str]): path tp logging directory. Defaults to None, which means DEFAULT_LOG_DIR. + """ + _log_dir = log_dir if log_dir else DEFAULT_LOG_DIR + if not os.path.exists(_log_dir): + os.makedirs(_log_dir) + + root_logger = logging.getLogger() + if SETTINGS.is_debug: + logging.basicConfig(level=logging.DEBUG) + else: + logging.basicConfig(level=logging.INFO) + log_path = os.path.join(_log_dir, LOG_FILENAME) + file_handler = logging.FileHandler(log_path) + fmt = logging.Formatter("%(asctime)s - %(levelname)s - %(name)s - %(message)s") + file_handler.setFormatter(fmt) + root_logger.addHandler(file_handler) + + +def get_logger(name: Optional[str] = None): + return logging.getLogger(name) diff --git a/expats/common/serialization.py b/expats/common/serialization.py new file mode 100644 index 0000000..9984d6a --- /dev/null +++ b/expats/common/serialization.py @@ -0,0 +1,23 @@ + +from abc import ABCMeta, abstractclassmethod, abstractmethod + + +class Serializable(metaclass=ABCMeta): + + @abstractclassmethod + def load(cls, artifact_path: str) -> 'Serializable': + """ + load serialized objects in-memory. + Args: + artifact_path: path to artifact (directory or file path) + """ + raise NotImplementedError() + + @abstractmethod + def save(self, artifact_path: str): + """ + dump objects into the storage. + Args: + artifact_path: path to artifact (directory or file path) + """ + raise NotImplementedError() diff --git a/expats/common/tensor.py b/expats/common/tensor.py new file mode 100644 index 0000000..1356136 --- /dev/null +++ b/expats/common/tensor.py @@ -0,0 +1,20 @@ + +from typing import Dict, List + +import numpy as np + + +def _extract_batch_length(preds: Dict[str, np.ndarray]) -> int: + """Extracts batch length of predictions.""" + batch_length = None + for key, value in preds.items(): + batch_length = batch_length or value.shape[0] + if value.shape[0] != batch_length: + raise ValueError(f"Batch length of predictions should be same. {key} has different batch length than others.") + return batch_length + + +def unbatch_preds(preds: Dict[str, np.ndarray]) -> List[Dict[str, np.ndarray]]: + """Unbatch predictions, as in estimator.predict(). + """ + return [{key: value[i] for key, value in preds.items()} for i in range(_extract_batch_length(preds))] diff --git a/expats/common/type.py b/expats/common/type.py new file mode 100644 index 0000000..4f4e9b6 --- /dev/null +++ b/expats/common/type.py @@ -0,0 +1,13 @@ + +from enum import Enum + + +class TaskType(str, Enum): + CLASSIFICATION = "classification" + REGRESSION = "regression" + + +# profiler task types. +SingleTextInput = str +ClassificationOutput = str +RegressionOutput = float diff --git a/expats/data/asap_aes.py b/expats/data/asap_aes.py new file mode 100644 index 0000000..67f70fa --- /dev/null +++ b/expats/data/asap_aes.py @@ -0,0 +1,132 @@ + +from dataclasses import dataclass +from typing import Optional + +import pandas as pd + +from expats.common.log import get_logger +from expats.data.dataset import Dataset +from expats.data.instance import Instance, LabeledTextInstance, ScoredTextInstance + + +logger = get_logger(__name__) + +# NOTE: different from https://github.com/nusnlp/nea/blob/becd233ccd4788fd307da77a41b4731d31c0fab9/nea/asap_reader.py#L14 +PROMPT_ID2SCORE_RANGE = { + -1: (0, 60), + 1: (2, 12), + 2: (1, 6), + 3: (0, 3), + 4: (0, 3), + 5: (0, 4), + 6: (0, 4), + 7: (2, 24), + 8: (10, 60), +} + + +@dataclass(frozen=True) +class ASAPAESInstance(Instance): + """Instance for asap-aes dataset + training tsv information is here + RangeIndex: 12976 entries, 0 to 12975 + Data columns (total 28 columns): + # Column Non-Null Count Dtype + --- ------ -------------- ----- + 0 essay_id 12976 non-null int64 + 1 essay_set 12976 non-null int64 + 2 essay 12976 non-null object + 3 rater1_domain1 12976 non-null int64 + 4 rater2_domain1 12976 non-null int64 + 5 rater3_domain1 128 non-null float64 + 6 domain1_score 12976 non-null int64 + 7 rater1_domain2 1800 non-null float64 + 8 rater2_domain2 1800 non-null float64 + 9 domain2_score 1800 non-null float64 + 10 rater1_trait1 2292 non-null float64 + 11 rater1_trait2 2292 non-null float64 + 12 rater1_trait3 2292 non-null float64 + 13 rater1_trait4 2292 non-null float64 + 14 rater1_trait5 723 non-null float64 + 15 rater1_trait6 723 non-null float64 + 16 rater2_trait1 2292 non-null float64 + 17 rater2_trait2 2292 non-null float64 + 18 rater2_trait3 2292 non-null float64 + 19 rater2_trait4 2292 non-null float64 + 20 rater2_trait5 723 non-null float64 + 21 rater2_trait6 723 non-null float64 + 22 rater3_trait1 128 non-null float64 + 23 rater3_trait2 128 non-null float64 + 24 rater3_trait3 128 non-null float64 + 25 rater3_trait4 128 non-null float64 + 26 rater3_trait5 128 non-null float64 + 27 rater3_trait6 128 non-null float64 + dtypes: float64(22), int64(5), object(1) + + And also attached some fields. + - domain1_score_normalized: normalized score of domain1_score + """ + essay_id: int + essay_set: int + essay: str + rater1_domain1: float + rater2_domain1: float + rater3_domain1: Optional[float] + domain1_score: float + rater1_domain2: Optional[float] + domain2_score: Optional[float] + domain1_score_normalized: float + # FIXME: support more field e.g) rater1_trait1 + + def to_labeled(self): + return LabeledTextInstance( + text=self.essay, + label=str(self.domain1_score) + ) + + def to_scored(self): + return ScoredTextInstance( + text=self.essay, + score=self.domain1_score_normalized + ) + + +def load_asap_aes(path: str, prompt_id: int = -1) -> Dataset[ASAPAESInstance]: + _df = pd.read_csv(path, sep="\t", encoding="ISO-8859-1") + if prompt_id not in PROMPT_ID2SCORE_RANGE: + raise ValueError(f"Unavailable prompt id {prompt_id}") + if prompt_id != -1: + _df = _df[_df["essay_set"] == prompt_id] + _min, _max = PROMPT_ID2SCORE_RANGE[prompt_id] + _df["domain1_score_normalized"] = _df["domain1_score"].apply( + lambda x: _assign_normalized_score(x, _min, _max) + ) + instances = [ + ASAPAESInstance( + essay_id=record["essay_id"], + essay_set=record["essay_set"], + essay=record["essay"], + rater1_domain1=record["rater1_domain1"], + rater2_domain1=record["rater2_domain1"], + rater3_domain1=record.get("rater3_domain1", None), + domain1_score=record["domain1_score"], + rater1_domain2=record.get("rater1_domain2", None), + domain2_score=record.get("domain2_score", None), + domain1_score_normalized=record["domain1_score_normalized"] + ) for record in _df.to_dict(orient='records') + ] + return Dataset(instances) + + +def _assign_normalized_score(x: float, x_min: float, x_max: float) -> float: + try: + return _min_max_normalization(x, x_min, x_max) + except ValueError as e: + logger.warning(f"Fail to normalize. Force assign: {e}.") + return 0.0 if x < x_min else 1.0 + + +def _min_max_normalization(x: float, x_min: float, x_max: float) -> float: + if (x < x_min) or (x > x_max): + raise ValueError(f"Invalid setting to normalize: x={x}, x_min={x_min}, x_max={x_max}") + return (x - x_min) / (x_max - x_min) diff --git a/expats/data/cefr.py b/expats/data/cefr.py new file mode 100644 index 0000000..cd52e10 --- /dev/null +++ b/expats/data/cefr.py @@ -0,0 +1,104 @@ + +import csv +from dataclasses import dataclass +from glob import glob +import os +import sys + +from expats.data.dataset import Dataset +from expats.data.instance import LabeledTextInstance +from expats.nlp.parser import sentence_tokenize_en + + +# based on Table1 in https://www.mitpressjournals.org/doi/pdfplus/10.1162/tacl_a_00310 +GRADE2SCALE = { + "A1": 0, + "A2": 20, + "B1": 40, + "B2": 60, + "C1": 80, + "C2": 100 +} + + +@dataclass(frozen=True) +class CEFRGradedTextInstance(LabeledTextInstance): + text: str + label: str + + +def load_octanove_en_passages_data( + passages_path: str, + min_char_size: int = sys.maxsize, + flatten_sentence: bool = True +) -> Dataset[CEFRGradedTextInstance]: + """load english passage data whose instances are graded with CEFR + + Args: + passages_path (str): path to corpus e.g) /path/to/education-data/en/passages/ + min_char_size (int, optional): filter out each instances by number of charactors to reduce noise. + flatten_sentence (bool, optional): each passage will be splited into sentence unit to augment data + + Returns: + Dataset[CEFRGradedTextInstance]: loaded dataset + """ + dir2grade_mapper = { + "a1": "A1", + "a2": "A2", + "b1": "B1", + "b2": "B2", + "c1": "C1", + "c2": "C2", + } + + instances = [] + for (dir_name, grade) in dir2grade_mapper.items(): + file_list = glob(os.path.join(passages_path, dir_name) + "/*") + assert len(file_list) > 0, f"Missing files in dir {os.path.join(passages_path, dir_name)}" + for file_path in file_list: + with open(file_path) as f: + text = f.read() + if flatten_sentence is False: # one text file as one instance + inst = CEFRGradedTextInstance(__clean_text(text), grade) + instances.append(inst) + else: # one each sentence in one text file as one instance + _insts = [ + CEFRGradedTextInstance(sentence, grade) + for sentence in sentence_tokenize_en(__clean_text(text)) + ] + instances.extend(_insts) + + # filter by token size + instances = [ + inst for inst in instances + if len(inst.text) >= min_char_size + ] + return Dataset(instances) + + +def __clean_text(text): + return text.replace("\n", " ") + + +def load_cefr_annotated_tsv_data(file_path: str) -> Dataset[CEFRGradedTextInstance]: + """Load a TSV file annotated with CEFR. + We assume that each line of the file is of "[CEFR level] \t [text]" + + Args: + file_path (str): path to the TSV file + + Returns: + Dataset[CEFRGradedTextInstance]: loaded dataset + """ + + instances = [] + with open(file_path) as f: + reader = csv.reader(f, delimiter="\t", quotechar='"') + next(reader) # skip header + for row in reader: + label, text = row + if label.upper() not in GRADE2SCALE: + raise ValueError(f"Invalid CEFR label: {label}") + inst = CEFRGradedTextInstance(text, label.upper()) + instances.append(inst) + return Dataset(instances) diff --git a/expats/data/convert.py b/expats/data/convert.py new file mode 100644 index 0000000..f990483 --- /dev/null +++ b/expats/data/convert.py @@ -0,0 +1,78 @@ + +from abc import ABCMeta, abstractmethod +from typing import Dict, Generic, List, TypeVar + +import numpy as np + +from expats.common.instantiate import BaseConfig, ConfigFactoried +from expats.common.type import ClassificationOutput, RegressionOutput + +T = TypeVar("T") +U = TypeVar("U") + + +class Converter(Generic[T, U], metaclass=ABCMeta): + """Converting some data type into another one. + This is used for converting classification task's output (e.g discrete label) into regression's one (e.g float value) + """ + @abstractmethod + def convert(self, inputs: List[T]) -> List[U]: + raise NotImplementedError() + + +class ClassificationToRegression(Converter[ClassificationOutput, RegressionOutput], ConfigFactoried): + pass + + +class RegressionToClassification(Converter[RegressionOutput, ClassificationOutput], ConfigFactoried): + pass + + +@ClassificationToRegression.register +class PredifinedNumerizer(ClassificationToRegression): + def __init__(self, mapper: Dict[str, RegressionOutput]): + self._mapper = mapper + + def convert(self, inputs: List[ClassificationOutput]) -> List[RegressionOutput]: + return [self._convert(input_) for input_ in inputs] + + def _convert(self, input_): + return self._mapper[input_] + + +@ClassificationToRegression.register +class ToFloat(ClassificationToRegression): + def convert(self, inputs: List[ClassificationOutput]) -> List[RegressionOutput]: + return [float(input_) for input_ in inputs] + + +@RegressionToClassification.register +class RoundNearestInteger(RegressionToClassification): + def convert(self, inputs: List[RegressionOutput]) -> List[ClassificationOutput]: + return [str(int(np.rint(input_))) for input_ in inputs] + + +@RegressionToClassification.register +class MinMaxDenormalizedRoundNearestInteger(RegressionToClassification): + class _Config(BaseConfig): + x_min: float + x_max: float + + config_class = _Config + + def __init__(self, x_min: float, x_max: float): + self._x_min = x_min + self._x_max = x_max + + def convert(self, inputs: List[RegressionOutput]) -> List[ClassificationOutput]: + return [ + str(int(np.rint(_min_max_denormalization(input_, self._x_min, self._x_max)))) + for input_ in inputs + ] + + +def _min_max_denormalization(x_norm: float, x_min: float, x_max: float) -> float: + if (x_norm > 1) or (x_norm < 0) or (x_min > x_max): + raise ValueError( + f"Invalid setting to normalize: x_norm={x_norm}, x_min={x_min}, x_max={x_max}") + return x_norm * (x_max - x_min) + x_min diff --git a/expats/data/dataset.py b/expats/data/dataset.py new file mode 100644 index 0000000..ecb33f2 --- /dev/null +++ b/expats/data/dataset.py @@ -0,0 +1,39 @@ + +from typing import Generic, List, TypeVar + +import pandas as pd + +from expats.data.instance import Instance, LabeledTextInstance, ScoredTextInstance + + +T = TypeVar("T", bound=Instance) + + +class Dataset(Generic[T]): + """Basic Dataset class + """ + def __init__(self, instances: List[T]): + self.instances = instances + + def __getitem__(self, idx) -> T: + return self.instances[idx] + + def __len__(self) -> int: + return len(self.instances) + + def to_labeled(self) -> "Dataset[LabeledTextInstance]": + _insts = [inst.to_labeled() for inst in self.instances] + return Dataset(_insts) + + def to_scored(self) -> "Dataset[ScoredTextInstance]": + _insts = [inst.to_scored() for inst in self.instances] + return Dataset(_insts) + + def to_dataframe(self) -> pd.DataFrame: + col_names = list(vars(self.instances[0]).keys()) + _dict = {key: [] for key in col_names} + for inst in self.instances: + _vars = vars(inst) + for name in col_names: + _dict[name].append(_vars[name]) + return pd.DataFrame(_dict) diff --git a/expats/data/factory.py b/expats/data/factory.py new file mode 100644 index 0000000..a97e2a8 --- /dev/null +++ b/expats/data/factory.py @@ -0,0 +1,31 @@ + +from typing import Any, Dict + +from expats.data.asap_aes import load_asap_aes +from expats.data.dataset import Dataset +from expats.data.cefr import load_octanove_en_passages_data, load_cefr_annotated_tsv_data +from expats.data.instance import SingleTextInstance + + +class DatasetFactory(): + """creating dataset with some settings + """ + @classmethod + def create_from_file(cls, _type: str, params: Dict[str, Any]) -> Dataset: + if _type == "octanove-cefr-en-passage": + return load_octanove_en_passages_data(**params) + elif _type == "cefr-tsv": + return load_cefr_annotated_tsv_data(**params) + elif _type == "line-by-line": + return load_line_by_line(**params) + elif _type == "asap-aes": + return load_asap_aes(**params) + else: + raise ValueError(f"Invalid dataset type: {_type}") + + +def load_line_by_line(file_path: str) -> Dataset[SingleTextInstance]: + """loading single text instances. one text instance per line. + """ + with open(file_path, "r") as f: + return Dataset([SingleTextInstance(text=line.rstrip()) for line in f]) diff --git a/expats/data/helper.py b/expats/data/helper.py new file mode 100644 index 0000000..b25ec6f --- /dev/null +++ b/expats/data/helper.py @@ -0,0 +1,10 @@ + +from typing import Dict, List + +from datasets import Dataset as HFDataset + + +def dict_to_hfdataset(dic: Dict[str, List]) -> HFDataset: + """helper function to convert to huggingface's dataset class + """ + return HFDataset.from_dict(dic) diff --git a/expats/data/instance.py b/expats/data/instance.py new file mode 100644 index 0000000..68f689a --- /dev/null +++ b/expats/data/instance.py @@ -0,0 +1,39 @@ + +from dataclasses import dataclass + +from expats.common.type import ClassificationOutput, RegressionOutput + + +@dataclass(frozen=True) +class Instance(): + """Basic Instance class + """ + + def to_labeled(self) -> "LabeledTextInstance": + raise NotImplementedError() + + def to_scored(self) -> "ScoredTextInstance": + raise NotImplementedError() + + +@dataclass(frozen=True) +class SingleTextInstance(Instance): + text: str + + +@dataclass(frozen=True) +class LabeledTextInstance(Instance): + text: str + label: ClassificationOutput + + def to_labeled(self): + return self + + +@dataclass(frozen=True) +class ScoredTextInstance(Instance): + text: str + score: RegressionOutput + + def to_scored(self): + return self diff --git a/expats/error/__init__.py b/expats/error/__init__.py new file mode 100644 index 0000000..43d4f66 --- /dev/null +++ b/expats/error/__init__.py @@ -0,0 +1,26 @@ + +class InternalProfilerError(Exception): + """ + Internally defined base error in this project + """ + + +class InstantiationError(InternalProfilerError): + """ + Error raised when creating instance + """ + + +class ArtifactNotFoundError(InternalProfilerError): + # TODO: docstring + pass + + +class DeserializationError(InternalProfilerError): + # TODO: docstring + pass + + +class SerializationError(InternalProfilerError): + # TODO: docstring + pass diff --git a/expats/feature/__init__.py b/expats/feature/__init__.py new file mode 100644 index 0000000..648981c --- /dev/null +++ b/expats/feature/__init__.py @@ -0,0 +1,3 @@ + +# NOTE: need import for registering subclass +import expats.feature.text_basics # noqa diff --git a/expats/feature/base.py b/expats/feature/base.py new file mode 100644 index 0000000..f6e200f --- /dev/null +++ b/expats/feature/base.py @@ -0,0 +1,57 @@ + +import os +import pickle +from typing import Dict, Generic, List, Union, TypeVar + +import numpy as np + +from expats.common.serialization import Serializable +from expats.common.instantiate import BaseConfig, ConfigFactoried +from expats.error import InstantiationError, DeserializationError, SerializationError + +T = TypeVar("T") + + +# TODO: how support stateful features (e.g TFIDF) +# TODO: consider compatibility for serialized object +class Feature(Generic[T], ConfigFactoried, Serializable): + def fit(self, inputs: List[T]): + # do nothing by default + pass + + def extract(self, _input: T) -> np.ndarray: + """ + Args: + _input: source of feature + Return: + feature: extracted feature as 1d ndarray + """ + raise NotImplementedError() + + @classmethod + def create(cls, params: Union[BaseConfig, Dict]) -> "Feature": + # construct with no args by default + try: + return cls() + except Exception as e: + raise InstantiationError( + f"Fail to create {cls.__name__} instance. (reason: {str(e)}) Please override and implement appropriate 'create' method." + ) + + @classmethod + def load(cls, artifact_path: str) -> "Feature": + _path = os.path.join(artifact_path, f"{cls.__name__}.pkl") + with open(_path, "rb") as f: + try: + obj = pickle.load(f) + except Exception as e: + raise DeserializationError(f"Fail to load pickled artifacts. (reason: {str(e)})") + return obj + + def save(self, artifact_path: str): + _path = os.path.join(artifact_path, f"{self.__class__.__name__}.pkl") + with open(_path, "wb") as fw: + try: + pickle.dump(self, fw) + except Exception as e: + raise SerializationError(f"Fail to dump pickled artifacts. (reason: {str(e)})") diff --git a/expats/feature/text_basics.py b/expats/feature/text_basics.py new file mode 100644 index 0000000..5f82a31 --- /dev/null +++ b/expats/feature/text_basics.py @@ -0,0 +1,56 @@ + +from typing import Dict + +import numpy as np +import spacy + +from expats.common.instantiate import BaseConfig +from expats.feature.base import Feature + + +@Feature.register +class NumberOfTokenFeature(Feature[spacy.tokens.doc.Doc]): + def extract(self, _input): + return np.array([len(_input)]) + + +@Feature.register +class AverageTokenLengthFeature(Feature[spacy.tokens.doc.Doc]): + def extract(self, _input): + n_token = len(_input) + return sum(len(token) for token in _input) / n_token + + +@Feature.register +class DocEmbeddingFeature(Feature[spacy.tokens.doc.Doc]): + def extract(self, _input): + return _input.vector + + +class UnigramLikelihoodFeatureConfig(BaseConfig): + path: str + sep: str = "\t" + + +@Feature.register +class UnigramLikelihoodFeature(Feature[spacy.tokens.doc.Doc]): + config_class = UnigramLikelihoodFeatureConfig + + def __init__(self, word2freq: Dict[str, float]): + self._word2freq = word2freq + self._freq_sum = sum(word2freq.values()) + + def extract(self, _input): + # NOTE: smoothing by 1 to avoid zero devide error + val = sum(np.log(self._word2freq.get(token.text, 1) / self._freq_sum) for token in _input) + # NOTE: take average to remove length bias + return np.array([val / len(_input)]) + + @classmethod + def create(cls, params: UnigramLikelihoodFeatureConfig): + with open(params.path) as f: + word2freq = { + line.rstrip().split(params.sep)[0]: float(line.rstrip().split(params.sep)[1]) + for line in f + } + return cls(word2freq) diff --git a/expats/integration/__init__.py b/expats/integration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/expats/integration/lit/dataset.py b/expats/integration/lit/dataset.py new file mode 100644 index 0000000..bfc3f79 --- /dev/null +++ b/expats/integration/lit/dataset.py @@ -0,0 +1,40 @@ + +from typing import Any, Dict, List + +from lit_nlp.api import dataset as lit_dataset +from lit_nlp.api import types as lit_types + + +class DatasetForLIT(lit_dataset.Dataset): + def spec(self) -> Dict[str, lit_types.LitType]: + raise NotImplementedError() + + +class TextClassificationLITDataset(lit_dataset.Dataset): + def __init__( + self, + examples: List[Dict[str, Any]], + labels: List[str] + ): + self._examples = examples + self._labels = labels + + def spec(self): + return { + "sentence": lit_types.TextSegment(), + "label": lit_types.CategoryLabel(vocab=self._labels) + } + + +class TextRegressionLITDataset(lit_dataset.Dataset): + def __init__( + self, + examples: List[Dict[str, Any]], + ): + self._examples = examples + + def spec(self): + return { + "sentence": lit_types.TextSegment(), + "label": lit_types.RegressionScore(), + } diff --git a/expats/integration/lit/model.py b/expats/integration/lit/model.py new file mode 100644 index 0000000..e575288 --- /dev/null +++ b/expats/integration/lit/model.py @@ -0,0 +1,82 @@ + +from typing import Any, Dict, List + +from lit_nlp.api import model as lit_model +from lit_nlp.api import types as lit_types + +from expats.profiler.base import TextClassifier + + +class LITModelForTextClassifier(lit_model.Model): + def __init__(self, profier: TextClassifier, labels: List[str]): + self._profiler = profier + self._labels = labels + + def max_minibatch_size(self): + return 32 + + def predict_minibatch(self, inputs: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + texts = [input_["sentence"] for input_ in inputs] + ys = [input_["label"] for input_ in inputs] + _output = self._profiler.interprete_via_prediction(texts, ys) + try: + output = [{ + "tokens": _output_per_inst["tokens"], + "probas": _output_per_inst["probas"], + "cls_emb": _output_per_inst["cls_emb"], + "token_grad_sentence": _output_per_inst["token_grad_sentence"] + } for _output_per_inst in _output] + return output + except KeyError as e: + raise KeyError(f"Output spec of interprete_via_prediction seems to be not fit. error={e}") + + def input_spec(self) -> lit_types.Spec: + return { + "sentence": lit_types.TextSegment(), + "label": lit_types.CategoryLabel(vocab=self._labels, required=False) + } + + def output_spec(self) -> lit_types.Spec: + return { + "tokens": lit_types.Tokens(), + "probas": lit_types.MulticlassPreds(parent="label", vocab=self._labels), + "cls_emb": lit_types.Embeddings(), + "token_grad_sentence": lit_types.TokenGradients(align="tokens") + } + + +class LITModelForTextRegressor(lit_model.Model): + def __init__(self, profier: TextClassifier): + self._profiler = profier + + def max_minibatch_size(self): + return 32 + + def predict_minibatch(self, inputs: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + texts = [input_["sentence"] for input_ in inputs] + ys = [input_["label"] for input_ in inputs] + _output = self._profiler.interprete_via_prediction(texts, ys) + try: + output = [{ + "tokens": _output_per_inst["tokens"], + "cls_emb": _output_per_inst["cls_emb"], + "logits": _output_per_inst["logits"], + "token_grad_sentence": _output_per_inst["token_grad_sentence"] + } for _output_per_inst in _output] + return output + except KeyError as e: + raise KeyError(f"Output spec of interprete_via_prediction seems to be not fit. error={e}") + + def input_spec(self) -> lit_types.Spec: + return { + "sentence": lit_types.TextSegment(), + "label": lit_types.RegressionScore(required=False) + } + + def output_spec(self) -> lit_types.Spec: + return { + "tokens": lit_types.Tokens(), + "logits": lit_types.RegressionScore(), + "cls_emb": lit_types.Embeddings(), + "token_grad_sentence": lit_types.TokenGradients(align="tokens") + } diff --git a/expats/integration/lit/server.py b/expats/integration/lit/server.py new file mode 100644 index 0000000..37b2206 --- /dev/null +++ b/expats/integration/lit/server.py @@ -0,0 +1,53 @@ + +from typing import Any, Dict, List, Tuple, Union + +from lit_nlp import dev_server +from lit_nlp import server_flags + +from expats.common.type import TaskType +from expats.data.dataset import Dataset +from expats.integration.lit.dataset import TextClassificationLITDataset, TextRegressionLITDataset +from expats.integration.lit.model import LITModelForTextClassifier, LITModelForTextRegressor +from expats.profiler.base import TextProfiler +from expats.data.instance import LabeledTextInstance, ScoredTextInstance + + +def launch( + task_type: TaskType, + profiler: TextProfiler, + dataset: Dataset[Union[LabeledTextInstance, ScoredTextInstance]] +): + if task_type == TaskType.CLASSIFICATION: + if not isinstance(dataset.instances[0], LabeledTextInstance): + raise ValueError("Inconsistent type between Instance and TaskType") + examples, labels = _setup_classification_dataset(dataset) + models = {"text_classifier": LITModelForTextClassifier(profiler, labels)} + lit_datasets = {"classification_dataset": TextClassificationLITDataset(examples, labels)} + lit_demo = dev_server.Server(models, lit_datasets, **server_flags.get_flags()) + lit_demo.serve() + elif task_type == TaskType.REGRESSION: + if not isinstance(dataset.instances[0], ScoredTextInstance): + raise ValueError("Inconsistent type between Instance and TaskType") + examples = _setup_regression_dataset(dataset) + models = {"text_regressor": LITModelForTextRegressor(profiler)} + lit_datasets = {"regression_dataset": TextRegressionLITDataset(examples)} + lit_demo = dev_server.Server(models, lit_datasets, **server_flags.get_flags()) + lit_demo.serve() + else: + raise ValueError(f"Unsupported task({task_type}) for launching LIT server") + + +def _setup_classification_dataset(dataset: Dataset[LabeledTextInstance]) -> Tuple[List[Dict[str, Any]], List[str]]: + examples = [ + {"sentence": inst.text, "label": inst.label} + for inst in dataset.instances + ] + labels = sorted(list(set([example["label"] for example in examples]))) + return (examples, labels) + + +def _setup_regression_dataset(dataset: Dataset[ScoredTextInstance]) -> List[Dict[str, Any]]: + return [ + {"sentence": inst.text, "label": inst.score} + for inst in dataset.instances + ] diff --git a/expats/metric/helper.py b/expats/metric/helper.py new file mode 100644 index 0000000..1a13519 --- /dev/null +++ b/expats/metric/helper.py @@ -0,0 +1,21 @@ + +from typing import List, Optional + +from scipy.stats import pearsonr as scipy_pearsonr +from sklearn.metrics import accuracy_score, cohen_kappa_score, f1_score + + +def f1(gold_ys: List[int], pred_ys: List[int], average: Optional[str] = None) -> float: + return f1_score(gold_ys, pred_ys, average=average) + + +def accuracy(gold_ys: List[int], pred_ys: List[int]) -> float: + return accuracy_score(gold_ys, pred_ys) + + +def cohen_kappa(gold_ys: List[int], pred_ys: List[int], weights: Optional[str] = None) -> float: + return cohen_kappa_score(gold_ys, pred_ys, weights=weights) + + +def pearsonr(gold_ys: List[float], pred_ys: List[float]) -> float: + return scipy_pearsonr(gold_ys, pred_ys) diff --git a/expats/metric/metric.py b/expats/metric/metric.py new file mode 100644 index 0000000..1ff7c9f --- /dev/null +++ b/expats/metric/metric.py @@ -0,0 +1,83 @@ + +from abc import ABCMeta, abstractmethod +from typing import Generic, List, Tuple, TypeVar + +from sklearn.preprocessing import LabelEncoder + +from expats.common.instantiate import ConfigFactoried +from expats.metric.helper import f1, accuracy, cohen_kappa, pearsonr + +T = TypeVar("T") +U = TypeVar("U") + + +class Metric(Generic[T], metaclass=ABCMeta): + """Metric class for quantitative evaluation + """ + def __init__(self): + pass + + @property + def name(self) -> str: + return self.__class__.__name__ + + @abstractmethod + def calculate(self, inputs: List[T]) -> float: + raise NotImplementedError + + +class ClassificationMetric(ConfigFactoried, Metric[Tuple[str, str]]): + def calculate(self, inputs: List[Tuple[str, str]]) -> float: + gold_ys, pred_ys = _split(inputs) + # str to int + le = LabelEncoder() + le.fit(gold_ys + pred_ys) + return self._calculate(le.transform(gold_ys), le.transform(pred_ys)) + + def _calculate(self, gold_ys: List[int], pred_ys: List[int]) -> float: + raise NotImplementedError + + +@ClassificationMetric.register +class MacroF1(ClassificationMetric): + def _calculate(self, gold_ys: List[int], pred_ys: List[int]) -> float: + return f1(gold_ys, pred_ys, "macro") + + +@ClassificationMetric.register +class MicroF1(ClassificationMetric): + def _calculate(self, gold_ys: List[int], pred_ys: List[int]) -> float: + return f1(gold_ys, pred_ys, "micro") + + +@ClassificationMetric.register +class Accuracy(ClassificationMetric): + def _calculate(self, gold_ys: List[int], pred_ys: List[int]) -> float: + return accuracy(gold_ys, pred_ys) + + +@ClassificationMetric.register +class QuadraticWeightedKappa(ClassificationMetric): + def _calculate(self, gold_ys: List[int], pred_ys: List[int]) -> float: + return cohen_kappa(gold_ys, pred_ys, weights="quadratic") + + +class RegressionMetric(ConfigFactoried, Metric[Tuple[float, float]]): + def calculate(self, inputs: List[Tuple[float, float]]) -> float: + gold_ys, pred_ys = _split(inputs) + return self._calculate(gold_ys, pred_ys) + + def _calculate(self, gold_ys: List[float], pred_ys: List[float]) -> float: + raise NotImplementedError + + +@RegressionMetric.register +class PearsonCorrelation(RegressionMetric): + def _calculate(self, gold_ys: List[float], pred_ys: List[float]) -> float: + return pearsonr(gold_ys, pred_ys)[0] + + +def _split(inputs: List[Tuple[U, U]]) -> Tuple[List[U], List[U]]: + _firsts = [first for (first, _) in inputs] + _seconds = [second for (_, second) in inputs] + return (_firsts, _seconds) diff --git a/expats/ml/nn/base.py b/expats/ml/nn/base.py new file mode 100644 index 0000000..ba6290f --- /dev/null +++ b/expats/ml/nn/base.py @@ -0,0 +1,63 @@ + +from collections import OrderedDict +from typing import Dict + +import torch +import pytorch_lightning as pl + +import expats +from expats.common.instantiate import ConfigFactoried +from expats.common.serialization import Serializable + + +class NNModuleBase(pl.LightningModule, Serializable, ConfigFactoried): + """ + Basic class for implementing Neural Network-based methods. + inspired by OpenKiwi. + """ + def __init__(self, params): + super().__init__() + self._params = params + + def forward(self, **kwargs): + raise NotImplementedError() + + def training_step(self, **kwargs): + raise NotImplementedError() + + def configure_optimizers(self): + raise NotImplementedError() + + def validation_step(self, **kwargs): + pass + + def forward_for_interpretation(self, **kwargs) -> Dict[str, torch.Tensor]: + raise NotImplementedError(f"This Neural Network class {self.__class__.__name__} doesn't support interepretation") + + @classmethod + def load(cls, artifact_path: str) -> "NNModuleBase": + # NOTE: load pretrained weights. (basically not for checkpoint purposes) + module_dict = torch.load(artifact_path) + nn_module = cls.create(module_dict["params"]) + nn_module.load_state_dict(module_dict["state_dict"]) + nn_module.eval() + return nn_module + + def save(self, artifact_path: str): + # NOTE: load pretrained weights. (basically not for checkpoint purposes) + module_dict = OrderedDict( + { + "expats.__version__": expats.__version__, + "torch.__version__": torch.__version__, + "pl.__version__": pl.__version__, + "state_dict": self.state_dict(), + "params": self._params.to_dict() + } + ) + torch.save(module_dict, artifact_path) + + @classmethod + def create(self, params: Dict) -> "NNModuleBase": + """create model with initialized parameters + """ + raise NotImplementedError() diff --git a/expats/ml/nn/trainer.py b/expats/ml/nn/trainer.py new file mode 100644 index 0000000..55edd49 --- /dev/null +++ b/expats/ml/nn/trainer.py @@ -0,0 +1,23 @@ + +import os +from typing import List, Optional, Union + +import pytorch_lightning as pl +from pytorch_lightning.loggers import TensorBoardLogger + +from expats.common.instantiate import BaseConfig +from expats.settings import SETTINGS + +SAVE_DIR = os.path.join(SETTINGS.home_root_path, "logs", "lightning_logs") + + +class PLTrainerConfig(BaseConfig): + accumulate_grad_batches: int = 1 + gpus: Optional[Union[int, List[int]]] + max_epochs: int + min_epochs: int = 1 + + +def create_pltrainer(config: PLTrainerConfig) -> pl.Trainer: + logger = TensorBoardLogger(SAVE_DIR) + return pl.Trainer(logger=logger, **config.to_dict()) diff --git a/expats/ml/nn/transformer.py b/expats/ml/nn/transformer.py new file mode 100644 index 0000000..eab0ce5 --- /dev/null +++ b/expats/ml/nn/transformer.py @@ -0,0 +1,210 @@ + +from typing import Dict, Optional + +import torch +import torch.nn.functional as F +from transformers import AutoModel, PreTrainedModel +from transformers.file_utils import ModelOutput +from transformers.optimization import AdamW + +from expats.common.instantiate import BaseConfig +from expats.ml.nn.base import NNModuleBase + + +class TransformerClassifierNetConfig(BaseConfig): + num_class: int + pretrained_model_name_or_path: str + lr: float + + +class TransformerRegressorNetConfig(BaseConfig): + pretrained_model_name_or_path: str + lr: float + output_normalized: bool + + +class TransformerNetBase(NNModuleBase): + def __init__( + self, + params: BaseConfig, + transformer: PreTrainedModel, + linear: torch.nn.Linear, + ): + super().__init__(params=params) + self._transformer = transformer + self._linear = linear + + def forward( + self, + input_ids: torch.LongTensor, + attention_mask: torch.LongTensor, + token_type_ids: Optional[torch.LongTensor] = None + ): + output = self._forward_transformer( + input_ids=input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + output_hidden_states=False + ) + return self._transformer_output2logit(output) + + def _forward_transformer( + self, + input_ids: torch.LongTensor, + attention_mask: torch.LongTensor, + token_type_ids: Optional[torch.LongTensor] = None, + output_hidden_states: bool = False + ) -> ModelOutput: + if token_type_ids is None: + output = self._transformer( + input_ids=input_ids, + attention_mask=attention_mask, + output_hidden_states=output_hidden_states + ) + else: + output = self._transformer( + input_ids=input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + output_hidden_states=output_hidden_states + ) + return output + + def _transformer_output2logit(self, output: ModelOutput): + raise NotImplementedError + + def _calculate_loss(self, logits: torch.Tensor, target: torch.Tensor) -> torch.Tensor: + raise NotImplementedError + + def training_step(self, batch, batch_nb): + y_hat = self(batch["input_ids"], batch["attention_mask"], batch.get("token_type_ids", None)) + loss = self._calculate_loss(y_hat, batch["label"]) + self.log("train_loss", loss) + return loss + + def validation_step(self, batch, batch_nb): + y_hat = self(batch["input_ids"], batch["attention_mask"], batch.get("token_type_ids", None)) + loss = self._calculate_loss(y_hat, batch["label"]) + self.log("val_loss", loss) + + def configure_optimizers(self): + # FIXME: support to freeze BERT params + optimizer = AdamW(self.parameters(), lr=self._params.lr) + return optimizer + + +class TransformerClassifierNet(TransformerNetBase): + """BERT-based classifier + """ + config_class = TransformerClassifierNetConfig + + def _transformer_output2logit(self, output): + # (batchsize, num_token, hidden_size) + h = output["last_hidden_state"] + # (batchsize, hidden_size) + h_cls = h[:, 0] + logits = self._linear(h_cls) + return logits + + def forward_for_interpretation( + self, + input_ids: torch.LongTensor, + attention_mask: torch.LongTensor, + token_type_ids: Optional[torch.LongTensor] = None + ): + with torch.set_grad_enabled(True): + output = self._forward_transformer( + input_ids=input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + output_hidden_states=True + ) + logits = self._transformer_output2logit(output) + probas = torch.nn.functional.softmax(logits, dim=-1) + # NOTE: https://github.com/PAIR-code/lit/blob/a88c58005c3b15694125e15e6165ee5fba7407d0/lit_nlp/examples/sst_pytorch_demo.py#L129 + # (batchsize, ) + scalar_pred_for_gradients = torch.max(probas, dim=1, keepdim=False)[0] + # NOTE: gradient with respect to hidden states in first layer + # (batchsize, num_token, hidden_size) + input_emb_grad = torch.autograd.grad( + scalar_pred_for_gradients, output["hidden_states"][0], + grad_outputs=torch.ones_like(scalar_pred_for_gradients), + )[0] + return { + "probas": probas, + "input_ids": input_ids, + "ntok": torch.sum(attention_mask, dim=1), + "cls_emb": output["last_hidden_state"][:, 0], + "input_emb_grad": input_emb_grad + } + + def _calculate_loss(self, logits, target): + return F.cross_entropy(logits, target) + + @classmethod + def create(cls, params: Dict) -> "TransformerClassifierNet": + params_ = TransformerClassifierNetConfig.from_dict(params) + transformer = AutoModel.from_pretrained(params_.pretrained_model_name_or_path) + linear = torch.nn.Linear(transformer.config.hidden_size, params_.num_class) + return cls( + params_, + transformer, + linear, + ) + + +class TransformerRegressorNet(TransformerNetBase): + config_class = TransformerRegressorNetConfig + + def _transformer_output2logit(self, output): + # (batchsize, num_token, hidden_size) + h = output["last_hidden_state"] + # (batchsize, hidden_size) + h_cls = h[:, 0] + logits = self._linear(h_cls) + if self._params.output_normalized: + logits = torch.sigmoid(logits) + return logits + + def _calculate_loss(self, logits, target): + return F.mse_loss(logits, torch.unsqueeze(target.float(), 1)) + + def forward_for_interpretation( + self, + input_ids: torch.LongTensor, + attention_mask: torch.LongTensor, + token_type_ids: Optional[torch.LongTensor] = None + ): + with torch.set_grad_enabled(True): + output = self._forward_transformer( + input_ids=input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + output_hidden_states=True + ) + logits = self._transformer_output2logit(output) + scalar_pred_for_gradients = torch.squeeze(logits) + # NOTE: gradient with respect to hidden states in first layer + # (batchsize, num_token, hidden_size) + input_emb_grad = torch.autograd.grad( + scalar_pred_for_gradients, output["hidden_states"][0], + grad_outputs=torch.ones_like(scalar_pred_for_gradients), + )[0] + return { + "logits": torch.squeeze(logits), + "input_ids": input_ids, + "ntok": torch.sum(attention_mask, dim=1), + "cls_emb": output["last_hidden_state"][:, 0], + "input_emb_grad": input_emb_grad + } + + @classmethod + def create(cls, params: Dict) -> "TransformerRegressorNet": + params_ = TransformerRegressorNetConfig.from_dict(params) + transformer = AutoModel.from_pretrained(params_.pretrained_model_name_or_path) + linear = torch.nn.Linear(transformer.config.hidden_size, 1) + return cls( + params_, + transformer, + linear, + ) diff --git a/expats/ml/sklearn.py b/expats/ml/sklearn.py new file mode 100644 index 0000000..ac5a7e4 --- /dev/null +++ b/expats/ml/sklearn.py @@ -0,0 +1,27 @@ + +from typing import Any, Dict + +from sklearn.ensemble import ( + GradientBoostingClassifier, + GradientBoostingRegressor, + RandomForestClassifier, + RandomForestRegressor +) + + +def create_ml_classifier(_type: str, params: Dict[str, Any]): + if _type == "rf": + return RandomForestClassifier(**params) + elif _type == "gb": + return GradientBoostingClassifier(**params) + else: + raise ValueError(f"Invalid type: {_type}") + + +def create_ml_regressor(_type: str, params: Dict[str, Any]): + if _type == "rf": + return RandomForestRegressor(**params) + elif _type == "gb": + return GradientBoostingRegressor(**params) + else: + raise ValueError(f"Invalid type: {_type}") diff --git a/expats/nlp/parser.py b/expats/nlp/parser.py new file mode 100644 index 0000000..c5c6a14 --- /dev/null +++ b/expats/nlp/parser.py @@ -0,0 +1,16 @@ + +from functools import lru_cache +from typing import List + +import spacy + + +@lru_cache(maxsize=10) +def create_spacy_parser(name: str) -> spacy.language.Language: + return spacy.load(name) + + +def sentence_tokenize_en(text: str) -> List[str]: + p = create_spacy_parser("en_core_web_sm") + doc = p(text) + return [sent.text for sent in doc.sents] diff --git a/expats/profiler/__init__.py b/expats/profiler/__init__.py new file mode 100644 index 0000000..9179627 --- /dev/null +++ b/expats/profiler/__init__.py @@ -0,0 +1,4 @@ + +# NOTE: need import for registering subclass +import expats.profiler.doc_feature_profiler # noqa +import expats.profiler.transformer # noqa \ No newline at end of file diff --git a/expats/profiler/base.py b/expats/profiler/base.py new file mode 100644 index 0000000..6cda884 --- /dev/null +++ b/expats/profiler/base.py @@ -0,0 +1,76 @@ + +import os +from typing import Any, Dict, Generic, List, TypeVar, Union + +from expats.common.config_util import dump_to_file, load_from_file +from expats.common.instantiate import BaseConfig, ConfigFactoried +from expats.common.serialization import Serializable +from expats.common.type import SingleTextInput, ClassificationOutput, RegressionOutput +from expats.common.log import get_logger + +T = TypeVar("T") +U = TypeVar("U") + +logger = get_logger() + +PARAMS_FILENAME = "profiler.yaml" + + +class ProfilerBase(Generic[T, U], ConfigFactoried, Serializable): + """Profiler class to solve some tasks. + """ + def __init__(self, params: Union[Dict, BaseConfig]): + self._params = params + + def fit(self, inputs: List[T], ys: List[U]): + # do nothing by default + logger.info("This profiler does not do anything when training.") + + def predict(self, input_: T) -> U: + return self.predict_batch([input_])[0] + + def predict_batch(self, inputs: List[T]) -> List[U]: + raise NotImplementedError() + + def interprete_via_prediction(self, inputs: List[T], ys: List[U]) -> List[Dict[str, Any]]: + # interpret models, based on acual prediction e.g) saliency map + raise NotImplementedError(f"This profiler class {self.__class__.__name__} doesn't support predication-based interepretation") + + def interpred_via_internal(self) -> Dict[str, Any]: + # interpret models, based ontraining results e.g) feature importance + raise NotImplementedError(f"This profiler class {self.__class__.__name__} doesn't support internal interepretation") + + @classmethod + def load(cls, artifact_path: str) -> "ProfilerBase": + param_dict = load_from_file(_get_param_path(artifact_path)) + params = param_dict if cls.config_class is None else cls.config_class.from_dict(param_dict) + return cls._load_internal(artifact_path, params) + + @classmethod + def _load_internal(cls, artifact_path: str, params: Union[Dict, BaseConfig]) -> "ProfilerBase": + raise NotImplementedError() + + def save(self, artifact_path: str): + _param_dict = self._params if type(self._params) == dict else self._params.to_dict() + dump_to_file(_param_dict, _get_param_path(artifact_path)) + self._save_internal(artifact_path) + + def _save_internal(self, artifact_path: str): + raise NotImplementedError() + + +# This class is just for type annotation +class TextProfiler(ProfilerBase[SingleTextInput, U]): + pass + + +class TextClassifier(TextProfiler[ClassificationOutput]): + pass + + +class TextRegressor(TextProfiler[RegressionOutput]): + pass + + +def _get_param_path(dir_path: str): + return os.path.join(dir_path, PARAMS_FILENAME) diff --git a/expats/profiler/doc_feature_profiler.py b/expats/profiler/doc_feature_profiler.py new file mode 100644 index 0000000..dc92f59 --- /dev/null +++ b/expats/profiler/doc_feature_profiler.py @@ -0,0 +1,130 @@ + +import os +import pickle +from typing import Any, List, Dict, TypeVar, Union + +import numpy as np +from sklearn.base import BaseEstimator +import spacy + +from expats.common.instantiate import BaseConfig +from expats.common.type import ClassificationOutput, RegressionOutput +from expats.feature.base import Feature +from expats.ml.sklearn import create_ml_classifier, create_ml_regressor +from expats.nlp.parser import create_spacy_parser +from expats.profiler.base import TextProfiler, TextClassifier, TextRegressor + + +T = TypeVar("T", bound=Union[ClassificationOutput, RegressionOutput]) +ESTIMATOR_FILENAME = "model.pkl" + + +class _Config(BaseConfig): + features: Any # FIXME: any to avoid: pydantic.error_wrappers.ValidationError: value is not a valid list + classifier: Dict + parser_name: str = "en_core_web_sm" + + +# FIXME: it's appropriate +class DocFeatureMLBase(TextProfiler[T]): + config_class = _Config + + # FIXME: 'fit' method seems not to be defined in BaseEstimator interaface. + def __init__( + self, + params: _Config, + doc_features: List[Feature[spacy.tokens.doc.Doc]], + estimator: BaseEstimator, + parser: spacy.language.Language + ): + assert _is_valid_estimator(estimator), "estimator in args is not valid" + super().__init__(params) + self._doc_features = doc_features + self._estimator = estimator + self._parser = parser + + # FIXME: inefficient + def _transform_batch(self, inputs: List[str]) -> np.ndarray: + return np.vstack([self._transform(text) for text in inputs]) + + def _transform(self, text: str) -> np.ndarray: # return 1d array + doc = self._parser(text) + return np.hstack([ + feature.extract(doc) for feature in self._doc_features + ]) + + def _predict_batch_from_vector(self, xs: np.ndarray) -> np.ndarray: + return self._estimator.predict(xs) + + def predict_batch(self, inputs: List[str]) -> List[T]: + xs = self._transform_batch(inputs) + ys = self._predict_batch_from_vector(xs) + return ys.tolist() + + def fit(self, inputs: List[str], ys: List[T]): + xs = self._transform_batch(inputs) + self._estimator.fit(xs, ys) + + @classmethod + def create(cls, params: _Config): + doc_features = [ + Feature.create_from_factory(feat["type"], feat["params"]) + for feat in params.features + ] + estimator = cls._create_estimator(params.classifier) + parser = create_spacy_parser(params.parser_name) + return cls( + params, doc_features, estimator, parser + ) + + @classmethod + def _create_estimator(cls, estimator_config: Dict) -> BaseEstimator: + raise NotImplementedError() + + @classmethod + def _load_internal(cls, artifact_path: str, params: _Config) -> "DocFeatureMLBase": + doc_features = [ + Feature.create_from_factory(feat["type"], feat["params"]) + for feat in params.features + ] + with open(_get_estimator_path(artifact_path), "rb") as fw: + estimator = pickle.load(fw) + parser = create_spacy_parser(params.parser_name) + return cls(params, doc_features, estimator, parser) + + def _save_internal(self, artifact_path: str): + with open(_get_estimator_path(artifact_path), "wb") as fw: + pickle.dump(self._estimator, fw) + + +@TextClassifier.register +class DocFeatureMLClassifier(DocFeatureMLBase[ClassificationOutput]): + @classmethod + def _create_estimator(cls, estimator_config: Dict) -> BaseEstimator: + return create_ml_classifier( + estimator_config["type"], + estimator_config["params"] + ) + + +@TextRegressor.register +class DocFeatureMLRegressor(DocFeatureMLBase[RegressionOutput]): + @classmethod + def _create_estimator(cls, estimator_config: Dict) -> BaseEstimator: + return create_ml_regressor( + estimator_config["type"], + estimator_config["params"] + ) + + +def _is_valid_estimator(estimator: BaseEstimator): + _dir = dir(estimator) + if "fit" not in _dir: + return False + if "predict" not in _dir: + return False + return True + + +def _get_estimator_path(dir_path: str) -> str: + return os.path.join(dir_path, ESTIMATOR_FILENAME) diff --git a/expats/profiler/transformer.py b/expats/profiler/transformer.py new file mode 100644 index 0000000..a79b6d8 --- /dev/null +++ b/expats/profiler/transformer.py @@ -0,0 +1,303 @@ + +from copy import deepcopy +import os +import pickle +from typing import Any, Dict, List, Optional, Tuple, TypeVar, Union + +from datasets import Dataset as HFDataset +from sklearn.model_selection import train_test_split +from sklearn.preprocessing import LabelEncoder +import torch +from transformers import AutoTokenizer + +from expats.common.instantiate import BaseConfig +from expats.common.tensor import unbatch_preds +from expats.common.type import ClassificationOutput, RegressionOutput +from expats.data.helper import dict_to_hfdataset +from expats.ml.nn.transformer import ( + TransformerClassifierNetConfig, TransformerClassifierNet, TransformerRegressorNetConfig, TransformerRegressorNet +) +from expats.ml.nn.trainer import PLTrainerConfig, create_pltrainer +from expats.profiler.base import TextProfiler, TextClassifier, TextRegressor +from expats.settings import SETTINGS + + +T = TypeVar("T", bound=Union[ClassificationOutput, RegressionOutput]) + +NET_FILENAME = "model.pt" +LABEL_ENCORDER_FILENAME = "label_encorder.pkl" +MAP_BATCHSIZE = 1000 # NOTE: it's default value in datasets https://github.com/huggingface/datasets/blob/441e2040ead59dc4b0c9f2f7998f57d4fa2fd99f/src/datasets/arrow_dataset.py#L1332 # noqa + + +class DataLoaderConfig(BaseConfig): + batch_size: int = 32 + num_workers: int = 0 + + +class _ClassifierConfig(BaseConfig): + trainer: PLTrainerConfig + network: TransformerClassifierNetConfig + data_loader: DataLoaderConfig + val_ratio: float = 0.0 + max_length: Optional[int] = None + + +class _RegressorConfig(BaseConfig): + trainer: PLTrainerConfig + network: TransformerRegressorNetConfig + data_loader: DataLoaderConfig + val_ratio: float = 0.0 + max_length: Optional[int] = None + + +class TransformerBase(TextProfiler[T]): + def predict_batch(self, inputs: List[str]) -> List[T]: + batchsize = 10 + results = [] + for i in range(0, len(inputs), batchsize): + _batch = inputs[i:i + batchsize] + results.extend(self._predict_batch_each(_batch)) + return results + + @classmethod + def create(cls, params: BaseConfig): + raise NotImplementedError + + def _load_internal(cls, artifact_path: str, params: BaseConfig) -> "TransformerBase": + raise NotImplementedError + + def _save_internal(self, artifact_path: str): + raise NotImplementedError + + +@TextClassifier.register +class TransformerClassifier(TransformerBase[ClassificationOutput]): + config_class = _ClassifierConfig + + def __init__( + self, + params: _ClassifierConfig, + net: TransformerClassifierNet, + tokenizer: AutoTokenizer, + label_encorder: LabelEncoder, + ): + super().__init__(params) + self._net = net + self._tokenizer = tokenizer + self._label_encorder = label_encorder + self._model_input_names = tokenizer.model_input_names + + def _predict_batch_each(self, inputs: List[str]) -> List[str]: + # NOTE: copy tokenizer to avoid https://github.com/huggingface/transformers/issues/8453 + _dataset = _convert_to_torch_format_dataset( + {"text": inputs}, "text", deepcopy(self._tokenizer), self._params.max_length, self._model_input_names + ) + logits = self._net.forward(**{ + key: _dataset[key] for key in self._model_input_names + }) + # decode + idxs = self._label_encorder.inverse_transform(logits.argmax(-1).tolist()) + return [idx for idx in idxs.tolist()] + + def fit(self, inputs: List[str], ys: List[str]): + _ys = self._label_encorder.fit_transform(ys) + train_inputs, val_inputs, train_ys, val_ys = _train_test_split( + inputs, _ys, self._params.val_ratio + ) + train_dataset = _convert_to_torch_format_dataset( + {"text": train_inputs, "label": train_ys}, "text", deepcopy(self._tokenizer), + self._params.max_length, + self._model_input_names + ["label"] + ) + val_dataset = _convert_to_torch_format_dataset( + {"text": val_inputs, "label": val_ys}, "text", deepcopy(self._tokenizer), + self._params.max_length, + self._model_input_names + ["label"] + ) + trainer = create_pltrainer(self._params.trainer) + trainer.fit( + self._net, + torch.utils.data.DataLoader( + train_dataset, + batch_size=self._params.data_loader.batch_size, + num_workers=self._params.data_loader.num_workers, + shuffle=True + ), + torch.utils.data.DataLoader( + val_dataset, + batch_size=self._params.data_loader.batch_size, + num_workers=self._params.data_loader.num_workers, + shuffle=False + ) + ) + + def interprete_via_prediction(self, inputs: List[str], ys: List[str]) -> List[Dict[str, Any]]: + _ys = self._label_encorder.transform(ys) + _dataset = _convert_to_torch_format_dataset( + {"text": inputs, "label": _ys}, "text", deepcopy(self._tokenizer), + self._params.max_length, + self._model_input_names + ["label"] + ) + results = self._net.forward_for_interpretation(**{ + key: _dataset[key] for key in self._model_input_names + }) + # to ndarray + results = {k: v.detach().numpy() for k, v in results.items()} + unbatched_results = unbatch_preds(results) + unbatched_results = [ + { + "tokens": self._tokenizer.convert_ids_to_tokens(res["input_ids"][:res["ntok"]]), + "token_grad_sentence": res["input_emb_grad"][:res["ntok"]], + **res + } + for res in unbatched_results + ] + return unbatched_results + + @classmethod + def create(cls, params: _ClassifierConfig): + net = TransformerClassifierNet.create(params.network.to_dict()) + tokenizer = AutoTokenizer.from_pretrained(params.network.pretrained_model_name_or_path) + label_encorder = LabelEncoder() + return cls(params, net, tokenizer, label_encorder) + + @classmethod + def _load_internal(cls, artifact_path: str, params: _ClassifierConfig) -> "TransformerClassifier": + net = TransformerClassifierNet.load(os.path.join(artifact_path, NET_FILENAME)) + with open(os.path.join(artifact_path, LABEL_ENCORDER_FILENAME), "rb") as f: + label_encoder = pickle.load(f) + tokenizer = AutoTokenizer.from_pretrained(params.network.pretrained_model_name_or_path) + return cls(params, net, tokenizer, label_encoder) + + def _save_internal(self, artifact_path: str): + self._net.save(os.path.join(artifact_path, NET_FILENAME)) + with open(os.path.join(artifact_path, LABEL_ENCORDER_FILENAME), "wb") as fw: + pickle.dump(self._label_encorder, fw) + + +@TextRegressor.register +class TransformerRegressor(TransformerBase[RegressionOutput]): + config_class = _RegressorConfig + + def __init__( + self, + params: _RegressorConfig, + net: TransformerRegressorNet, + tokenizer: AutoTokenizer, + ): + super().__init__(params) + self._net = net + self._tokenizer = tokenizer + self._model_input_names = tokenizer.model_input_names + + def _predict_batch_each(self, inputs: List[str]) -> List[float]: + _dataset = _convert_to_torch_format_dataset( + {"text": inputs}, "text", deepcopy(self._tokenizer), self._params.max_length, self._model_input_names + ) + logits = self._net.forward(**{ + key: _dataset[key] for key in self._model_input_names + }) + return logits.squeeze().tolist() + + def fit(self, inputs: List[str], ys: List[float]): + train_inputs, val_inputs, train_ys, val_ys = _train_test_split( + inputs, ys, self._params.val_ratio + ) + train_dataset = _convert_to_torch_format_dataset( + {"text": train_inputs, "label": train_ys}, "text", deepcopy(self._tokenizer), + self._params.max_length, + self._model_input_names + ["label"] + ) + val_dataset = _convert_to_torch_format_dataset( + {"text": val_inputs, "label": val_ys}, "text", deepcopy(self._tokenizer), + self._params.max_length, + self._model_input_names + ["label"] + ) + trainer = create_pltrainer(self._params.trainer) + trainer.fit( + self._net, + torch.utils.data.DataLoader( + train_dataset, + batch_size=self._params.data_loader.batch_size, + num_workers=self._params.data_loader.num_workers, + shuffle=True + ), + torch.utils.data.DataLoader( + val_dataset, + batch_size=self._params.data_loader.batch_size, + num_workers=self._params.data_loader.num_workers, + shuffle=False + ) + ) + + def interprete_via_prediction(self, inputs: List[str], ys: List[float]) -> List[Dict[str, Any]]: + _dataset = _convert_to_torch_format_dataset( + {"text": inputs, "label": ys}, "text", deepcopy(self._tokenizer), + self._params.max_length, + self._model_input_names + ["label"] + ) + results = self._net.forward_for_interpretation(**{ + key: _dataset[key] for key in self._model_input_names + }) + # to ndarray + results = {k: v.detach().numpy() for k, v in results.items()} + unbatched_results = unbatch_preds(results) + unbatched_results = [ + { + "tokens": self._tokenizer.convert_ids_to_tokens(res["input_ids"][:res["ntok"]]), + "token_grad_sentence": res["input_emb_grad"][:res["ntok"]], + **res + } + for res in unbatched_results + ] + return unbatched_results + + @classmethod + def create(cls, params: _RegressorConfig): + net = TransformerRegressorNet.create(params.network.to_dict()) + tokenizer = AutoTokenizer.from_pretrained(params.network.pretrained_model_name_or_path) + return cls(params, net, tokenizer) + + @classmethod + def _load_internal(cls, artifact_path: str, params: _RegressorConfig) -> "TransformerRegressor": + net = TransformerRegressorNet.load(os.path.join(artifact_path, NET_FILENAME)) + tokenizer = AutoTokenizer.from_pretrained(params.network.pretrained_model_name_or_path) + return cls(params, net, tokenizer) + + def _save_internal(self, artifact_path: str): + self._net.save(os.path.join(artifact_path, NET_FILENAME)) + + +def _convert_to_torch_format_dataset( + input_dict: Dict[str, Any], + text_key: str, + tokenizer: AutoTokenizer, + max_length: Optional[int], + columns: List[str] +) -> HFDataset: + if text_key not in input_dict: + raise ValueError(f"Specified key({text_key}) must be contained in input_dict.") + dataset = dict_to_hfdataset(input_dict) + dataset = dataset.map( + # NOTE: Padding to specific length. + # https://huggingface.co/transformers/preprocessing.html#everything-you-always-wanted-to-know-about-padding-and-truncation # noqa + lambda batch: tokenizer(batch[text_key], padding="max_length", truncation=True, max_length=max_length), + batched=True, batch_size=MAP_BATCHSIZE + ) + dataset.set_format('torch', columns=columns) + return dataset + + +def _train_test_split( + arr1: List[Any], arr2: List[Any], test_ratio: float = 0.0 +) -> Tuple[List[Any], List[Any], List[Any], List[Any]]: + if len(arr1) != len(arr2): + raise ValueError(f"Inconsistent length: {len(arr1)} != {len(arr2)}") + if (test_ratio > 1.0) or (test_ratio < 0.0): + raise ValueError(f"Invalid ratio: {test_ratio}") + if test_ratio == 0: + return arr1, [], arr2, [] + else: + train_arr1, test_arr1, train_arr2, test_arr2 = train_test_split( + arr1, arr2, test_size=test_ratio, random_state=SETTINGS.random_seed) + return train_arr1, test_arr1, train_arr2, test_arr2 diff --git a/expats/settings.py b/expats/settings.py new file mode 100644 index 0000000..494a4a1 --- /dev/null +++ b/expats/settings.py @@ -0,0 +1,14 @@ + +from dataclasses import dataclass +import os + + +@dataclass(frozen=True) +class Settings(): + spacy_parser_name: str = "en_core_web_sm" + home_root_path: str = os.path.join(os.path.expanduser('~'), ".expats") + is_debug: bool = bool(os.environ.get("IS_DEBUG")) or False + random_seed: int = 46 + + +SETTINGS = Settings() diff --git a/expats/task.py b/expats/task.py new file mode 100644 index 0000000..2ddcbf1 --- /dev/null +++ b/expats/task.py @@ -0,0 +1,224 @@ + +import os +from typing import Dict, List, Tuple, Type, Union + +import pandas as pd + +from expats.common.config_util import dump_to_file, load_from_file +from expats.common.log import get_logger +from expats.common.type import TaskType +from expats.data.convert import ClassificationToRegression, RegressionToClassification +from expats.data.dataset import Dataset +from expats.data.factory import DatasetFactory +from expats.data.instance import LabeledTextInstance, ScoredTextInstance +from expats.metric.metric import Metric, ClassificationMetric, RegressionMetric +from expats.profiler.base import TextProfiler, TextClassifier, TextRegressor + + +TRAIN_CONFIG_FILENAME = "train_config.yaml" + +logger = get_logger(__name__) + + +def train(config: Dict, artifact_path: str): + task = TaskType(config["task"]) + + dataset_config = config["dataset"] + + logger.info("Loading dataset ...") + dataset = DatasetFactory.create_from_file( + dataset_config["type"], + dataset_config["params"] + ) + dataset = _convert_dataset(task, dataset) + logger.info(f"Dataset size: {len(dataset)}") + + xs, ys = extract_xys(task, dataset.to_dataframe()) + + profiler_config = config["profiler"] + + logger.info("Initializing model ...") + profiler_class = get_task_profiler_class(task) + profiler = profiler_class.create_from_factory(profiler_config["type"], profiler_config["params"]) + + logger.info("Start training") + profiler.fit(xs, ys) + + logger.info("Saving ...") + profiler.save(artifact_path) + dump_to_file(config, os.path.join(artifact_path, TRAIN_CONFIG_FILENAME)) + + +def predict(predict_config: Dict, output_file: str): + logger.info("Loading artifacts ...") + profiler, _ = load_artifacts(predict_config["artifact_path"]) + + dataset_config = predict_config["dataset"] + + logger.info("Loading dataset ...") + assert dataset_config["type"] == "line-by-line", "Only line-by-line dataset is available for predict" + dataset = DatasetFactory.create_from_file( + dataset_config["type"], + dataset_config["params"] + ) + logger.info(f"Dataset size: {len(dataset)}") + + xs = dataset.to_dataframe()["text"].to_list() + + logger.info("Making predictions ...") + pred_ys = profiler.predict_batch(xs) + + logger.info("Writing prediction results to file ...") + with open(output_file, "w") as fw: + for x, y in zip(xs, pred_ys): + print(f"{y}\t{x}", file=fw) + + +def evaluate(eval_config: Dict): + logger.info("Loading artifacts ...") + profiler, train_config = load_artifacts(eval_config["artifact_path"]) + train_task = TaskType(train_config["task"]) + + dataset_config = eval_config["dataset"] + + logger.info("Loading dataset ...") + dataset = DatasetFactory.create_from_file( + dataset_config["type"], + dataset_config["params"] + ) + dataset = _convert_dataset(train_task, dataset) + logger.info(f"Dataset size: {len(dataset)}") + + xs, gold_ys = extract_xys(train_task, dataset.to_dataframe()) + + logger.info("Making predictions ...") + pred_ys = profiler.predict_batch(xs) + + metrics_config = eval_config["metrics"] + metric_report: Dict[str, float] = {} + logger.info("Calcurating metrics ...") + for _target_task in metrics_config.keys(): + _target_task_type = TaskType(_target_task) + # convert model prediction/gold data for evaluation target task, not trainind task + _gold_ys, _pred_ys = get_target_task_prediction( + _target_task_type, + train_task, + gold_ys, + pred_ys, + eval_config["output_convert"] + ) + + for _metric_wise_config in metrics_config[_target_task]: + task_metric_class = get_task_metric_class(_target_task_type) + _metric = task_metric_class.create_from_factory( + _metric_wise_config["type"], + _metric_wise_config["params"] + ) + metric_report[_metric.name] = _metric.calculate( + [(g, p) for (g, p) in zip(_gold_ys, _pred_ys)] + ) + logger.info(f"Results: {metric_report}") + + +def interpret(interpret_config: Dict): + logger.info("Loading artifacts ...") + profiler, train_config = load_artifacts(interpret_config["artifact_path"]) + train_task = TaskType(train_config["task"]) + + dataset_config = interpret_config["dataset"] + + logger.info("Loading dataset") + dataset = DatasetFactory.create_from_file( + dataset_config["type"], + dataset_config["params"] + ) + dataset = _convert_dataset(train_task, dataset) + logger.info(f"Dataset size: {len(dataset)}") + + # FIXME: better handling for integrations + try: + from expats.integration.lit.server import launch + launch( + train_task, + profiler, + dataset + ) + except ImportError as e: + logger.error(f"Failed to import. Please check if dependencies are properly installed. error={str(e)}") + + +def get_task_metric_class( + task: TaskType +) -> Metric: + if task == TaskType.CLASSIFICATION: + return ClassificationMetric + elif task == TaskType.REGRESSION: + return RegressionMetric + else: + raise ValueError(f"Unsupported task({task}) for evaluation metrics") + + +def get_task_profiler_class( + task: TaskType +) -> Type[TextProfiler]: + if task == TaskType.CLASSIFICATION: + return TextClassifier + elif task == TaskType.REGRESSION: + return TextRegressor + else: + raise ValueError(f"Unsupported task({task}) for extracting x and y") + + +def get_target_task_prediction( + target_task, + train_task, + gold_ys, + pred_ys, + output_convert_config +): + if target_task == train_task: + return (gold_ys, pred_ys) + elif (target_task == TaskType.REGRESSION) and (train_task == TaskType.CLASSIFICATION): + # convert profiler prediction into Regression type + converter_config = output_convert_config["classification_to_regression"] + converter = ClassificationToRegression.create_from_factory( + converter_config["type"], + converter_config["params"] + ) + elif (target_task == TaskType.CLASSIFICATION) and (train_task == TaskType.REGRESSION): + converter_config = output_convert_config["regression_to_classification"] + converter = RegressionToClassification.create_from_factory( + converter_config["type"], + converter_config["params"] + ) + else: + raise ValueError(f"Unexpected combinations for target task({target_task}) and train tasks({train_task})") + + return (converter.convert(gold_ys), converter.convert(pred_ys)) + + +def load_artifacts(artifact_path: str) -> Tuple[TextProfiler, Dict]: + train_config = load_from_file(os.path.join(artifact_path, TRAIN_CONFIG_FILENAME)) + train_task_type = TaskType(train_config["task"]) + profiler_type = train_config["profiler"]["type"] + profiler_class = get_task_profiler_class(train_task_type) + profiler = profiler_class.get_subclass(profiler_type).load(artifact_path) + return (profiler, train_config) + + +def extract_xys(task: TaskType, dataset_df: pd.DataFrame) -> Tuple[List, List]: + if task == TaskType.CLASSIFICATION: + return (dataset_df["text"].tolist(), dataset_df["label"].tolist()) + elif task == TaskType.REGRESSION: + return (dataset_df["text"].tolist(), dataset_df["score"].tolist()) + else: + raise ValueError(f"Unsupported task({task}) for extracting x and y") + + +def _convert_dataset(task: TaskType, dataset: Dataset) -> Dataset[Union[LabeledTextInstance, ScoredTextInstance]]: + if task == TaskType.CLASSIFICATION: + return dataset.to_labeled() + elif task == TaskType.REGRESSION: + return dataset.to_scored() + else: + raise ValueError(f"Unsupported task({task}) for converting datset") diff --git a/log/unittest/.gitkeep b/log/unittest/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/overview.png b/overview.png new file mode 100644 index 0000000..290ba35 Binary files /dev/null and b/overview.png differ diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..5bbe617 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2524 @@ +[[package]] +category = "main" +description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py." +name = "absl-py" +optional = false +python-versions = "*" +version = "0.12.0" + +[package.dependencies] +six = "*" + +[[package]] +category = "dev" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +name = "appdirs" +optional = false +python-versions = "*" +version = "1.4.4" + +[[package]] +category = "dev" +description = "Atomic file writes." +marker = "sys_platform == \"win32\"" +name = "atomicwrites" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.4.0" + +[[package]] +category = "main" +description = "Classes Without Boilerplate" +name = "attrs" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "20.3.0" + +[package.extras] +dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface"] +tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] + +[[package]] +category = "dev" +description = "The uncompromising code formatter." +name = "black" +optional = false +python-versions = ">=3.6" +version = "19.10b0" + +[package.dependencies] +appdirs = "*" +attrs = ">=18.1.0" +click = ">=6.5" +pathspec = ">=0.6,<1" +regex = "*" +toml = ">=0.9.4" +typed-ast = ">=1.4.0" + +[package.extras] +d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] + +[[package]] +category = "main" +description = "The Blis BLAS-like linear algebra library, as a self-contained C-extension." +name = "blis" +optional = false +python-versions = "*" +version = "0.7.4" + +[package.dependencies] +numpy = ">=1.15.0" + +[[package]] +category = "main" +description = "Extensible memoizing collections and decorators" +name = "cachetools" +optional = false +python-versions = "~=3.5" +version = "4.2.1" + +[[package]] +category = "main" +description = "Super lightweight function registries for your library" +name = "catalogue" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +version = "1.0.0" + +[package.dependencies] +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.20" + +[[package]] +category = "main" +description = "Python package for providing Mozilla's CA Bundle." +name = "certifi" +optional = false +python-versions = "*" +version = "2020.12.5" + +[[package]] +category = "main" +description = "Universal encoding detector for Python 2 and 3" +name = "chardet" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "4.0.0" + +[[package]] +category = "main" +description = "Composable command line interface toolkit" +name = "click" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "7.1.2" + +[[package]] +category = "dev" +description = "Cross-platform colored terminal text." +marker = "sys_platform == \"win32\"" +name = "colorama" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.4.4" + +[[package]] +category = "dev" +description = "Code coverage measurement for Python" +name = "coverage" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +version = "5.5" + +[package.extras] +toml = ["toml"] + +[[package]] +category = "main" +description = "Composable style cycles" +name = "cycler" +optional = false +python-versions = "*" +version = "0.10.0" + +[package.dependencies] +six = "*" + +[[package]] +category = "main" +description = "Manage calls to calloc/free through Cython" +name = "cymem" +optional = false +python-versions = "*" +version = "2.0.5" + +[[package]] +category = "main" +description = "HuggingFace/Datasets is an open library of NLP datasets." +name = "datasets" +optional = false +python-versions = "*" +version = "1.5.0" + +[package.dependencies] +dill = "*" +fsspec = "*" +huggingface-hub = "<0.1.0" +multiprocess = "*" +numpy = ">=1.17" +pandas = "*" +pyarrow = ">=0.17.1" +requests = ">=2.19.0" +tqdm = ">=4.27,<4.50.0" +xxhash = "*" + +[package.dependencies.importlib-metadata] +python = "<3.8" +version = "*" + +[package.extras] +apache-beam = ["apache-beam"] +benchmarks = ["numpy (1.18.5)", "tensorflow (2.3.0)", "torch (1.6.0)", "transformers (3.0.2)"] +dev = ["absl-py", "pytest", "pytest-xdist", "apache-beam (>=2.24.0)", "elasticsearch", "boto3 (1.16.43)", "botocore (1.19.43)", "faiss-cpu", "fsspec", "moto (1.3.16)", "rarfile (>=4.0)", "tensorflow (>=2.3)", "torch", "transformers", "bs4", "conllu", "langdetect", "lxml", "mwparserfromhell", "nltk", "openpyxl", "py7zr", "tldextract", "zstandard", "bert-score (>=0.3.6)", "rouge-score", "sacrebleu", "scipy", "seqeval", "sklearn", "jiwer", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "s3fs (>=0.4.2)", "Werkzeug (>=1.0.1)", "wget (>=3.2)", "pytorch-nlp (0.5.0)", "pytorch-lightning", "fastBPE (0.1.0)", "fairseq", "black", "isort", "flake8 (3.7.9)"] +docs = ["recommonmark", "sphinx (3.1.2)", "sphinx-markdown-tables", "sphinx-rtd-theme (0.4.3)", "sphinx-copybutton", "fsspec"] +quality = ["black", "isort", "flake8 (3.7.9)"] +s3 = ["fsspec", "boto3 (1.16.43)", "botocore (1.19.43)"] +tensorflow = ["tensorflow (>=2.2.0)"] +tensorflow_gpu = ["tensorflow-gpu (>=2.2.0)"] +tests = ["absl-py", "pytest", "pytest-xdist", "apache-beam (>=2.24.0)", "elasticsearch", "boto3 (1.16.43)", "botocore (1.19.43)", "faiss-cpu", "fsspec", "moto (1.3.16)", "rarfile (>=4.0)", "tensorflow (>=2.3)", "torch", "transformers", "bs4", "conllu", "langdetect", "lxml", "mwparserfromhell", "nltk", "openpyxl", "py7zr", "tldextract", "zstandard", "bert-score (>=0.3.6)", "rouge-score", "sacrebleu", "scipy", "seqeval", "sklearn", "jiwer", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "s3fs (>=0.4.2)", "Werkzeug (>=1.0.1)", "wget (>=3.2)", "pytorch-nlp (0.5.0)", "pytorch-lightning", "fastBPE (0.1.0)", "fairseq"] +torch = ["torch"] + +[[package]] +category = "main" +description = "Decorators for Humans" +name = "decorator" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" +version = "4.4.2" + +[[package]] +category = "main" +description = "serialize all of python" +name = "dill" +optional = false +python-versions = ">=2.6, !=3.0.*" +version = "0.3.3" + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + +[[package]] +category = "main" +description = "English multi-task CNN trained on OntoNotes. Assigns context-specific token vectors, POS tags, dependency parse and named entities." +name = "en_core_web_sm" +optional = false +python-versions = "*" +version = "2.3.0" + +[package.dependencies] +spacy = ">=2.3.0,<2.4.0" + +[package.source] +reference = "" +type = "url" +url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-2.3.0/en_core_web_sm-2.3.0.tar.gz" +[[package]] +category = "main" +description = "A platform independent file lock." +name = "filelock" +optional = false +python-versions = "*" +version = "3.0.12" + +[[package]] +category = "dev" +description = "the modular source code checker: pep8 pyflakes and co" +name = "flake8" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +version = "3.9.0" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" + +[package.dependencies.importlib-metadata] +python = "<3.8" +version = "*" + +[[package]] +category = "main" +description = "File-system specification" +name = "fsspec" +optional = false +python-versions = ">3.6" +version = "0.8.7" + +[package.dependencies] +[package.dependencies.importlib-metadata] +python = "<3.8" +version = "*" + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +dask = ["dask", "distributed"] +dropbox = ["dropboxdrivefs", "requests", "dropbox"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +hdfs = ["pyarrow"] +http = ["requests", "aiohttp"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] + +[[package]] +category = "main" +description = "Clean single-source support for Python 3 and 2" +name = "future" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "0.18.2" + +[[package]] +category = "main" +description = "Google Authentication Library" +name = "google-auth" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" +version = "1.28.0" + +[package.dependencies] +cachetools = ">=2.0.0,<5.0" +pyasn1-modules = ">=0.2.1" +setuptools = ">=40.3.0" +six = ">=1.9.0" + +[package.dependencies.rsa] +python = ">=3.6" +version = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"] +pyopenssl = ["pyopenssl (>=20.0.0)"] + +[[package]] +category = "main" +description = "Google Authentication Library" +name = "google-auth-oauthlib" +optional = false +python-versions = ">=3.6" +version = "0.4.4" + +[package.dependencies] +google-auth = ">=1.0.0" +requests-oauthlib = ">=0.7.0" + +[package.extras] +tool = ["click (>=6.0.0)"] + +[[package]] +category = "main" +description = "HTTP/2-based RPC framework" +name = "grpcio" +optional = false +python-versions = "*" +version = "1.36.1" + +[package.dependencies] +six = ">=1.5.2" + +[package.extras] +protobuf = ["grpcio-tools (>=1.36.1)"] + +[[package]] +category = "main" +description = "Client library to download and publish models on the huggingface.co hub" +name = "huggingface-hub" +optional = false +python-versions = ">=3.6.0" +version = "0.0.7" + +[package.dependencies] +filelock = "*" +requests = "*" +tqdm = "*" + +[package.dependencies.importlib-metadata] +python = "<3.8" +version = "*" + +[package.extras] +all = ["pytest", "black (>=20.8b1)", "isort (>=5.5.4)", "flake8 (>=3.8.3)"] +dev = ["pytest", "black (>=20.8b1)", "isort (>=5.5.4)", "flake8 (>=3.8.3)"] +quality = ["black (>=20.8b1)", "isort (>=5.5.4)", "flake8 (>=3.8.3)"] +testing = ["pytest"] + +[[package]] +category = "main" +description = "Internationalized Domain Names in Applications (IDNA)" +name = "idna" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.10" + +[[package]] +category = "main" +description = "Library for reading and writing a wide range of image, video, scientific, and volumetric data formats." +name = "imageio" +optional = false +python-versions = ">=3.5" +version = "2.9.0" + +[package.dependencies] +numpy = "*" +pillow = "*" + +[package.extras] +ffmpeg = ["imageio-ffmpeg"] +fits = ["astropy"] +full = ["astropy", "gdal", "imageio-ffmpeg", "itk"] +gdal = ["gdal"] +itk = ["itk"] + +[[package]] +category = "main" +description = "Read metadata from Python packages" +marker = "python_version < \"3.8\"" +name = "importlib-metadata" +optional = false +python-versions = ">=3.6" +version = "3.10.0" + +[package.dependencies] +zipp = ">=0.5" + +[package.dependencies.typing-extensions] +python = "<3.8" +version = ">=3.6.4" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] + +[[package]] +category = "dev" +description = "iniconfig: brain-dead simple config-ini parsing" +name = "iniconfig" +optional = false +python-versions = "*" +version = "1.1.1" + +[[package]] +category = "main" +description = "Lightweight pipelining with Python functions" +name = "joblib" +optional = false +python-versions = ">=3.6" +version = "1.0.1" + +[[package]] +category = "main" +description = "A fast implementation of the Cassowary constraint solver" +name = "kiwisolver" +optional = false +python-versions = ">=3.6" +version = "1.3.1" + +[[package]] +category = "main" +description = "Local Interpretable Model-Agnostic Explanations for machine learning classifiers" +name = "lime" +optional = false +python-versions = ">=3.5" +version = "0.2.0.1" + +[package.dependencies] +matplotlib = "*" +numpy = "*" +scikit-image = ">=0.12" +scikit-learn = ">=0.18" +scipy = "*" +tqdm = "*" + +[package.extras] +dev = ["pytest", "flake8"] + +[[package]] +category = "main" +description = "Language Interpretability Tool." +name = "lit-nlp" +optional = false +python-versions = "*" +version = "0.2" + +[package.dependencies] +Werkzeug = "*" +absl-py = "*" +attrs = "*" +lime = "*" +numpy = "*" +pandas = "*" +sacrebleu = "*" +scikit-learn = "*" +scipy = "*" +umap-learn = "*" + +[[package]] +category = "main" +description = "lightweight wrapper around basic LLVM functionality" +name = "llvmlite" +optional = false +python-versions = ">=3.6" +version = "0.34.0" + +[[package]] +category = "main" +description = "Python implementation of Markdown." +name = "markdown" +optional = false +python-versions = ">=3.6" +version = "3.3.4" + +[package.dependencies] +[package.dependencies.importlib-metadata] +python = "<3.8" +version = "*" + +[package.extras] +testing = ["coverage", "pyyaml"] + +[[package]] +category = "main" +description = "Python plotting package" +name = "matplotlib" +optional = false +python-versions = ">=3.7" +version = "3.4.1" + +[package.dependencies] +cycler = ">=0.10" +kiwisolver = ">=1.0.1" +numpy = ">=1.16" +pillow = ">=6.2.0" +pyparsing = ">=2.2.1" +python-dateutil = ">=2.7" + +[[package]] +category = "dev" +description = "McCabe checker, plugin for flake8" +name = "mccabe" +optional = false +python-versions = "*" +version = "0.6.1" + +[[package]] +category = "main" +description = "better multiprocessing and multithreading in python" +name = "multiprocess" +optional = false +python-versions = "*" +version = "0.70.11.1" + +[package.dependencies] +dill = ">=0.3.3" + +[[package]] +category = "main" +description = "Cython bindings for MurmurHash" +name = "murmurhash" +optional = false +python-versions = "*" +version = "1.0.5" + +[[package]] +category = "dev" +description = "Optional static typing for Python" +name = "mypy" +optional = false +python-versions = ">=3.5" +version = "0.790" + +[package.dependencies] +mypy-extensions = ">=0.4.3,<0.5.0" +typed-ast = ">=1.4.0,<1.5.0" +typing-extensions = ">=3.7.4" + +[package.extras] +dmypy = ["psutil (>=4.0)"] + +[[package]] +category = "dev" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +name = "mypy-extensions" +optional = false +python-versions = "*" +version = "0.4.3" + +[[package]] +category = "main" +description = "Python package for creating and manipulating graphs and networks" +name = "networkx" +optional = false +python-versions = ">=3.6" +version = "2.5" + +[package.dependencies] +decorator = ">=4.3.0" + +[package.extras] +all = ["numpy", "scipy", "pandas", "matplotlib", "pygraphviz", "pydot", "pyyaml", "lxml", "pytest"] +gdal = ["gdal"] +lxml = ["lxml"] +matplotlib = ["matplotlib"] +numpy = ["numpy"] +pandas = ["pandas"] +pydot = ["pydot"] +pygraphviz = ["pygraphviz"] +pytest = ["pytest"] +pyyaml = ["pyyaml"] +scipy = ["scipy"] + +[[package]] +category = "main" +description = "compiling Python code using LLVM" +name = "numba" +optional = false +python-versions = ">=3.6" +version = "0.51.2" + +[package.dependencies] +llvmlite = ">=0.34.0.dev0,<0.35" +numpy = ">=1.15" +setuptools = "*" + +[[package]] +category = "main" +description = "NumPy is the fundamental package for array computing with Python." +name = "numpy" +optional = false +python-versions = ">=3.7" +version = "1.20.2" + +[[package]] +category = "main" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +name = "oauthlib" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "3.1.0" + +[package.extras] +rsa = ["cryptography"] +signals = ["blinker"] +signedtoken = ["cryptography", "pyjwt (>=1.0.0)"] + +[[package]] +category = "main" +description = "A flexible configuration library" +name = "omegaconf" +optional = false +python-versions = ">=3.6" +version = "2.0.6" + +[package.dependencies] +PyYAML = ">=5.1" +typing-extensions = "*" + +[[package]] +category = "main" +description = "Core utilities for Python packages" +name = "packaging" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "20.9" + +[package.dependencies] +pyparsing = ">=2.0.2" + +[[package]] +category = "main" +description = "Powerful data structures for data analysis, time series, and statistics" +name = "pandas" +optional = false +python-versions = ">=3.6.1" +version = "1.1.5" + +[package.dependencies] +numpy = ">=1.15.4" +python-dateutil = ">=2.7.3" +pytz = ">=2017.2" + +[package.extras] +test = ["pytest (>=4.0.2)", "pytest-xdist", "hypothesis (>=3.58)"] + +[[package]] +category = "dev" +description = "Utility library for gitignore style pattern matching of file paths." +name = "pathspec" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.8.1" + +[[package]] +category = "main" +description = "Python Imaging Library (Fork)" +name = "pillow" +optional = false +python-versions = ">=3.6" +version = "8.1.2" + +[[package]] +category = "main" +description = "The smartest command line arguments parser in the world" +name = "plac" +optional = false +python-versions = "*" +version = "1.1.3" + +[[package]] +category = "dev" +description = "plugin and hook calling mechanisms for python" +name = "pluggy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.13.1" + +[package.dependencies] +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.12" + +[package.extras] +dev = ["pre-commit", "tox"] + +[[package]] +category = "main" +description = "Wraps the portalocker recipe for easy usage" +name = "portalocker" +optional = false +python-versions = "*" +version = "2.0.0" + +[package.dependencies] +pywin32 = "!=226" + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +tests = ["pytest (>=4.6.9)", "pytest-cov (>=2.8.1)", "sphinx (>=1.8.5)", "pytest-flake8 (>=1.0.5)"] + +[[package]] +category = "main" +description = "Cython hash table that trusts the keys are pre-hashed" +name = "preshed" +optional = false +python-versions = "*" +version = "3.0.5" + +[package.dependencies] +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=0.28.0,<1.1.0" + +[[package]] +category = "main" +description = "Protocol Buffers" +name = "protobuf" +optional = false +python-versions = "*" +version = "3.15.6" + +[package.dependencies] +six = ">=1.9" + +[[package]] +category = "dev" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "py" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.10.0" + +[[package]] +category = "main" +description = "Python library for Apache Arrow" +name = "pyarrow" +optional = false +python-versions = ">=3.6" +version = "3.0.0" + +[package.dependencies] +numpy = ">=1.16.6" + +[[package]] +category = "main" +description = "ASN.1 types and codecs" +name = "pyasn1" +optional = false +python-versions = "*" +version = "0.4.8" + +[[package]] +category = "main" +description = "A collection of ASN.1-based protocols modules." +name = "pyasn1-modules" +optional = false +python-versions = "*" +version = "0.2.8" + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.5.0" + +[[package]] +category = "dev" +description = "Python style guide checker" +name = "pycodestyle" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.7.0" + +[[package]] +category = "main" +description = "Data validation and settings management using python 3.6 type hinting" +name = "pydantic" +optional = false +python-versions = ">=3.6.1" +version = "1.8.1" + +[package.dependencies] +typing-extensions = ">=3.7.4.3" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +category = "dev" +description = "passive checker of Python programs" +name = "pyflakes" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.3.1" + +[[package]] +category = "main" +description = "Nearest Neighbor Descent" +name = "pynndescent" +optional = false +python-versions = "*" +version = "0.5.2" + +[package.dependencies] +joblib = ">=0.11" +llvmlite = ">=0.30" +numba = ">=0.51.2" +scikit-learn = ">=0.18" +scipy = ">=1.0" + +[[package]] +category = "main" +description = "Python parsing module" +name = "pyparsing" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.4.7" + +[[package]] +category = "dev" +description = "pytest: simple powerful testing with Python" +name = "pytest" +optional = false +python-versions = ">=3.6" +version = "6.2.2" + +[package.dependencies] +atomicwrites = ">=1.0" +attrs = ">=19.2.0" +colorama = "*" +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<1.0.0a1" +py = ">=1.8.2" +toml = "*" + +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.12" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +category = "dev" +description = "Pytest plugin for measuring coverage." +name = "pytest-cov" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "2.11.1" + +[package.dependencies] +coverage = ">=5.2.1" +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] + +[[package]] +category = "main" +description = "Extensions to the standard Python datetime module" +name = "python-dateutil" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +version = "2.8.1" + +[package.dependencies] +six = ">=1.5" + +[[package]] +category = "main" +description = "PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers. Scale your models. Write less boilerplate." +name = "pytorch-lightning" +optional = false +python-versions = ">=3.6" +version = "1.1.5" + +[package.dependencies] +PyYAML = ">=5.1" +future = ">=0.17.1" +numpy = ">=1.16.6" +tensorboard = ">=2.2.0" +torch = ">=1.3" +tqdm = ">=4.41.0" + +[package.dependencies.fsspec] +extras = ["http"] +version = ">=0.8.1" + +[package.extras] +all = ["matplotlib (>3.1)", "horovod (>=0.20.2)", "omegaconf (>=2.0.1)", "torchtext (>=0.3.1,<0.7)", "onnx (>=1.7.0)", "onnxruntime (>=1.3.0)", "hydra-core (>=1.0)", "neptune-client (>=0.4.109)", "comet-ml (>=3.1.12)", "mlflow (>=1.0.0)", "test-tube (>=0.7.5)", "wandb (>=0.8.21)", "coverage (>=5.0)", "codecov (>=2.1)", "pytest (>=5.0)", "flake8 (>=3.6)", "flake8-black", "check-manifest", "twine (3.2)", "scikit-learn (>=0.22.2)", "scikit-image (>=0.17.2)", "black (>=20.8b1)", "isort (>=5.6.4)", "mypy (>=0.720)", "pre-commit (>=1.0)", "cloudpickle (>=1.3)", "nltk (>=3.3)", "pandas", "torchvision (>=0.4.1)", "gym (>=0.17.0)"] +cpu = ["matplotlib (>3.1)", "omegaconf (>=2.0.1)", "torchtext (>=0.3.1,<0.7)", "onnx (>=1.7.0)", "onnxruntime (>=1.3.0)", "hydra-core (>=1.0)", "neptune-client (>=0.4.109)", "comet-ml (>=3.1.12)", "mlflow (>=1.0.0)", "test-tube (>=0.7.5)", "wandb (>=0.8.21)", "coverage (>=5.0)", "codecov (>=2.1)", "pytest (>=5.0)", "flake8 (>=3.6)", "flake8-black", "check-manifest", "twine (3.2)", "scikit-learn (>=0.22.2)", "scikit-image (>=0.17.2)", "black (>=20.8b1)", "isort (>=5.6.4)", "mypy (>=0.720)", "pre-commit (>=1.0)", "cloudpickle (>=1.3)", "nltk (>=3.3)", "pandas", "torchvision (>=0.4.1)", "gym (>=0.17.0)"] +cpu-extra = ["matplotlib (>3.1)", "omegaconf (>=2.0.1)", "torchtext (>=0.3.1,<0.7)", "onnx (>=1.7.0)", "onnxruntime (>=1.3.0)", "hydra-core (>=1.0)"] +dev = ["matplotlib (>3.1)", "horovod (>=0.20.2)", "omegaconf (>=2.0.1)", "torchtext (>=0.3.1,<0.7)", "onnx (>=1.7.0)", "onnxruntime (>=1.3.0)", "hydra-core (>=1.0)", "neptune-client (>=0.4.109)", "comet-ml (>=3.1.12)", "mlflow (>=1.0.0)", "test-tube (>=0.7.5)", "wandb (>=0.8.21)", "coverage (>=5.0)", "codecov (>=2.1)", "pytest (>=5.0)", "flake8 (>=3.6)", "flake8-black", "check-manifest", "twine (3.2)", "scikit-learn (>=0.22.2)", "scikit-image (>=0.17.2)", "black (>=20.8b1)", "isort (>=5.6.4)", "mypy (>=0.720)", "pre-commit (>=1.0)", "cloudpickle (>=1.3)", "nltk (>=3.3)", "pandas"] +examples = ["torchvision (>=0.4.1)", "gym (>=0.17.0)"] +extra = ["matplotlib (>3.1)", "horovod (>=0.20.2)", "omegaconf (>=2.0.1)", "torchtext (>=0.3.1,<0.7)", "onnx (>=1.7.0)", "onnxruntime (>=1.3.0)", "hydra-core (>=1.0)"] +loggers = ["neptune-client (>=0.4.109)", "comet-ml (>=3.1.12)", "mlflow (>=1.0.0)", "test-tube (>=0.7.5)", "wandb (>=0.8.21)"] +test = ["coverage (>=5.0)", "codecov (>=2.1)", "pytest (>=5.0)", "flake8 (>=3.6)", "flake8-black", "check-manifest", "twine (3.2)", "scikit-learn (>=0.22.2)", "scikit-image (>=0.17.2)", "black (>=20.8b1)", "isort (>=5.6.4)", "mypy (>=0.720)", "pre-commit (>=1.0)", "cloudpickle (>=1.3)", "nltk (>=3.3)", "pandas"] + +[[package]] +category = "main" +description = "World timezone definitions, modern and historical" +name = "pytz" +optional = false +python-versions = "*" +version = "2021.1" + +[[package]] +category = "main" +description = "PyWavelets, wavelet transform module" +name = "pywavelets" +optional = false +python-versions = ">=3.5" +version = "1.1.1" + +[package.dependencies] +numpy = ">=1.13.3" + +[[package]] +category = "main" +description = "Python for Window Extensions" +marker = "platform_system == \"Windows\"" +name = "pywin32" +optional = false +python-versions = "*" +version = "300" + +[[package]] +category = "main" +description = "YAML parser and emitter for Python" +name = "pyyaml" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +version = "5.4.1" + +[[package]] +category = "main" +description = "Alternative regular expression module, to replace re." +name = "regex" +optional = false +python-versions = "*" +version = "2021.3.17" + +[[package]] +category = "main" +description = "Python HTTP for Humans." +name = "requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "2.25.1" + +[package.dependencies] +certifi = ">=2017.4.17" +chardet = ">=3.0.2,<5" +idna = ">=2.5,<3" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] +socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] + +[[package]] +category = "main" +description = "OAuthlib authentication support for Requests." +name = "requests-oauthlib" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.3.0" + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib (>=3.0.0)"] + +[[package]] +category = "main" +description = "Pure-Python RSA implementation" +marker = "python_version >= \"3.6\"" +name = "rsa" +optional = false +python-versions = ">=3.5, <4" +version = "4.7.2" + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +category = "main" +description = "Hassle-free computation of shareable, comparable, and reproducible BLEU, chrF, and TER scores" +name = "sacrebleu" +optional = false +python-versions = ">=3" +version = "1.5.1" + +[package.dependencies] +portalocker = "2.0.0" + +[package.extras] +ja = ["mecab-python3 (1.0.3)", "ipadic (>=1.0,<2.0)"] + +[[package]] +category = "main" +description = "SacreMoses" +name = "sacremoses" +optional = false +python-versions = "*" +version = "0.0.43" + +[package.dependencies] +click = "*" +joblib = "*" +regex = "*" +six = "*" +tqdm = "*" + +[[package]] +category = "main" +description = "Image processing in Python" +name = "scikit-image" +optional = false +python-versions = ">=3.7" +version = "0.18.1" + +[package.dependencies] +PyWavelets = ">=1.1.1" +imageio = ">=2.3.0" +matplotlib = ">=2.0.0,<3.0.0 || >3.0.0" +networkx = ">=2.0" +numpy = ">=1.16.5" +pillow = ">=4.3.0,<7.1.0 || >7.1.0,<7.1.1 || >7.1.1" +scipy = ">=1.0.1" +tifffile = ">=2019.7.26" + +[package.extras] +data = ["pooch (>=1.3.0)"] +docs = ["sphinx (>=1.8,<=2.4.4)", "sphinx-gallery (>=0.7.0,<0.8.0 || >0.8.0)", "numpydoc (>=1.0)", "sphinx-copybutton", "pytest-runner", "scikit-learn", "matplotlib (>=3.0.1)", "dask (>=0.15.0,<2.17.0 || >2.17.0)", "cloudpickle (>=0.2.1)", "pandas (>=0.23.0)", "seaborn (>=0.7.1)", "pooch (>=1.3.0)", "tifffile (>=2020.5.30)", "myst-parser", "ipywidgets", "plotly (>=4.10.0)"] +optional = ["simpleitk", "astropy (>=3.1.2)", "qtpy", "pyamg", "dask (>=1.0.0,<2.17.0 || >2.17.0)", "cloudpickle (>=0.2.1)", "pooch (>=1.3.0)"] +test = ["pytest (>=5.2.0)", "pytest-cov (>=2.7.0)", "pytest-localserver", "pytest-faulthandler", "flake8", "codecov", "pooch (>=1.3.0)"] + +[[package]] +category = "main" +description = "A set of python modules for machine learning and data mining" +name = "scikit-learn" +optional = false +python-versions = ">=3.6" +version = "0.23.2" + +[package.dependencies] +joblib = ">=0.11" +numpy = ">=1.13.3" +scipy = ">=0.19.1" +threadpoolctl = ">=2.0.0" + +[package.extras] +alldeps = ["numpy (>=1.13.3)", "scipy (>=0.19.1)"] + +[[package]] +category = "main" +description = "SciPy: Scientific Library for Python" +name = "scipy" +optional = false +python-versions = ">=3.7" +version = "1.6.1" + +[package.dependencies] +numpy = ">=1.16.5" + +[[package]] +category = "main" +description = "Python 2 and 3 compatibility utilities" +name = "six" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "1.15.0" + +[[package]] +category = "main" +description = "Industrial-strength Natural Language Processing (NLP) in Python" +name = "spacy" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +version = "2.3.5" + +[package.dependencies] +blis = ">=0.4.0,<0.8.0" +catalogue = ">=0.0.7,<1.1.0" +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=0.28.0,<1.1.0" +numpy = ">=1.15.0" +plac = ">=0.9.6,<1.2.0" +preshed = ">=3.0.2,<3.1.0" +requests = ">=2.13.0,<3.0.0" +setuptools = "*" +srsly = ">=1.0.2,<1.1.0" +thinc = ">=7.4.1,<7.5.0" +tqdm = ">=4.38.0,<5.0.0" +wasabi = ">=0.4.0,<1.1.0" + +[package.extras] +cuda = ["cupy (>=5.0.0b4)"] +cuda100 = ["cupy-cuda100 (>=5.0.0b4)"] +cuda101 = ["cupy-cuda101 (>=5.0.0b4)"] +cuda102 = ["cupy-cuda102 (>=5.0.0b4)"] +cuda110 = ["cupy-cuda110 (>=5.0.0b4)"] +cuda111 = ["cupy-cuda111 (>=5.0.0b4)"] +cuda80 = ["cupy-cuda80 (>=5.0.0b4)"] +cuda90 = ["cupy-cuda90 (>=5.0.0b4)"] +cuda91 = ["cupy-cuda91 (>=5.0.0b4)"] +cuda92 = ["cupy-cuda92 (>=5.0.0b4)"] +ja = ["sudachipy (>=0.4.5)", "sudachidict-core (>=20200330)"] +ko = ["natto-py (0.9.0)"] +lookups = ["spacy-lookups-data (>=0.3.2,<0.4.0)"] +th = ["pythainlp (>=2.0)"] + +[[package]] +category = "main" +description = "Modern high-performance serialization utilities for Python" +name = "srsly" +optional = false +python-versions = "*" +version = "1.0.5" + +[[package]] +category = "main" +description = "TensorBoard lets you watch Tensors Flow" +name = "tensorboard" +optional = false +python-versions = ">= 2.7, != 3.0.*, != 3.1.*" +version = "2.4.1" + +[package.dependencies] +absl-py = ">=0.4" +google-auth = ">=1.6.3,<2" +google-auth-oauthlib = ">=0.4.1,<0.5" +grpcio = ">=1.24.3" +markdown = ">=2.6.8" +numpy = ">=1.12.0" +protobuf = ">=3.6.0" +requests = ">=2.21.0,<3" +setuptools = ">=41.0.0" +six = ">=1.10.0" +tensorboard-plugin-wit = ">=1.6.0" +werkzeug = ">=0.11.15" + +[package.dependencies.wheel] +python = ">=3" +version = ">=0.26" + +[[package]] +category = "main" +description = "What-If Tool TensorBoard plugin." +name = "tensorboard-plugin-wit" +optional = false +python-versions = "*" +version = "1.8.0" + +[[package]] +category = "main" +description = "Practical Machine Learning for NLP" +name = "thinc" +optional = false +python-versions = "*" +version = "7.4.5" + +[package.dependencies] +blis = ">=0.4.0,<0.8.0" +catalogue = ">=0.0.7,<1.1.0" +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=0.28.0,<1.1.0" +numpy = ">=1.15.0" +plac = ">=0.9.6,<1.2.0" +preshed = ">=1.0.1,<3.1.0" +srsly = ">=0.0.6,<1.1.0" +tqdm = ">=4.10.0,<5.0.0" +wasabi = ">=0.0.9,<1.1.0" + +[package.extras] +cuda = ["cupy (>=5.0.0b4)"] +cuda100 = ["cupy-cuda100 (>=5.0.0b4)"] +cuda101 = ["cupy-cuda101 (>=5.0.0b4)"] +cuda102 = ["cupy-cuda102 (>=5.0.0b4)"] +cuda110 = ["cupy-cuda110 (>=5.0.0b4)"] +cuda111 = ["cupy-cuda111 (>=5.0.0b4)"] +cuda80 = ["cupy-cuda80 (>=5.0.0b4)"] +cuda90 = ["cupy-cuda90 (>=5.0.0b4)"] +cuda91 = ["cupy-cuda91 (>=5.0.0b4)"] +cuda92 = ["cupy-cuda92 (>=5.0.0b4)"] + +[[package]] +category = "main" +description = "threadpoolctl" +name = "threadpoolctl" +optional = false +python-versions = ">=3.5" +version = "2.1.0" + +[[package]] +category = "main" +description = "Read and write TIFF files" +name = "tifffile" +optional = false +python-versions = ">=3.7" +version = "2021.3.31" + +[package.dependencies] +numpy = ">=1.15.1" + +[package.extras] +all = ["imagecodecs (>=2021.3.31)", "matplotlib (>=3.2)", "lxml"] + +[[package]] +category = "main" +description = "Fast and Customizable Tokenizers" +name = "tokenizers" +optional = false +python-versions = "*" +version = "0.10.1" + +[package.extras] +testing = ["pytest"] + +[[package]] +category = "dev" +description = "Python Library for Tom's Obvious, Minimal Language" +name = "toml" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "0.10.2" + +[[package]] +category = "main" +description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" +name = "torch" +optional = false +python-versions = ">=3.6.2" +version = "1.8.1" + +[package.dependencies] +numpy = "*" +typing-extensions = "*" + +[[package]] +category = "main" +description = "Fast, Extensible Progress Meter" +name = "tqdm" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" +version = "4.49.0" + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "argopt", "pydoc-markdown"] + +[[package]] +category = "main" +description = "State-of-the-art Natural Language Processing for TensorFlow 2.0 and PyTorch" +name = "transformers" +optional = false +python-versions = ">=3.6.0" +version = "4.3.2" + +[package.dependencies] +filelock = "*" +numpy = ">=1.17" +packaging = "*" +regex = "!=2019.12.17" +requests = "*" +sacremoses = "*" +tokenizers = ">=0.10.1,<0.11" +tqdm = ">=4.27" + +[package.dependencies.importlib-metadata] +python = "<3.8" +version = "*" + +[package.extras] +all = ["tensorflow (>=2.3)", "onnxconverter-common", "keras2onnx", "torch (>=1.0)", "jax (>=0.2.8)", "jaxlib (>=0.1.59)", "flax (>=0.2.2)", "sentencepiece (0.1.91)", "protobuf", "tokenizers (>=0.10.1,<0.11)"] +dev = ["tensorflow (>=2.3)", "onnxconverter-common", "keras2onnx", "torch (>=1.0)", "jax (>=0.2.8)", "jaxlib (>=0.1.59)", "flax (>=0.2.2)", "sentencepiece (0.1.91)", "protobuf", "tokenizers (>=0.10.1,<0.11)", "pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets", "faiss-cpu", "cookiecutter (1.7.2)", "soundfile", "black (>=20.8b1)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "recommonmark", "sphinx (3.2.1)", "sphinx-markdown-tables", "sphinx-rtd-theme (0.4.3)", "sphinx-copybutton", "scikit-learn"] +docs = ["recommonmark", "sphinx (3.2.1)", "sphinx-markdown-tables", "sphinx-rtd-theme (0.4.3)", "sphinx-copybutton"] +flax = ["jax (>=0.2.8)", "jaxlib (>=0.1.59)", "flax (>=0.2.2)"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)"] +modelcreation = ["cookiecutter (1.7.2)"] +onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +quality = ["black (>=20.8b1)", "isort (>=5.5.4)", "flake8 (>=3.8.3)"] +retrieval = ["faiss-cpu", "datasets"] +sentencepiece = ["sentencepiece (0.1.91)", "protobuf"] +serving = ["pydantic", "uvicorn", "fastapi", "starlette"] +sklearn = ["scikit-learn"] +speech = ["soundfile"] +testing = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets", "faiss-cpu", "cookiecutter (1.7.2)", "soundfile"] +tf = ["tensorflow (>=2.3)", "onnxconverter-common", "keras2onnx"] +tf-cpu = ["tensorflow-cpu (>=2.3)", "onnxconverter-common", "keras2onnx"] +tokenizers = ["tokenizers (>=0.10.1,<0.11)"] +torch = ["torch (>=1.0)"] +torchhub = ["filelock", "importlib-metadata", "numpy (>=1.17)", "packaging", "protobuf", "regex (!=2019.12.17)", "requests", "sacremoses", "sentencepiece (0.1.91)", "torch (>=1.0)", "tokenizers (>=0.10.1,<0.11)", "tqdm (>=4.27)"] + +[[package]] +category = "dev" +description = "a fork of Python 2 and 3 ast modules with type comment support" +name = "typed-ast" +optional = false +python-versions = "*" +version = "1.4.2" + +[[package]] +category = "main" +description = "Backported and Experimental Type Hints for Python 3.5+" +name = "typing-extensions" +optional = false +python-versions = "*" +version = "3.7.4.3" + +[[package]] +category = "main" +description = "Uniform Manifold Approximation and Projection" +name = "umap-learn" +optional = false +python-versions = "*" +version = "0.5.1" + +[package.dependencies] +numba = ">=0.49" +numpy = ">=1.17" +pynndescent = ">=0.5" +scikit-learn = ">=0.22" +scipy = ">=1.0" + +[package.extras] +parametric_umap = ["tensorflow (>=2.1)"] +plot = ["pandas", "matplotlib", "datashader", "bokeh", "holoviews", "colorcet", "seaborn", "scikit-image"] + +[[package]] +category = "main" +description = "HTTP library with thread-safe connection pooling, file post, and more." +name = "urllib3" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +version = "1.26.4" + +[package.extras] +brotli = ["brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] + +[[package]] +category = "main" +description = "A lightweight console printing and formatting toolkit" +name = "wasabi" +optional = false +python-versions = "*" +version = "0.8.2" + +[[package]] +category = "main" +description = "The comprehensive WSGI web application library." +name = "werkzeug" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "1.0.1" + +[package.extras] +dev = ["pytest", "pytest-timeout", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinx-issues"] +watchdog = ["watchdog"] + +[[package]] +category = "main" +description = "A built-package format for Python" +marker = "python_version >= \"3\"" +name = "wheel" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +version = "0.36.2" + +[package.extras] +test = ["pytest (>=3.0.0)", "pytest-cov"] + +[[package]] +category = "main" +description = "Python binding for xxHash" +name = "xxhash" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.0.0" + +[[package]] +category = "main" +description = "Backport of pathlib-compatible object wrapper for zip files" +marker = "python_version < \"3.8\"" +name = "zipp" +optional = false +python-versions = ">=3.6" +version = "3.4.1" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[metadata] +content-hash = "e3553de504e15b755f0c3044a65266517058ce3ed79b4f8bb62e4fd9cdd46d2b" +python-versions = "^3.7" + +[metadata.files] +absl-py = [ + {file = "absl-py-0.12.0.tar.gz", hash = "sha256:b44f68984a5ceb2607d135a615999b93924c771238a63920d17d3387b0d229d5"}, + {file = "absl_py-0.12.0-py3-none-any.whl", hash = "sha256:afe94e3c751ff81aad55d33ab6e630390da32780110b5af72ae81ecff8418d9e"}, +] +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, + {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, +] +black = [ + {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"}, + {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, +] +blis = [ + {file = "blis-0.7.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5b403deb2ad5515e1edb3c0867bccb5b974b461f24283d9219a3a761fd6dacc6"}, + {file = "blis-0.7.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9f9b829480c12fc834549306821e5c51cb28b216ca5f88c5b2cfedbeb9daf67d"}, + {file = "blis-0.7.4-cp36-cp36m-win_amd64.whl", hash = "sha256:c2d8064217c326dd9a0dcbae294ffe8557263e2a00d76101ffa222b9c9d9c62d"}, + {file = "blis-0.7.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d717b5dea407aac89a646908e7d9849105abab9c88a539c120518c200f899f4e"}, + {file = "blis-0.7.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5ecddc4c6daf80558154b091db0a9839bb15dbe65d2906a543a73b93fbce4f73"}, + {file = "blis-0.7.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6814991b3e3193db4f9b2417174c6f24b9c0197409d864fa7628583bd2df1f0f"}, + {file = "blis-0.7.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4222bbc7b9c47bc3cf6f36f2241862c1512ca7ebac3828267a2e05ef6c47fc54"}, + {file = "blis-0.7.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:445e4838b809e99677f5c0982fb9af320f0d91328fb28c8097e5f1173c4df9d6"}, + {file = "blis-0.7.4-cp38-cp38-win_amd64.whl", hash = "sha256:94890b2296f1449baa56aede46627ea7fc8de11c788f9c261ee38c2eb4a2cc7d"}, + {file = "blis-0.7.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:168fd7bd763ebe529aa25a066d3a6b89f4c3f492f6297f881df6942741b95787"}, + {file = "blis-0.7.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:5c1a2023f7d8431daa8d87d32f539bb23e1a009500c37f9eba0ac7b3f20f73eb"}, + {file = "blis-0.7.4-cp39-cp39-win_amd64.whl", hash = "sha256:78a8e0ee72a42c3b2f5b9114500a781119995f76fa6c21d4b02c6fb9c21df2cc"}, + {file = "blis-0.7.4.tar.gz", hash = "sha256:7daa615a97d4f28db0f332b710bfe1900b15d0c25841c6d727965e4fd91e09cf"}, +] +cachetools = [ + {file = "cachetools-4.2.1-py3-none-any.whl", hash = "sha256:1d9d5f567be80f7c07d765e21b814326d78c61eb0c3a637dffc0e5d1796cb2e2"}, + {file = "cachetools-4.2.1.tar.gz", hash = "sha256:f469e29e7aa4cff64d8de4aad95ce76de8ea1125a16c68e0d93f65c3c3dc92e9"}, +] +catalogue = [ + {file = "catalogue-1.0.0-py2.py3-none-any.whl", hash = "sha256:584d78e7f4c3c6e2fd498eb56dfc8ef1f4ff738480237de2ccd26cbe2cf47172"}, + {file = "catalogue-1.0.0.tar.gz", hash = "sha256:d74d1d856c6b36a37bf14aa6dbbc27d0582667b7ab979a6108e61a575e8723f5"}, +] +certifi = [ + {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, + {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, +] +chardet = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +] +click = [ + {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, + {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +coverage = [ + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +] +cycler = [ + {file = "cycler-0.10.0-py2.py3-none-any.whl", hash = "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d"}, + {file = "cycler-0.10.0.tar.gz", hash = "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"}, +] +cymem = [ + {file = "cymem-2.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9d72d69f7a62a280199c3aa7bc550685c47d6d0689b2d299e6492253b86d2437"}, + {file = "cymem-2.0.5-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:8ea57e6923f40eb51012352161bb5707c14a5a5ce901ff72021e59df06221655"}, + {file = "cymem-2.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:4bd023c2477198b39b660c2a6b0242880649765ecee8461688a57fd4afd2bfc0"}, + {file = "cymem-2.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f0eb9b3d03623dcfc746cf8bff0663b0e347f4aea759965c8932087a0307ee9"}, + {file = "cymem-2.0.5-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:a440d63577fcdc9c528c9cc026b7b4f8648193bac462bc0596c9eac10f9fba62"}, + {file = "cymem-2.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:3d48902d7441645835fefc7832df49feb5362c7300d182475b63a01d25ae44ef"}, + {file = "cymem-2.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f2167c9959fcd639b95d51fa5efaa7c61eef8d686cb75a25412a914f428ce980"}, + {file = "cymem-2.0.5-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:734d82d0d03c2ceb929bc1744c04dbe0a105e68a4947c8406056a36f86c41830"}, + {file = "cymem-2.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:01d3ea159f7a3f3192b1e800ed8207dac7586794d903a153198b9ea317f144bc"}, + {file = "cymem-2.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d307f7f6230d861a938837cae4b855226b6845a21c010242a15e9ce6853856cd"}, + {file = "cymem-2.0.5-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ce1e81c1d031f56b67bac2136e73b4512cbc794706cd570178972d54ba6115d8"}, + {file = "cymem-2.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d19f68b90411e02ab33b1654118337f96f41c13a3cd00c4f44f7abed2bc712e7"}, + {file = "cymem-2.0.5.tar.gz", hash = "sha256:190e15d9cf2c3bde60ae37bddbae6568a36044dc4a326d84081a5fa08818eee0"}, +] +datasets = [ + {file = "datasets-1.5.0-py3-none-any.whl", hash = "sha256:a58555cbcd162f70374e94de628ec4db8e6b347d18a04e814306592ddbdf5689"}, + {file = "datasets-1.5.0.tar.gz", hash = "sha256:4548241548ab45a3a4259f55c9e4ea79c6b03b2445be4a5971494c1b71e35262"}, +] +decorator = [ + {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, + {file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"}, +] +dill = [ + {file = "dill-0.3.3-py2.py3-none-any.whl", hash = "sha256:78370261be6ea49037ace8c17e0b7dd06d0393af6513cc23f9b222d9367ce389"}, + {file = "dill-0.3.3.zip", hash = "sha256:efb7f6cb65dba7087c1e111bb5390291ba3616741f96840bfc75792a1a9b5ded"}, +] +en_core_web_sm = [] +filelock = [ + {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, + {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, +] +flake8 = [ + {file = "flake8-3.9.0-py2.py3-none-any.whl", hash = "sha256:12d05ab02614b6aee8df7c36b97d1a3b2372761222b19b58621355e82acddcff"}, + {file = "flake8-3.9.0.tar.gz", hash = "sha256:78873e372b12b093da7b5e5ed302e8ad9e988b38b063b61ad937f26ca58fc5f0"}, +] +fsspec = [ + {file = "fsspec-0.8.7-py3-none-any.whl", hash = "sha256:65dbf8244a3a3d23342109925f9f588c7551b2b01a5f47e555043b17e2b32d62"}, + {file = "fsspec-0.8.7.tar.gz", hash = "sha256:4b11557a90ac637089b10afa4c77adf42080c0696f6f2771c41ce92d73c41432"}, +] +future = [ + {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, +] +google-auth = [ + {file = "google-auth-1.28.0.tar.gz", hash = "sha256:9bd436d19ab047001a1340720d2b629eb96dd503258c524921ec2af3ee88a80e"}, + {file = "google_auth-1.28.0-py2.py3-none-any.whl", hash = "sha256:dcaba3aa9d4e0e96fd945bf25a86b6f878fcb05770b67adbeb50a63ca4d28a5e"}, +] +google-auth-oauthlib = [ + {file = "google-auth-oauthlib-0.4.4.tar.gz", hash = "sha256:09832c6e75032f93818edf1affe4746121d640c625a5bef9b5c96af676e98eee"}, + {file = "google_auth_oauthlib-0.4.4-py2.py3-none-any.whl", hash = "sha256:0e92aacacfb94978de3b7972cf4b0f204c3cd206f74ddd0dc0b31e91164e6317"}, +] +grpcio = [ + {file = "grpcio-1.36.1-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:e3a83c5db16f95daac1d96cf3c9018d765579b5a29bb336758d793028e729921"}, + {file = "grpcio-1.36.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c18739fecb90760b183bfcb4da1cf2c6bf57e38f7baa2c131d5f67d9a4c8365d"}, + {file = "grpcio-1.36.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f6efa62ca1fe02cd34ec35f53446f04a15fe2c886a4e825f5679936a573d2cbf"}, + {file = "grpcio-1.36.1-cp27-cp27m-win32.whl", hash = "sha256:9a18299827a70be0507f98a65393b1c7f6c004fe2ca995fe23ffac534dd187a7"}, + {file = "grpcio-1.36.1-cp27-cp27m-win_amd64.whl", hash = "sha256:8a89190de1985a54ef311650cf9687ffb81de038973fd32e452636ddae36b29f"}, + {file = "grpcio-1.36.1-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:3e75643d21db7d68acd541d3fec66faaa8061d12b511e101b529ff12a276bb9b"}, + {file = "grpcio-1.36.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:3c5204e05e18268dd6a1099ca6c106fd9d00bcae1e37d5a5186094c55044c941"}, + {file = "grpcio-1.36.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:24d4c2c5e540e666c52225953d6813afc8ccf9bf46db6a72edd4e8d606656248"}, + {file = "grpcio-1.36.1-cp35-cp35m-linux_armv7l.whl", hash = "sha256:4dc7295dc9673f7af22c1e38c2a2c24ecbd6773a4c5ed5a46ed38ad4dcf2bf6c"}, + {file = "grpcio-1.36.1-cp35-cp35m-macosx_10_10_intel.whl", hash = "sha256:f241116d4bf1a8037ff87f16914b606390824e50902bdbfa2262e855fbf07fe5"}, + {file = "grpcio-1.36.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:1056b558acfd575d774644826df449e1402a03e456a3192fafb6b06d1069bf80"}, + {file = "grpcio-1.36.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:52ec563da45d06319224ebbda53501d25594de64ee1b2786e119ba4a2f1ce40c"}, + {file = "grpcio-1.36.1-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:7cbeac9bbe6a4a7fce4a89c892c249135dd9f5f5219ede157174c34a456188f0"}, + {file = "grpcio-1.36.1-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:2abaa9f0d83bd0b26f6d0d1fc4b97d73bde3ceac36ab857f70d3cabcf31c5c79"}, + {file = "grpcio-1.36.1-cp35-cp35m-win32.whl", hash = "sha256:02030e1afd3247f2b159df9dff959ec79dd4047b1c4dd4eec9e3d1642efbd504"}, + {file = "grpcio-1.36.1-cp35-cp35m-win_amd64.whl", hash = "sha256:eafafc7e040e36aa926edc731ab52c23465981888779ae64bfc8ad85888ed4f3"}, + {file = "grpcio-1.36.1-cp36-cp36m-linux_armv7l.whl", hash = "sha256:1030e74ddd0fa6e3bad7944f0c68cf1251b15bcd70641f0ad3858fdf2b8602a0"}, + {file = "grpcio-1.36.1-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:b003e24339030ed356f59505d1065b89e1f443ef41ce71ca9069be944c0d2e6b"}, + {file = "grpcio-1.36.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:76daa3c4d58fcf40f7969bdb4270335e96ee0382a050cadcd97d7332cd0251a3"}, + {file = "grpcio-1.36.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f591597bb25eae0094ead5a965555e911453e5f35fdbdaa83be11ef107865697"}, + {file = "grpcio-1.36.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:cbd82c479338fc1c0e5c3db09752b61fe47d40c6e38e4be8657153712fa76674"}, + {file = "grpcio-1.36.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:7e32bc01dfaa7a51c547379644ea619a2161d6969affdac3bbd173478d26673d"}, + {file = "grpcio-1.36.1-cp36-cp36m-win32.whl", hash = "sha256:5378189fb897567f4929f75ab67a3e0da4f8967806246cb9cfa1fa06bfbdb0d5"}, + {file = "grpcio-1.36.1-cp36-cp36m-win_amd64.whl", hash = "sha256:3a6295aa692806218e97bb687a71cd768450ed99e2acddc488f18d738edef463"}, + {file = "grpcio-1.36.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:6f6f8a8b57e40347d0bf32c2135037dae31d63d3b19007b4c426a11b76deaf65"}, + {file = "grpcio-1.36.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:4c05ed54b2a00df01e633bebec819b512bf0c60f8f5b3b36dd344dc673b02fea"}, + {file = "grpcio-1.36.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e1b9e906aa6f7577016e86ed7f3a69cae7dab4e41356584dc7980f76ea65035f"}, + {file = "grpcio-1.36.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:a602d6b30760bbbb2fe776caaa914a0d404636cafc3f2322718bf8002d7b1e55"}, + {file = "grpcio-1.36.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:dee9971aef20fc09ed897420446c4d0926cd1d7630f343333288523ca5b44bb2"}, + {file = "grpcio-1.36.1-cp37-cp37m-win32.whl", hash = "sha256:ed16bfeda02268e75e038c58599d52afc7097d749916c079b26bc27a66900f7d"}, + {file = "grpcio-1.36.1-cp37-cp37m-win_amd64.whl", hash = "sha256:85a6035ae75ce964f78f19cf913938596ccf068b149fcd79f4371268bcb9aa7c"}, + {file = "grpcio-1.36.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:6b30682180053eebc87802c2f249d2f59b430e1a18e8808575dde0d22a968b2c"}, + {file = "grpcio-1.36.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:5e4920a8fb5d17b2c5ba980db0ac1c925bbee3e5d70e96da3ec4fb1c8600d68f"}, + {file = "grpcio-1.36.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f7740d9d9451f3663df11b241ac05cafc0efaa052d2fdca6640c4d3748eaf6e2"}, + {file = "grpcio-1.36.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:20b7c4c5513e1135a2261e56830c0e710f205fee92019b92fe132d7f16a5cfd8"}, + {file = "grpcio-1.36.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:216fbd2a488e74c3b96e240e4054c85c4c99102a439bc9f556936991643f43bc"}, + {file = "grpcio-1.36.1-cp38-cp38-win32.whl", hash = "sha256:7863c2a140e829b1f4c6d67bf0bf15e5321ac4766d0a295e2682970d9dd4b091"}, + {file = "grpcio-1.36.1-cp38-cp38-win_amd64.whl", hash = "sha256:f214076eb13da9e65c1aa9877b51fca03f51a82bd8691358e1a1edd9ff341330"}, + {file = "grpcio-1.36.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:ec753c022b39656f88409fbf9f2d3b28497e3f17aa678f884d78776b41ebe6bd"}, + {file = "grpcio-1.36.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:0648a6d5d7ddcd9c8462d7d961660ee024dad6b88152ee3a521819e611830edf"}, + {file = "grpcio-1.36.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:45ea10dd133a43b10c0b4326834107ebccfee25dab59b312b78e018c2d72a1f0"}, + {file = "grpcio-1.36.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:bab743cdac1d6d8326c65d1d091d0740b39966dfab06519f74a03b3d128b8454"}, + {file = "grpcio-1.36.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:09af8ceb91860086216edc6e5ea15f9beb2cf81687faa43b7c03216f5b73e244"}, + {file = "grpcio-1.36.1-cp39-cp39-win32.whl", hash = "sha256:f3f70505207ee1cee65f60a799fd8e06e07861409aa0d55d834825a79b40c297"}, + {file = "grpcio-1.36.1-cp39-cp39-win_amd64.whl", hash = "sha256:f22c11772eff25ba1ca536e760b8c34ba56f2a9d66b6842cb11770a8f61f879d"}, + {file = "grpcio-1.36.1.tar.gz", hash = "sha256:a66ea59b20f3669df0f0c6a3bd57b985e5b2d1dcf3e4c29819bb8dc232d0fd38"}, +] +huggingface-hub = [ + {file = "huggingface_hub-0.0.7-py3-none-any.whl", hash = "sha256:50316c627c353419d3c8e371ec99a6f3eefd86bfa74e3d87c4d088f33b51747f"}, + {file = "huggingface_hub-0.0.7.tar.gz", hash = "sha256:bc096d9ee27eced690fe90c13c55819965500e2956bc74d0fadc8aafb791b2f9"}, +] +idna = [ + {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, + {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, +] +imageio = [ + {file = "imageio-2.9.0-py3-none-any.whl", hash = "sha256:3604d751f03002e8e0e7650aa71d8d9148144a87daf17cb1f3228e80747f2e6b"}, + {file = "imageio-2.9.0.tar.gz", hash = "sha256:52ddbaeca2dccf53ba2d6dec5676ca7bc3b2403ef8b37f7da78b7654bb3e10f0"}, +] +importlib-metadata = [ + {file = "importlib_metadata-3.10.0-py3-none-any.whl", hash = "sha256:d2d46ef77ffc85cbf7dac7e81dd663fde71c45326131bea8033b9bad42268ebe"}, + {file = "importlib_metadata-3.10.0.tar.gz", hash = "sha256:c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +joblib = [ + {file = "joblib-1.0.1-py3-none-any.whl", hash = "sha256:feeb1ec69c4d45129954f1b7034954241eedfd6ba39b5e9e4b6883be3332d5e5"}, + {file = "joblib-1.0.1.tar.gz", hash = "sha256:9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7"}, +] +kiwisolver = [ + {file = "kiwisolver-1.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd34fbbfbc40628200730bc1febe30631347103fc8d3d4fa012c21ab9c11eca9"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:d3155d828dec1d43283bd24d3d3e0d9c7c350cdfcc0bd06c0ad1209c1bbc36d0"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5a7a7dbff17e66fac9142ae2ecafb719393aaee6a3768c9de2fd425c63b53e21"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f8d6f8db88049a699817fd9178782867bf22283e3813064302ac59f61d95be05"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:5f6ccd3dd0b9739edcf407514016108e2280769c73a85b9e59aa390046dbf08b"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-win32.whl", hash = "sha256:225e2e18f271e0ed8157d7f4518ffbf99b9450fca398d561eb5c4a87d0986dd9"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cf8b574c7b9aa060c62116d4181f3a1a4e821b2ec5cbfe3775809474113748d4"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:232c9e11fd7ac3a470d65cd67e4359eee155ec57e822e5220322d7b2ac84fbf0"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b38694dcdac990a743aa654037ff1188c7a9801ac3ccc548d3341014bc5ca278"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ca3820eb7f7faf7f0aa88de0e54681bddcb46e485beb844fcecbcd1c8bd01689"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c8fd0f1ae9d92b42854b2979024d7597685ce4ada367172ed7c09edf2cef9cb8"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:1e1bc12fb773a7b2ffdeb8380609f4f8064777877b2225dec3da711b421fda31"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:72c99e39d005b793fb7d3d4e660aed6b6281b502e8c1eaf8ee8346023c8e03bc"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:8be8d84b7d4f2ba4ffff3665bcd0211318aa632395a1a41553250484a871d454"}, + {file = "kiwisolver-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:31dfd2ac56edc0ff9ac295193eeaea1c0c923c0355bf948fbd99ed6018010b72"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:563c649cfdef27d081c84e72a03b48ea9408c16657500c312575ae9d9f7bc1c3"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:78751b33595f7f9511952e7e60ce858c6d64db2e062afb325985ddbd34b5c131"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a357fd4f15ee49b4a98b44ec23a34a95f1e00292a139d6015c11f55774ef10de"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:5989db3b3b34b76c09253deeaf7fbc2707616f130e166996606c284395da3f18"}, + {file = "kiwisolver-1.3.1-cp38-cp38-win32.whl", hash = "sha256:c08e95114951dc2090c4a630c2385bef681cacf12636fb0241accdc6b303fd81"}, + {file = "kiwisolver-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:44a62e24d9b01ba94ae7a4a6c3fb215dc4af1dde817e7498d901e229aaf50e4e"}, + {file = "kiwisolver-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50af681a36b2a1dee1d3c169ade9fdc59207d3c31e522519181e12f1b3ba7000"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a53d27d0c2a0ebd07e395e56a1fbdf75ffedc4a05943daf472af163413ce9598"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:834ee27348c4aefc20b479335fd422a2c69db55f7d9ab61721ac8cd83eb78882"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5c3e6455341008a054cccee8c5d24481bcfe1acdbc9add30aa95798e95c65621"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:acef3d59d47dd85ecf909c359d0fd2c81ed33bdff70216d3956b463e12c38a54"}, + {file = "kiwisolver-1.3.1-cp39-cp39-win32.whl", hash = "sha256:c5518d51a0735b1e6cee1fdce66359f8d2b59c3ca85dc2b0813a8aa86818a030"}, + {file = "kiwisolver-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:b9edd0110a77fc321ab090aaa1cfcaba1d8499850a12848b81be2222eab648f6"}, + {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0cd53f403202159b44528498de18f9285b04482bab2a6fc3f5dd8dbb9352e30d"}, + {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:33449715e0101e4d34f64990352bce4095c8bf13bed1b390773fc0a7295967b3"}, + {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:401a2e9afa8588589775fe34fc22d918ae839aaaf0c0e96441c0fdbce6d8ebe6"}, + {file = "kiwisolver-1.3.1.tar.gz", hash = "sha256:950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248"}, +] +lime = [ + {file = "lime-0.2.0.1.tar.gz", hash = "sha256:76960e4f055feb53e89b5022383bafc87b63f25bac6265984b0a333d1a57f781"}, +] +lit-nlp = [ + {file = "lit_nlp-0.2-py3-none-any.whl", hash = "sha256:1d28ed15f92c2ce917ce74c8366318c9ee105e1b0d2b1e755ec2c9a047f8a5db"}, +] +llvmlite = [ + {file = "llvmlite-0.34.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:11342e5ac320c953590bdd9d0dec8c52f4b5252c4c6335ba25f1e7b9f91f9325"}, + {file = "llvmlite-0.34.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5bdf0ce430adfaf938ced5844d12f80616eb8321b5b9edfc45ef84ada5c5242c"}, + {file = "llvmlite-0.34.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e08d9d2dc5a31636bfc6b516d2d7daba95632afa3419eb8730dc76a7951e9558"}, + {file = "llvmlite-0.34.0-cp36-cp36m-win32.whl", hash = "sha256:9ff1dcdad03be0cf953aca5fc8cffdca25ccee2ec9e8ec7e95571722cdc02d55"}, + {file = "llvmlite-0.34.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5acdc3c3c7ea0ef7a1a6b442272e05d695bc8492e5b07666135ed1cfbf4ab9d2"}, + {file = "llvmlite-0.34.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bb96989bc57a1ccb131e7a0e061d07b68139b6f81a98912345d53d9239e231e1"}, + {file = "llvmlite-0.34.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:6d3f81992f52a94077e7b9b16497029daf5b5eebb2cce56f3c8345bbc9c6308e"}, + {file = "llvmlite-0.34.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d841248d1c630426c93e3eb3f8c45bca0dab77c09faeb7553b1a500220e362ce"}, + {file = "llvmlite-0.34.0-cp37-cp37m-win32.whl", hash = "sha256:408b15ffec30696406e821c89da010f1bb1eb0aa572be4561c98eb2536d610ab"}, + {file = "llvmlite-0.34.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5d1f370bf150db7239204f09cf6a0603292ea28bac984e69b167e16fe160d803"}, + {file = "llvmlite-0.34.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:132322bc084abf336c80dd106f9357978c8c085911fb656898d3be0d9ff057ea"}, + {file = "llvmlite-0.34.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:8f344102745fceba6eb5bf03c228bb290e9bc79157e9506a4a72878d636f9b3c"}, + {file = "llvmlite-0.34.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:05253f3f44fab0148276335b2c1b2c4a78143dfa78e6bafd7f937d6248f297cc"}, + {file = "llvmlite-0.34.0-cp38-cp38-win32.whl", hash = "sha256:28264f9e2b3df4135cbcfca5a91c5b0b31dd3fc02fa623b4bb13327f0cd4fc80"}, + {file = "llvmlite-0.34.0-cp38-cp38-win_amd64.whl", hash = "sha256:964f8f7a2184963cb3617d057c2382575953e488b7bb061b632ee014cfef110a"}, + {file = "llvmlite-0.34.0.tar.gz", hash = "sha256:f03ee0d19bca8f2fe922bb424a909d05c28411983b0c2bc58b020032a0d11f63"}, +] +markdown = [ + {file = "Markdown-3.3.4-py3-none-any.whl", hash = "sha256:96c3ba1261de2f7547b46a00ea8463832c921d3f9d6aba3f255a6f71386db20c"}, + {file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"}, +] +matplotlib = [ + {file = "matplotlib-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a54efd6fcad9cb3cd5ef2064b5a3eeb0b63c99f26c346bdcf66e7c98294d7cc"}, + {file = "matplotlib-3.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:86dc94e44403fa0f2b1dd76c9794d66a34e821361962fe7c4e078746362e3b14"}, + {file = "matplotlib-3.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:574306171b84cd6854c83dc87bc353cacc0f60184149fb00c9ea871eca8c1ecb"}, + {file = "matplotlib-3.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:84a10e462120aa7d9eb6186b50917ed5a6286ee61157bfc17c5b47987d1a9068"}, + {file = "matplotlib-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:81e6fe8b18ef5be67f40a1d4f07d5a4ed21d3878530193898449ddef7793952f"}, + {file = "matplotlib-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c45e7bf89ea33a2adaef34774df4e692c7436a18a48bcb0e47a53e698a39fa39"}, + {file = "matplotlib-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1f83a32e4b6045191f9d34e4dc68c0a17c870b57ef9cca518e516da591246e79"}, + {file = "matplotlib-3.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a18cc1ab4a35b845cf33b7880c979f5c609fd26c2d6e74ddfacb73dcc60dd956"}, + {file = "matplotlib-3.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ac2a30a09984c2719f112a574b6543ccb82d020fd1b23b4d55bf4759ba8dd8f5"}, + {file = "matplotlib-3.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a97781453ac79409ddf455fccf344860719d95142f9c334f2a8f3fff049ffec3"}, + {file = "matplotlib-3.4.1-cp38-cp38-win32.whl", hash = "sha256:2eee37340ca1b353e0a43a33da79d0cd4bcb087064a0c3c3d1329cdea8fbc6f3"}, + {file = "matplotlib-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:90dbc007f6389bcfd9ef4fe5d4c78c8d2efe4e0ebefd48b4f221cdfed5672be2"}, + {file = "matplotlib-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f16660edf9a8bcc0f766f51c9e1b9d2dc6ceff6bf636d2dbd8eb925d5832dfd"}, + {file = "matplotlib-3.4.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a989022f89cda417f82dbf65e0a830832afd8af743d05d1414fb49549287ff04"}, + {file = "matplotlib-3.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:be4430b33b25e127fc4ea239cc386389de420be4d63e71d5359c20b562951ce1"}, + {file = "matplotlib-3.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:7561fd541477d41f3aa09457c434dd1f7604f3bd26d7858d52018f5dfe1c06d1"}, + {file = "matplotlib-3.4.1-cp39-cp39-win32.whl", hash = "sha256:9f374961a3996c2d1b41ba3145462c3708a89759e604112073ed6c8bdf9f622f"}, + {file = "matplotlib-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:53ceb12ef44f8982b45adc7a0889a7e2df1d758e8b360f460e435abe8a8cd658"}, + {file = "matplotlib-3.4.1.tar.gz", hash = "sha256:84d4c4f650f356678a5d658a43ca21a41fca13f9b8b00169c0b76e6a6a948908"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +multiprocess = [ + {file = "multiprocess-0.70.11.1-cp27-cp27m-macosx_10_8_x86_64.whl", hash = "sha256:8f0d0640642acc654fe2fb5cb529ebbe116468a1dd1544d484db6e79033767c8"}, + {file = "multiprocess-0.70.11.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:4b33a0111e341fad5e3c6bb6dd7f592596f2974cc5ecddee06b9a999bac4cbb0"}, + {file = "multiprocess-0.70.11.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0eab6e0e87acba9586e5d6869d21271cc865d72d74b7f6b30b6290dffca5caae"}, + {file = "multiprocess-0.70.11.1-cp27-cp27m-win32.whl", hash = "sha256:4d97020a50a18862fbb1f84d81914a2a28f2d78bc315de9a6699459682df2a67"}, + {file = "multiprocess-0.70.11.1-cp27-cp27m-win_amd64.whl", hash = "sha256:217e96638fbfd951a203b8dc17410839e4aea8aa3fb9cc393c37e491dcac2c65"}, + {file = "multiprocess-0.70.11.1-py35-none-any.whl", hash = "sha256:ebb92b67a61b901bfc277c4525e86afba24a60638d192b62f8c332933da995f4"}, + {file = "multiprocess-0.70.11.1-py36-none-any.whl", hash = "sha256:d8e87b086373fbd19c28659391e5b8888aadeaeb88f0e448e55502578bde4920"}, + {file = "multiprocess-0.70.11.1-py37-none-any.whl", hash = "sha256:164c77448e357ebee0dc6abc7ee8c823e40e295e629a5fc6d31725109a3a7ee9"}, + {file = "multiprocess-0.70.11.1-py38-none-any.whl", hash = "sha256:7761fed45cae123aa4b7bb918e77a5cfef6fd436c65bc87453e76bf2bdc3e29e"}, + {file = "multiprocess-0.70.11.1-py39-none-any.whl", hash = "sha256:ae026110257fc551fc949d96d69160768810d9019786c8c84c0c28d1f88fab67"}, + {file = "multiprocess-0.70.11.1.zip", hash = "sha256:9d5e417f3ebce4d027a3c900995840f167f316d9f73c0a7a1fbb4ac0116298d0"}, +] +murmurhash = [ + {file = "murmurhash-1.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ef8819d15973e0d6f69688bafc097a1fae081675c1de39807028869a1320b1a9"}, + {file = "murmurhash-1.0.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:76251513a2acad6c2e4b7aeffc5fcb807ee97a66cad5c2990557556555a6b7e9"}, + {file = "murmurhash-1.0.5-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:d58315961dc5a5e740f41f2ac5c3a0ebc61ef472f8afeb4db7eeb3b863243105"}, + {file = "murmurhash-1.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:23c56182822a1ed88e2a098ac56958dfec380696a9a943df203b9b41e4bcf5e4"}, + {file = "murmurhash-1.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:023391cfefe584ac544c1ea0936976c0119b17dd27bb8280652cef1704f76428"}, + {file = "murmurhash-1.0.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f00321998f0a6bad3fd068babf448a296d4b0b1f4dd424cab863ebe5ed54182f"}, + {file = "murmurhash-1.0.5-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:8381172e03c5f6f947005fb146a53c5e5a9e0d630be4a40cbf8838e9324bfe1c"}, + {file = "murmurhash-1.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fed7578fbaa6c301f27ed80834c1f7494ea7d335e269e98b9aee477cf0b3b487"}, + {file = "murmurhash-1.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d4c3a0242014cf4c84e9ea0ba3f13b48f02a3992de3da7b1116d11b816451195"}, + {file = "murmurhash-1.0.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:99e55488476a5f70e8d305fd31258f140e52f724f788bcc50c31ec846a2b3766"}, + {file = "murmurhash-1.0.5-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:b9292c532538cf47846ca81056cfeab08b877c35fe7521d6524aa92ddcd833e2"}, + {file = "murmurhash-1.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:fd17973fd4554715efd8d86b3e9200358e49e437fdb92a897ca127aced48b61c"}, + {file = "murmurhash-1.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:81474a45c4074637a6dfc8fea4cdebf091ab5aa781c2cfcb94c43b16030badd7"}, + {file = "murmurhash-1.0.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:a9bd2312996e6e47605af305a1e5f091eba1bdd637cdd9986aec4885cb4c5530"}, + {file = "murmurhash-1.0.5-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:892749023da26420d194f37bfa30df1368aaac0149cfa3b2105db36b66549e37"}, + {file = "murmurhash-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:add366944eb8ec73013a4f36e166c5a4f0f7628ffe1746bc5fe031347489e5e8"}, + {file = "murmurhash-1.0.5.tar.gz", hash = "sha256:98ec9d727bd998a35385abd56b062cf0cca216725ea7ec5068604ab566f7e97f"}, +] +mypy = [ + {file = "mypy-0.790-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:bd03b3cf666bff8d710d633d1c56ab7facbdc204d567715cb3b9f85c6e94f669"}, + {file = "mypy-0.790-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:2170492030f6faa537647d29945786d297e4862765f0b4ac5930ff62e300d802"}, + {file = "mypy-0.790-cp35-cp35m-win_amd64.whl", hash = "sha256:e86bdace26c5fe9cf8cb735e7cedfe7850ad92b327ac5d797c656717d2ca66de"}, + {file = "mypy-0.790-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e97e9c13d67fbe524be17e4d8025d51a7dca38f90de2e462243ab8ed8a9178d1"}, + {file = "mypy-0.790-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0d34d6b122597d48a36d6c59e35341f410d4abfa771d96d04ae2c468dd201abc"}, + {file = "mypy-0.790-cp36-cp36m-win_amd64.whl", hash = "sha256:72060bf64f290fb629bd4a67c707a66fd88ca26e413a91384b18db3876e57ed7"}, + {file = "mypy-0.790-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:eea260feb1830a627fb526d22fbb426b750d9f5a47b624e8d5e7e004359b219c"}, + {file = "mypy-0.790-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c614194e01c85bb2e551c421397e49afb2872c88b5830e3554f0519f9fb1c178"}, + {file = "mypy-0.790-cp37-cp37m-win_amd64.whl", hash = "sha256:0a0d102247c16ce93c97066443d11e2d36e6cc2a32d8ccc1f705268970479324"}, + {file = "mypy-0.790-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cf4e7bf7f1214826cf7333627cb2547c0db7e3078723227820d0a2490f117a01"}, + {file = "mypy-0.790-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:af4e9ff1834e565f1baa74ccf7ae2564ae38c8df2a85b057af1dbbc958eb6666"}, + {file = "mypy-0.790-cp38-cp38-win_amd64.whl", hash = "sha256:da56dedcd7cd502ccd3c5dddc656cb36113dd793ad466e894574125945653cea"}, + {file = "mypy-0.790-py3-none-any.whl", hash = "sha256:2842d4fbd1b12ab422346376aad03ff5d0805b706102e475e962370f874a5122"}, + {file = "mypy-0.790.tar.gz", hash = "sha256:2b21ba45ad9ef2e2eb88ce4aeadd0112d0f5026418324176fd494a6824b74975"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +networkx = [ + {file = "networkx-2.5-py3-none-any.whl", hash = "sha256:8c5812e9f798d37c50570d15c4a69d5710a18d77bafc903ee9c5fba7454c616c"}, + {file = "networkx-2.5.tar.gz", hash = "sha256:7978955423fbc9639c10498878be59caf99b44dc304c2286162fd24b458c1602"}, +] +numba = [ + {file = "numba-0.51.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:af798310eeb318c56cdb83254abbe9a938cc0182d08671d7f9f032dc817e064d"}, + {file = "numba-0.51.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:93e18350f2094e7432321c1275730a3143b94af012fb609cc180fa376c44867f"}, + {file = "numba-0.51.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9e2bb1f129bfadd757ad7a9c18ab79c3ab25ce6d6a68e58565d6c52ad07b3566"}, + {file = "numba-0.51.2-cp36-cp36m-win32.whl", hash = "sha256:31cdf6b6d1301d5fb6c4fcb8b4c711ba5c9f60ba2fca008b550da9b56185367c"}, + {file = "numba-0.51.2-cp36-cp36m-win_amd64.whl", hash = "sha256:df6edca13c04a31fdb5addf5205199478a7da372712829157ef491e8a6e7031f"}, + {file = "numba-0.51.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a628122dacfcba9a3ea68a9e95578c6b6391016e34962c46550ea8e189e0412e"}, + {file = "numba-0.51.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:106736d5a8dab6bebce989d4ab1b3f169c264582598f172e6e5b736210d2e834"}, + {file = "numba-0.51.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:a12f16fdb4ca5edc94e2ef412e4e768c29217ef9b6fdfc237d064ebe30acfe14"}, + {file = "numba-0.51.2-cp37-cp37m-win32.whl", hash = "sha256:025b033fd31c44bba17802293c81270084b5454b5b055b8c10c394385c232f00"}, + {file = "numba-0.51.2-cp37-cp37m-win_amd64.whl", hash = "sha256:081788f584fa500339e9b74bf02e3c5029d408c114e555ada19cae0b92721416"}, + {file = "numba-0.51.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:5416b584183fd599afda11b947b64f89450fcf26a9c15b408167f412b98a3a94"}, + {file = "numba-0.51.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:05da65dca2ac28a192c9d8f20e9e477eb1237205cfc4d131c414f5f8092c6639"}, + {file = "numba-0.51.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:aee435e3b7e465dd49971f8ea76aa414532a87736916cb399534e017334d1138"}, + {file = "numba-0.51.2-cp38-cp38-win32.whl", hash = "sha256:bbbe2432433b11d3fadab0226a84c1a81918cb905ba1aeb022249e8d2ba8856c"}, + {file = "numba-0.51.2-cp38-cp38-win_amd64.whl", hash = "sha256:259e7c15b24feec4a99fb41eb8c47b5ad49b544d1a5ad40ad0252ef531ba06fd"}, + {file = "numba-0.51.2.tar.gz", hash = "sha256:16bd59572114adbf5f600ea383880d7b2071ae45477e84a24994e089ea390768"}, +] +numpy = [ + {file = "numpy-1.20.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e9459f40244bb02b2f14f6af0cd0732791d72232bbb0dc4bab57ef88e75f6935"}, + {file = "numpy-1.20.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a8e6859913ec8eeef3dbe9aed3bf475347642d1cdd6217c30f28dee8903528e6"}, + {file = "numpy-1.20.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9cab23439eb1ebfed1aaec9cd42b7dc50fc96d5cd3147da348d9161f0501ada5"}, + {file = "numpy-1.20.2-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9c0fab855ae790ca74b27e55240fe4f2a36a364a3f1ebcfd1fb5ac4088f1cec3"}, + {file = "numpy-1.20.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:61d5b4cf73622e4d0c6b83408a16631b670fc045afd6540679aa35591a17fe6d"}, + {file = "numpy-1.20.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d15007f857d6995db15195217afdbddfcd203dfaa0ba6878a2f580eaf810ecd6"}, + {file = "numpy-1.20.2-cp37-cp37m-win32.whl", hash = "sha256:d76061ae5cab49b83a8cf3feacefc2053fac672728802ac137dd8c4123397677"}, + {file = "numpy-1.20.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bad70051de2c50b1a6259a6df1daaafe8c480ca98132da98976d8591c412e737"}, + {file = "numpy-1.20.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:719656636c48be22c23641859ff2419b27b6bdf844b36a2447cb39caceb00935"}, + {file = "numpy-1.20.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:aa046527c04688af680217fffac61eec2350ef3f3d7320c07fd33f5c6e7b4d5f"}, + {file = "numpy-1.20.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2428b109306075d89d21135bdd6b785f132a1f5a3260c371cee1fae427e12727"}, + {file = "numpy-1.20.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:e8e4fbbb7e7634f263c5b0150a629342cc19b47c5eba8d1cd4363ab3455ab576"}, + {file = "numpy-1.20.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:edb1f041a9146dcf02cd7df7187db46ab524b9af2515f392f337c7cbbf5b52cd"}, + {file = "numpy-1.20.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:c73a7975d77f15f7f68dacfb2bca3d3f479f158313642e8ea9058eea06637931"}, + {file = "numpy-1.20.2-cp38-cp38-win32.whl", hash = "sha256:6c915ee7dba1071554e70a3664a839fbc033e1d6528199d4621eeaaa5487ccd2"}, + {file = "numpy-1.20.2-cp38-cp38-win_amd64.whl", hash = "sha256:471c0571d0895c68da309dacee4e95a0811d0a9f9f532a48dc1bea5f3b7ad2b7"}, + {file = "numpy-1.20.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4703b9e937df83f5b6b7447ca5912b5f5f297aba45f91dbbbc63ff9278c7aa98"}, + {file = "numpy-1.20.2-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:abc81829c4039e7e4c30f7897938fa5d4916a09c2c7eb9b244b7a35ddc9656f4"}, + {file = "numpy-1.20.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:377751954da04d4a6950191b20539066b4e19e3b559d4695399c5e8e3e683bf6"}, + {file = "numpy-1.20.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:6e51e417d9ae2e7848314994e6fc3832c9d426abce9328cf7571eefceb43e6c9"}, + {file = "numpy-1.20.2-cp39-cp39-win32.whl", hash = "sha256:780ae5284cb770ade51d4b4a7dce4faa554eb1d88a56d0e8b9f35fca9b0270ff"}, + {file = "numpy-1.20.2-cp39-cp39-win_amd64.whl", hash = "sha256:924dc3f83de20437de95a73516f36e09918e9c9c18d5eac520062c49191025fb"}, + {file = "numpy-1.20.2-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:97ce8b8ace7d3b9288d88177e66ee75480fb79b9cf745e91ecfe65d91a856042"}, + {file = "numpy-1.20.2.zip", hash = "sha256:878922bf5ad7550aa044aa9301d417e2d3ae50f0f577de92051d739ac6096cee"}, +] +oauthlib = [ + {file = "oauthlib-3.1.0-py2.py3-none-any.whl", hash = "sha256:df884cd6cbe20e32633f1db1072e9356f53638e4361bef4e8b03c9127c9328ea"}, + {file = "oauthlib-3.1.0.tar.gz", hash = "sha256:bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889"}, +] +omegaconf = [ + {file = "omegaconf-2.0.6-py3-none-any.whl", hash = "sha256:9e349fd76819b95b47aa628edea1ff83fed5b25108608abdd6c7fdca188e302a"}, + {file = "omegaconf-2.0.6.tar.gz", hash = "sha256:92ca535a788d21651bf4c2eaf5c1ca4c7a8003b2dab4a87cbb09109784268806"}, +] +packaging = [ + {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, + {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, +] +pandas = [ + {file = "pandas-1.1.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:bf23a3b54d128b50f4f9d4675b3c1857a688cc6731a32f931837d72effb2698d"}, + {file = "pandas-1.1.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5a780260afc88268a9d3ac3511d8f494fdcf637eece62fb9eb656a63d53eb7ca"}, + {file = "pandas-1.1.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:b61080750d19a0122469ab59b087380721d6b72a4e7d962e4d7e63e0c4504814"}, + {file = "pandas-1.1.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:0de3ddb414d30798cbf56e642d82cac30a80223ad6fe484d66c0ce01a84d6f2f"}, + {file = "pandas-1.1.5-cp36-cp36m-win32.whl", hash = "sha256:70865f96bb38fec46f7ebd66d4b5cfd0aa6b842073f298d621385ae3898d28b5"}, + {file = "pandas-1.1.5-cp36-cp36m-win_amd64.whl", hash = "sha256:19a2148a1d02791352e9fa637899a78e371a3516ac6da5c4edc718f60cbae648"}, + {file = "pandas-1.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26fa92d3ac743a149a31b21d6f4337b0594b6302ea5575b37af9ca9611e8981a"}, + {file = "pandas-1.1.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c16d59c15d946111d2716856dd5479221c9e4f2f5c7bc2d617f39d870031e086"}, + {file = "pandas-1.1.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3be7a7a0ca71a2640e81d9276f526bca63505850add10206d0da2e8a0a325dae"}, + {file = "pandas-1.1.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:573fba5b05bf2c69271a32e52399c8de599e4a15ab7cec47d3b9c904125ab788"}, + {file = "pandas-1.1.5-cp37-cp37m-win32.whl", hash = "sha256:21b5a2b033380adbdd36b3116faaf9a4663e375325831dac1b519a44f9e439bb"}, + {file = "pandas-1.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:24c7f8d4aee71bfa6401faeba367dd654f696a77151a8a28bc2013f7ced4af98"}, + {file = "pandas-1.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2860a97cbb25444ffc0088b457da0a79dc79f9c601238a3e0644312fcc14bf11"}, + {file = "pandas-1.1.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5008374ebb990dad9ed48b0f5d0038124c73748f5384cc8c46904dace27082d9"}, + {file = "pandas-1.1.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2c2f7c670ea4e60318e4b7e474d56447cf0c7d83b3c2a5405a0dbb2600b9c48e"}, + {file = "pandas-1.1.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0a643bae4283a37732ddfcecab3f62dd082996021b980f580903f4e8e01b3c5b"}, + {file = "pandas-1.1.5-cp38-cp38-win32.whl", hash = "sha256:5447ea7af4005b0daf695a316a423b96374c9c73ffbd4533209c5ddc369e644b"}, + {file = "pandas-1.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:4c62e94d5d49db116bef1bd5c2486723a292d79409fc9abd51adf9e05329101d"}, + {file = "pandas-1.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:731568be71fba1e13cae212c362f3d2ca8932e83cb1b85e3f1b4dd77d019254a"}, + {file = "pandas-1.1.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c61c043aafb69329d0f961b19faa30b1dab709dd34c9388143fc55680059e55a"}, + {file = "pandas-1.1.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2b1c6cd28a0dfda75c7b5957363333f01d370936e4c6276b7b8e696dd500582a"}, + {file = "pandas-1.1.5-cp39-cp39-win32.whl", hash = "sha256:c94ff2780a1fd89f190390130d6d36173ca59fcfb3fe0ff596f9a56518191ccb"}, + {file = "pandas-1.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:edda9bacc3843dfbeebaf7a701763e68e741b08fccb889c003b0a52f0ee95782"}, + {file = "pandas-1.1.5.tar.gz", hash = "sha256:f10fc41ee3c75a474d3bdf68d396f10782d013d7f67db99c0efbfd0acb99701b"}, +] +pathspec = [ + {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, + {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, +] +pillow = [ + {file = "Pillow-8.1.2-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:5cf03b9534aca63b192856aa601c68d0764810857786ea5da652581f3a44c2b0"}, + {file = "Pillow-8.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:f91b50ad88048d795c0ad004abbe1390aa1882073b1dca10bfd55d0b8cf18ec5"}, + {file = "Pillow-8.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5762ebb4436f46b566fc6351d67a9b5386b5e5de4e58fdaa18a1c83e0e20f1a8"}, + {file = "Pillow-8.1.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e2cd8ac157c1e5ae88b6dd790648ee5d2777e76f1e5c7d184eaddb2938594f34"}, + {file = "Pillow-8.1.2-cp36-cp36m-win32.whl", hash = "sha256:72027ebf682abc9bafd93b43edc44279f641e8996fb2945104471419113cfc71"}, + {file = "Pillow-8.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d1d6bca39bb6dd94fba23cdb3eeaea5e30c7717c5343004d900e2a63b132c341"}, + {file = "Pillow-8.1.2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:90882c6f084ef68b71bba190209a734bf90abb82ab5e8f64444c71d5974008c6"}, + {file = "Pillow-8.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:89e4c757a91b8c55d97c91fa09c69b3677c227b942fa749e9a66eef602f59c28"}, + {file = "Pillow-8.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8c4e32218c764bc27fe49b7328195579581aa419920edcc321c4cb877c65258d"}, + {file = "Pillow-8.1.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a01da2c266d9868c4f91a9c6faf47a251f23b9a862dce81d2ff583135206f5be"}, + {file = "Pillow-8.1.2-cp37-cp37m-win32.whl", hash = "sha256:30d33a1a6400132e6f521640dd3f64578ac9bfb79a619416d7e8802b4ce1dd55"}, + {file = "Pillow-8.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:71b01ee69e7df527439d7752a2ce8fb89e19a32df484a308eca3e81f673d3a03"}, + {file = "Pillow-8.1.2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:5a2d957eb4aba9d48170b8fe6538ec1fbc2119ffe6373782c03d8acad3323f2e"}, + {file = "Pillow-8.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:87f42c976f91ca2fc21a3293e25bd3cd895918597db1b95b93cbd949f7d019ce"}, + {file = "Pillow-8.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:15306d71a1e96d7e271fd2a0737038b5a92ca2978d2e38b6ced7966583e3d5af"}, + {file = "Pillow-8.1.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:71f31ee4df3d5e0b366dd362007740106d3210fb6a56ec4b581a5324ba254f06"}, + {file = "Pillow-8.1.2-cp38-cp38-win32.whl", hash = "sha256:98afcac3205d31ab6a10c5006b0cf040d0026a68ec051edd3517b776c1d78b09"}, + {file = "Pillow-8.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:328240f7dddf77783e72d5ed79899a6b48bc6681f8d1f6001f55933cb4905060"}, + {file = "Pillow-8.1.2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:bead24c0ae3f1f6afcb915a057943ccf65fc755d11a1410a909c1fefb6c06ad1"}, + {file = "Pillow-8.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81b3716cc9744ffdf76b39afb6247eae754186838cedad0b0ac63b2571253fe6"}, + {file = "Pillow-8.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:63cd413ac52ee3f67057223d363f4f82ce966e64906aea046daf46695e3c8238"}, + {file = "Pillow-8.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8565355a29655b28fdc2c666fd9a3890fe5edc6639d128814fafecfae2d70910"}, + {file = "Pillow-8.1.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1940fc4d361f9cc7e558d6f56ff38d7351b53052fd7911f4b60cd7bc091ea3b1"}, + {file = "Pillow-8.1.2-cp39-cp39-win32.whl", hash = "sha256:46c2bcf8e1e75d154e78417b3e3c64e96def738c2a25435e74909e127a8cba5e"}, + {file = "Pillow-8.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:aeab4cd016e11e7aa5cfc49dcff8e51561fa64818a0be86efa82c7038e9369d0"}, + {file = "Pillow-8.1.2-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:74cd9aa648ed6dd25e572453eb09b08817a1e3d9f8d1bd4d8403d99e42ea790b"}, + {file = "Pillow-8.1.2-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:e5739ae63636a52b706a0facec77b2b58e485637e1638202556156e424a02dc2"}, + {file = "Pillow-8.1.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:903293320efe2466c1ab3509a33d6b866dc850cfd0c5d9cc92632014cec185fb"}, + {file = "Pillow-8.1.2-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:5daba2b40782c1c5157a788ec4454067c6616f5a0c1b70e26ac326a880c2d328"}, + {file = "Pillow-8.1.2-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:1f93f2fe211f1ef75e6f589327f4d4f8545d5c8e826231b042b483d8383e8a7c"}, + {file = "Pillow-8.1.2-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:6efac40344d8f668b6c4533ae02a48d52fd852ef0654cc6f19f6ac146399c733"}, + {file = "Pillow-8.1.2-pp37-pypy37_pp73-win32.whl", hash = "sha256:f36c3ff63d6fc509ce599a2f5b0d0732189eed653420e7294c039d342c6e204a"}, + {file = "Pillow-8.1.2.tar.gz", hash = "sha256:b07c660e014852d98a00a91adfbe25033898a9d90a8f39beb2437d22a203fc44"}, +] +plac = [ + {file = "plac-1.1.3-py2.py3-none-any.whl", hash = "sha256:487e553017d419f35add346c4c09707e52fa53f7e7181ce1098ca27620e9ceee"}, + {file = "plac-1.1.3.tar.gz", hash = "sha256:398cb947c60c4c25e275e1f1dadf027e7096858fb260b8ece3b33bcff90d985f"}, +] +pluggy = [ + {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, + {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, +] +portalocker = [ + {file = "portalocker-2.0.0-py2.py3-none-any.whl", hash = "sha256:5d7bc386d785dfc87e763a29d7d5864556c62653379e3ff484c71e49425d1898"}, + {file = "portalocker-2.0.0.tar.gz", hash = "sha256:14487eed81aa914127edf0284e29c7ca8842c05bb33d96dc7e4bdb47282d26e4"}, +] +preshed = [ + {file = "preshed-3.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:572899224578d30f6a67fadecb3d62b824866b4d2b6bad73f71abf7585db1389"}, + {file = "preshed-3.0.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:67c11e384ce4c008bc487ba3a29bafdfe038b9a2546ccfe0fe2160480b356fed"}, + {file = "preshed-3.0.5-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:6e833f1632a1d0232bdc6df6c3542fb130ef044d8656b24576d9fd19e5f1e0d1"}, + {file = "preshed-3.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:1ce0846cb7ebb2ea913d44ec2e296098c285443ecdea80ddf02656bbef4deacb"}, + {file = "preshed-3.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8a560850b8c53c1487ba51c2b0f5769535512b36d3b129ad5796b64653abe2f9"}, + {file = "preshed-3.0.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6f126bcc414a0304b54956f9dac2628a0f9bef1657d1b3a3837fc82b791aa2a1"}, + {file = "preshed-3.0.5-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:1bdededa7fd81f26a42bc9d11d542657c74746b7ea7fc2b2ca6d0ddbf1f93792"}, + {file = "preshed-3.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9ebf444f8487782c84d7b5acb1d7195e603155882fafc4697344199eeeafbe5f"}, + {file = "preshed-3.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a3adffde3126c2a0ab7d57cab1d605cb5f63da1ba88088ad3cf8debfd9aa4dc"}, + {file = "preshed-3.0.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:56b9603517bb2a364418163236d6a147a1d722ff7546cbe085e76e25ae118e89"}, + {file = "preshed-3.0.5-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:5e06a49477bd257eea02bf823b5d3e201d00a19d6976523a58da8606b2358481"}, + {file = "preshed-3.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:ca4a7681b643b8356e7dfdab9cf668b2b34bd07ef4b09ebed44c8aeb3b1626ee"}, + {file = "preshed-3.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:85074eebf90a858a6b68242f1ae265ca99e1af45bf9dafcb9a83d49b0815a2e1"}, + {file = "preshed-3.0.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:12cbe1e378b4f1c6b06f5e4130408befe916e55ea1616e6aa63c5cd0ccd9c927"}, + {file = "preshed-3.0.5-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:30f0c8ea85113d0565a1e3eb6222d00513ec39b56f3f9a2615e304575e65422e"}, + {file = "preshed-3.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:fb4d2e82add82d63b2c97802b759a58ff200d06b632e2edc48a9ced1e6472faf"}, + {file = "preshed-3.0.5.tar.gz", hash = "sha256:c6d3dba39ed5059aaf99767017b9568c75b2d0780c3481e204b1daecde00360e"}, +] +protobuf = [ + {file = "protobuf-3.15.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1771ef20e88759c4d81db213e89b7a1fc53937968e12af6603c658ee4bcbfa38"}, + {file = "protobuf-3.15.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1a66261a402d05c8ad8c1fde8631837307bf8d7e7740a4f3941fc3277c2e1528"}, + {file = "protobuf-3.15.6-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:eac23a3e56175b710f3da9a9e8e2aa571891fbec60e0c5a06db1c7b1613b5cfd"}, + {file = "protobuf-3.15.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ec220d90eda8bb7a7a1434a8aed4fe26d7e648c1a051c2885f3f5725b6aa71a"}, + {file = "protobuf-3.15.6-cp35-cp35m-win32.whl", hash = "sha256:88d8f21d1ac205eedb6dea943f8204ed08201b081dba2a966ab5612788b9bb1e"}, + {file = "protobuf-3.15.6-cp35-cp35m-win_amd64.whl", hash = "sha256:eaada29bbf087dea7d8bce4d1d604fc768749e8809e9c295922accd7c8fce4d5"}, + {file = "protobuf-3.15.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:256c0b2e338c1f3228d3280707606fe5531fde85ab9d704cde6fdeb55112531f"}, + {file = "protobuf-3.15.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:b9069e45b6e78412fba4a314ea38b4a478686060acf470d2b131b3a2c50484ec"}, + {file = "protobuf-3.15.6-cp36-cp36m-win32.whl", hash = "sha256:24f4697f57b8520c897a401b7f9a5ae45c369e22c572e305dfaf8053ecb49687"}, + {file = "protobuf-3.15.6-cp36-cp36m-win_amd64.whl", hash = "sha256:d9ed0955b794f1e5f367e27f8a8ff25501eabe34573f003f06639c366ca75f73"}, + {file = "protobuf-3.15.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:822ac7f87fc2fb9b24edd2db390538b60ef50256e421ca30d65250fad5a3d477"}, + {file = "protobuf-3.15.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:74ac159989e2b02d761188a2b6f4601ff5e494d9b9d863f5ad6e98e5e0c54328"}, + {file = "protobuf-3.15.6-cp37-cp37m-win32.whl", hash = "sha256:30fe4249a364576f9594180589c3f9c4771952014b5f77f0372923fc7bafbbe2"}, + {file = "protobuf-3.15.6-cp37-cp37m-win_amd64.whl", hash = "sha256:45a91fc6f9aa86d3effdeda6751882b02de628519ba06d7160daffde0c889ff8"}, + {file = "protobuf-3.15.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83c7c7534f050cb25383bb817159416601d1cc46c40bc5e851ec8bbddfc34a2f"}, + {file = "protobuf-3.15.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9ec20a6ded7d0888e767ad029dbb126e604e18db744ac0a428cf746e040ccecd"}, + {file = "protobuf-3.15.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0f2da2fcc4102b6c3b57f03c9d8d5e37c63f8bc74deaa6cb54e0cc4524a77247"}, + {file = "protobuf-3.15.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:70054ae1ce5dea7dec7357db931fcf487f40ea45b02cb719ee6af07eb1e906fb"}, + {file = "protobuf-3.15.6-py2.py3-none-any.whl", hash = "sha256:1655fc0ba7402560d749de13edbfca1ac45d1753d8f4e5292989f18f5a00c215"}, + {file = "protobuf-3.15.6.tar.gz", hash = "sha256:2b974519a2ae83aa1e31cff9018c70bbe0e303a46a598f982943c49ae1d4fcd3"}, +] +py = [ + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, +] +pyarrow = [ + {file = "pyarrow-3.0.0-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:03e2435da817bc2b5d0fad6f2e53305eb36c24004ddfcb2b30e4217a1a80cf22"}, + {file = "pyarrow-3.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2be3a9eab4bfd00024dc3c83fa03de1c1d04a0f47ebaf3dc483cd100546eacbf"}, + {file = "pyarrow-3.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:a76031ef19d11db2fef79a97cc69997c97bea35aa07efbe042a177c7e3b1a390"}, + {file = "pyarrow-3.0.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:a07e286e81ceb20f8f0c45f69760d2ebc434fe83794d5f9b44f89fc2dc6dc24d"}, + {file = "pyarrow-3.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:cfea99a01d844c3db5e25374a6cdcf3b5ba1698bfe95d41272c295a4581e884c"}, + {file = "pyarrow-3.0.0-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:d5666a7fa2668f3ff95df028c2072d59e8b17e73d682068e8505dafa2688f3cc"}, + {file = "pyarrow-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3ea6574d1ae2d9bff7e6e1715f64c31bdc01b42387a5c78311a8ce9c09cfe135"}, + {file = "pyarrow-3.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:2d5c95eb04a3d2e786e097b53534893eade6c8b3faf10f53a06143384b4446b1"}, + {file = "pyarrow-3.0.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:31e6fc0868963aba4e6b8a3e218c9a5ff347bca870d622da0b3d58269d0c5398"}, + {file = "pyarrow-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:960a9b0fd599601ddac42f16d5acf049637ec08957359c6741d6eb2bf0dbae97"}, + {file = "pyarrow-3.0.0-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:2c3353d38d137f1158595b3b18dcef711f3d8fdb57cf7ae2d861d07235064bc1"}, + {file = "pyarrow-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:72206cde1857d5420601feae75f53921cffab4326b42262a858c7b8be67982b7"}, + {file = "pyarrow-3.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:dec007a0f7adba86bd170252140ede01646b45c3a470d5862ce00d8e40cd29bd"}, + {file = "pyarrow-3.0.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:bf6684fe9e38f8ddb696e38901461eab783ec1d565974ebd5862270320b3e27f"}, + {file = "pyarrow-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:3b46487c45faaea8d1a5aa65002e2832ae2e1c9e68ecb461cda4fa59891cf490"}, + {file = "pyarrow-3.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:978bbe8ec9090d1133a25f00f32ed92600f9d315fbfa29a17952bee01f0d7fe5"}, + {file = "pyarrow-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7a8903f2b8a80498725ef5d4a35cd7dd5a98b74e080d42692545e61a6cbfbe4"}, + {file = "pyarrow-3.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b1cf92df9f336f31706249e543dc0ffce3c67a78204ce540f1173c6c07dfafec"}, + {file = "pyarrow-3.0.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:b08c119cc2b9fcd1567797fedb245a2f4352a3084a22b7298272afe7cf7a4730"}, + {file = "pyarrow-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:5faa2dc73444bdcf042f121383965a47362be1f946303d46e8fd80f8d26cd90c"}, + {file = "pyarrow-3.0.0.tar.gz", hash = "sha256:4bf8cc43e1db1e0517466209ee8e8f459d9b5e1b4074863317f2a965cf59889e"}, +] +pyasn1 = [ + {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, + {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, + {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, + {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, + {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, + {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, + {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, + {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, + {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, + {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, + {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, + {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, + {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, +] +pyasn1-modules = [ + {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, + {file = "pyasn1_modules-0.2.8-py2.4.egg", hash = "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199"}, + {file = "pyasn1_modules-0.2.8-py2.5.egg", hash = "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405"}, + {file = "pyasn1_modules-0.2.8-py2.6.egg", hash = "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb"}, + {file = "pyasn1_modules-0.2.8-py2.7.egg", hash = "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8"}, + {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, + {file = "pyasn1_modules-0.2.8-py3.1.egg", hash = "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d"}, + {file = "pyasn1_modules-0.2.8-py3.2.egg", hash = "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45"}, + {file = "pyasn1_modules-0.2.8-py3.3.egg", hash = "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4"}, + {file = "pyasn1_modules-0.2.8-py3.4.egg", hash = "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811"}, + {file = "pyasn1_modules-0.2.8-py3.5.egg", hash = "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed"}, + {file = "pyasn1_modules-0.2.8-py3.6.egg", hash = "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0"}, + {file = "pyasn1_modules-0.2.8-py3.7.egg", hash = "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd"}, +] +pycodestyle = [ + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, +] +pydantic = [ + {file = "pydantic-1.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0c40162796fc8d0aa744875b60e4dc36834db9f2a25dbf9ba9664b1915a23850"}, + {file = "pydantic-1.8.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fff29fe54ec419338c522b908154a2efabeee4f483e48990f87e189661f31ce3"}, + {file = "pydantic-1.8.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:fbfb608febde1afd4743c6822c19060a8dbdd3eb30f98e36061ba4973308059e"}, + {file = "pydantic-1.8.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:eb8ccf12295113ce0de38f80b25f736d62f0a8d87c6b88aca645f168f9c78771"}, + {file = "pydantic-1.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:20d42f1be7c7acc352b3d09b0cf505a9fab9deb93125061b376fbe1f06a5459f"}, + {file = "pydantic-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dde4ca368e82791de97c2ec019681ffb437728090c0ff0c3852708cf923e0c7d"}, + {file = "pydantic-1.8.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:3bbd023c981cbe26e6e21c8d2ce78485f85c2e77f7bab5ec15b7d2a1f491918f"}, + {file = "pydantic-1.8.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:830ef1a148012b640186bf4d9789a206c56071ff38f2460a32ae67ca21880eb8"}, + {file = "pydantic-1.8.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:fb77f7a7e111db1832ae3f8f44203691e15b1fa7e5a1cb9691d4e2659aee41c4"}, + {file = "pydantic-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3bcb9d7e1f9849a6bdbd027aabb3a06414abd6068cb3b21c49427956cce5038a"}, + {file = "pydantic-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2287ebff0018eec3cc69b1d09d4b7cebf277726fa1bd96b45806283c1d808683"}, + {file = "pydantic-1.8.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4bbc47cf7925c86a345d03b07086696ed916c7663cb76aa409edaa54546e53e2"}, + {file = "pydantic-1.8.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:6388ef4ef1435364c8cc9a8192238aed030595e873d8462447ccef2e17387125"}, + {file = "pydantic-1.8.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:dd4888b300769ecec194ca8f2699415f5f7760365ddbe243d4fd6581485fa5f0"}, + {file = "pydantic-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:8fbb677e4e89c8ab3d450df7b1d9caed23f254072e8597c33279460eeae59b99"}, + {file = "pydantic-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2f2736d9a996b976cfdfe52455ad27462308c9d3d0ae21a2aa8b4cd1a78f47b9"}, + {file = "pydantic-1.8.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3114d74329873af0a0e8004627f5389f3bb27f956b965ddd3e355fe984a1789c"}, + {file = "pydantic-1.8.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:258576f2d997ee4573469633592e8b99aa13bda182fcc28e875f866016c8e07e"}, + {file = "pydantic-1.8.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c17a0b35c854049e67c68b48d55e026c84f35593c66d69b278b8b49e2484346f"}, + {file = "pydantic-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8bc082afef97c5fd3903d05c6f7bb3a6af9fc18631b4cc9fedeb4720efb0c58"}, + {file = "pydantic-1.8.1-py3-none-any.whl", hash = "sha256:e3f8790c47ac42549dc8b045a67b0ca371c7f66e73040d0197ce6172b385e520"}, + {file = "pydantic-1.8.1.tar.gz", hash = "sha256:26cf3cb2e68ec6c0cfcb6293e69fb3450c5fd1ace87f46b64f678b0d29eac4c3"}, +] +pyflakes = [ + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, +] +pynndescent = [ + {file = "pynndescent-0.5.2.tar.gz", hash = "sha256:d9fd22210b8d64368376ff392e876fb72fe3cda282396cfa6a59440ab6600771"}, +] +pyparsing = [ + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, +] +pytest = [ + {file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"}, + {file = "pytest-6.2.2.tar.gz", hash = "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9"}, +] +pytest-cov = [ + {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, + {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, + {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, +] +pytorch-lightning = [ + {file = "pytorch-lightning-1.1.5.tar.gz", hash = "sha256:213a4edef41a63b5e1f30475a47468f36c92d5444b26bed84936cb41a7ffefb8"}, + {file = "pytorch_lightning-1.1.5-py3-none-any.whl", hash = "sha256:053b7f947c70d7bc5d3c13a463d66cfb05ac7cef9103b5d63f5523ae66c53962"}, +] +pytz = [ + {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, + {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, +] +pywavelets = [ + {file = "PyWavelets-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:35959c041ec014648575085a97b498eafbbaa824f86f6e4a59bfdef8a3fe6308"}, + {file = "PyWavelets-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:55e39ec848ceec13c9fa1598253ae9dd5c31d09dfd48059462860d2b908fb224"}, + {file = "PyWavelets-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c06d2e340c7bf8b9ec71da2284beab8519a3908eab031f4ea126e8ccfc3fd567"}, + {file = "PyWavelets-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:be105382961745f88d8196bba5a69ee2c4455d87ad2a2e5d1eed6bd7fda4d3fd"}, + {file = "PyWavelets-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:076ca8907001fdfe4205484f719d12b4a0262dfe6652fa1cfc3c5c362d14dc84"}, + {file = "PyWavelets-1.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7947e51ca05489b85928af52a34fe67022ab5b81d4ae32a4109a99e883a0635e"}, + {file = "PyWavelets-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9e2528823ccf5a0a1d23262dfefe5034dce89cd84e4e124dc553dfcdf63ebb92"}, + {file = "PyWavelets-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:80b924edbc012ded8aa8b91cb2fd6207fb1a9a3a377beb4049b8a07445cec6f0"}, + {file = "PyWavelets-1.1.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c2a799e79cee81a862216c47e5623c97b95f1abee8dd1f9eed736df23fb653fb"}, + {file = "PyWavelets-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:d510aef84d9852653d079c84f2f81a82d5d09815e625f35c95714e7364570ad4"}, + {file = "PyWavelets-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:889d4c5c5205a9c90118c1980df526857929841df33e4cd1ff1eff77c6817a65"}, + {file = "PyWavelets-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:68b5c33741d26c827074b3d8f0251de1c3019bb9567b8d303eb093c822ce28f1"}, + {file = "PyWavelets-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18a51b3f9416a2ae6e9a35c4af32cf520dd7895f2b69714f4aa2f4342fca47f9"}, + {file = "PyWavelets-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cfe79844526dd92e3ecc9490b5031fca5f8ab607e1e858feba232b1b788ff0ea"}, + {file = "PyWavelets-1.1.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:2f7429eeb5bf9c7068002d0d7f094ed654c77a70ce5e6198737fd68ab85f8311"}, + {file = "PyWavelets-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:720dbcdd3d91c6dfead79c80bf8b00a1d8aa4e5d551dc528c6d5151e4efc3403"}, + {file = "PyWavelets-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:bc5e87b72371da87c9bebc68e54882aada9c3114e640de180f62d5da95749cd3"}, + {file = "PyWavelets-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:98b2669c5af842a70cfab33a7043fcb5e7535a690a00cd251b44c9be0be418e5"}, + {file = "PyWavelets-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e02a0558e0c2ac8b8bbe6a6ac18c136767ec56b96a321e0dfde2173adfa5a504"}, + {file = "PyWavelets-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6162dc0ae04669ea04b4b51420777b9ea2d30b0a9d02901b2a3b4d61d159c2e9"}, + {file = "PyWavelets-1.1.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:39c74740718e420d38c78ca4498568fa57976d78d5096277358e0fa9629a7aea"}, + {file = "PyWavelets-1.1.1-cp38-cp38-win32.whl", hash = "sha256:79f5b54f9dc353e5ee47f0c3f02bebd2c899d49780633aa771fed43fa20b3149"}, + {file = "PyWavelets-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:935ff247b8b78bdf77647fee962b1cc208c51a7b229db30b9ba5f6da3e675178"}, + {file = "PyWavelets-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6ebfefebb5c6494a3af41ad8c60248a95da267a24b79ed143723d4502b1fe4d7"}, + {file = "PyWavelets-1.1.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:6bc78fb9c42a716309b4ace56f51965d8b5662c3ba19d4591749f31773db1125"}, + {file = "PyWavelets-1.1.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:411e17ca6ed8cf5e18a7ca5ee06a91c25800cc6c58c77986202abf98d749273a"}, + {file = "PyWavelets-1.1.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:83c5e3eb78ce111c2f0b45f46106cc697c3cb6c4e5f51308e1f81b512c70c8fb"}, + {file = "PyWavelets-1.1.1-cp39-cp39-win32.whl", hash = "sha256:2b634a54241c190ee989a4af87669d377b37c91bcc9cf0efe33c10ff847f7841"}, + {file = "PyWavelets-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:732bab78435c48be5d6bc75486ef629d7c8f112e07b313bf1f1a2220ab437277"}, + {file = "PyWavelets-1.1.1.tar.gz", hash = "sha256:1a64b40f6acb4ffbaccce0545d7fc641744f95351f62e4c6aaa40549326008c9"}, +] +pywin32 = [ + {file = "pywin32-300-cp35-cp35m-win32.whl", hash = "sha256:1c204a81daed2089e55d11eefa4826c05e604d27fe2be40b6bf8db7b6a39da63"}, + {file = "pywin32-300-cp35-cp35m-win_amd64.whl", hash = "sha256:350c5644775736351b77ba68da09a39c760d75d2467ecec37bd3c36a94fbed64"}, + {file = "pywin32-300-cp36-cp36m-win32.whl", hash = "sha256:a3b4c48c852d4107e8a8ec980b76c94ce596ea66d60f7a697582ea9dce7e0db7"}, + {file = "pywin32-300-cp36-cp36m-win_amd64.whl", hash = "sha256:27a30b887afbf05a9cbb05e3ffd43104a9b71ce292f64a635389dbad0ed1cd85"}, + {file = "pywin32-300-cp37-cp37m-win32.whl", hash = "sha256:d7e8c7efc221f10d6400c19c32a031add1c4a58733298c09216f57b4fde110dc"}, + {file = "pywin32-300-cp37-cp37m-win_amd64.whl", hash = "sha256:8151e4d7a19262d6694162d6da85d99a16f8b908949797fd99c83a0bfaf5807d"}, + {file = "pywin32-300-cp38-cp38-win32.whl", hash = "sha256:fbb3b1b0fbd0b4fc2a3d1d81fe0783e30062c1abed1d17c32b7879d55858cfae"}, + {file = "pywin32-300-cp38-cp38-win_amd64.whl", hash = "sha256:60a8fa361091b2eea27f15718f8eb7f9297e8d51b54dbc4f55f3d238093d5190"}, + {file = "pywin32-300-cp39-cp39-win32.whl", hash = "sha256:638b68eea5cfc8def537e43e9554747f8dee786b090e47ead94bfdafdb0f2f50"}, + {file = "pywin32-300-cp39-cp39-win_amd64.whl", hash = "sha256:b1609ce9bd5c411b81f941b246d683d6508992093203d4eb7f278f4ed1085c3f"}, +] +pyyaml = [ + {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, + {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, + {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, + {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, + {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, + {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, + {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, + {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, +] +regex = [ + {file = "regex-2021.3.17-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b97ec5d299c10d96617cc851b2e0f81ba5d9d6248413cd374ef7f3a8871ee4a6"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:cb4ee827857a5ad9b8ae34d3c8cc51151cb4a3fe082c12ec20ec73e63cc7c6f0"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:633497504e2a485a70a3268d4fc403fe3063a50a50eed1039083e9471ad0101c"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:a59a2ee329b3de764b21495d78c92ab00b4ea79acef0f7ae8c1067f773570afa"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f85d6f41e34f6a2d1607e312820971872944f1661a73d33e1e82d35ea3305e14"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4651f839dbde0816798e698626af6a2469eee6d9964824bb5386091255a1694f"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:39c44532d0e4f1639a89e52355b949573e1e2c5116106a395642cbbae0ff9bcd"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3d9a7e215e02bd7646a91fb8bcba30bc55fd42a719d6b35cf80e5bae31d9134e"}, + {file = "regex-2021.3.17-cp36-cp36m-win32.whl", hash = "sha256:159fac1a4731409c830d32913f13f68346d6b8e39650ed5d704a9ce2f9ef9cb3"}, + {file = "regex-2021.3.17-cp36-cp36m-win_amd64.whl", hash = "sha256:13f50969028e81765ed2a1c5fcfdc246c245cf8d47986d5172e82ab1a0c42ee5"}, + {file = "regex-2021.3.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9d8d286c53fe0cbc6d20bf3d583cabcd1499d89034524e3b94c93a5ab85ca90"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:201e2619a77b21a7780580ab7b5ce43835e242d3e20fef50f66a8df0542e437f"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d47d359545b0ccad29d572ecd52c9da945de7cd6cf9c0cfcb0269f76d3555689"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:ea2f41445852c660ba7c3ebf7d70b3779b20d9ca8ba54485a17740db49f46932"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:486a5f8e11e1f5bbfcad87f7c7745eb14796642323e7e1829a331f87a713daaa"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:18e25e0afe1cf0f62781a150c1454b2113785401ba285c745acf10c8ca8917df"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:a2ee026f4156789df8644d23ef423e6194fad0bc53575534101bb1de5d67e8ce"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:4c0788010a93ace8a174d73e7c6c9d3e6e3b7ad99a453c8ee8c975ddd9965643"}, + {file = "regex-2021.3.17-cp37-cp37m-win32.whl", hash = "sha256:575a832e09d237ae5fedb825a7a5bc6a116090dd57d6417d4f3b75121c73e3be"}, + {file = "regex-2021.3.17-cp37-cp37m-win_amd64.whl", hash = "sha256:8e65e3e4c6feadf6770e2ad89ad3deb524bcb03d8dc679f381d0568c024e0deb"}, + {file = "regex-2021.3.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a0df9a0ad2aad49ea3c7f65edd2ffb3d5c59589b85992a6006354f6fb109bb18"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b98bc9db003f1079caf07b610377ed1ac2e2c11acc2bea4892e28cc5b509d8d5"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:808404898e9a765e4058bf3d7607d0629000e0a14a6782ccbb089296b76fa8fe"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:5770a51180d85ea468234bc7987f5597803a4c3d7463e7323322fe4a1b181578"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:976a54d44fd043d958a69b18705a910a8376196c6b6ee5f2596ffc11bff4420d"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:63f3ca8451e5ff7133ffbec9eda641aeab2001be1a01878990f6c87e3c44b9d5"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bcd945175c29a672f13fce13a11893556cd440e37c1b643d6eeab1988c8b209c"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:3d9356add82cff75413bec360c1eca3e58db4a9f5dafa1f19650958a81e3249d"}, + {file = "regex-2021.3.17-cp38-cp38-win32.whl", hash = "sha256:f5d0c921c99297354cecc5a416ee4280bd3f20fd81b9fb671ca6be71499c3fdf"}, + {file = "regex-2021.3.17-cp38-cp38-win_amd64.whl", hash = "sha256:14de88eda0976020528efc92d0a1f8830e2fb0de2ae6005a6fc4e062553031fa"}, + {file = "regex-2021.3.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4c2e364491406b7888c2ad4428245fc56c327e34a5dfe58fd40df272b3c3dab3"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8bd4f91f3fb1c9b1380d6894bd5b4a519409135bec14c0c80151e58394a4e88a"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:882f53afe31ef0425b405a3f601c0009b44206ea7f55ee1c606aad3cc213a52c"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:07ef35301b4484bce843831e7039a84e19d8d33b3f8b2f9aab86c376813d0139"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:360a01b5fa2ad35b3113ae0c07fb544ad180603fa3b1f074f52d98c1096fa15e"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:709f65bb2fa9825f09892617d01246002097f8f9b6dde8d1bb4083cf554701ba"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:c66221e947d7207457f8b6f42b12f613b09efa9669f65a587a2a71f6a0e4d106"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c782da0e45aff131f0bed6e66fbcfa589ff2862fc719b83a88640daa01a5aff7"}, + {file = "regex-2021.3.17-cp39-cp39-win32.whl", hash = "sha256:dc9963aacb7da5177e40874585d7407c0f93fb9d7518ec58b86e562f633f36cd"}, + {file = "regex-2021.3.17-cp39-cp39-win_amd64.whl", hash = "sha256:a0d04128e005142260de3733591ddf476e4902c0c23c1af237d9acf3c96e1b38"}, + {file = "regex-2021.3.17.tar.gz", hash = "sha256:4b8a1fb724904139149a43e172850f35aa6ea97fb0545244dc0b805e0154ed68"}, +] +requests = [ + {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, + {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, +] +requests-oauthlib = [ + {file = "requests-oauthlib-1.3.0.tar.gz", hash = "sha256:b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a"}, + {file = "requests_oauthlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:7f71572defaecd16372f9006f33c2ec8c077c3cfa6f5911a9a90202beb513f3d"}, + {file = "requests_oauthlib-1.3.0-py3.7.egg", hash = "sha256:fa6c47b933f01060936d87ae9327fead68768b69c6c9ea2109c48be30f2d4dbc"}, +] +rsa = [ + {file = "rsa-4.7.2-py3-none-any.whl", hash = "sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2"}, + {file = "rsa-4.7.2.tar.gz", hash = "sha256:9d689e6ca1b3038bc82bf8d23e944b6b6037bc02301a574935b2dd946e0353b9"}, +] +sacrebleu = [ + {file = "sacrebleu-1.5.1-py3-none-any.whl", hash = "sha256:fbe54fcc7f4e370c9ded0c7cb9bbdadabc020bcde9217daea043b4906b962fdc"}, + {file = "sacrebleu-1.5.1.tar.gz", hash = "sha256:e8184f0d2ec45dd7ef0883fbce3976b2b7cf581e04d0854ac5f7eb394318b84b"}, +] +sacremoses = [ + {file = "sacremoses-0.0.43.tar.gz", hash = "sha256:123c1bf2664351fb05e16f87d3786dbe44a050cfd7b85161c09ad9a63a8e2948"}, +] +scikit-image = [ + {file = "scikit-image-0.18.1.tar.gz", hash = "sha256:fbb618ca911867bce45574c1639618cdfb5d94e207432b19bc19563d80d2f171"}, + {file = "scikit_image-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1cd05c882ffb2a271a1f20b4afe937d63d55b8753c3d652f11495883a7800ebe"}, + {file = "scikit_image-0.18.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e972c628ad9ba52c298b032368e29af9bd5eeb81ce33bc2d9b039a81661c99c5"}, + {file = "scikit_image-0.18.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1256017c513e8e1b8b9da73e5fd1e605d0077bbbc8e5c8d6c2cab36400131c6c"}, + {file = "scikit_image-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:ec25e4110951d3a280421bb10dd510a082ba83d86e20d706294faf7899cdb3d5"}, + {file = "scikit_image-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2eea42706a25ae6e0cebaf1914e2ab1c04061b1f3c9966d76025d58a2e9188fc"}, + {file = "scikit_image-0.18.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:76446e2402e64d7dba78eeae8aa86e92a0cafe5b1c9e6235bd8d067471ed2788"}, + {file = "scikit_image-0.18.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:d5ad4a9b4c9797d4c4c48f45fa224c5ebff22b9b0af636c3ecb8addbb66c21e6"}, + {file = "scikit_image-0.18.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:23f9178b21c752bfb4e4ea3a3fa0ff79bc5a401bc75ddb4661f2cebd1c2b0e24"}, + {file = "scikit_image-0.18.1-cp38-cp38-win32.whl", hash = "sha256:d746540cafe7776c6d05a0b40ec744bb8d33d1ddc51faba601d26c02593d8bcc"}, + {file = "scikit_image-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:30447af3f5b7c9491f2d3db5bc275493d1b91bf1dd16b67e2fd79a6bb95d8ee9"}, + {file = "scikit_image-0.18.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae6659b3a8bd4bba7e9dcbfd0064e443b32c7054bf09174749db896730fcf42e"}, + {file = "scikit_image-0.18.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c058770c6ad6e0fe6c30f59970c9c65fa740ff014d121d8c341664cd792cf49"}, + {file = "scikit_image-0.18.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:c700336a7f96109c74154090c5e693693a8e3fa09ed6156a5996cdc9a3bb1534"}, + {file = "scikit_image-0.18.1-cp39-cp39-win32.whl", hash = "sha256:3515b890e771f99bbe1051a0dcfe0fc477da961da933c34f89808a0f1eeb7dc2"}, + {file = "scikit_image-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:5f602779258807d03e72c0a439cfb221f647e628be166fb3594397435f13c76b"}, +] +scikit-learn = [ + {file = "scikit-learn-0.23.2.tar.gz", hash = "sha256:20766f515e6cd6f954554387dfae705d93c7b544ec0e6c6a5d8e006f6f7ef480"}, + {file = "scikit_learn-0.23.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:98508723f44c61896a4e15894b2016762a55555fbf09365a0bb1870ecbd442de"}, + {file = "scikit_learn-0.23.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a64817b050efd50f9abcfd311870073e500ae11b299683a519fbb52d85e08d25"}, + {file = "scikit_learn-0.23.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:daf276c465c38ef736a79bd79fc80a249f746bcbcae50c40945428f7ece074f8"}, + {file = "scikit_learn-0.23.2-cp36-cp36m-win32.whl", hash = "sha256:cb3e76380312e1f86abd20340ab1d5b3cc46a26f6593d3c33c9ea3e4c7134028"}, + {file = "scikit_learn-0.23.2-cp36-cp36m-win_amd64.whl", hash = "sha256:0a127cc70990d4c15b1019680bfedc7fec6c23d14d3719fdf9b64b22d37cdeca"}, + {file = "scikit_learn-0.23.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa95c2f17d2f80534156215c87bee72b6aa314a7f8b8fe92a2d71f47280570d"}, + {file = "scikit_learn-0.23.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6c28a1d00aae7c3c9568f61aafeaad813f0f01c729bee4fd9479e2132b215c1d"}, + {file = "scikit_learn-0.23.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:da8e7c302003dd765d92a5616678e591f347460ac7b53e53d667be7dfe6d1b10"}, + {file = "scikit_learn-0.23.2-cp37-cp37m-win32.whl", hash = "sha256:d9a1ce5f099f29c7c33181cc4386660e0ba891b21a60dc036bf369e3a3ee3aec"}, + {file = "scikit_learn-0.23.2-cp37-cp37m-win_amd64.whl", hash = "sha256:914ac2b45a058d3f1338d7736200f7f3b094857758895f8667be8a81ff443b5b"}, + {file = "scikit_learn-0.23.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7671bbeddd7f4f9a6968f3b5442dac5f22bf1ba06709ef888cc9132ad354a9ab"}, + {file = "scikit_learn-0.23.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:d0dcaa54263307075cb93d0bee3ceb02821093b1b3d25f66021987d305d01dce"}, + {file = "scikit_learn-0.23.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5ce7a8021c9defc2b75620571b350acc4a7d9763c25b7593621ef50f3bd019a2"}, + {file = "scikit_learn-0.23.2-cp38-cp38-win32.whl", hash = "sha256:0d39748e7c9669ba648acf40fb3ce96b8a07b240db6888563a7cb76e05e0d9cc"}, + {file = "scikit_learn-0.23.2-cp38-cp38-win_amd64.whl", hash = "sha256:1b8a391de95f6285a2f9adffb7db0892718950954b7149a70c783dc848f104ea"}, +] +scipy = [ + {file = "scipy-1.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a15a1f3fc0abff33e792d6049161b7795909b40b97c6cc2934ed54384017ab76"}, + {file = "scipy-1.6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e79570979ccdc3d165456dd62041d9556fb9733b86b4b6d818af7a0afc15f092"}, + {file = "scipy-1.6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a423533c55fec61456dedee7b6ee7dce0bb6bfa395424ea374d25afa262be261"}, + {file = "scipy-1.6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:33d6b7df40d197bdd3049d64e8e680227151673465e5d85723b3b8f6b15a6ced"}, + {file = "scipy-1.6.1-cp37-cp37m-win32.whl", hash = "sha256:6725e3fbb47da428794f243864f2297462e9ee448297c93ed1dcbc44335feb78"}, + {file = "scipy-1.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:5fa9c6530b1661f1370bcd332a1e62ca7881785cc0f80c0d559b636567fab63c"}, + {file = "scipy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd50daf727f7c195e26f27467c85ce653d41df4358a25b32434a50d8870fc519"}, + {file = "scipy-1.6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f46dd15335e8a320b0fb4685f58b7471702234cba8bb3442b69a3e1dc329c345"}, + {file = "scipy-1.6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0e5b0ccf63155d90da576edd2768b66fb276446c371b73841e3503be1d63fb5d"}, + {file = "scipy-1.6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2481efbb3740977e3c831edfd0bd9867be26387cacf24eb5e366a6a374d3d00d"}, + {file = "scipy-1.6.1-cp38-cp38-win32.whl", hash = "sha256:68cb4c424112cd4be886b4d979c5497fba190714085f46b8ae67a5e4416c32b4"}, + {file = "scipy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:5f331eeed0297232d2e6eea51b54e8278ed8bb10b099f69c44e2558c090d06bf"}, + {file = "scipy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8a51d33556bf70367452d4d601d1742c0e806cd0194785914daf19775f0e67"}, + {file = "scipy-1.6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:83bf7c16245c15bc58ee76c5418e46ea1811edcc2e2b03041b804e46084ab627"}, + {file = "scipy-1.6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:794e768cc5f779736593046c9714e0f3a5940bc6dcc1dba885ad64cbfb28e9f0"}, + {file = "scipy-1.6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5da5471aed911fe7e52b86bf9ea32fb55ae93e2f0fac66c32e58897cfb02fa07"}, + {file = "scipy-1.6.1-cp39-cp39-win32.whl", hash = "sha256:8e403a337749ed40af60e537cc4d4c03febddcc56cd26e774c9b1b600a70d3e4"}, + {file = "scipy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5193a098ae9f29af283dcf0041f762601faf2e595c0db1da929875b7570353f"}, + {file = "scipy-1.6.1.tar.gz", hash = "sha256:c4fceb864890b6168e79b0e714c585dbe2fd4222768ee90bc1aa0f8218691b11"}, +] +six = [ + {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, + {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, +] +spacy = [ + {file = "spacy-2.3.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:faa728e56f7b8fe0a70c4bedc42611da23de86b783f6ad588a92c115f427b90c"}, + {file = "spacy-2.3.5-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:cecb9987a875620d0f185ff07dd04cd64d5097de48689e506256a27a46a644a1"}, + {file = "spacy-2.3.5-cp36-cp36m-win_amd64.whl", hash = "sha256:4e2e79ab7c2af2af8a91913d6d096dd2e6a5a422142cfb35b30c574f776b9fd7"}, + {file = "spacy-2.3.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ec9eebfae2a35e464d1c35aa2109422765967ba5b10fa9f11da8873801d2241a"}, + {file = "spacy-2.3.5-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:f153d8aa6104694389ef85c578ac1a3900b142f108248c7b9f5790d010fbe4ee"}, + {file = "spacy-2.3.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4b7c0c8ab94c6433f08633fef415a054d1f3345b205bcb064578c79f35192917"}, + {file = "spacy-2.3.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:118a92582b1054b5de7bc5ed763f47ee89388847ede1e0597c6df4b509643e14"}, + {file = "spacy-2.3.5-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:c7b3d7928d047e5abcd591f8cf6a1c508da16423d371b8a21332101cab46ff7c"}, + {file = "spacy-2.3.5-cp38-cp38-win_amd64.whl", hash = "sha256:14bb12de0d03beb2d8309f194154db70fb364a0fae727e864c2b0228bf3438d8"}, + {file = "spacy-2.3.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e9496f5ea3d08f2b9fc3e326c2c8cc7886df0db982a41dca2521d3f22ca043e"}, + {file = "spacy-2.3.5-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:49f7818bd8a597887013fdaaea3263d8b6e99ca64db0933c32f0896158898209"}, + {file = "spacy-2.3.5-cp39-cp39-win_amd64.whl", hash = "sha256:45497775e986d2790c7ee3625c565e3ef7e9ffa607d50230aa3382dd6d9b26e7"}, + {file = "spacy-2.3.5.tar.gz", hash = "sha256:315278ab60094643baecd866017c7d4cbd966efd2d517ad0e6c888edf7fa5aef"}, +] +srsly = [ + {file = "srsly-1.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a696e9c925e91f76ec53840c55483a4fbf76cb717424410a4f249d4805439038"}, + {file = "srsly-1.0.5-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:8fc4c0641537262e15c7b5b57edc47487b15ac47b696adcb81e0a770ef78e8f5"}, + {file = "srsly-1.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:11447f8e659e1f62f29302252fb057f179031457b36c83426027182f624fe565"}, + {file = "srsly-1.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a2746afccfd4f51f0793cccc2b6d5e8a564c962870feec5c77408244c1dbb3c5"}, + {file = "srsly-1.0.5-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:a1449da4195e30a3bd1fd3122e5b1a0c57703843c590643555c412fc87132aa0"}, + {file = "srsly-1.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:23c7205b8c1cac49a03521bee37f0afe3680d9f0ec18c75ab3ac39bd3e15272b"}, + {file = "srsly-1.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2615b8713dfe793ca57925076b0869385d56754816b1eaee5490a6827a1cb5c7"}, + {file = "srsly-1.0.5-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:b5b887328ac6e210842560fcf32a29c2a9c1ed38c6d47479cadc03d81940da8c"}, + {file = "srsly-1.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:fd5e1e01f5fd0f532a6f3977bb74facc42f1b7155402ee3d06c07a73e83e3c47"}, + {file = "srsly-1.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:779ebfaa3cf1d5c0f1286ac1baf06af5f2a17bb103622992c71acc6ac20b2781"}, + {file = "srsly-1.0.5-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:4c43a1f28e555891a1e65650adea2c5d0f0fe4b3d63821de65c8357f32c3a11c"}, + {file = "srsly-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:334f29435099e644a8047b63d60b8386a98b5f7b4739f7efc86b46ca0200aa0e"}, + {file = "srsly-1.0.5.tar.gz", hash = "sha256:d3dd796372367c71946d0cd6f734e49db3d99dd13a57bdac937d9eb62689fc9e"}, +] +tensorboard = [ + {file = "tensorboard-2.4.1-py3-none-any.whl", hash = "sha256:7b8c53c396069b618f6f276ec94fc45d17e3282d668979216e5d30be472115e4"}, +] +tensorboard-plugin-wit = [ + {file = "tensorboard_plugin_wit-1.8.0-py3-none-any.whl", hash = "sha256:2a80d1c551d741e99b2f197bb915d8a133e24adb8da1732b840041860f91183a"}, +] +thinc = [ + {file = "thinc-7.4.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5774007b5c52501cab5e2970cadca84923b4c420fff06172f2d0c86531973ce8"}, + {file = "thinc-7.4.5-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:02b71ae5a0fa906a0aca968bd65589e0ab9fabd511e57be839774228b1509224"}, + {file = "thinc-7.4.5-cp36-cp36m-win_amd64.whl", hash = "sha256:8b647de79fe5f98cd327983bf0e27d006b48ad9694ceabdb9a3832b614ed1618"}, + {file = "thinc-7.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cce68c5ea54cd32cef661858363509afdedad047027e8cdf0dc4edec0c2cc010"}, + {file = "thinc-7.4.5-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:29a47ad0289dda0520b5af8538b30e8134553130200b83c34311feb71739968d"}, + {file = "thinc-7.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:24086aa0fb72f466782115d529574a825c89afa62eb817962b9339f61ab50e0d"}, + {file = "thinc-7.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d01ab1480d37ebefcac22d63ffe01916c9f025ae3dbdbe5824ac3ea5cce8e3fd"}, + {file = "thinc-7.4.5-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:309ec4cae81f4de2e4e4fbd0bcb52b10bef4b1a6352c6a9143f6a53d3b1060ef"}, + {file = "thinc-7.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:10bafe5ddce698180098345b9c55f762dc3456558be844d35d64175e511581b6"}, + {file = "thinc-7.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c43ed753aa70bc619e42e168be4926c8a47799af6121ff0727ba99b330afbb44"}, + {file = "thinc-7.4.5-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c408ab24b24e6368ce4b6ddebb579118042a22d3f2f2c4e19ca67e3eadc9ed33"}, + {file = "thinc-7.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:fae320de65af70786c1526ffc33b88f2da650d3106f5f9a06b37f0ac3944a44f"}, + {file = "thinc-7.4.5.tar.gz", hash = "sha256:5743fde41706252ec6ce4737c68d3505f7e1cc3d4431174a17149838d594f8cb"}, +] +threadpoolctl = [ + {file = "threadpoolctl-2.1.0-py3-none-any.whl", hash = "sha256:38b74ca20ff3bb42caca8b00055111d74159ee95c4370882bbff2b93d24da725"}, + {file = "threadpoolctl-2.1.0.tar.gz", hash = "sha256:ddc57c96a38beb63db45d6c159b5ab07b6bced12c45a1f07b2b92f272aebfa6b"}, +] +tifffile = [ + {file = "tifffile-2021.3.31-py3-none-any.whl", hash = "sha256:e0182c4f819688cad03788006512295875565127b7a7eeab0993304e2aa33c76"}, + {file = "tifffile-2021.3.31.tar.gz", hash = "sha256:3a966053e09a89317e6c9bdf99db4bf5c4d3d611ca8ac455024d7824ea5772b3"}, +] +tokenizers = [ + {file = "tokenizers-0.10.1-cp35-cp35m-macosx_10_11_x86_64.whl", hash = "sha256:729954ba7f650a3965f1b3e481c57da1c372b9d3fe1d70b0a25c711543e48300"}, + {file = "tokenizers-0.10.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:cd045b3b79e04c0278c3d5f02ca2bbab2fe351fdb489e8441b231da12bea41de"}, + {file = "tokenizers-0.10.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:290eceb99780d5ef5de89968bb6a9bcf912d6ea79578107e9c8bc1386f490389"}, + {file = "tokenizers-0.10.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:e035deb888de33bf7bce3f004c9f815090f74498fe867d8a4ce4d33964db4068"}, + {file = "tokenizers-0.10.1-cp35-cp35m-manylinux2014_ppc64le.whl", hash = "sha256:e380e87600430aa9e5b32757e3ffcfa25744ea14d010585d8ca817097bd1f4f7"}, + {file = "tokenizers-0.10.1-cp35-cp35m-manylinux2014_s390x.whl", hash = "sha256:99eb29009bb23548edd98f01fabb6df24c99ce55456f1c0910204996c7211920"}, + {file = "tokenizers-0.10.1-cp35-cp35m-win32.whl", hash = "sha256:eae53cbdd4ce50d7fe37e9d4e49019023205065ceac6f373bb656038937e42ec"}, + {file = "tokenizers-0.10.1-cp35-cp35m-win_amd64.whl", hash = "sha256:adb5566b37fae9a01bd6ecd86a85e71f421df4ab481822b9dd6dae446d6f37d1"}, + {file = "tokenizers-0.10.1-cp36-cp36m-macosx_10_11_x86_64.whl", hash = "sha256:8f5d2a1d6d4a502ead87af60aaec9fdd943e3f298dfa302fe139c0c357f553e5"}, + {file = "tokenizers-0.10.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6f9f5ac1a1c67dec947e0a28905ea017a826926faa9a827eff1792012632a2f5"}, + {file = "tokenizers-0.10.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fecfcb95c23ce6ebb4834156e78cb6a4cf883b37102c8dd46bc8bd3b5244c3d5"}, + {file = "tokenizers-0.10.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:2ef2ac8e03b9cdc4b7d3be8a84dc411ce264abdb694cfc198e6ba9d22f5c1381"}, + {file = "tokenizers-0.10.1-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:3da95c8f9bea55e261376972d0d461bd6a4bd02913c5b3942c087801c8631b65"}, + {file = "tokenizers-0.10.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:a7a4c5c59f9896601467807a92b32eb64970b3210bed6d8f1e87c14f1da17496"}, + {file = "tokenizers-0.10.1-cp36-cp36m-win32.whl", hash = "sha256:9c4aedbd763dbf929a27073bb6e569bd454bf79c409f3f6920fa0f4db12abe51"}, + {file = "tokenizers-0.10.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6b70f446bf57c4447d7a192ffaa4f5a883fee99524d76c694025ae05942065a1"}, + {file = "tokenizers-0.10.1-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:917157a63c919c44888bf9d13f8aae43a8b409d1dba831caafd47b4aee0ea917"}, + {file = "tokenizers-0.10.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:eea64d50897234f2df0301c53412d6dc5810688ad73b75e661e53fe698d33e71"}, + {file = "tokenizers-0.10.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:4f42453c86dcabbc3c19c3ec166d693843ffa80543982e3150209e3ca274a322"}, + {file = "tokenizers-0.10.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:57c37a312ba56060129f2ad075f852e83bae205df1a5f34db50e6da2e0432805"}, + {file = "tokenizers-0.10.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:ed236e0ea8c3369cb3bb65a206c2b186e50a3779b03cfca1a92e0ce0b272caab"}, + {file = "tokenizers-0.10.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:dd399f58c26d18d2ee6ffd647c5ec9365c910bed9004e5ddaea5842ff533bcec"}, + {file = "tokenizers-0.10.1-cp37-cp37m-win32.whl", hash = "sha256:1c5aef76b8c9f1fed979a1943cfde2a1707a396a24dc3c09d510e06b3b8a3f3d"}, + {file = "tokenizers-0.10.1-cp37-cp37m-win_amd64.whl", hash = "sha256:5dd957cfd1109e56ff895dfce14d1be16bd9399ce98999b60cf0e8e21243d34c"}, + {file = "tokenizers-0.10.1-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:d2bc2eab13002f06fb987be9d762654acf3c0909a7a8c4e822fe7ebaa7a2a09d"}, + {file = "tokenizers-0.10.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:95d59780e202abf43f150225dc1fd09ac67fb49cc8d93e437a27f3e4593fe64c"}, + {file = "tokenizers-0.10.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:07e321f46df4ecf8f7919b260f67d4df18151e50eddfaaa8ef7ba80ed473a79a"}, + {file = "tokenizers-0.10.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:60a2f5a33e7846817ce54a5ddd2f61108e95aaccf579caeaea0bba7b6a0a9222"}, + {file = "tokenizers-0.10.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d212515cb0c44ef884009169fb9b7c6a3efa540b3fa497620a0915624a7d21b6"}, + {file = "tokenizers-0.10.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:069e30baedbfb098390e03af1701c95c87d024055c82842e4b2d699a0d49d3db"}, + {file = "tokenizers-0.10.1-cp38-cp38-win32.whl", hash = "sha256:3a343b703bd4e174de3b126702418c7924bd68a95a25b43cbb590d1c2e7a4034"}, + {file = "tokenizers-0.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:62c21682a4a8dd8cc8eecf68d3a1c058b5c912f0b792a5e0507e0afd7524f56c"}, + {file = "tokenizers-0.10.1-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:eabea1bdf9d525ca316e72bfc94e41c8e9b42b8fe8e4cbd01d0a4f520e25276f"}, + {file = "tokenizers-0.10.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2739e9d44cfbd9b3e4339a279e7037c572757763bb09faa9727ae35c93f79623"}, + {file = "tokenizers-0.10.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c96b0c985ab6073c901767df3d77e96dc208ab391ad94d5a0b9b5211d6e1729c"}, + {file = "tokenizers-0.10.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:296c741df0ceec93946f030e6b94c6fa975f17823cdf0bdf0989654eee277b54"}, + {file = "tokenizers-0.10.1-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:f405c9f0c0f77f9f6500b7c8587eacc122d1a0256ff55bcbef219ed94201b8f8"}, + {file = "tokenizers-0.10.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:bcb2c07cc052f45419a1aac1bfef96de69cd0677c53f901f5dcba5bfa4361575"}, + {file = "tokenizers-0.10.1-cp39-cp39-win32.whl", hash = "sha256:484939ddea9689454156f4d2ee96032ebefddff1dc174caafa9a28c919f3c85d"}, + {file = "tokenizers-0.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:3e7e526fafaae5d7360570be0ec6f1331e3e962bafa262f6a357ba6bd850f8df"}, + {file = "tokenizers-0.10.1.tar.gz", hash = "sha256:81c35b4bc9238c0b5d0af91a719e732a60ee0d87d8bf76615bfec8f3e3ba8f15"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +torch = [ + {file = "torch-1.8.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:f23eeb1a48cc39209d986c418ad7e02227eee973da45c0c42d36b1aec72f4940"}, + {file = "torch-1.8.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4ace9c5bb94d5a7b9582cd089993201658466e9c59ff88bd4e9e08f6f072d1cf"}, + {file = "torch-1.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6ffa1e7ae079c7cb828712cb0cdaae5cc4fb87c16a607e6d14526b62c20bcc17"}, + {file = "torch-1.8.1-cp36-none-macosx_10_9_x86_64.whl", hash = "sha256:16f2630d9604c4ee28ea7d6e388e2264cd7bc6031c6ecd796bae3f56b5efa9a3"}, + {file = "torch-1.8.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:95b7bbbacc3f28fe438f418392ceeae146a01adc03b29d44917d55214ac234c9"}, + {file = "torch-1.8.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:55137feb2f5a0dc7aced5bba690dcdb7652054ad3452b09a2bbb59f02a11e9ff"}, + {file = "torch-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:8ad2252bf09833dcf46a536a78544e349b8256a370e03a98627ebfb118d9555b"}, + {file = "torch-1.8.1-cp37-none-macosx_10_9_x86_64.whl", hash = "sha256:1388b30fbd262c1a053d6c9ace73bb0bd8f5871b4892b6f3e02d1d7bc9768563"}, + {file = "torch-1.8.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:e7ad1649adb7dc2a450e70a3e51240b84fa4746c69c8f98989ce0c254f9fba3a"}, + {file = "torch-1.8.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3e4190c04dfd89c59bad06d5fe451446643a65e6d2607cc989eb1001ee76e12f"}, + {file = "torch-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:5c2e9a33d44cdb93ebd739b127ffd7da786bf5f740539539195195b186a05f6c"}, + {file = "torch-1.8.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:c6ede2ae4dcd8214b63e047efabafa92493605205a947574cf358216ca4e440a"}, + {file = "torch-1.8.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:ce7d435426f3dd14f95710d779aa46e9cd5e077d512488e813f7589fdc024f78"}, + {file = "torch-1.8.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a50ea8ed900927fb30cadb63aa7a32fdd59c7d7abe5012348dfbe35a8355c083"}, + {file = "torch-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:dac4d10494e74f7e553c92d7263e19ea501742c4825ddd26c4decfa27be95981"}, + {file = "torch-1.8.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:225ee4238c019b28369c71977327deeeb2bd1c6b8557e6fcf631b8866bdc5447"}, +] +tqdm = [ + {file = "tqdm-4.49.0-py2.py3-none-any.whl", hash = "sha256:8f3c5815e3b5e20bc40463fa6b42a352178859692a68ffaa469706e6d38342a5"}, + {file = "tqdm-4.49.0.tar.gz", hash = "sha256:faf9c671bd3fad5ebaeee366949d969dca2b2be32c872a7092a1e1a9048d105b"}, +] +transformers = [ + {file = "transformers-4.3.2-py3-none-any.whl", hash = "sha256:f814f6d38d28013557ce942e0655f3de5ba13a02386c19500a21eb229b370f6d"}, + {file = "transformers-4.3.2.tar.gz", hash = "sha256:bfa8bd35ac8494bbd1bf22a2b96471fd5691430f356da3fed2c850cc0194c9a9"}, +] +typed-ast = [ + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, + {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, + {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, + {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, + {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, + {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, + {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, + {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, + {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, + {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, +] +typing-extensions = [ + {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, + {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, + {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, +] +umap-learn = [ + {file = "umap-learn-0.5.1.tar.gz", hash = "sha256:3e3e5e526109866012a9da79f423c922edc379c6cac9bf65ea08fbb9dd93ff3a"}, +] +urllib3 = [ + {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, + {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, +] +wasabi = [ + {file = "wasabi-0.8.2-py3-none-any.whl", hash = "sha256:a493e09d86109ec6d9e70d040472f9facc44634d4ae6327182f94091ca73a490"}, + {file = "wasabi-0.8.2.tar.gz", hash = "sha256:b4a36aaa9ca3a151f0c558f269d442afbb3526f0160fd541acd8a0d5e5712054"}, +] +werkzeug = [ + {file = "Werkzeug-1.0.1-py2.py3-none-any.whl", hash = "sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43"}, + {file = "Werkzeug-1.0.1.tar.gz", hash = "sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c"}, +] +wheel = [ + {file = "wheel-0.36.2-py2.py3-none-any.whl", hash = "sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e"}, + {file = "wheel-0.36.2.tar.gz", hash = "sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e"}, +] +xxhash = [ + {file = "xxhash-2.0.0-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:df8d1ebdef86bd5d772d81c91d5d111a5ee8e4b68b8fc6b6edfa5aa825dd2a3d"}, + {file = "xxhash-2.0.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:f01c59f5bad2e46bb4235b71b36c56be353f08b6d514a3bd0deb9bf56e4b180a"}, + {file = "xxhash-2.0.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:cb4feeb8881eb89b9ddd0fae797deb078ebdaad6b1ae6c185b9993d241ed365a"}, + {file = "xxhash-2.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:2912d7810bcf7e39b3929fb186fe46ff83b1bd4a3d6b7eba956d57fa1516ac0c"}, + {file = "xxhash-2.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:48b99c55fc643b32f5efca9c35fcaac6ea553958cf503e202c10eb62718e7a0e"}, + {file = "xxhash-2.0.0-cp27-cp27m-win32.whl", hash = "sha256:3221f1a5bc2ee1f150b84a0c4c7cddc7724aaa01460f3353cf63fd667d89f593"}, + {file = "xxhash-2.0.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cba4b6d174b524623ac8b64bda734601d574f95033f87ddf9c495c69a70135e8"}, + {file = "xxhash-2.0.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:b94f13f4f946500f3cc78f11da4ec4b340bd92c5200b5fe4e6aeac96064aa1fd"}, + {file = "xxhash-2.0.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:635b1d7fa85d215112f41d089bd113ac139f6a42769fcc49c73e779904160f7f"}, + {file = "xxhash-2.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:0f5f1b9ae8e2cf2ff606018769f7e46147df70291312f64e1b80d10482ca8c0b"}, + {file = "xxhash-2.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:8f90deec6567a38e1da29feff36973468691e309b2db8235e64936e61df77c43"}, + {file = "xxhash-2.0.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:8b7e930a60dfe7380e52466aa27941290dd575a5750c622158c86941797eaa1b"}, + {file = "xxhash-2.0.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:44b26872fd63f1eaf1ab527817aebbd455a3fdcbd56ff6df74fd42a6a137cff4"}, + {file = "xxhash-2.0.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:3d25b540148f1ebf4852e4115f3f4819b585ecd36f121a1f388e8966d69d3a1c"}, + {file = "xxhash-2.0.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:bcd1e9f3ba8df23edefe1d0a886f16b4e27602acbd8575b39540fea26e1aa6d2"}, + {file = "xxhash-2.0.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fc03a399205268815742125b17d967afa9f23b08cdafe185e41368cf7ba9b278"}, + {file = "xxhash-2.0.0-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:bdbc195231c87d63b0503785d9c5264f4275a92da41d9f28fdf08fb321453356"}, + {file = "xxhash-2.0.0-cp35-cp35m-win32.whl", hash = "sha256:7291392bdb1d38c44557dfd3fcd4fd04c363a696dbfa7e6592700a31e4ff6657"}, + {file = "xxhash-2.0.0-cp35-cp35m-win_amd64.whl", hash = "sha256:e0fc170c3a00ca008d992c2e6324da3f1467b30044b5835d2feb27870645d38c"}, + {file = "xxhash-2.0.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:5b3c0c84187556d463626ceed85f0d735a5b8ea1678da3e858d3934f38f23915"}, + {file = "xxhash-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:2f0ca6673fcbae988389576a779c00a62a28718a18ddc7b2e5b32d7fb30c6f98"}, + {file = "xxhash-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d1859d54837af16ae2a7975477e619793ac698a374d909f533e317c3b384b223"}, + {file = "xxhash-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:9d0311fcd78dabe04ab3b4034659628b00ac220e77e37648f73aebbf4cb13680"}, + {file = "xxhash-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:0ecea927fd3df8f3f3a1d6e5bc85838eb44a69ea2f4c9263dfd0f68c4e17e483"}, + {file = "xxhash-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4167f22b037e128820f7642ecc1fbf1b4b4956346093a2e75081bee82b9cfb7e"}, + {file = "xxhash-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:85c5de6c56335b75beef2cba713f95a1b62422be5e27dad30b5083419c6839c4"}, + {file = "xxhash-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:ade1c356acd0b0454a3d3cf42442afe7ad0f46fc944ea1e84720b3858bfdb772"}, + {file = "xxhash-2.0.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:fca7d0fb6fde33d1ac5f97298f44e711e5fe1b4587832864be8c6545cb072a54"}, + {file = "xxhash-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e296b0dee072a54c40c04f09ca35bb9902bb74b54f0fffeafabfc937b3ec85f9"}, + {file = "xxhash-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:02476c5cef803cfd1350662b1e543e47ad64bd5f7f792033d94d590f9674da11"}, + {file = "xxhash-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:28c1f0bb6dadc11162d1f2e203d7a12d38b511b87fbb5ffa729594fd456f48e6"}, + {file = "xxhash-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:922ae5b1efa1f9a9cc959f7197113a623ad110853622e990433242a9d8d00d5c"}, + {file = "xxhash-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:7709bc8a5e30c74b07203553f33232531e7739458f72204908cedb08a00bd546"}, + {file = "xxhash-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:fb3c9760598009b1d8bbe57785e278aeb956efb7372d8f9b0bb43cd46f420dff"}, + {file = "xxhash-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3f29f6d455388cc415fe52c0f63f442aaea674cee35a2252d8d4dc8d640938c6"}, + {file = "xxhash-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf360465dc3d24b1501b799c85815c82ddcfc0ffbcba0232968f3a7cd64306fc"}, + {file = "xxhash-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5d2edbb50025a67f061d09d381c54c7d0948c1572f6c9bd15ee238a303d368d9"}, + {file = "xxhash-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7943ede91d8aedfcacb7178b2d881b7498145590206ff61c3e84dc66e6a51d6a"}, + {file = "xxhash-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:b5c2edb8b0a2acc5bdac984b3177711f206463b970aa03087221771c2b0d8f1d"}, + {file = "xxhash-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:16e4b7d508bb49b6fc84bf077f2f7f51263b5618cc61f33a64ed43786ec2c6cf"}, + {file = "xxhash-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:80903d4ce7337921bbc8e5ac695b45691b43c0a00b21964c76e19ea21b9108ea"}, + {file = "xxhash-2.0.0-cp38-cp38-win32.whl", hash = "sha256:e37b25182e969212d5aec60a8da7d1e6a960dbffdb9ba4c63e2240de3605c184"}, + {file = "xxhash-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:fabee25186b6649bbf6ff258f23941339902374786f8317b0422144ddaa505df"}, + {file = "xxhash-2.0.0-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:be93004b832717234a7d2f47dc555428ab1e8712f99cad7d212cebe0e27d3d48"}, + {file = "xxhash-2.0.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:1b86f49b36c25ebdbd1b5539d428a37d9051ad49eb576a3edd964a8770bc8f3a"}, + {file = "xxhash-2.0.0-pp27-pypy_73-win32.whl", hash = "sha256:bde4d39997de901d0a66ebd631b34f9cf106676fec0878f36b7baf630cb3965a"}, + {file = "xxhash-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:99b5412a3eddb1aa9aaf36cdbf93be4eca99ad83ff8c692672fdeedc7fb597de"}, + {file = "xxhash-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:33c4832e689f429539d70baf69162b41dfbabc7f31ca542b5b772cb8a55e7a79"}, + {file = "xxhash-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:82034c9ed54db20f051133cba01de959b5208fe2900e67ebb4c9631f1fd523fd"}, + {file = "xxhash-2.0.0.tar.gz", hash = "sha256:58ca818554c1476fa1456f6cd4b87002e2294f09baf0f81e5a2a4968e62c423c"}, +] +zipp = [ + {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, + {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..db3ea1b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,33 @@ +[tool.poetry] +name = "expats" +version = "0.1.0" +description = "EXPlainable Automated Text Scorer" +authors = ["mana-ysh "] + +[tool.poetry.dependencies] +python = "^3.7" +scikit-learn = "^0.23.2" +spacy = "^2.3.2" +pandas = "^1.1.0" +en_core_web_sm = { url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-2.3.0/en_core_web_sm-2.3.0.tar.gz" } +torch = "^1.6.0" +pydantic = "^1.6.1" +transformers = "==4.3.2" +datasets = "^1.1.2" +pytorch-lightning = "^1.0.6" +omegaconf = "^2.0.5" +lit-nlp = "^0.2" + +[tool.poetry.dev-dependencies] +black = "^19.10b0" +pytest = "^6.0.1" +flake8 = "^3.8.3" +pytest-cov = "^2.10.0" +mypy = "^0.790" + +[tool.poetry.scripts] +expats = 'expats.cli:main' + +[build-system] +requires = ["poetry>=0.12"] +build-backend = "poetry.masonry.api" \ No newline at end of file diff --git a/scripts/do_asap_experiment.sh b/scripts/do_asap_experiment.sh new file mode 100644 index 0000000..12b4792 --- /dev/null +++ b/scripts/do_asap_experiment.sh @@ -0,0 +1,53 @@ +#/bin/bash + +TRAIN_CONFIG=$1 +EVALUATION_CONFIG=$2 + +echo "\n----- prompt 1 -----\n" +make train-then-evaluate \ + TRAIN_CONFIG_PATH=${TRAIN_CONFIG} \ + TRAIN_OVERRIDES='dataset.params.prompt_id=1' \ + EVALUATION_CONFIG_PATH=${EVALUATION_CONFIG} \ + EVALUATION_OVERRIDES='dataset.params.prompt_id=1 output_convert.regression_to_classification.params.x_min=2 output_convert.regression_to_classification.params.x_max=12' +echo "\n----- prompt 2 -----\n" +make train-then-evaluate \ + TRAIN_CONFIG_PATH=${TRAIN_CONFIG} \ + TRAIN_OVERRIDES='dataset.params.prompt_id=2' \ + EVALUATION_CONFIG_PATH=${EVALUATION_CONFIG} \ + EVALUATION_OVERRIDES='dataset.params.prompt_id=2 output_convert.regression_to_classification.params.x_min=1 output_convert.regression_to_classification.params.x_max=6' +echo "\n----- prompt 3 -----\n" +make train-then-evaluate \ + TRAIN_CONFIG_PATH=${TRAIN_CONFIG} \ + TRAIN_OVERRIDES='dataset.params.prompt_id=3' \ + EVALUATION_CONFIG_PATH=${EVALUATION_CONFIG} \ + EVALUATION_OVERRIDES='dataset.params.prompt_id=3 output_convert.regression_to_classification.params.x_min=0 output_convert.regression_to_classification.params.x_max=3' +echo "\n----- prompt 4 -----\n" +make train-then-evaluate \ + TRAIN_CONFIG_PATH=${TRAIN_CONFIG} \ + TRAIN_OVERRIDES='dataset.params.prompt_id=4' \ + EVALUATION_CONFIG_PATH=${EVALUATION_CONFIG} \ + EVALUATION_OVERRIDES='dataset.params.prompt_id=4 output_convert.regression_to_classification.params.x_min=0 output_convert.regression_to_classification.params.x_max=3' +echo "\n----- prompt 5 -----\n" +make train-then-evaluate \ + TRAIN_CONFIG_PATH=${TRAIN_CONFIG} \ + TRAIN_OVERRIDES='dataset.params.prompt_id=5' \ + EVALUATION_CONFIG_PATH=${EVALUATION_CONFIG} \ + EVALUATION_OVERRIDES='dataset.params.prompt_id=5 output_convert.regression_to_classification.params.x_min=0 output_convert.regression_to_classification.params.x_max=4' +echo "\n----- prompt 6 -----\n" +make train-then-evaluate \ + TRAIN_CONFIG_PATH=${TRAIN_CONFIG} \ + TRAIN_OVERRIDES='dataset.params.prompt_id=6' \ + EVALUATION_CONFIG_PATH=${EVALUATION_CONFIG} \ + EVALUATION_OVERRIDES='dataset.params.prompt_id=6 output_convert.regression_to_classification.params.x_min=0 output_convert.regression_to_classification.params.x_max=4' +echo "\n----- prompt 7 -----\n" +make train-then-evaluate \ + TRAIN_CONFIG_PATH=${TRAIN_CONFIG} \ + TRAIN_OVERRIDES='dataset.params.prompt_id=7' \ + EVALUATION_CONFIG_PATH=${EVALUATION_CONFIG} \ + EVALUATION_OVERRIDES='dataset.params.prompt_id=7 output_convert.regression_to_classification.params.x_min=2 output_convert.regression_to_classification.params.x_max=24' +echo "\n----- prompt 8 -----\n" +make train-then-evaluate \ + TRAIN_CONFIG_PATH=${TRAIN_CONFIG} \ + TRAIN_OVERRIDES='dataset.params.prompt_id=8' \ + EVALUATION_CONFIG_PATH=${EVALUATION_CONFIG} \ + EVALUATION_OVERRIDES='dataset.params.prompt_id=8 output_convert.regression_to_classification.params.x_min=10 output_convert.regression_to_classification.params.x_max=60' diff --git a/tests/common/test_config_util.py b/tests/common/test_config_util.py new file mode 100644 index 0000000..ce7c85e --- /dev/null +++ b/tests/common/test_config_util.py @@ -0,0 +1,21 @@ + +import pytest + +from expats.common.config_util import dump_to_file, load_from_file, merge_with_dotlist + + +@pytest.mark.parametrize( + "orig_dic, dotlist, expected_dic", + [ + ({"a": 1, "b": 3}, None, {"a": 1, "b": 3}), + ({"a": 1, "b": 3}, ["b=10"], {"a": 1, "b": 10}), + ({"a": 1, "b": 3}, ["c=10"], {"a": 1, "b": 3, "c": 10}), + ] +) +def test_save_and_load_and_merge(tmp_path, orig_dic, dotlist, expected_dic): + path = str(tmp_path / "test_save_and_load") + dump_to_file(orig_dic, path) + actual_dic = load_from_file(path) + if dotlist: + actual_dic = merge_with_dotlist(actual_dic, dotlist) + assert actual_dic == expected_dic diff --git a/tests/common/test_instantiate.py b/tests/common/test_instantiate.py new file mode 100644 index 0000000..0dd692c --- /dev/null +++ b/tests/common/test_instantiate.py @@ -0,0 +1,21 @@ + +import pytest + +from expats.common.instantiate import ConfigFactoried + + +class MockBase(ConfigFactoried): + pass + + +@MockBase.register +class MockImpl(MockBase): + pass + + +def test_create(): + mock_impl_instance = MockBase.create_from_factory("MockImpl", {}) + assert type(mock_impl_instance) == MockImpl + + with pytest.raises(KeyError): + MockBase.create_from_factory("NonExistImpl", {}) diff --git a/tests/data/test_convert.py b/tests/data/test_convert.py new file mode 100644 index 0000000..cca77fa --- /dev/null +++ b/tests/data/test_convert.py @@ -0,0 +1,28 @@ + +import pytest + +from expats.data.convert import RoundNearestInteger, MinMaxDenormalizedRoundNearestInteger + + +@pytest.mark.parametrize( + "inputs, expected_outputs", + [ + ([2.3, 0.1, -1.7, 3.5], ["2", "0", "-2", "4"]), + ] +) +def test_round_nearest_integer(inputs, expected_outputs): + converter = RoundNearestInteger() + assert converter.convert(inputs) == expected_outputs + + +@pytest.mark.parametrize( + "x_min, x_max, inputs, expected_outputs", + [ + (2, 10, [0.75, 0, 1], ["8", "2", "10"]), + ] +) +def test_min_max_denormalized_round_nearest_integer( + x_min, x_max, inputs, expected_outputs +): + converter = MinMaxDenormalizedRoundNearestInteger(x_min, x_max) + assert converter.convert(inputs) == expected_outputs diff --git a/tests/e2e/test_pipelines.py b/tests/e2e/test_pipelines.py new file mode 100644 index 0000000..f8fbcfb --- /dev/null +++ b/tests/e2e/test_pipelines.py @@ -0,0 +1,80 @@ +from contextlib import contextmanager +import os +import requests +import shutil +import subprocess +import time +from typing import Optional + +import pytest + +from expats.task import train, evaluate, predict +from expats.common.config_util import load_from_file + +TRAIN_ARTIFACT_PATH = "log/unittest" +PREDICT_OUTPUT_PATH = "log/unittest.output" +CONFIG_FIXTURE_DIR = "tests/fixtures/config/" + + +@contextmanager +def start_interpret_server(interpret_config_path: str): + proc = subprocess.Popen([ + "poetry", "run", "expats", "interpret", f"{interpret_config_path}" + ]) + try: + yield proc + finally: + proc.kill() + + +@pytest.mark.parametrize( + "train_yaml_filename, evaluate_yaml_filename, predict_yaml_filename, interpret_yaml_filename", + [ + ("train.yaml", "evaluate.yaml", "predict.yaml", None), + ("train_cefr.yaml", "evaluate_cefr.yaml", "predict.yaml", None), + ("train_bert_classifier.yaml", "evaluate.yaml", "predict.yaml", "interpret.yaml"), + ("train_bert_regressor.yaml", "evaluate.yaml", "predict.yaml", "interpret.yaml"), + ("train_distilbert_regressor.yaml", "evaluate.yaml", "predict.yaml", "interpret.yaml") + ] +) +def test_e2e_pipeline( + train_yaml_filename: str, + evaluate_yaml_filename: str, + predict_yaml_filename: str, + interpret_yaml_filename: Optional[str] +): + # clean in advance + if os.path.exists(TRAIN_ARTIFACT_PATH): + shutil.rmtree(TRAIN_ARTIFACT_PATH) + os.mkdir(TRAIN_ARTIFACT_PATH) # FIXME + if os.path.exists(PREDICT_OUTPUT_PATH): + os.remove(PREDICT_OUTPUT_PATH) + + train_config_path = os.path.join(CONFIG_FIXTURE_DIR, train_yaml_filename) + train_config = load_from_file(train_config_path) + train(train_config, TRAIN_ARTIFACT_PATH) + + eval_config_path = os.path.join(CONFIG_FIXTURE_DIR, evaluate_yaml_filename) + eval_config = load_from_file(eval_config_path) + evaluate(eval_config) + + predict_config_path = os.path.join(CONFIG_FIXTURE_DIR, predict_yaml_filename) + predict_config = load_from_file(predict_config_path) + predict(predict_config, PREDICT_OUTPUT_PATH) + + if interpret_yaml_filename is not None: + interpret_config_path = os.path.join(CONFIG_FIXTURE_DIR, interpret_yaml_filename) + # FIXME: better test + with start_interpret_server(interpret_config_path): + max_retries = 10 + second_per_request = 10 + n_fail = 0 + for i in range(max_retries): + time.sleep(second_per_request) + try: + response = requests.get("http://localhost:5432") + break + except requests.exceptions.ConnectionError: + n_fail += 1 + assert n_fail < max_retries, "Fail to connect" + assert response.status_code == 200 diff --git a/tests/feature/test_text_basics.py b/tests/feature/test_text_basics.py new file mode 100644 index 0000000..4d7d9da --- /dev/null +++ b/tests/feature/test_text_basics.py @@ -0,0 +1,53 @@ +from typing import List + +import numpy as np +import spacy +import pytest + +from expats.feature.text_basics import ( + NumberOfTokenFeature, + AverageTokenLengthFeature, + UnigramLikelihoodFeature, +) + + +def _create_spacy_doc(words: List[str]) -> spacy.tokens.doc.Doc: + return spacy.tokens.doc.Doc(spacy.vocab.Vocab(), words=words) + + +@pytest.mark.parametrize( + "words, expected_value", + [ + (["i", "am", "here"], 3), + ] +) +def test_number_of_token_feature(words, expected_value): + doc = _create_spacy_doc(words) + feature = NumberOfTokenFeature() + np.testing.assert_array_equal(feature.extract(doc), np.array([expected_value])) + + +@pytest.mark.parametrize( + "words, expected_value", + [ + (["i", "am", "here"], 7 / 3), + (["a", "ab", "b"], 4 / 3) + ] +) +def test_average_token_length_feature(words, expected_value): + doc = _create_spacy_doc(words) + feature = AverageTokenLengthFeature() + np.testing.assert_array_equal(feature.extract(doc), np.array([expected_value])) + + +@pytest.mark.parametrize( + "words, word2freq, expected_value", + [ + (["i", "am"], {"i": 4, "am": 3, "is": 2}, (np.log(4 / 9) + np.log(3 / 9)) / 2), + (["i", "are"], {"i": 4, "am": 3, "is": 2}, (np.log(4 / 9) + np.log(1 / 9)) / 2), # NOTE: OOV case + ] +) +def test_unigram_likelihood_feature(words, word2freq, expected_value): + doc = _create_spacy_doc(words) + feature = UnigramLikelihoodFeature(word2freq) + np.testing.assert_array_equal(feature.extract(doc), np.array([expected_value])) diff --git a/tests/fixtures/config/evaluate.yaml b/tests/fixtures/config/evaluate.yaml new file mode 100644 index 0000000..3662b01 --- /dev/null +++ b/tests/fixtures/config/evaluate.yaml @@ -0,0 +1,29 @@ + +artifact_path: log/unittest + +dataset: + type: asap-aes + params: + path: tests/fixtures/data/asap_aes_training_set_rel3_exmaple.tsv + +output_convert: + classification_to_regression: + type: ToFloat + params: + regression_to_classification: + type: RoundNearestInteger + params: + +metrics: + classification: + - type: MacroF1 + params: + - type: MicroF1 + params: + - type: Accuracy + params: + - type: QuadraticWeightedKappa + params: + regression: + - type: PearsonCorrelation + params: \ No newline at end of file diff --git a/tests/fixtures/config/evaluate_cefr.yaml b/tests/fixtures/config/evaluate_cefr.yaml new file mode 100644 index 0000000..2e02d34 --- /dev/null +++ b/tests/fixtures/config/evaluate_cefr.yaml @@ -0,0 +1,33 @@ + +artifact_path: log/unittest + +dataset: + type: cefr-tsv + params: + file_path: tests/fixtures/data/cefr.tsv + +output_convert: + classification_to_regression: + type: PredifinedNumerizer + params: + mapper: + 'A1': 0.0 + 'A2': 0.2 + 'B1': 0.4 + 'B2': 0.6 + 'C1': 0.8 + 'C2': 1.0 + +metrics: + classification: + - type: MacroF1 + params: + - type: MicroF1 + params: + - type: Accuracy + params: + - type: QuadraticWeightedKappa + params: + regression: + - type: PearsonCorrelation + params: \ No newline at end of file diff --git a/tests/fixtures/config/interpret.yaml b/tests/fixtures/config/interpret.yaml new file mode 100644 index 0000000..db26a0c --- /dev/null +++ b/tests/fixtures/config/interpret.yaml @@ -0,0 +1,7 @@ + +artifact_path: log/unittest + +dataset: + type: asap-aes + params: + path: tests/fixtures/data/asap_aes_training_set_rel3_exmaple.tsv \ No newline at end of file diff --git a/tests/fixtures/config/predict.yaml b/tests/fixtures/config/predict.yaml new file mode 100644 index 0000000..7183ce2 --- /dev/null +++ b/tests/fixtures/config/predict.yaml @@ -0,0 +1,7 @@ + +artifact_path: log/unittest + +dataset: + type: line-by-line + params: + file_path: tests/fixtures/data/line_by_line.txt \ No newline at end of file diff --git a/tests/fixtures/config/train.yaml b/tests/fixtures/config/train.yaml new file mode 100644 index 0000000..53dbbe6 --- /dev/null +++ b/tests/fixtures/config/train.yaml @@ -0,0 +1,25 @@ + +task: classification + +profiler: + type: DocFeatureMLClassifier + params: + features: + - type: NumberOfTokenFeature + params: + - type: AverageTokenLengthFeature + params: + - type: UnigramLikelihoodFeature + params: + path: tests/fixtures/data/word2freq.tsv + classifier: + type: rf + params: + n_estimators: 3 + max_depth: 2 + random_state: 46 + +dataset: + type: asap-aes + params: + path: tests/fixtures/data/asap_aes_training_set_rel3_exmaple.tsv diff --git a/tests/fixtures/config/train_bert_classifier.yaml b/tests/fixtures/config/train_bert_classifier.yaml new file mode 100644 index 0000000..a849233 --- /dev/null +++ b/tests/fixtures/config/train_bert_classifier.yaml @@ -0,0 +1,21 @@ + +task: classification + +profiler: + type: TransformerClassifier + params: + trainer: + gpus: null + max_epochs: 1 + network: + num_class: 10 + pretrained_model_name_or_path: bert-base-uncased + lr: 5e-4 + data_loader: + batch_size: 16 + val_ratio: 0.2 + +dataset: + type: asap-aes + params: + path: tests/fixtures/data/asap_aes_training_set_rel3_exmaple.tsv \ No newline at end of file diff --git a/tests/fixtures/config/train_bert_regressor.yaml b/tests/fixtures/config/train_bert_regressor.yaml new file mode 100644 index 0000000..345f73e --- /dev/null +++ b/tests/fixtures/config/train_bert_regressor.yaml @@ -0,0 +1,21 @@ + +task: regression + +profiler: + type: TransformerRegressor + params: + trainer: + gpus: null + max_epochs: 1 + network: + pretrained_model_name_or_path: bert-base-uncased + lr: 5e-4 + output_normalized: true + data_loader: + batch_size: 16 + val_ratio: 0.2 + +dataset: + type: asap-aes + params: + path: tests/fixtures/data/asap_aes_training_set_rel3_exmaple.tsv \ No newline at end of file diff --git a/tests/fixtures/config/train_cefr.yaml b/tests/fixtures/config/train_cefr.yaml new file mode 100644 index 0000000..a6332fd --- /dev/null +++ b/tests/fixtures/config/train_cefr.yaml @@ -0,0 +1,21 @@ + +task: classification + +profiler: + type: TransformerClassifier + params: + trainer: + gpus: null + max_epochs: 1 + network: + num_class: 10 + pretrained_model_name_or_path: bert-base-uncased + lr: 5e-4 + data_loader: + batch_size: 16 + val_ratio: 0.2 + +dataset: + type: cefr-tsv + params: + file_path: tests/fixtures/data/cefr.tsv \ No newline at end of file diff --git a/tests/fixtures/config/train_distilbert_regressor.yaml b/tests/fixtures/config/train_distilbert_regressor.yaml new file mode 100644 index 0000000..11bcca2 --- /dev/null +++ b/tests/fixtures/config/train_distilbert_regressor.yaml @@ -0,0 +1,21 @@ + +task: regression + +profiler: + type: TransformerRegressor + params: + trainer: + gpus: null + max_epochs: 1 + network: + pretrained_model_name_or_path: distilbert-base-uncased + lr: 5e-4 + output_normalized: true + data_loader: + batch_size: 16 + val_ratio: 0.2 + +dataset: + type: asap-aes + params: + path: tests/fixtures/data/asap_aes_training_set_rel3_exmaple.tsv \ No newline at end of file diff --git a/tests/fixtures/data/asap_aes_training_set_rel3_exmaple.tsv b/tests/fixtures/data/asap_aes_training_set_rel3_exmaple.tsv new file mode 100644 index 0000000..25fcd9b --- /dev/null +++ b/tests/fixtures/data/asap_aes_training_set_rel3_exmaple.tsv @@ -0,0 +1,4 @@ +essay_id essay_set essay rater1_domain1 rater2_domain1 rater3_domain1 domain1_score rater1_domain2 rater2_domain2 domain2_score rater1_trait1 rater1_trait2 rater1_trait3 rater1_trait4 rater1_trait5 rater1_trait6 rater2_trait1 rater2_trait2 rater2_trait3 rater2_trait4 rater2_trait5 rater2_trait6 rater3_trait1 rater3_trait2 rater3_trait3 rater3_trait4 rater3_trait5 rater3_trait6 +1 1 "I'm here" 4 4 8 +5 1 "Hi, this is test file." 4 4 8 +9 1 "testtest" 4 5 9 diff --git a/tests/fixtures/data/cefr.tsv b/tests/fixtures/data/cefr.tsv new file mode 100644 index 0000000..1a5e0ff --- /dev/null +++ b/tests/fixtures/data/cefr.tsv @@ -0,0 +1,3 @@ +A1 test +A2 foo bar +C1 this is example diff --git a/tests/fixtures/data/line_by_line.txt b/tests/fixtures/data/line_by_line.txt new file mode 100644 index 0000000..d0f0282 --- /dev/null +++ b/tests/fixtures/data/line_by_line.txt @@ -0,0 +1,2 @@ +Hi, this is test file. +This example looks like... \ No newline at end of file diff --git a/tests/fixtures/data/word2freq.tsv b/tests/fixtures/data/word2freq.tsv new file mode 100644 index 0000000..abee876 --- /dev/null +++ b/tests/fixtures/data/word2freq.tsv @@ -0,0 +1,12 @@ +. 1201360 +to 392383 +I 381699 +the 277443 +that 258837 +n't 240872 +a 202189 +is 184184 +you 181809 +do 168408 +? 158657 +, 155421 diff --git a/tests/metric/test_metric.py b/tests/metric/test_metric.py new file mode 100644 index 0000000..232e0cd --- /dev/null +++ b/tests/metric/test_metric.py @@ -0,0 +1,30 @@ + +import pytest + +from expats.metric.metric import ClassificationMetric, RegressionMetric + + +@pytest.mark.parametrize( + "name, inputs, expected", + [ + ("Accuracy", [("0", "0"), ("1", "2"), ("2", "1"), ("0", "0"), ("1", "0"), ("2", "1")], 2 / 6), + ("MacroF1", [("0", "0"), ("1", "2"), ("2", "1"), ("0", "0"), ("1", "0"), ("2", "1")], 0.26666666), + ("MicroF1", [("0", "0"), ("1", "2"), ("2", "1"), ("0", "0"), ("1", "0"), ("2", "1")], 0.33333333), + ] +) +def test_classification_metric(name, inputs, expected): + metric = ClassificationMetric.create_from_factory(name, None) + actual = metric.calculate(inputs) + assert actual == pytest.approx(expected) + + +@pytest.mark.parametrize( + "name, inputs, expected", + [ + ("PearsonCorrelation", [(1, 10), (2, 9), (3, 2.5), (4, 6), (5, 4)], -0.74261065), + ] +) +def test_regression_metric(name, inputs, expected): + metric = RegressionMetric.create_from_factory(name, None) + actual = metric.calculate(inputs) + assert actual == pytest.approx(expected) diff --git a/tests/nlp/test_parser.py b/tests/nlp/test_parser.py new file mode 100644 index 0000000..18d015f --- /dev/null +++ b/tests/nlp/test_parser.py @@ -0,0 +1,20 @@ + +import pytest + +from expats.nlp.parser import create_spacy_parser, sentence_tokenize_en + + +def test_create_spacy_parser(): + create_spacy_parser("en_core_web_sm") + with pytest.raises(OSError): + create_spacy_parser("not_found_parser") + + +@pytest.mark.parametrize( + "text, expected_sents", + [ + ("i am here. you are also here.", ["i am here.", "you are also here."]), + ] +) +def test_sentence_tokenize_en(text, expected_sents): + assert sentence_tokenize_en(text) == expected_sents