diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index bfc34d9..95f4324 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8, 3.9] + python-version: [3.8] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} @@ -17,32 +17,36 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Install poetry + run: | + python -m pip install --upgrade pip + curl -sSL https://install.python-poetry.org | python - --version 1.2.2 + echo "${HOME}/.local/bin" >> $GITHUB_PATH + - name: Install dependencies run: | - curl -sSL https://install.python-poetry.org | python - --version 1.2.1 - $HOME/.local/bin/poetry install --no-root + poetry install --no-root - name: Run tests run: | - $HOME/.local/bin/poetry run pytest + poetry run pytest - name: Build wheels run: | - $HOME/.local/bin/poetry version $(git tag --points-at HEAD) - $HOME/.local/bin/poetry build + poetry version $(git tag --points-at HEAD) + poetry build - name: Test install package run: | - mkdir test_install - cd test_install - $HOME/.local/bin/poetry init - $HOME/.local/bin/poetry add ../dist/$(ls dist/*.whl) + poetry new test-install + cd test-install + poetry add ../dist/$(ls ../dist/*.whl) - $HOME/.local/bin/poetry run python -c "import datastream" + poetry run python -c "import datastream" - name: Upload env: USERNAME: __token__ PASSWORD: ${{ secrets.PYPI_TOKEN }} run: | - $HOME/.local/bin/poetry publish --username=$USERNAME --password=$PASSWORD + poetry publish --username=$USERNAME --password=$PASSWORD diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c32d526..6d6f704 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -24,18 +24,23 @@ jobs: ${{ runner.os }}-pip- ${{ runner.os }}- + - name: Install poetry + run: | + python -m pip install --upgrade pip + curl -sSL https://install.python-poetry.org | python - --version 1.2.2 + echo "${HOME}/.local/bin" >> $GITHUB_PATH + - name: Install dependencies run: | - curl -sSL https://install.python-poetry.org | python - --version 1.2.1 - $HOME/.local/bin/poetry install install + poetry install - name: Run tests run: | - $HOME/.local/bin/poetry install run pytest + poetry run pytest - name: Build wheels run: | - $HOME/.local/bin/poetry install build + poetry build build-docs: runs-on: ubuntu-latest diff --git a/README.rst b/README.rst index 1e6787c..1dfd402 100644 --- a/README.rst +++ b/README.rst @@ -110,9 +110,3 @@ Install from source =================== .. pip install -e . - -To patch the code locally for `Python 3.6` run `patch-python3.6.sh`. - -.. code-block:: bash - - $ ./patch-python3.6.sh diff --git a/datastream/__init__.py b/datastream/__init__.py index 46609bf..d4f136e 100644 --- a/datastream/__init__.py +++ b/datastream/__init__.py @@ -2,7 +2,8 @@ from datastream.datastream import Datastream from pkg_resources import get_distribution, DistributionNotFound + try: - __version__ = get_distribution('pytorch-datastream').version + __version__ = get_distribution("pytorch-datastream").version except DistributionNotFound: pass diff --git a/datastream/dataset.py b/datastream/dataset.py index 626c1d2..7de0200 100644 --- a/datastream/dataset.py +++ b/datastream/dataset.py @@ -1,7 +1,15 @@ from __future__ import annotations from pydantic import BaseModel from typing import ( - Tuple, Callable, Union, List, TypeVar, Generic, Dict, Optional, Iterable + Tuple, + Callable, + Union, + List, + TypeVar, + Generic, + Dict, + Optional, + Iterable, ) from pathlib import Path from functools import lru_cache @@ -14,12 +22,12 @@ from datastream import tools -T = TypeVar('T') -R = TypeVar('R') +T = TypeVar("T") +R = TypeVar("R") class Dataset(BaseModel, Generic[T]): - ''' + """ A ``Dataset[T]`` is a mapping that allows pipelining of functions in a readable syntax returning an example of type ``T``. @@ -39,7 +47,7 @@ class Dataset(BaseModel, Generic[T]): ... ) >>> dataset[2] ('banana', 28) - ''' + """ dataframe: Optional[pd.DataFrame] length: int @@ -51,25 +59,22 @@ class Config: @staticmethod def from_subscriptable(subscriptable) -> Dataset: - ''' + """ Create ``Dataset`` based on subscriptable i.e. implements ``__getitem__`` and ``__len__``. Should only be used for simple examples as a ``Dataset`` created with this method does not support methods that require a source dataframe like :func:`Dataset.split` and :func:`Dataset.subset`. - ''' + """ - return ( - Dataset.from_dataframe( - pd.DataFrame(dict(index=range(len(subscriptable)))) - ) - .map(lambda row: subscriptable[row['index']]) - ) + return Dataset.from_dataframe( + pd.DataFrame(dict(index=range(len(subscriptable)))) + ).map(lambda row: subscriptable[row["index"]]) @staticmethod def from_dataframe(dataframe: pd.DataFrame) -> Dataset[pd.Series]: - ''' + """ Create ``Dataset`` based on ``pandas.DataFrame``. :func:`Dataset.__getitem__` will return a row from the dataframe and :func:`Dataset.map` should be given a function that takes a row from @@ -82,7 +87,7 @@ def from_dataframe(dataframe: pd.DataFrame) -> Dataset[pd.Series]: ... .map(lambda row: row['number'] + 1) ... )[-1] 4 - ''' + """ return Dataset( dataframe=dataframe, length=len(dataframe), @@ -91,7 +96,7 @@ def from_dataframe(dataframe: pd.DataFrame) -> Dataset[pd.Series]: @staticmethod def from_paths(paths: Iterable[str, Path], pattern: str) -> Dataset[pd.Series]: - ''' + """ Create ``Dataset`` from paths using regex pattern that extracts information from the path itself. :func:`Dataset.__getitem__` will return a row from the dataframe and @@ -104,20 +109,17 @@ def from_paths(paths: Iterable[str, Path], pattern: str) -> Dataset[pd.Series]: ... .map(lambda row: row["class_name"]) ... )[-1] 'damage' - ''' + """ paths = list(paths) return Dataset.from_dataframe( - pd.Series(paths) - .astype(str) - .str.extract(pattern) - .assign(path=paths) + pd.Series(paths).astype(str).str.extract(pattern).assign(path=paths) ) def __getitem__( - self: Dataset[T], - select: Union[int, slice, Iterable, Callable[[pd.DataFrame], Iterable[int]]] + self: Dataset[T], + select: Union[int, slice, Iterable, Callable[[pd.DataFrame], Iterable[int]]], ) -> Union[T, Dataset[T]]: - '''Get selection from the ``Dataset[T]``''' + """Get selection from the ``Dataset[T]``""" if np.issubdtype(type(select), np.integer): return self.get_item(self.dataframe, select) else: @@ -128,10 +130,12 @@ def __len__(self): return self.length def __str__(self): - return str('\n'.join( - [str(self[index]) for index in range(min(3, len(self)))] - + (['...'] if len(self) > 3 else []) - )) + return str( + "\n".join( + [str(self[index]) for index in range(min(3, len(self)))] + + (["..."] if len(self) > 3 else []) + ) + ) def __repr__(self): return str(self) @@ -154,10 +158,8 @@ def replace(self, **kwargs): new_dict.update(**kwargs) return type(self)(**new_dict) - def map( - self: Dataset[T], function: Callable[[T], R] - ) -> Dataset[R]: - ''' + def map(self: Dataset[T], function: Callable[[T], R]) -> Dataset[R]: + """ Creates a new dataset with the function added to the dataset pipeline. >>> ( @@ -165,7 +167,7 @@ def map( ... .map(lambda number: number + 1) ... )[-1] 4 - ''' + """ def composed_fn(dataframe, index): item = self.get_item(dataframe, index) @@ -174,16 +176,20 @@ def composed_fn(dataframe, index): except Exception as e: item_text = textwrap.shorten(str(item), width=79) - raise Exception('\n'.join([ - repr(e), - '', - 'Above exception originated from', - f'module: {inspect.getmodule(function)}', - 'from mapped function:', - inspect.getsource(function), - 'for item:', - item_text, - ])).with_traceback(e.__traceback__) + raise Exception( + "\n".join( + [ + repr(e), + "", + "Above exception originated from", + f"module: {inspect.getmodule(function)}", + "from mapped function:", + inspect.getsource(function), + "for item:", + item_text, + ] + ) + ).with_traceback(e.__traceback__) return Dataset( dataframe=self.dataframe, @@ -191,10 +197,8 @@ def composed_fn(dataframe, index): get_item=composed_fn, ) - def starmap( - self: Dataset[T], function: Callable[Union[..., R]] - ) -> Dataset[R]: - ''' + def starmap(self: Dataset[T], function: Callable[Union[..., R]]) -> Dataset[R]: + """ Creates a new dataset with the function added to the dataset pipeline. The dataset's pipeline should return an iterable that will be expanded as \\*args to the mapped function. @@ -205,15 +209,13 @@ def starmap( ... .starmap(lambda number, plus_one: number + plus_one) ... )[-1] 7 - ''' + """ return self.map(tools.star(function)) def subset( - self, mask_fn: Callable[ - [pd.DataFrame], Union[pd.Series, np.array, List[bool]] - ] + self, mask_fn: Callable[[pd.DataFrame], Union[pd.Series, np.array, List[bool]]] ) -> Dataset[T]: - ''' + """ Select a subset of the dataset using a function that receives the source dataframe as input and is expected to return a boolean mask. @@ -228,7 +230,7 @@ def subset( ... .subset(lambda dataframe: dataframe['number'] <= 2) ... )[-1] 2 - ''' + """ dataframe = self.dataframe[mask_fn(self.dataframe)] return self.replace(dataframe=dataframe, length=len(dataframe)) @@ -241,7 +243,7 @@ def split( frozen: Optional[bool] = False, seed: Optional[int] = None, ) -> Dict[str, Dataset[T]]: - ''' + """ Split dataset into multiple parts. Optionally you can chose to stratify on a column in the source dataframe or save the split to a json file. If you are sure that the split strategy will not change then you can @@ -270,7 +272,7 @@ def split( 80 >>> split_datasets['test'][0] 3 - ''' + """ if filepath is not None: filepath = Path(filepath) filepath.parent.mkdir(parents=True, exist_ok=True) @@ -298,7 +300,7 @@ def split( def with_columns( self: Dataset[T], **kwargs: Callable[pd.Dataframe, pd.Series] ) -> Dataset[T]: - ''' + """ Append new column(s) to the :attr:`.Dataset.dataframe` by passing the new column names as keywords with functions that take the :attr:`.Dataset.dataframe` as input and return :func:`pandas.Series`. @@ -309,9 +311,9 @@ def with_columns( ... .map(lambda row: row['twice']) ... )[-1] 6 - ''' + """ if len(set(kwargs.keys()) & set(self.dataframe.columns)) >= 1: - raise ValueError('Should not replace existing columns') + raise ValueError("Should not replace existing columns") dataframe = self.dataframe.assign(**kwargs) return Dataset( @@ -321,7 +323,7 @@ def with_columns( ) def zip_index(self: Dataset[T]) -> Dataset[Tuple[T, int]]: - ''' + """ Zip the output with its index. The output of the pipeline will be a tuple ``(output, index)``. @@ -330,7 +332,7 @@ def zip_index(self: Dataset[T]) -> Dataset[Tuple[T, int]]: ... .zip_index() ... )[0] (4, 0) - ''' + """ return Dataset( dataframe=self.dataframe, length=self.length, @@ -352,6 +354,7 @@ def from_concat(index): inner_index = index - cumulative_lengths[dataset_index - 1] return dataset_index, inner_index + return from_concat @staticmethod @@ -365,18 +368,19 @@ def to_concat(dataset_index, inner_index): index = inner_index + cumulative_lengths[dataset_index - 1] return index + return to_concat @staticmethod def concat(datasets: List[Dataset]) -> Dataset[R]: - ''' + """ Concatenate multiple datasets together so that they behave like a single dataset. Consider using :func:`Datastream.merge` if you have multiple data sources instead as it allows you to control the number of samples from each source in the training batches. - ''' + """ from_concat_mapping = Dataset.create_from_concat_mapping(datasets) if any([dataset.dataframe is None for dataset in datasets]): @@ -391,24 +395,21 @@ def get_item(dataframe, index): get_item=get_item, ) else: - dataset_column = ( - '__concat__' - + ''.join([random.choice(string.ascii_lowercase) for _ in range(8)]) + dataset_column = "__concat__" + "".join( + [random.choice(string.ascii_lowercase) for _ in range(8)] ) dataframes = [dataset.dataframe for dataset in datasets] for dataframe in dataframes: for col in dataframe.columns: - if ( - dataframe[col].dtype == int - and any([col not in other.columns for other in dataframes]) + if dataframe[col].dtype == int and any( + [col not in other.columns for other in dataframes] ): dataframe[col] = dataframe[col].astype(object) new_dataframe = pd.concat(dataframes) new_dataframe[dataset_column] = [ - from_concat_mapping(index)[0] - for index in range(len(new_dataframe)) + from_concat_mapping(index)[0] for index in range(len(new_dataframe)) ] def get_item(dataframe, index): @@ -436,6 +437,7 @@ def from_combine(index): ) ] ) + return from_combine @staticmethod @@ -443,29 +445,30 @@ def create_to_combine_mapping(datasets): cumprod_lengths = np.cumprod(list(map(len, datasets))) def to_concat(inner_indices): - return inner_indices[0] + sum([ - inner_index * cumprod_lengths[i] - for i, inner_index in enumerate(inner_indices[1:]) - ]) + return inner_indices[0] + sum( + [ + inner_index * cumprod_lengths[i] + for i, inner_index in enumerate(inner_indices[1:]) + ] + ) + return to_concat @staticmethod def combine(datasets: List[Dataset]) -> Dataset[Tuple]: - ''' + """ Zip multiple datasets together so that all combinations of examples are possible (i.e. the product) creating tuples like ``(example1, example2, ...)``. The created dataset will not have a dataframe because combined datasets are often very long and it is expensive to enumerate them. - ''' + """ from_combine_mapping = Dataset.create_from_combine_mapping(datasets) def get_item(dataframe, index): indices = from_combine_mapping(index) - return tuple([ - dataset[index] for dataset, index in zip(datasets, indices) - ]) + return tuple([dataset[index] for dataset, index in zip(datasets, indices)]) return Dataset( dataframe=None, @@ -475,7 +478,7 @@ def get_item(dataframe, index): @staticmethod def zip(datasets: List[Dataset]) -> Dataset[Tuple]: - ''' + """ Zip multiple datasets together so that examples with matching indices create tuples like ``(example1, example2, ...)``. @@ -492,7 +495,7 @@ def zip(datasets: List[Dataset]) -> Dataset[Tuple]: ... Dataset.from_subscriptable([4, 5, 6, 7]), ... ])[-1] (3, 6) - ''' + """ length = min(map(len, datasets)) return ( Dataset.from_dataframe( @@ -503,26 +506,26 @@ def zip(datasets: List[Dataset]) -> Dataset[Tuple]: ], axis=1, keys=[ - f'dataset{dataset_index}' + f"dataset{dataset_index}" for dataset_index in range(len(datasets)) ], ).assign(_index=list(range(length))) ) - .map(lambda row: row['_index'].iloc[0]) - .map(lambda index: tuple( - dataset[index] for dataset in datasets - )) + .map(lambda row: row["_index"].iloc[0]) + .map(lambda index: tuple(dataset[index] for dataset in datasets)) ) def cache( self, key_column: str, ): - '''Cache intermediate step in-memory based on key column.''' - key_mapping = dict(zip( - self.dataframe[key_column], - range(len(self)), - )) + """Cache intermediate step in-memory based on key column.""" + key_mapping = dict( + zip( + self.dataframe[key_column], + range(len(self)), + ) + ) @lru_cache(maxsize=None) def only_key(key): @@ -551,7 +554,7 @@ def test_subscript(): for dataset in ( Dataset.from_subscriptable(number_list), - Dataset.from_dataframe(number_df).map(lambda row: row['number']) + Dataset.from_dataframe(number_df).map(lambda row: row["number"]), ): assert dataset[-1] == number_list[-1] @@ -570,61 +573,62 @@ def test_subset(): assert dataset[0] == numbers[1] dataframe = pd.DataFrame(dict(number=numbers)) - dataset = ( - Dataset.from_dataframe(dataframe) - .subset(lambda df: df['number'] >= 12) - ) - assert dataset[0]['number'] == numbers[2] + dataset = Dataset.from_dataframe(dataframe).subset(lambda df: df["number"] >= 12) + assert dataset[0]["number"] == numbers[2] def test_with_columns(): from pytest import raises with raises(ValueError): - dataset = ( - Dataset.from_dataframe(pd.DataFrame(dict( - key=np.arange(100), - ))) - .with_columns(key=lambda df: df['key'] * 2) - ) + dataset = Dataset.from_dataframe( + pd.DataFrame( + dict( + key=np.arange(100), + ) + ) + ).with_columns(key=lambda df: df["key"] * 2) def test_concat_dataset(): - dataset = Dataset.concat([ - Dataset.from_subscriptable(list(range(5))), - Dataset.from_subscriptable(list(range(4))), - ]) + dataset = Dataset.concat( + [ + Dataset.from_subscriptable(list(range(5))), + Dataset.from_subscriptable(list(range(4))), + ] + ) assert dataset[6] == 1 def test_concat_heterogenous_datasets(): dataset1 = Dataset.from_dataframe( - pd.DataFrame(dict(a=[1], b=['a'])).set_index('a'), + pd.DataFrame(dict(a=[1], b=["a"])).set_index("a"), ) dataset2 = Dataset.from_dataframe( - pd.DataFrame(dict(a=[1], b=[1], c=[2])).set_index('a'), - ) - dataset = ( - Dataset.concat([dataset1, dataset2]) - .map(lambda row: row['b']) + pd.DataFrame(dict(a=[1], b=[1], c=[2])).set_index("a"), ) + dataset = Dataset.concat([dataset1, dataset2]).map(lambda row: row["b"]) - assert list(dataset) == ['a', 1] + assert list(dataset) == ["a", 1] - dataset_other_functions = Dataset.concat([ - dataset1.map(lambda row: row['b']), - dataset2.map(lambda row: row['c']), - ]) + dataset_other_functions = Dataset.concat( + [ + dataset1.map(lambda row: row["b"]), + dataset2.map(lambda row: row["c"]), + ] + ) - assert list(dataset_other_functions) == ['a', 2] + assert list(dataset_other_functions) == ["a", 2] def test_zip_dataset(): - dataset = Dataset.zip([ - Dataset.from_subscriptable(list(range(5))), - Dataset.from_subscriptable(list(range(4))), - ]) + dataset = Dataset.zip( + [ + Dataset.from_subscriptable(list(range(5))), + Dataset.from_subscriptable(list(range(4))), + ] + ) assert dataset[3] == (3, 3) @@ -657,13 +661,17 @@ def test_combine_dataset(): def test_split_dataset(): - dataset = Dataset.from_dataframe(pd.DataFrame(dict( - index=np.arange(100), - number=np.random.randn(100), - stratify=np.concatenate([np.ones(50), np.zeros(50)]), - ))).map(tuple) + dataset = Dataset.from_dataframe( + pd.DataFrame( + dict( + index=np.arange(100), + number=np.random.randn(100), + stratify=np.concatenate([np.ones(50), np.zeros(50)]), + ) + ) + ).map(tuple) - filepath = Path('test_split_dataset.json') + filepath = Path("test_split_dataset.json") proportions = dict( gradient=0.7, early_stopping=0.15, @@ -671,30 +679,30 @@ def test_split_dataset(): ) kwargs = dict( - key_column='index', + key_column="index", proportions=proportions, filepath=filepath, - stratify_column='stratify', + stratify_column="stratify", ) split_datasets1 = dataset.split(**kwargs) split_datasets2 = dataset.split(**kwargs) split_datasets3 = dataset.split( - key_column='index', + key_column="index", proportions=proportions, - stratify_column='stratify', + stratify_column="stratify", seed=100, ) split_datasets4 = dataset.split( - key_column='index', + key_column="index", proportions=proportions, - stratify_column='stratify', + stratify_column="stratify", seed=100, ) split_datasets5 = dataset.split( - key_column='index', + key_column="index", proportions=proportions, - stratify_column='stratify', + stratify_column="stratify", seed=800, ) filepath.unlink() @@ -706,12 +714,16 @@ def test_split_dataset(): def test_group_split_dataset(): - dataset = Dataset.from_dataframe(pd.DataFrame(dict( - group=np.arange(100) // 4, - number=np.random.randn(100), - ))).map(tuple) + dataset = Dataset.from_dataframe( + pd.DataFrame( + dict( + group=np.arange(100) // 4, + number=np.random.randn(100), + ) + ) + ).map(tuple) - filepath = Path('test_split_dataset.json') + filepath = Path("test_split_dataset.json") proportions = dict( gradient=0.7, early_stopping=0.15, @@ -719,7 +731,7 @@ def test_group_split_dataset(): ) kwargs = dict( - key_column='group', + key_column="group", proportions=proportions, filepath=filepath, ) @@ -727,17 +739,17 @@ def test_group_split_dataset(): split_datasets1 = dataset.split(**kwargs) split_datasets2 = dataset.split(**kwargs) split_datasets3 = dataset.split( - key_column='group', + key_column="group", proportions=proportions, seed=100, ) split_datasets4 = dataset.split( - key_column='group', + key_column="group", proportions=proportions, seed=100, ) split_datasets5 = dataset.split( - key_column='group', + key_column="group", proportions=proportions, seed=800, ) @@ -753,80 +765,93 @@ def test_group_split_dataset(): def test_missing_stratify_column(): from pytest import raises - dataset = Dataset.from_dataframe(pd.DataFrame(dict( - index=np.arange(100), - number=np.random.randn(100), - ))).map(tuple) + dataset = Dataset.from_dataframe( + pd.DataFrame( + dict( + index=np.arange(100), + number=np.random.randn(100), + ) + ) + ).map(tuple) with raises(KeyError): dataset.split( - key_column='index', + key_column="index", proportions=dict(train=0.8, test=0.2), - stratify_column='should_fail', + stratify_column="should_fail", ) def test_split_proportions(): - dataset = Dataset.from_dataframe(pd.DataFrame(dict( - index=np.arange(100), - number=np.random.randn(100), - stratify=np.arange(100) // 10, - ))).map(tuple) + dataset = Dataset.from_dataframe( + pd.DataFrame( + dict( + index=np.arange(100), + number=np.random.randn(100), + stratify=np.arange(100) // 10, + ) + ) + ).map(tuple) splits = dataset.split( - key_column='index', + key_column="index", proportions=dict(train=0.8, test=0.2), - stratify_column='stratify', + stratify_column="stratify", ) - assert len(splits['train']) == 80 - assert len(splits['test']) == 20 + assert len(splits["train"]) == 80 + assert len(splits["test"]) == 20 def test_with_columns_split(): dataset = ( - Dataset.from_dataframe(pd.DataFrame(dict( - index=np.arange(100), - number=np.arange(100), - ))) + Dataset.from_dataframe( + pd.DataFrame( + dict( + index=np.arange(100), + number=np.arange(100), + ) + ) + ) .map(tuple) - .with_columns(split=lambda df: df['index'] * 2) + .with_columns(split=lambda df: df["index"] * 2) ) splits = dataset.split( - key_column='index', + key_column="index", proportions=dict(train=0.8, test=0.2), ) - assert splits['train'][0][0] * 2 == splits['train'][0][2] + assert splits["train"][0][0] * 2 == splits["train"][0][2] def test_split_filepath(): - dataset = ( - Dataset.from_dataframe(pd.DataFrame(dict( - index=np.arange(100), - number=np.random.randn(100), - stratify=np.arange(100) // 10, - ))) - .map(tuple) - ) + dataset = Dataset.from_dataframe( + pd.DataFrame( + dict( + index=np.arange(100), + number=np.random.randn(100), + stratify=np.arange(100) // 10, + ) + ) + ).map(tuple) - filepath = Path('tmp_test_split.json') + filepath = Path("tmp_test_split.json") splits1 = dataset.split( - key_column='index', + key_column="index", proportions=dict(train=0.8, test=0.2), filepath=filepath, ) splits2 = dataset.split( - key_column='index', + key_column="index", proportions=dict(train=0.8, test=0.2), filepath=filepath, ) - assert splits1['train'][0] == splits2['train'][0] - assert splits1['test'][0] == splits2['test'][0] + assert splits1["train"][0] == splits2["train"][0] + assert splits1["test"][0] == splits2["test"][0] filepath.unlink() @@ -835,48 +860,44 @@ def test_update_stratified_split(): for _ in range(5): - dataset = ( - Dataset.from_dataframe(pd.DataFrame(dict( - index=np.arange(100), - number=np.random.randn(100), - stratify1=np.random.randint(0, 10, 100), - stratify2=np.random.randint(0, 10, 100), - ))) - .map(tuple) - ) + dataset = Dataset.from_dataframe( + pd.DataFrame( + dict( + index=np.arange(100), + number=np.random.randn(100), + stratify1=np.random.randint(0, 10, 100), + stratify2=np.random.randint(0, 10, 100), + ) + ) + ).map(tuple) - filepath = Path('tmp_test_split.json') + filepath = Path("tmp_test_split.json") - splits1 = ( - dataset - .subset(lambda df: df['index'] < 50) - .split( - key_column='index', - proportions=dict(train=0.8, test=0.2), - filepath=filepath, - stratify_column='stratify1', - ) + splits1 = dataset.subset(lambda df: df["index"] < 50).split( + key_column="index", + proportions=dict(train=0.8, test=0.2), + filepath=filepath, + stratify_column="stratify1", ) - splits2 = ( - dataset - .split( - key_column='index', - proportions=dict(train=0.8, test=0.2), - filepath=filepath, - stratify_column='stratify2', - ) + splits2 = dataset.split( + key_column="index", + proportions=dict(train=0.8, test=0.2), + filepath=filepath, + stratify_column="stratify2", ) assert ( - splits1['train'].dataframe['index'] - .isin(splits2['train'].dataframe['index']) + splits1["train"] + .dataframe["index"] + .isin(splits2["train"].dataframe["index"]) .all() ) assert ( - splits1['test'].dataframe['index'] - .isin(splits2['test'].dataframe['index']) + splits1["test"] + .dataframe["index"] + .isin(splits2["test"].dataframe["index"]) .all() ) @@ -885,15 +906,15 @@ def test_update_stratified_split(): def test_concat_missing_columns(): dataset1 = Dataset.from_dataframe( - pd.DataFrame(dict(a=[1, 2, 3], b=['a', 'b', 'c'])) + pd.DataFrame(dict(a=[1, 2, 3], b=["a", "b", "c"])) ) dataset2 = Dataset.from_dataframe( pd.DataFrame(dict(c=[True, False], d=[[1, 2], [3, 4]])) ) concatenated = Dataset.concat([dataset1, dataset2]) - assert type(concatenated[0]['a']) == int - assert type(concatenated[-1]['a']) == float - assert type(concatenated[0]['b']) == str - assert type(concatenated[-1]['c']) == bool - assert type(concatenated[-1]['d']) == list + assert type(concatenated[0]["a"]) == int + assert type(concatenated[-1]["a"]) == float + assert type(concatenated[0]["b"]) == str + assert type(concatenated[-1]["c"]) == bool + assert type(concatenated[-1]["d"]) == list diff --git a/datastream/datastream.py b/datastream/datastream.py index c57895c..5739ee4 100644 --- a/datastream/datastream.py +++ b/datastream/datastream.py @@ -24,12 +24,12 @@ ) -T = TypeVar('T') -R = TypeVar('R') +T = TypeVar("T") +R = TypeVar("R") class Datastream(BaseModel, Generic[T]): - ''' + """ ``Datastream[T]`` combines a ``Dataset[T]`` and a sampler into a stream of examples. @@ -44,7 +44,7 @@ class Datastream(BaseModel, Generic[T]): ... ) >>> len(next(iter(data_loader))) 16 - ''' + """ dataset: Dataset[T] sampler: Optional[torch.utils.data.Sampler] @@ -53,21 +53,13 @@ class Config: arbitrary_types_allowed = True allow_mutation = False - def __init__( - self, - dataset: Dataset[T], - sampler: torch.utils.data.Sampler = None - ): + def __init__(self, dataset: Dataset[T], sampler: torch.utils.data.Sampler = None): if len(dataset) == 0: - raise ValueError('Cannot create datastream from empty dataset') + raise ValueError("Cannot create datastream from empty dataset") super().__init__( dataset=dataset, - sampler=( - StandardSampler(len(dataset)) - if sampler is None - else sampler - ) + sampler=(StandardSampler(len(dataset)) if sampler is None else sampler), ) def __len__(self): @@ -77,11 +69,10 @@ def __iter__(self): return map(self.dataset.__getitem__, iter(self.sampler)) @staticmethod - def merge(datastreams_and_ns: Tuple[Union[ - Datastream[T], - Tuple[Datastream[T], int] - ], ...]) -> Datastream[T]: - ''' + def merge( + datastreams_and_ns: Tuple[Union[Datastream[T], Tuple[Datastream[T], int]], ...] + ) -> Datastream[T]: + """ Creates a merged datastream where samples are drawn one at a time from each underlying datastream (also known as "interleave"). @@ -98,70 +89,69 @@ def merge(datastreams_and_ns: Tuple[Union[ ... ]) >>> list(merged_datastream) [1, 2, 3, 3, 1, 2, 3, 3] - ''' + """ datastreams_and_ns = [ - x if type(x) is tuple else (x, 1) - for x in datastreams_and_ns + x if type(x) is tuple else (x, 1) for x in datastreams_and_ns ] return Datastream( - Dataset.concat([ - datastream.dataset for datastream, n in datastreams_and_ns - ]), - MergeSampler(*zip(*[ - (datastream.sampler, datastream.dataset, n) - for (datastream, n) in datastreams_and_ns - ])), + Dataset.concat( + [datastream.dataset for datastream, n in datastreams_and_ns] + ), + MergeSampler( + *zip( + *[ + (datastream.sampler, datastream.dataset, n) + for (datastream, n) in datastreams_and_ns + ] + ) + ), ) @staticmethod def zip(datastreams: List[Datastream]) -> Datastream[Tuple]: - ''' + """ Zip multiple datastreams together so that all combinations of examples are possible (i.e. the product) creating tuples like ``(example1, example2, ...)``. The samples are drawn independently from each underlying datastream. - ''' + """ return Datastream( - Dataset.combine([ - datastream.dataset for datastream in datastreams - ]), - ZipSampler(*zip(*[ - (datastream.sampler, datastream.dataset) - for datastream in datastreams - ])), + Dataset.combine([datastream.dataset for datastream in datastreams]), + ZipSampler( + *zip( + *[ + (datastream.sampler, datastream.dataset) + for datastream in datastreams + ] + ) + ), ) - def map( - self: Datastream[T], function: Callable[[T], R] - ) -> Datastream[R]: - ''' + def map(self: Datastream[T], function: Callable[[T], R]) -> Datastream[R]: + """ Creates a new Datastream with a new mapped dataset. See :func:`Dataset.map` for details. - ''' + """ return Datastream( self.dataset.map(function), self.sampler, ) - def starmap( - self: Datastream[T], function: Callable[[...], R] - ) -> Datastream[R]: - ''' + def starmap(self: Datastream[T], function: Callable[[...], R]) -> Datastream[R]: + """ Creates a new Datastream with a new starmapped dataset. See :func:`Dataset.starmap` for details. - ''' + """ return Datastream( self.dataset.starmap(function), self.sampler, ) def data_loader( - self, - n_batches_per_epoch: int = None, - **kwargs + self, n_batches_per_epoch: int = None, **kwargs ) -> torch.utils.data.DataLoader: - ''' + """ Get ``torch.utils.data.DataLoader`` for use in pytorch pipeline. The argument ``n_batches_per_epoch`` overrides the underlying length @@ -174,21 +164,19 @@ def data_loader( ... ) >>> list(data_loader)[0] tensor([5, 5, 5, 5, 5]) - ''' + """ if n_batches_per_epoch is None: sampler = self.sampler else: sampler = RepeatSampler( self.sampler, - n_batches_per_epoch * kwargs['batch_size'], + n_batches_per_epoch * kwargs["batch_size"], ) - return torch.utils.data.DataLoader( - self.dataset, sampler=sampler, **kwargs - ) + return torch.utils.data.DataLoader(self.dataset, sampler=sampler, **kwargs) def zip_index(self: Datastream[T]) -> Datastream[Tuple[T, int]]: - ''' + """ Zip the output with its underlying `Dataset` index. The output of the pipeline will be a tuple ``(output, index)`` @@ -196,29 +184,29 @@ def zip_index(self: Datastream[T]) -> Datastream[Tuple[T, int]]: training since that requires the index of the example. See :func:`Dataset.zip_index` for more details. - ''' + """ return Datastream( self.dataset.zip_index(), self.sampler, ) def weight(self, index: int) -> float: - '''Get sample weight for specific example.''' + """Get sample weight for specific example.""" return self.sampler.weight(index) def update_weights_(self, function: Callable[[np.array], np.array]): - '''Update all sample weights by function **in-place**.''' + """Update all sample weights by function **in-place**.""" self.sampler.update_weights_(function) def update_example_weight_(self, weight: Union[List, float], index: int): - '''Update sample weight for specific example **in-place**.''' + """Update sample weight for specific example **in-place**.""" self.sampler.update_example_weight_(weight, index) def sample_proportion( self: Datastream[T], proportion: float, ) -> Datastream[T]: - ''' + """ Create new ``Datastream[T]`` with changed proportion. This changes the numbers of drawn samples before restarting sampling with new weights and allowing sample replacement. @@ -227,7 +215,7 @@ def sample_proportion( default is to sample without replacement with proportion 1.0 which will cause the weighting scheme to only affect the order in which the samples are drawn. - ''' + """ return Datastream( self.dataset, self.sampler.sample_proportion(proportion), @@ -237,24 +225,24 @@ def take( self: Datastream[T], n_samples: PositiveInt, ) -> Datastream[T]: - ''' + """ Like :func:`Datastream.sample_proportion` but specify the number of samples instead of a proportion. - ''' + """ if n_samples < 1: - raise ValueError('n_samples must be greater than or equal to 1') + raise ValueError("n_samples must be greater than or equal to 1") return self.sample_proportion(n_samples / len(self)) def state_dict(self) -> Dict: - '''Get state of datastream. Useful for checkpointing sample weights.''' + """Get state of datastream. Useful for checkpointing sample weights.""" return dict(sampler=self.sampler.state_dict()) def load_state_dict(self, state_dict: Dict): - '''Load saved state from :func:`Datastream.state_dict`.''' - return self.sampler.load_state_dict(state_dict['sampler']) + """Load saved state from :func:`Datastream.state_dict`.""" + return self.sampler.load_state_dict(state_dict["sampler"]) def multi_sample(self: Datastream[T], n: int) -> Datastream[T]: - ''' + """ Split datastream into clones with different sample weights and then merge them. The weights when accessed will be a sequence of multiple weights. @@ -280,7 +268,7 @@ def multi_sample(self: Datastream[T], n: int) -> Datastream[T]: for index in indices: datastream.update_weight(index, predicted_classes) - ''' + """ return Datastream( self.dataset, MultiSampler.from_number(n, self.dataset), @@ -290,7 +278,7 @@ def cache( self, key_column: str, ): - '''Cache dataset in-memory. See :func:`Dataset.cache` for details.''' + """Cache dataset in-memory. See :func:`Dataset.cache` for details.""" return Datastream( self.dataset.cache(key_column), self.sampler, @@ -299,7 +287,7 @@ def cache( def test_infinite(): - datastream = Datastream(Dataset.from_subscriptable(list('abc'))) + datastream = Datastream(Dataset.from_subscriptable(list("abc"))) it = iter(datastream.data_loader(batch_size=8, n_batches_per_epoch=10)) for _ in range(10): batch = next(it) @@ -307,7 +295,7 @@ def test_infinite(): def test_iter(): - datastream = Datastream(Dataset.from_subscriptable(list('abc'))) + datastream = Datastream(Dataset.from_subscriptable(list("abc"))) assert len(list(datastream)) == 3 @@ -321,10 +309,12 @@ def test_empty(): def test_datastream_merge(): - datastream = Datastream.merge([ - Datastream(Dataset.from_subscriptable(list('abc'))), - Datastream(Dataset.from_subscriptable(list('def'))), - ]) + datastream = Datastream.merge( + [ + Datastream(Dataset.from_subscriptable(list("abc"))), + Datastream(Dataset.from_subscriptable(list("def"))), + ] + ) it = iter(datastream.sampler) for _ in range(2): @@ -334,11 +324,7 @@ def test_datastream_merge(): for _ in range(10): batch = next(it) - assert ( - len(list( - datastream.data_loader(batch_size=1) - )) == len(datastream) - ) + assert len(list(datastream.data_loader(batch_size=1))) == len(datastream) def test_datastream_zip(): @@ -361,23 +347,21 @@ def test_datastream_zip(): assert batch[1][0] == 3 and batch[1][1] == 4 and batch[1][2] == 5 assert batch[2][0] == 6 and batch[2][1] == 7 and batch[2][2] == 6 - assert ( - len(list( - zipped_datastream.data_loader(batch_size=1) - )) == len(zipped_datastream) + assert len(list(zipped_datastream.data_loader(batch_size=1))) == len( + zipped_datastream ) def test_datastream_merge_zip_merge(): - ''' + """ Repeating because it only sometimes recreated an error that occured when using mixup/mixmatch - ''' + """ def RandomDatastream(): - return Datastream(Dataset.from_subscriptable( - list(range(np.random.randint(1, 10))) - )) + return Datastream( + Dataset.from_subscriptable(list(range(np.random.randint(1, 10)))) + ) def MergedDatastream(): return Datastream.merge([RandomDatastream(), RandomDatastream()]) @@ -386,15 +370,15 @@ def ZippedMergedDatastream(): return Datastream.zip([MergedDatastream(), MergedDatastream()]) for attempt in range(10): - print('attempt:', attempt) - datastream = Datastream.merge([ - (ZippedMergedDatastream(), 1), - (ZippedMergedDatastream(), 5), - ]) - - it = iter(datastream.data_loader( - batch_size=16, n_batches_per_epoch=10 - )) + print("attempt:", attempt) + datastream = Datastream.merge( + [ + (ZippedMergedDatastream(), 1), + (ZippedMergedDatastream(), 5), + ] + ) + + it = iter(datastream.data_loader(batch_size=16, n_batches_per_epoch=10)) for _ in range(10): print(next(it)) @@ -405,10 +389,12 @@ def test_datastream_simple_weights(): datastream = ( Datastream(dataset) .zip_index() - .starmap(lambda integer, index: dict( - integer=integer, - index=index, - )) + .starmap( + lambda integer, index: dict( + integer=integer, + index=index, + ) + ) .sample_proportion(0.5) ) @@ -421,10 +407,8 @@ def test_datastream_simple_weights(): assert len(samples) == 2 for sample in samples: - if sample['index'] in removed_indices: - raise AssertionError( - 'Samples with 0 weight were drawn from the dataset' - ) + if sample["index"] in removed_indices: + raise AssertionError("Samples with 0 weight were drawn from the dataset") def test_merge_datastream_weights(): @@ -436,15 +420,14 @@ def test_merge_datastream_weights(): ] datastream = ( - Datastream.merge([ - Datastream(dataset) - for dataset in datasets - ]) + Datastream.merge([Datastream(dataset) for dataset in datasets]) .zip_index() - .starmap(lambda integer, index: dict( - integer=integer, - index=index, - )) + .starmap( + lambda integer, index: dict( + integer=integer, + index=index, + ) + ) .sample_proportion(0.5) ) @@ -463,20 +446,15 @@ def test_multi_sample(): n_multi_sample = 2 datastream = ( - Datastream( - Dataset.from_subscriptable(data) - ) - .map(lambda number: number ** 2) + Datastream(Dataset.from_subscriptable(data)) + .map(lambda number: number**2) .multi_sample(n_multi_sample) .sample_proportion(0.5) .zip_index() - .starmap(lambda number, index: (number ** 0.5, index)) + .starmap(lambda number, index: (number**0.5, index)) ) - output = [ - (number, index) - for number, index in datastream.data_loader(batch_size=1) - ] + output = [(number, index) for number, index in datastream.data_loader(batch_size=1)] assert len(output) == len(data) * n_multi_sample print(output) @@ -487,8 +465,7 @@ def test_multi_sample(): datastream.update_example_weight_(index, 0) output2 = [ - (number, index) - for number, index in datastream.data_loader(batch_size=1) + (number, index) for number, index in datastream.data_loader(batch_size=1) ] assert len(output2) == len(data) * n_multi_sample @@ -501,16 +478,18 @@ def test_take(): import pytest - datastream = Datastream(Dataset.from_subscriptable(list('abc'))).take(2) + datastream = Datastream(Dataset.from_subscriptable(list("abc"))).take(2) assert len(list(datastream.data_loader(batch_size=1))) == 2 with pytest.raises(ValueError): - Datastream(Dataset.from_subscriptable(list('abc'))).take(0) + Datastream(Dataset.from_subscriptable(list("abc"))).take(0) - datastream = Datastream.merge([ - Datastream(Dataset.from_subscriptable(list('abc'))), - Datastream(Dataset.from_subscriptable(list('d'))), - ]) + datastream = Datastream.merge( + [ + Datastream(Dataset.from_subscriptable(list("abc"))), + Datastream(Dataset.from_subscriptable(list("d"))), + ] + ) assert len(list(datastream.take(2).data_loader(batch_size=1))) == 2 @@ -518,54 +497,68 @@ def test_sequential_sampler(): from datastream.samplers import SequentialSampler - dataset = Dataset.from_subscriptable(list('abc')) + dataset = Dataset.from_subscriptable(list("abc")) datastream = Datastream(dataset, SequentialSampler(len(dataset))).take(2) assert len(list(datastream.data_loader(batch_size=1))) == 2 datastream = Datastream(dataset, SequentialSampler(len(dataset))) it = iter(datastream.data_loader(batch_size=6, n_batches_per_epoch=10)) - assert next(it) == ['a', 'b', 'c', 'a', 'b', 'c'] + assert next(it) == ["a", "b", "c", "a", "b", "c"] def test_concat_merge(): - dataset = Dataset.concat([ - Dataset.from_subscriptable([1, 2]), - Dataset.from_subscriptable([1, 3, 5]), - ]) + dataset = Dataset.concat( + [ + Dataset.from_subscriptable([1, 2]), + Dataset.from_subscriptable([1, 3, 5]), + ] + ) - datastream = Datastream.merge([ - Datastream(dataset), - Datastream(dataset.subset( - lambda df: [index < 3 for index in range(len(df))] - )), - ]) + datastream = Datastream.merge( + [ + Datastream(dataset), + Datastream( + dataset.subset(lambda df: [index < 3 for index in range(len(df))]) + ), + ] + ) - assert len(dataset.subset( - lambda df: [index < 3 for index in range(len(df))] - )) == 3 + assert len(dataset.subset(lambda df: [index < 3 for index in range(len(df))])) == 3 assert len(list(datastream)) == 6 def test_combine_concat_merge(): - dataset = Dataset.concat([ - Dataset.zip([ - Dataset.from_subscriptable([1]), - Dataset.from_subscriptable([2]), - ]), - Dataset.combine([ - Dataset.from_subscriptable([3, 3]), - Dataset.from_subscriptable([4, 4, 4]), - ]), - ]) - - datastream = Datastream.merge([ - Datastream(dataset), - Datastream(Dataset.zip([ - Dataset.from_subscriptable([5]), - Dataset.from_subscriptable([6]), - ])), - ]) + dataset = Dataset.concat( + [ + Dataset.zip( + [ + Dataset.from_subscriptable([1]), + Dataset.from_subscriptable([2]), + ] + ), + Dataset.combine( + [ + Dataset.from_subscriptable([3, 3]), + Dataset.from_subscriptable([4, 4, 4]), + ] + ), + ] + ) + + datastream = Datastream.merge( + [ + Datastream(dataset), + Datastream( + Dataset.zip( + [ + Dataset.from_subscriptable([5]), + Dataset.from_subscriptable([6]), + ] + ) + ), + ] + ) assert len(list(datastream)) == 2 @@ -573,14 +566,14 @@ def test_combine_concat_merge(): def test_last_batch(): from datastream.samplers import SequentialSampler - datastream = Datastream( - Dataset.from_subscriptable(list('abc')) - ) + datastream = Datastream(Dataset.from_subscriptable(list("abc"))) assert list(map(len, datastream.data_loader(batch_size=4))) == [3] - assert list(map(len, datastream.data_loader(batch_size=4, n_batches_per_epoch=2))) == [4, 4] + assert list( + map(len, datastream.data_loader(batch_size=4, n_batches_per_epoch=2)) + ) == [4, 4] datastream = Datastream( - Dataset.from_subscriptable(list('abc')), + Dataset.from_subscriptable(list("abc")), SequentialSampler(3), ) assert list(map(len, datastream.data_loader(batch_size=2))) == [2, 1] diff --git a/datastream/samplers/merge_sampler.py b/datastream/samplers/merge_sampler.py index 1bd5e0d..0dbc448 100644 --- a/datastream/samplers/merge_sampler.py +++ b/datastream/samplers/merge_sampler.py @@ -28,9 +28,7 @@ def __init__(self, samplers, datasets, ns): ns=ns, length=MergeSampler.merged_samplers_length(samplers, ns), from_mapping=Dataset.create_from_concat_mapping(datasets), - merged_samplers=MergeSampler.merge_samplers( - samplers, datasets, ns - ), + merged_samplers=MergeSampler.merge_samplers(samplers, datasets, ns), ) def __len__(self): @@ -41,10 +39,7 @@ def __iter__(self): @staticmethod def merged_samplers_length(samplers, ns): - return ( - min([len(sampler) / n for sampler, n in zip(samplers, ns)]) - * sum(ns) - ) + return min([len(sampler) / n for sampler, n in zip(samplers, ns)]) * sum(ns) @staticmethod def merge_samplers(samplers, datasets, ns): @@ -54,13 +49,18 @@ def batch(iterable, n): while True: yield [next(iterable) for _ in range(n)] - index_batch = zip(*[ - batch(map( - partial(to_mapping, dataset_index), - repeat_map_chain(iter, sampler), - ), n) - for dataset_index, (sampler, n) in enumerate(zip(samplers, ns)) - ]) + index_batch = zip( + *[ + batch( + map( + partial(to_mapping, dataset_index), + repeat_map_chain(iter, sampler), + ), + n, + ) + for dataset_index, (sampler, n) in enumerate(zip(samplers, ns)) + ] + ) return chain.from_iterable(chain.from_iterable(index_batch)) @@ -74,25 +74,18 @@ def update_weights_(self, function): def update_example_weight_(self, weight, index): dataset_index, inner_index = self.from_mapping(index) - self.samplers[dataset_index].update_example_weight_( - weight, inner_index - ) + self.samplers[dataset_index].update_example_weight_(weight, inner_index) def sample_proportion(self, proportion): return MergeSampler( - [ - sampler.sample_proportion(proportion) - for sampler in self.samplers - ], + [sampler.sample_proportion(proportion) for sampler in self.samplers], self.datasets, self.ns, ) def state_dict(self): - return dict( - samplers=[sampler.state_dict() for sampler in self.samplers] - ) + return dict(samplers=[sampler.state_dict() for sampler in self.samplers]) def load_state_dict(self, state_dict): - for sampler, state_dict in zip(self.samplers, state_dict['samplers']): + for sampler, state_dict in zip(self.samplers, state_dict["samplers"]): sampler.load_state_dict(state_dict) diff --git a/datastream/samplers/multi_sampler.py b/datastream/samplers/multi_sampler.py index 40a60c1..c048736 100644 --- a/datastream/samplers/multi_sampler.py +++ b/datastream/samplers/multi_sampler.py @@ -28,7 +28,7 @@ def __init__(self, samplers, dataset): merged_samplers=MultiSampler.merge_samplers( samplers, [1 for _ in samplers], - ) + ), ) @staticmethod @@ -50,10 +50,12 @@ def batch(iterable, n): while True: yield [next(iterable) for _ in range(n)] - index_batch = zip(*[ - batch(repeat_map_chain(iter, sampler), n) - for sampler, n in zip(samplers, ns) - ]) + index_batch = zip( + *[ + batch(repeat_map_chain(iter, sampler), n) + for sampler, n in zip(samplers, ns) + ] + ) return chain.from_iterable(chain.from_iterable(index_batch)) @@ -66,24 +68,17 @@ def update_weights_(self, function): def update_example_weight_(self, weights, index): for sampler, weight in zip(self.samplers, weights): - sampler.update_example_weight_( - weight, index - ) + sampler.update_example_weight_(weight, index) def sample_proportion(self, proportion): return MultiSampler( - [ - sampler.sample_proportion(proportion) - for sampler in self.samplers - ], - self.dataset + [sampler.sample_proportion(proportion) for sampler in self.samplers], + self.dataset, ) def state_dict(self): - return dict( - samplers=[sampler.state_dict() for sampler in self.samplers] - ) + return dict(samplers=[sampler.state_dict() for sampler in self.samplers]) def load_state_dict(self, state_dict): - for sampler, state_dict in zip(self.samplers, state_dict['samplers']): + for sampler, state_dict in zip(self.samplers, state_dict["samplers"]): sampler.load_state_dict(state_dict) diff --git a/datastream/samplers/repeat_sampler.py b/datastream/samplers/repeat_sampler.py index 2ec92ce..523c922 100644 --- a/datastream/samplers/repeat_sampler.py +++ b/datastream/samplers/repeat_sampler.py @@ -14,16 +14,16 @@ class Config: arbitrary_types_allowed = True def __init__(self, sampler, length, epoch_bound=False): - ''' + """ Wrapper that repeats and limits length of sampling based on epoch length and batch size - ''' + """ BaseModel.__init__( self, sampler=sampler, length=length, epoch_bound=epoch_bound, - queue=iter(sampler) + queue=iter(sampler), ) def __iter__(self): diff --git a/datastream/samplers/sequential_sampler.py b/datastream/samplers/sequential_sampler.py index 9a0ca33..9850b04 100644 --- a/datastream/samplers/sequential_sampler.py +++ b/datastream/samplers/sequential_sampler.py @@ -12,8 +12,7 @@ class Config: def __init__(self, length): BaseModel.__init__( - self, - sampler=torch.utils.data.SequentialSampler(torch.ones(length)) + self, sampler=torch.utils.data.SequentialSampler(torch.ones(length)) ) def __len__(self): @@ -23,7 +22,4 @@ def __iter__(self): return iter(self.sampler) def sample_proportion(self, proportion): - return SequentialSampler(min( - len(self), - int(len(self) * proportion) - )) + return SequentialSampler(min(len(self), int(len(self) * proportion))) diff --git a/datastream/samplers/standard_sampler.py b/datastream/samplers/standard_sampler.py index 113366d..1d6ed77 100644 --- a/datastream/samplers/standard_sampler.py +++ b/datastream/samplers/standard_sampler.py @@ -21,7 +21,7 @@ def __init__(self, length, proportion=1.0, replacement=False): torch.ones(length).double(), num_samples=int(max(1, min(length, length * proportion))), replacement=replacement, - ) + ), ) def __len__(self): @@ -41,7 +41,7 @@ def update_weights_(self, function): self.sampler.weights[:] = function(self.sampler.weights) def update_example_weight_(self, weight, index): - if hasattr(weight, 'item'): + if hasattr(weight, "item"): weight = weight.item() self.sampler.weights[index] = weight @@ -59,4 +59,4 @@ def state_dict(self): return dict(weights=self.sampler.weights) def load_state_dict(self, state_dict): - self.sampler.weights[:] = state_dict['weights'] + self.sampler.weights[:] = state_dict["weights"] diff --git a/datastream/samplers/zip_sampler.py b/datastream/samplers/zip_sampler.py index ccb6ea8..51faa78 100644 --- a/datastream/samplers/zip_sampler.py +++ b/datastream/samplers/zip_sampler.py @@ -50,9 +50,7 @@ def zip_samplers(samplers, datasets): def weight(self, index): return [ sampler.weight(inner_index) - for sampler, inner_index in zip( - self.samplers, self.from_mapping(index) - ) + for sampler, inner_index in zip(self.samplers, self.from_mapping(index)) ] def update_weights_(self, function): @@ -61,24 +59,17 @@ def update_weights_(self, function): def update_example_weight_(self, weights, index): inner_indices = self.from_mapping(index) - for sampler, weight, inner_index in zip( - self.samplers, weights, inner_indices - ): - sampler.update_example_weight_( - weight, inner_index - ) + for sampler, weight, inner_index in zip(self.samplers, weights, inner_indices): + sampler.update_example_weight_(weight, inner_index) def sample_proportion(self, proportion): - return ZipSampler([ - sampler.sample_proportion(proportion) - for sampler in self.samplers - ]) + return ZipSampler( + [sampler.sample_proportion(proportion) for sampler in self.samplers] + ) def state_dict(self): - return dict( - samplers=[sampler.state_dict() for sampler in self.samplers] - ) + return dict(samplers=[sampler.state_dict() for sampler in self.samplers]) def load_state_dict(self, state_dict): - for sampler, state_dict in zip(self.samplers, state_dict['samplers']): + for sampler, state_dict in zip(self.samplers, state_dict["samplers"]): sampler.load_state_dict(state_dict) diff --git a/datastream/tools/numpy_seed.py b/datastream/tools/numpy_seed.py index 738bedc..d389cfe 100644 --- a/datastream/tools/numpy_seed.py +++ b/datastream/tools/numpy_seed.py @@ -3,7 +3,8 @@ def numpy_seed(seed): - '''Function decorator that sets a temporary numpy seed during execution''' + """Function decorator that sets a temporary numpy seed during execution""" + def decorator(fn): @wraps(fn) def seeded_function(*args, **kwargs): @@ -12,12 +13,13 @@ def seeded_function(*args, **kwargs): output = fn(*args, **kwargs) np.random.set_state(random_state) return output + return seeded_function + return decorator def test_numpy_seed(): - def get_random_uniform(min, max): return np.random.random() * (max - min) + min @@ -25,12 +27,10 @@ def get_random_uniform(min, max): numpy_seed(1)(get_random_uniform)(-1, 1) assert np.all(random_state[1] == np.random.get_state()[1]) - assert ( - numpy_seed(1)(get_random_uniform)(-1, 1) == - numpy_seed(1)(get_random_uniform)(-1, 1) - ) + assert numpy_seed(1)(get_random_uniform)(-1, 1) == numpy_seed(1)( + get_random_uniform + )(-1, 1) - assert ( - numpy_seed(1)(get_random_uniform)(-1, 1) != - numpy_seed(None)(get_random_uniform)(-1, 1) - ) + assert numpy_seed(1)(get_random_uniform)(-1, 1) != numpy_seed(None)( + get_random_uniform + )(-1, 1) diff --git a/datastream/tools/split_dataframes.py b/datastream/tools/split_dataframes.py index ac05935..4a5d667 100644 --- a/datastream/tools/split_dataframes.py +++ b/datastream/tools/split_dataframes.py @@ -14,45 +14,50 @@ def split_dataframes( filepath: Optional[Path] = None, frozen: Optional[bool] = False, ): - ''' + """ Split and save result. Add new examples and continue from the old split. As new examples come in it can handle: - Changing test size - Adapt after removing examples from dataset - Adapt to new stratification - ''' + """ if abs(sum(proportions.values()) - 1.0) >= 1e-5: - raise ValueError(' '.join([ - 'Expected sum of proportions to be 1.', - f'Proportions were {tuple(proportions.values())}', - ])) + raise ValueError( + " ".join( + [ + "Expected sum of proportions to be 1.", + f"Proportions were {tuple(proportions.values())}", + ] + ) + ) if filepath is not None and filepath.exists(): split = json.loads(filepath.read_text()) if set(proportions.keys()) != set(split.keys()): - raise ValueError(' '.join([ - 'Expected split names in split file to be the same as the', - 'keys in proportions', - ])) + raise ValueError( + " ".join( + [ + "Expected split names in split file to be the same as the", + "keys in proportions", + ] + ) + ) else: - split = { - split_name: list() - for split_name in proportions.keys() - } + split = {split_name: list() for split_name in proportions.keys()} key_dataframe = pd.DataFrame({key_column: np.sort(dataframe[key_column].unique())}) if frozen: if sum(map(len, split.values())) == 0: - raise ValueError('Frozen split is empty') + raise ValueError("Frozen split is empty") n_unassigned = (~key_dataframe[key_column].isin(sum(split.values(), []))).sum() if n_unassigned > 0: warnings.warn( ( - f'Found {n_unassigned} unassigned examples when splitting the dataset.' - ' The split is frozen so they will will be discarded' + f"Found {n_unassigned} unassigned examples when splitting the dataset." + " The split is frozen so they will will be discarded" ), UserWarning, ) @@ -120,23 +125,23 @@ def n_target_split(keys, proportion): def selected(k, unassigned): - return np.random.choice( - unassigned, size=k, replace=False - ).tolist() + return np.random.choice(unassigned, size=k, replace=False).tolist() def mock_dataframe(): - return pd.DataFrame(dict( - index=np.arange(100), - number=np.random.randn(100), - )) + return pd.DataFrame( + dict( + index=np.arange(100), + number=np.random.randn(100), + ) + ) def test_standard(): - split_file = Path('test_standard.json') + split_file = Path("test_standard.json") split_dataframes_ = split_dataframes( mock_dataframe(), - key_column='index', + key_column="index", proportions=dict( gradient=0.8, early_stopping=0.1, @@ -151,18 +156,17 @@ def test_standard(): def test_group_split_dataframe(): - dataframe = mock_dataframe().assign(group=lambda df: df['index'] // 4) + dataframe = mock_dataframe().assign(group=lambda df: df["index"] // 4) split_dataframes_ = split_dataframes( dataframe, - key_column='group', + key_column="group", proportions=dict( train=0.8, compare=0.2, ), ) - group_overlap = ( - set(split_dataframes_['train'].group) - .intersection(split_dataframes_['compare'].group) + group_overlap = set(split_dataframes_["train"].group).intersection( + split_dataframes_["compare"].group ) assert len(group_overlap) == 0 assert tuple(map(len, split_dataframes_.values())) == (80, 20) @@ -171,11 +175,11 @@ def test_group_split_dataframe(): def test_validate_proportions(): from pytest import raises - split_file = Path('test_validate_proportions.json') + split_file = Path("test_validate_proportions.json") with raises(ValueError): split_dataframes( mock_dataframe(), - key_column='index', + key_column="index", proportions=dict(train=0.4, test=0.4), filepath=split_file, ) @@ -184,11 +188,11 @@ def test_validate_proportions(): def test_missing_key_column(): from pytest import raises - split_file = Path('test_missing_key_column.json') + split_file = Path("test_missing_key_column.json") with raises(KeyError): split_dataframes( mock_dataframe(), - key_column='should_fail', + key_column="should_fail", proportions=dict(train=0.8, test=0.2), filepath=split_file, ) @@ -198,10 +202,10 @@ def test_missing_key_column(): def test_no_split(): - '''we do not need to support this''' + """we do not need to support this""" split_dataframes( mock_dataframe(), - key_column='index', + key_column="index", proportions=dict(all=1.0), ) @@ -209,7 +213,7 @@ def test_no_split(): def test_split_empty(): split_dataframes_ = split_dataframes( mock_dataframe().iloc[:0], - key_column='index', + key_column="index", proportions=dict(train=0.8, test=0.2), ) for df in split_dataframes_.values(): @@ -219,20 +223,20 @@ def test_split_empty(): def test_split_single_row(): split_dataframes_ = split_dataframes( mock_dataframe().iloc[:1], - key_column='index', + key_column="index", proportions=dict(train=0.9999, test=0.0001), ) - assert len(split_dataframes_['train']) == 1 - assert len(split_dataframes_['test']) == 0 + assert len(split_dataframes_["train"]) == 1 + assert len(split_dataframes_["test"]) == 0 def test_changed_split_names(): from pytest import raises - split_file = Path('test_changed_split_names.json') + split_file = Path("test_changed_split_names.json") split_dataframes( mock_dataframe(), - key_column='index', + key_column="index", proportions=dict(train=0.8, test=0.2), filepath=split_file, ) @@ -240,7 +244,7 @@ def test_changed_split_names(): with raises(ValueError): split_dataframes( mock_dataframe(), - key_column='index', + key_column="index", proportions=dict(should_fail=0.8, test=0.2), filepath=split_file, ) @@ -255,15 +259,15 @@ def test_frozen(): with raises(ValueError): split_dataframes( dataframe, - key_column='index', + key_column="index", proportions=dict(train=0.8, test=0.2), frozen=True, ) - split_file = Path('test_frozen.json') + split_file = Path("test_frozen.json") split_dataframes( dataframe, - key_column='index', + key_column="index", proportions=dict(train=0.8, test=0.2), filepath=split_file, ) diff --git a/datastream/tools/star.py b/datastream/tools/star.py index 056dc67..7d4c8aa 100644 --- a/datastream/tools/star.py +++ b/datastream/tools/star.py @@ -2,8 +2,10 @@ def star(fn): - '''Wrap function to expand input to arguments''' + """Wrap function to expand input to arguments""" + @wraps(fn) def wrapper(args): return fn(*args) + return wrapper diff --git a/datastream/tools/starcompose.py b/datastream/tools/starcompose.py index 6371c5a..c41a706 100644 --- a/datastream/tools/starcompose.py +++ b/datastream/tools/starcompose.py @@ -1,11 +1,9 @@ - - def starcompose(*transforms): - ''' + """ left compose functions together and expand tuples to args Use starcompose.debug for verbose output when debugging - ''' + """ # TODO: consider doing starcompose with inner function calls rather than # a loop @@ -16,24 +14,28 @@ def _compose(*x): else: x = t(x) return x + return _compose def starcompose_debug(*transforms): - ''' + """ verbose starcompose for debugging - ''' - print('starcompose debug') + """ + print("starcompose debug") + def _compose(*x): for index, t in enumerate(transforms): - print(f'{index}:, fn={t}, x={x}') + print(f"{index}:, fn={t}, x={x}") if type(x) is tuple: x = t(*x) else: x = t(x) return x + return _compose + starcompose.debug = starcompose_debug @@ -42,11 +44,11 @@ def test_starcompose(): test = starcompose(lambda x, y: x + y) if test(3, 5) != 8: - raise Exception('Two args inputs failed') + raise Exception("Two args inputs failed") test = starcompose(lambda x: sum(x)) if test((3, 5)) != 8: - raise Exception('Tuple input failed') + raise Exception("Tuple input failed") test = starcompose( lambda x: (x, x), @@ -54,4 +56,4 @@ def test_starcompose(): lambda x: x * 2, ) if test(10) != 40: - raise Exception('Expanded tuple for inner function failed') + raise Exception("Expanded tuple for inner function failed") diff --git a/datastream/tools/stratified_split.py b/datastream/tools/stratified_split.py index 6113f3d..ecc47ba 100644 --- a/datastream/tools/stratified_split.py +++ b/datastream/tools/stratified_split.py @@ -13,19 +13,14 @@ def stratified_split( seed: Optional[int] = None, frozen: Optional[bool] = False, ): - if ( - stratify_column is not None - and any(dataset.dataframe[key_column].duplicated()) - ): + if stratify_column is not None and any(dataset.dataframe[key_column].duplicated()): # mathematically impossible in the general case warnings.warn( - 'Trying to do stratified split with non-unique key column' - ' - cannot guarantee correct splitting of key values.' + "Trying to do stratified split with non-unique key column" + " - cannot guarantee correct splitting of key values." ) strata = { - stratum_value: dataset.subset( - lambda df: df[stratify_column] == stratum_value - ) + stratum_value: dataset.subset(lambda df: df[stratify_column] == stratum_value) for stratum_value in dataset.dataframe[stratify_column].unique() } split_strata = [ diff --git a/datastream/tools/verify_split.py b/datastream/tools/verify_split.py index 2969d56..638dfba 100644 --- a/datastream/tools/verify_split.py +++ b/datastream/tools/verify_split.py @@ -5,7 +5,7 @@ @validate_arguments def verify_split(old_path: Path, new_path: Path): - ''' + """ Verify that no keys from an old split are present in a different new split. .. highlight:: python @@ -16,7 +16,7 @@ def verify_split(old_path: Path, new_path: Path): "path/to/new/split.json", ) - ''' + """ for old_split_name, old_split in json.loads(old_path.read_text()).items(): for new_split_name, new_split in json.loads(new_path.read_text()).items(): if ( @@ -26,8 +26,13 @@ def verify_split(old_path: Path, new_path: Path): raise ValueError( f'Some keys from old split "{old_split_name}"' f' are present in new split "{new_split_name}":\n' - + str("\n".join( - [str(old_split[index]) for index in range(min(10, len(old_split)))] - + (["..."] if len(old_split) > 10 else []) - )) + + str( + "\n".join( + [ + str(old_split[index]) + for index in range(min(10, len(old_split))) + ] + + (["..."] if len(old_split) > 10 else []) + ) + ) ) diff --git a/docs/source/requirements.txt b/docs/source/requirements.txt index 2a4446d..486aab0 100644 --- a/docs/source/requirements.txt +++ b/docs/source/requirements.txt @@ -23,18 +23,18 @@ lazy-object-proxy==1.4.3 MarkupSafe==1.1.1 mccabe==0.6.1 more-itertools==8.3.0 -numpy==1.18.5 +numpy==1.23.4 packaging==20.4 pandas==1.1.5 pkginfo==1.5.0.1 pluggy==0.13.1 -py==1.10.0 +py==1.11.0 pycparser==2.20 pydantic==1.8.2 Pygments==2.7.4 pylint==2.5.3 pyparsing==2.4.7 -pyspark==3.0.3 +pyspark==3.3.0 pytest==5.4.3 python-dateutil==2.8.1 pytz==2020.1 @@ -58,12 +58,12 @@ sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.4 toml==0.10.1 -torch==1.8.1 +torch==1.12.1 tqdm==4.46.1 twine==3.1.1 typing-extensions==3.10.0.0 urllib3==1.26.5 -waitress==1.4.4 +waitress==2.1.2 wcwidth==0.2.4 webencodings==0.5.1 WebOb==1.8.6 diff --git a/poetry.lock b/poetry.lock index d316bcf..6717252 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,20 +1,22 @@ [[package]] name = "astroid" -version = "2.9.0" +version = "2.12.12" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false -python-versions = "~=3.6" +python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" -typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} -wrapt = ">=1.11,<1.14" +wrapt = [ + {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, +] [[package]] name = "atomicwrites" -version = "1.4.0" +version = "1.4.1" description = "Atomic file writes." category = "dev" optional = false @@ -22,25 +24,69 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "21.2.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" + +[package.extras] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] + +[[package]] +name = "black" +version = "22.10.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" + +[[package]] +name = "dill" +version = "0.3.6" +description = "serialize all of python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +graph = ["objgraph (>=1.7.2)"] [[package]] name = "flake8" @@ -51,28 +97,10 @@ optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.7.0,<2.8.0" pyflakes = ">=2.3.0,<2.4.0" -[[package]] -name = "importlib-metadata" -version = "4.8.2" -description = "Read metadata from Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] - [[package]] name = "iniconfig" version = "1.1.1" @@ -90,18 +118,18 @@ optional = false python-versions = ">=3.6.1,<4.0" [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "lazy-object-proxy" -version = "1.6.0" +version = "1.7.1" description = "A fast and thorough lazy object proxy." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.6" [[package]] name = "mccabe" @@ -111,13 +139,21 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "numpy" -version = "1.21.1" +version = "1.23.4" description = "NumPy is the fundamental package for array computing with Python." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [[package]] name = "packaging" @@ -132,30 +168,41 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pandas" -version = "1.1.5" +version = "1.5.1" description = "Powerful data structures for data analysis, time series, and statistics" category = "main" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.8" [package.dependencies] -numpy = ">=1.15.4" -python-dateutil = ">=2.7.3" -pytz = ">=2017.2" +numpy = [ + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, +] +python-dateutil = ">=2.8.1" +pytz = ">=2020.1" [package.extras] -test = ["pytest (>=4.0.2)", "pytest-xdist", "hypothesis (>=3.58)"] +test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] + +[[package]] +name = "pathspec" +version = "0.10.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" [[package]] name = "platformdirs" -version = "2.4.0" +version = "2.5.2" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] @@ -166,9 +213,6 @@ category = "dev" optional = false python-versions = ">=3.6" -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] @@ -191,14 +235,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.8.2" -description = "Data validation and settings management using python 3.6 type hinting" +version = "1.10.2" +description = "Data validation and settings management using python type hints" category = "main" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] -typing-extensions = ">=3.7.4.3" +typing-extensions = ">=4.1.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -214,28 +258,34 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pylint" -version = "2.12.2" +version = "2.15.5" description = "python code static checker" category = "dev" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.2" [package.dependencies] -astroid = ">=2.9.0,<2.10" -colorama = {version = "*", markers = "sys_platform == \"win32\""} +astroid = ">=2.12.12,<=2.14.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = ">=0.2" isort = ">=4.2.5,<6" -mccabe = ">=0.6,<0.7" +mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" -toml = ">=0.9.2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pyparsing" -version = "3.0.6" -description = "Python parsing module" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] diagrams = ["jinja2", "railroad-diagrams"] @@ -252,7 +302,6 @@ python-versions = ">=3.6" atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" @@ -275,7 +324,7 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2021.3" +version = "2022.5" description = "World timezone definitions, modern and historical" category = "main" optional = false @@ -297,83 +346,105 @@ category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tomlkit" +version = "0.11.5" +description = "Style preserving TOML library" +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + [[package]] name = "torch" -version = "1.10.0" +version = "1.12.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" category = "main" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.0" [package.dependencies] typing-extensions = "*" -[[package]] -name = "typed-ast" -version = "1.5.1" -description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" -optional = false -python-versions = ">=3.6" - [[package]] name = "typing-extensions" -version = "4.0.1" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.4.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "wrapt" -version = "1.13.3" +version = "1.14.1" description = "Module for decorators, wrappers and monkey patching." category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -[[package]] -name = "zipp" -version = "3.6.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] - [metadata] lock-version = "1.1" -python-versions = "^3.7" -content-hash = "7e29af1eccd6b0d1c398d353128c5fb13e85acd55ac70fa0fbc943f1f02494be" +python-versions = "^3.8" +content-hash = "5666571ca26f093ff80da9c09423e0b6ed6ae8f403b546a073c53cedeccfa006" [metadata.files] astroid = [ - {file = "astroid-2.9.0-py3-none-any.whl", hash = "sha256:776ca0b748b4ad69c00bfe0fff38fa2d21c338e12c84aa9715ee0d473c422778"}, - {file = "astroid-2.9.0.tar.gz", hash = "sha256:5939cf55de24b92bda00345d4d0659d01b3c7dafb5055165c330bc7c568ba273"}, + {file = "astroid-2.12.12-py3-none-any.whl", hash = "sha256:72702205200b2a638358369d90c222d74ebc376787af8fb2f7f2a86f7b5cc85f"}, + {file = "astroid-2.12.12.tar.gz", hash = "sha256:1c00a14f5a3ed0339d38d2e2e5b74ea2591df5861c0936bb292b84ccf3a78d83"}, ] atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, ] attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] +black = [ + {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, + {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, + {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, + {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, + {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, + {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, + {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, + {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, + {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, + {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, + {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, + {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, + {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, + {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, + {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, + {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, + {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, + {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, + {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, + {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, + {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +dill = [ + {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, + {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, ] flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] -importlib-metadata = [ - {file = "importlib_metadata-4.8.2-py3-none-any.whl", hash = "sha256:53ccfd5c134223e497627b9815d5030edf77d2ed573922f7a0b8f8bb81a1c100"}, - {file = "importlib_metadata-4.8.2.tar.gz", hash = "sha256:75bdec14c397f528724c1bfd9709d660b33a4d2e77387a3358f20b848bb5e5fb"}, -] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -383,96 +454,122 @@ isort = [ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] lazy-object-proxy = [ - {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"}, + {file = "lazy-object-proxy-1.7.1.tar.gz", hash = "sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a57d51ed2997e97f3b8e3500c984db50a554bb5db56c50b5dab1b41339b37e36"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd45683c3caddf83abbb1249b653a266e7069a09f486daa8863fb0e7496a9fdb"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8561da8b3dd22d696244d6d0d5330618c993a215070f473b699e00cf1f3f6443"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fccdf7c2c5821a8cbd0a9440a456f5050492f2270bd54e94360cac663398739b"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-win32.whl", hash = "sha256:898322f8d078f2654d275124a8dd19b079080ae977033b713f677afcfc88e2b9"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:85b232e791f2229a4f55840ed54706110c80c0a210d076eee093f2b2e33e1bfd"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:46ff647e76f106bb444b4533bb4153c7370cdf52efc62ccfc1a28bdb3cc95442"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12f3bb77efe1367b2515f8cb4790a11cffae889148ad33adad07b9b55e0ab22c"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c19814163728941bb871240d45c4c30d33b8a2e85972c44d4e63dd7107faba44"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:e40f2013d96d30217a51eeb1db28c9ac41e9d0ee915ef9d00da639c5b63f01a1"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2052837718516a94940867e16b1bb10edb069ab475c3ad84fd1e1a6dd2c0fcfc"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:6a24357267aa976abab660b1d47a34aaf07259a0c3859a34e536f1ee6e76b5bb"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6aff3fe5de0831867092e017cf67e2750c6a1c7d88d84d2481bd84a2e019ec35"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a6e94c7b02641d1311228a102607ecd576f70734dc3d5e22610111aeacba8a0"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ce15276a1a14549d7e81c243b887293904ad2d94ad767f42df91e75fd7b5b6"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e368b7f7eac182a59ff1f81d5f3802161932a41dc1b1cc45c1f757dc876b5d2c"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6ecbb350991d6434e1388bee761ece3260e5228952b1f0c46ffc800eb313ff42"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:553b0f0d8dbf21890dd66edd771f9b1b5f51bd912fa5f26de4449bfc5af5e029"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:c7a683c37a8a24f6428c28c561c80d5f4fd316ddcf0c7cab999b15ab3f5c5c69"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:df2631f9d67259dc9620d831384ed7732a198eb434eadf69aea95ad18c587a28"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07fa44286cda977bd4803b656ffc1c9b7e3bc7dff7d34263446aec8f8c96f88a"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dca6244e4121c74cc20542c2ca39e5c4a5027c81d112bfb893cf0790f96f57e"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ba172fc5b03978764d1df5144b4ba4ab13290d7bab7a50f12d8117f8630c38"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:043651b6cb706eee4f91854da4a089816a6606c1428fd391573ef8cb642ae4f7"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b9e89b87c707dd769c4ea91f7a31538888aad05c116a59820f28d59b3ebfe25a"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-win32.whl", hash = "sha256:9d166602b525bf54ac994cf833c385bfcc341b364e3ee71e3bf5a1336e677b55"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:8f3953eb575b45480db6568306893f0bd9d8dfeeebd46812aa09ca9579595148"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dd7ed7429dbb6c494aa9bc4e09d94b778a3579be699f9d67da7e6804c422d3de"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ed0c2b380eb6248abdef3cd425fc52f0abd92d2b07ce26359fcbc399f636ad"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7096a5e0c1115ec82641afbdd70451a144558ea5cf564a896294e346eb611be1"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f769457a639403073968d118bc70110e7dce294688009f5c24ab78800ae56dc8"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b0e26725c5023757fc1ab2a89ef9d7ab23b84f9251e28f9cc114d5b59c1b09"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2130db8ed69a48a3440103d4a520b89d8a9405f1b06e2cc81640509e8bf6548f"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, + {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] numpy = [ - {file = "numpy-1.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38e8648f9449a549a7dfe8d8755a5979b45b3538520d1e735637ef28e8c2dc50"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd7d7409fa643a91d0a05c7554dd68aa9c9bb16e186f6ccfe40d6e003156e33a"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a75b4498b1e93d8b700282dc8e655b8bd559c0904b3910b144646dbbbc03e062"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1412aa0aec3e00bc23fbb8664d76552b4efde98fb71f60737c83efbac24112f1"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e46ceaff65609b5399163de5893d8f2a82d3c77d5e56d976c8b5fb01faa6b671"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c6a2324085dd52f96498419ba95b5777e40b6bcbc20088fddb9e8cbb58885e8e"}, - {file = "numpy-1.21.1-cp37-cp37m-win32.whl", hash = "sha256:73101b2a1fef16602696d133db402a7e7586654682244344b8329cdcbbb82172"}, - {file = "numpy-1.21.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7a708a79c9a9d26904d1cca8d383bf869edf6f8e7650d85dbc77b041e8c5a0f8"}, - {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95b995d0c413f5d0428b3f880e8fe1660ff9396dcd1f9eedbc311f37b5652e16"}, - {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:635e6bd31c9fb3d475c8f44a089569070d10a9ef18ed13738b03049280281267"}, - {file = "numpy-1.21.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a3d5fb89bfe21be2ef47c0614b9c9c707b7362386c9a3ff1feae63e0267ccb6"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a326af80e86d0e9ce92bcc1e65c8ff88297de4fa14ee936cb2293d414c9ec63"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:791492091744b0fe390a6ce85cc1bf5149968ac7d5f0477288f78c89b385d9af"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0318c465786c1f63ac05d7c4dbcecd4d2d7e13f0959b01b534ea1e92202235c5"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a513bd9c1551894ee3d31369f9b07460ef223694098cf27d399513415855b68"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91c6f5fc58df1e0a3cc0c3a717bb3308ff850abdaa6d2d802573ee2b11f674a8"}, - {file = "numpy-1.21.1-cp38-cp38-win32.whl", hash = "sha256:978010b68e17150db8765355d1ccdd450f9fc916824e8c4e35ee620590e234cd"}, - {file = "numpy-1.21.1-cp38-cp38-win_amd64.whl", hash = "sha256:9749a40a5b22333467f02fe11edc98f022133ee1bfa8ab99bda5e5437b831214"}, - {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d7a4aeac3b94af92a9373d6e77b37691b86411f9745190d2c351f410ab3a791f"}, - {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9e7912a56108aba9b31df688a4c4f5cb0d9d3787386b87d504762b6754fbb1b"}, - {file = "numpy-1.21.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25b40b98ebdd272bc3020935427a4530b7d60dfbe1ab9381a39147834e985eac"}, - {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a92c5aea763d14ba9d6475803fc7904bda7decc2a0a68153f587ad82941fec1"}, - {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05a0f648eb28bae4bcb204e6fd14603de2908de982e761a2fc78efe0f19e96e1"}, - {file = "numpy-1.21.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f01f28075a92eede918b965e86e8f0ba7b7797a95aa8d35e1cc8821f5fc3ad6a"}, - {file = "numpy-1.21.1-cp39-cp39-win32.whl", hash = "sha256:88c0b89ad1cc24a5efbb99ff9ab5db0f9a86e9cc50240177a571fbe9c2860ac2"}, - {file = "numpy-1.21.1-cp39-cp39-win_amd64.whl", hash = "sha256:01721eefe70544d548425a07c80be8377096a54118070b8a62476866d5208e33"}, - {file = "numpy-1.21.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d4d1de6e6fb3d28781c73fbde702ac97f03d79e4ffd6598b880b2d95d62ead4"}, - {file = "numpy-1.21.1.zip", hash = "sha256:dff4af63638afcc57a3dfb9e4b26d434a7a602d225b42d746ea7fe2edf1342fd"}, + {file = "numpy-1.23.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d79ada05005f6f4f337d3bb9de8a7774f259341c70bc88047a1f7b96a4bcb2"}, + {file = "numpy-1.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:926db372bc4ac1edf81cfb6c59e2a881606b409ddc0d0920b988174b2e2a767f"}, + {file = "numpy-1.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c237129f0e732885c9a6076a537e974160482eab8f10db6292e92154d4c67d71"}, + {file = "numpy-1.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8365b942f9c1a7d0f0dc974747d99dd0a0cdfc5949a33119caf05cb314682d3"}, + {file = "numpy-1.23.4-cp310-cp310-win32.whl", hash = "sha256:2341f4ab6dba0834b685cce16dad5f9b6606ea8a00e6da154f5dbded70fdc4dd"}, + {file = "numpy-1.23.4-cp310-cp310-win_amd64.whl", hash = "sha256:d331afac87c92373826af83d2b2b435f57b17a5c74e6268b79355b970626e329"}, + {file = "numpy-1.23.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:488a66cb667359534bc70028d653ba1cf307bae88eab5929cd707c761ff037db"}, + {file = "numpy-1.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce03305dd694c4873b9429274fd41fc7eb4e0e4dea07e0af97a933b079a5814f"}, + {file = "numpy-1.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8981d9b5619569899666170c7c9748920f4a5005bf79c72c07d08c8a035757b0"}, + {file = "numpy-1.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a70a7d3ce4c0e9284e92285cba91a4a3f5214d87ee0e95928f3614a256a1488"}, + {file = "numpy-1.23.4-cp311-cp311-win32.whl", hash = "sha256:5e13030f8793e9ee42f9c7d5777465a560eb78fa7e11b1c053427f2ccab90c79"}, + {file = "numpy-1.23.4-cp311-cp311-win_amd64.whl", hash = "sha256:7607b598217745cc40f751da38ffd03512d33ec06f3523fb0b5f82e09f6f676d"}, + {file = "numpy-1.23.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ab46e4e7ec63c8a5e6dbf5c1b9e1c92ba23a7ebecc86c336cb7bf3bd2fb10e5"}, + {file = "numpy-1.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8aae2fb3180940011b4862b2dd3756616841c53db9734b27bb93813cd79fce6"}, + {file = "numpy-1.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c053d7557a8f022ec823196d242464b6955a7e7e5015b719e76003f63f82d0f"}, + {file = "numpy-1.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0882323e0ca4245eb0a3d0a74f88ce581cc33aedcfa396e415e5bba7bf05f68"}, + {file = "numpy-1.23.4-cp38-cp38-win32.whl", hash = "sha256:dada341ebb79619fe00a291185bba370c9803b1e1d7051610e01ed809ef3a4ba"}, + {file = "numpy-1.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:0fe563fc8ed9dc4474cbf70742673fc4391d70f4363f917599a7fa99f042d5a8"}, + {file = "numpy-1.23.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c67b833dbccefe97cdd3f52798d430b9d3430396af7cdb2a0c32954c3ef73894"}, + {file = "numpy-1.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f76025acc8e2114bb664294a07ede0727aa75d63a06d2fae96bf29a81747e4a7"}, + {file = "numpy-1.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12ac457b63ec8ded85d85c1e17d85efd3c2b0967ca39560b307a35a6703a4735"}, + {file = "numpy-1.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95de7dc7dc47a312f6feddd3da2500826defdccbc41608d0031276a24181a2c0"}, + {file = "numpy-1.23.4-cp39-cp39-win32.whl", hash = "sha256:f2f390aa4da44454db40a1f0201401f9036e8d578a25f01a6e237cea238337ef"}, + {file = "numpy-1.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:f260da502d7441a45695199b4e7fd8ca87db659ba1c78f2bbf31f934fe76ae0e"}, + {file = "numpy-1.23.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61be02e3bf810b60ab74e81d6d0d36246dbfb644a462458bb53b595791251911"}, + {file = "numpy-1.23.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296d17aed51161dbad3c67ed6d164e51fcd18dbcd5dd4f9d0a9c6055dce30810"}, + {file = "numpy-1.23.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4d52914c88b4930dafb6c48ba5115a96cbab40f45740239d9f4159c4ba779962"}, + {file = "numpy-1.23.4.tar.gz", hash = "sha256:ed2cc92af0efad20198638c69bb0fc2870a58dabfba6eb722c933b48556c686c"}, ] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pandas = [ - {file = "pandas-1.1.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:bf23a3b54d128b50f4f9d4675b3c1857a688cc6731a32f931837d72effb2698d"}, - {file = "pandas-1.1.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5a780260afc88268a9d3ac3511d8f494fdcf637eece62fb9eb656a63d53eb7ca"}, - {file = "pandas-1.1.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:b61080750d19a0122469ab59b087380721d6b72a4e7d962e4d7e63e0c4504814"}, - {file = "pandas-1.1.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:0de3ddb414d30798cbf56e642d82cac30a80223ad6fe484d66c0ce01a84d6f2f"}, - {file = "pandas-1.1.5-cp36-cp36m-win32.whl", hash = "sha256:70865f96bb38fec46f7ebd66d4b5cfd0aa6b842073f298d621385ae3898d28b5"}, - {file = "pandas-1.1.5-cp36-cp36m-win_amd64.whl", hash = "sha256:19a2148a1d02791352e9fa637899a78e371a3516ac6da5c4edc718f60cbae648"}, - {file = "pandas-1.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26fa92d3ac743a149a31b21d6f4337b0594b6302ea5575b37af9ca9611e8981a"}, - {file = "pandas-1.1.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c16d59c15d946111d2716856dd5479221c9e4f2f5c7bc2d617f39d870031e086"}, - {file = "pandas-1.1.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3be7a7a0ca71a2640e81d9276f526bca63505850add10206d0da2e8a0a325dae"}, - {file = "pandas-1.1.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:573fba5b05bf2c69271a32e52399c8de599e4a15ab7cec47d3b9c904125ab788"}, - {file = "pandas-1.1.5-cp37-cp37m-win32.whl", hash = "sha256:21b5a2b033380adbdd36b3116faaf9a4663e375325831dac1b519a44f9e439bb"}, - {file = "pandas-1.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:24c7f8d4aee71bfa6401faeba367dd654f696a77151a8a28bc2013f7ced4af98"}, - {file = "pandas-1.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2860a97cbb25444ffc0088b457da0a79dc79f9c601238a3e0644312fcc14bf11"}, - {file = "pandas-1.1.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5008374ebb990dad9ed48b0f5d0038124c73748f5384cc8c46904dace27082d9"}, - {file = "pandas-1.1.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2c2f7c670ea4e60318e4b7e474d56447cf0c7d83b3c2a5405a0dbb2600b9c48e"}, - {file = "pandas-1.1.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0a643bae4283a37732ddfcecab3f62dd082996021b980f580903f4e8e01b3c5b"}, - {file = "pandas-1.1.5-cp38-cp38-win32.whl", hash = "sha256:5447ea7af4005b0daf695a316a423b96374c9c73ffbd4533209c5ddc369e644b"}, - {file = "pandas-1.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:4c62e94d5d49db116bef1bd5c2486723a292d79409fc9abd51adf9e05329101d"}, - {file = "pandas-1.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:731568be71fba1e13cae212c362f3d2ca8932e83cb1b85e3f1b4dd77d019254a"}, - {file = "pandas-1.1.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c61c043aafb69329d0f961b19faa30b1dab709dd34c9388143fc55680059e55a"}, - {file = "pandas-1.1.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2b1c6cd28a0dfda75c7b5957363333f01d370936e4c6276b7b8e696dd500582a"}, - {file = "pandas-1.1.5-cp39-cp39-win32.whl", hash = "sha256:c94ff2780a1fd89f190390130d6d36173ca59fcfb3fe0ff596f9a56518191ccb"}, - {file = "pandas-1.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:edda9bacc3843dfbeebaf7a701763e68e741b08fccb889c003b0a52f0ee95782"}, - {file = "pandas-1.1.5.tar.gz", hash = "sha256:f10fc41ee3c75a474d3bdf68d396f10782d013d7f67db99c0efbfd0acb99701b"}, + {file = "pandas-1.5.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a78e05ec09731c5b3bd7a9805927ea631fe6f6cb06f0e7c63191a9a778d52b4"}, + {file = "pandas-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5b0c970e2215572197b42f1cff58a908d734503ea54b326412c70d4692256391"}, + {file = "pandas-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f340331a3f411910adfb4bbe46c2ed5872d9e473a783d7f14ecf49bc0869c594"}, + {file = "pandas-1.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8c709f4700573deb2036d240d140934df7e852520f4a584b2a8d5443b71f54d"}, + {file = "pandas-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32e3d9f65606b3f6e76555bfd1d0b68d94aff0929d82010b791b6254bf5a4b96"}, + {file = "pandas-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a52419d9ba5906db516109660b114faf791136c94c1a636ed6b29cbfff9187ee"}, + {file = "pandas-1.5.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:66a1ad667b56e679e06ba73bb88c7309b3f48a4c279bd3afea29f65a766e9036"}, + {file = "pandas-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:36aa1f8f680d7584e9b572c3203b20d22d697c31b71189322f16811d4ecfecd3"}, + {file = "pandas-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bcf1a82b770b8f8c1e495b19a20d8296f875a796c4fe6e91da5ef107f18c5ecb"}, + {file = "pandas-1.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c25e5c16ee5c0feb6cf9d982b869eec94a22ddfda9aa2fbed00842cbb697624"}, + {file = "pandas-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:932d2d7d3cab44cfa275601c982f30c2d874722ef6396bb539e41e4dc4618ed4"}, + {file = "pandas-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:eb7e8cf2cf11a2580088009b43de84cabbf6f5dae94ceb489f28dba01a17cb77"}, + {file = "pandas-1.5.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cb2a9cf1150302d69bb99861c5cddc9c25aceacb0a4ef5299785d0f5389a3209"}, + {file = "pandas-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:81f0674fa50b38b6793cd84fae5d67f58f74c2d974d2cb4e476d26eee33343d0"}, + {file = "pandas-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17da7035d9e6f9ea9cdc3a513161f8739b8f8489d31dc932bc5a29a27243f93d"}, + {file = "pandas-1.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:669c8605dba6c798c1863157aefde959c1796671ffb342b80fcb80a4c0bc4c26"}, + {file = "pandas-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683779e5728ac9138406c59a11e09cd98c7d2c12f0a5fc2b9c5eecdbb4a00075"}, + {file = "pandas-1.5.1-cp38-cp38-win32.whl", hash = "sha256:ddf46b940ef815af4e542697eaf071f0531449407a7607dd731bf23d156e20a7"}, + {file = "pandas-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:db45b94885000981522fb92349e6b76f5aee0924cc5315881239c7859883117d"}, + {file = "pandas-1.5.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:927e59c694e039c75d7023465d311277a1fc29ed7236b5746e9dddf180393113"}, + {file = "pandas-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e675f8fe9aa6c418dc8d3aac0087b5294c1a4527f1eacf9fe5ea671685285454"}, + {file = "pandas-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04e51b01d5192499390c0015630975f57836cc95c7411415b499b599b05c0c96"}, + {file = "pandas-1.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cee0c74e93ed4f9d39007e439debcaadc519d7ea5c0afc3d590a3a7b2edf060"}, + {file = "pandas-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b156a971bc451c68c9e1f97567c94fd44155f073e3bceb1b0d195fd98ed12048"}, + {file = "pandas-1.5.1-cp39-cp39-win32.whl", hash = "sha256:05c527c64ee02a47a24031c880ee0ded05af0623163494173204c5b72ddce658"}, + {file = "pandas-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:6bb391659a747cf4f181a227c3e64b6d197100d53da98dcd766cc158bdd9ec68"}, + {file = "pandas-1.5.1.tar.gz", hash = "sha256:249cec5f2a5b22096440bd85c33106b6102e0672204abd2d5c014106459804ee"}, +] +pathspec = [ + {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, + {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, ] platformdirs = [ - {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, - {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, @@ -487,40 +584,54 @@ pycodestyle = [ {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pydantic = [ - {file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"}, - {file = "pydantic-1.8.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a7c6002203fe2c5a1b5cbb141bb85060cbff88c2d78eccbc72d97eb7022c43e4"}, - {file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:589eb6cd6361e8ac341db97602eb7f354551482368a37f4fd086c0733548308e"}, - {file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:10e5622224245941efc193ad1d159887872776df7a8fd592ed746aa25d071840"}, - {file = "pydantic-1.8.2-cp36-cp36m-win_amd64.whl", hash = "sha256:99a9fc39470010c45c161a1dc584997f1feb13f689ecf645f59bb4ba623e586b"}, - {file = "pydantic-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a83db7205f60c6a86f2c44a61791d993dff4b73135df1973ecd9eed5ea0bda20"}, - {file = "pydantic-1.8.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:41b542c0b3c42dc17da70554bc6f38cbc30d7066d2c2815a94499b5684582ecb"}, - {file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:ea5cb40a3b23b3265f6325727ddfc45141b08ed665458be8c6285e7b85bd73a1"}, - {file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:18b5ea242dd3e62dbf89b2b0ec9ba6c7b5abaf6af85b95a97b00279f65845a23"}, - {file = "pydantic-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:234a6c19f1c14e25e362cb05c68afb7f183eb931dd3cd4605eafff055ebbf287"}, - {file = "pydantic-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:021ea0e4133e8c824775a0cfe098677acf6fa5a3cbf9206a376eed3fc09302cd"}, - {file = "pydantic-1.8.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e710876437bc07bd414ff453ac8ec63d219e7690128d925c6e82889d674bb505"}, - {file = "pydantic-1.8.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:ac8eed4ca3bd3aadc58a13c2aa93cd8a884bcf21cb019f8cfecaae3b6ce3746e"}, - {file = "pydantic-1.8.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4a03cbbe743e9c7247ceae6f0d8898f7a64bb65800a45cbdc52d65e370570820"}, - {file = "pydantic-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:8621559dcf5afacf0069ed194278f35c255dc1a1385c28b32dd6c110fd6531b3"}, - {file = "pydantic-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8b223557f9510cf0bfd8b01316bf6dd281cf41826607eada99662f5e4963f316"}, - {file = "pydantic-1.8.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:244ad78eeb388a43b0c927e74d3af78008e944074b7d0f4f696ddd5b2af43c62"}, - {file = "pydantic-1.8.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:05ef5246a7ffd2ce12a619cbb29f3307b7c4509307b1b49f456657b43529dc6f"}, - {file = "pydantic-1.8.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:54cd5121383f4a461ff7644c7ca20c0419d58052db70d8791eacbbe31528916b"}, - {file = "pydantic-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:4be75bebf676a5f0f87937c6ddb061fa39cbea067240d98e298508c1bda6f3f3"}, - {file = "pydantic-1.8.2-py3-none-any.whl", hash = "sha256:fec866a0b59f372b7e776f2d7308511784dace622e0992a0b59ea3ccee0ae833"}, - {file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, + {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, + {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, + {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, + {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, + {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, + {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, + {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, + {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, ] pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pylint = [ - {file = "pylint-2.12.2-py3-none-any.whl", hash = "sha256:daabda3f7ed9d1c60f52d563b1b854632fd90035bcf01443e234d3dc794e3b74"}, - {file = "pylint-2.12.2.tar.gz", hash = "sha256:9d945a73640e1fec07ee34b42f5669b770c759acd536ec7b16d7e4b87a9c9ff9"}, + {file = "pylint-2.15.5-py3-none-any.whl", hash = "sha256:c2108037eb074334d9e874dc3c783752cc03d0796c88c9a9af282d0f161a1004"}, + {file = "pylint-2.15.5.tar.gz", hash = "sha256:3b120505e5af1d06a5ad76b55d8660d44bf0f2fc3c59c2bdd94e39188ee3a4df"}, ] pyparsing = [ - {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, - {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pytest = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, @@ -531,8 +642,8 @@ python-dateutil = [ {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] pytz = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, + {file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"}, + {file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -542,103 +653,103 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +tomlkit = [ + {file = "tomlkit-0.11.5-py3-none-any.whl", hash = "sha256:f2ef9da9cef846ee027947dc99a45d6b68a63b0ebc21944649505bf2e8bc5fe7"}, + {file = "tomlkit-0.11.5.tar.gz", hash = "sha256:571854ebbb5eac89abcb4a2e47d7ea27b89bf29e09c35395da6f03dd4ae23d1c"}, +] torch = [ - {file = "torch-1.10.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:56022b0ce94c54e95a2f63fc5a1494feb1fc3d5c7a9b35a62944651d03edef05"}, - {file = "torch-1.10.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:13e1ffab502aa32d6841a018771b47028d02dbbc685c5b79cfd61db5464dae4e"}, - {file = "torch-1.10.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3c0a942e0df104c80b0eedc30d2a19cdc3d28601bc6e280bf24b2e6255016d3b"}, - {file = "torch-1.10.0-cp36-none-macosx_10_9_x86_64.whl", hash = "sha256:eea16c01af1980ba709c00e8d5e6c09bedb5b30f9fa2085f6a52a78d7dc4e125"}, - {file = "torch-1.10.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:b812e8d40d7037748da40bb695bd849e7b2e7faad4cd06df53d2cc4531926fda"}, - {file = "torch-1.10.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:034df0b20603bfc81325094586647302891b9b20be7e36f152c7dd6af00deac1"}, - {file = "torch-1.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67fc509e207b8e7330f2e76e77800950317d31d035a4d19593db991962afead4"}, - {file = "torch-1.10.0-cp37-none-macosx_10_9_x86_64.whl", hash = "sha256:4499055547087d7ef7e8a754f09c2c4f1470297ae3e5490363dba66c75501b21"}, - {file = "torch-1.10.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ab0cf330714c8f79a837c04784a7a5658b014cf5a4ca527e7b710155ae519cdf"}, - {file = "torch-1.10.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e01ba5946267014abfdb30248bcdbd457aaa20cff749febe7fc191e5ae096af4"}, - {file = "torch-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:9013002adcb42bac05dcdbf0a03dd9f6bb5d7ab8b9817041c1176a014870786b"}, - {file = "torch-1.10.0-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:aef7afb62e9b174b4e0e5e1e4a42e3bab3b8490a668d666f62f7d4517559fbf2"}, - {file = "torch-1.10.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d82e68302c9b5c76ed585e04d61be0ca2184f70cb8ffeba8610570609ad5d7c9"}, - {file = "torch-1.10.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e5822200bf80a1495ad98a2bb41803eeba4a85ce373e35fc65765f7f888f5374"}, - {file = "torch-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:ca2c88fa4376e2648785029ab108e6e7abd784eb6535fc6036004b9254f9f7c1"}, - {file = "torch-1.10.0-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:d6ef87470b44df9970e84542547d5ba7720bb89616602441df555a39b124e2bc"}, -] -typed-ast = [ - {file = "typed_ast-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d8314c92414ce7481eee7ad42b353943679cf6f30237b5ecbf7d835519e1212"}, - {file = "typed_ast-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b53ae5de5500529c76225d18eeb060efbcec90ad5e030713fe8dab0fb4531631"}, - {file = "typed_ast-1.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:24058827d8f5d633f97223f5148a7d22628099a3d2efe06654ce872f46f07cdb"}, - {file = "typed_ast-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a6d495c1ef572519a7bac9534dbf6d94c40e5b6a608ef41136133377bba4aa08"}, - {file = "typed_ast-1.5.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:de4ecae89c7d8b56169473e08f6bfd2df7f95015591f43126e4ea7865928677e"}, - {file = "typed_ast-1.5.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:256115a5bc7ea9e665c6314ed6671ee2c08ca380f9d5f130bd4d2c1f5848d695"}, - {file = "typed_ast-1.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:7c42707ab981b6cf4b73490c16e9d17fcd5227039720ca14abe415d39a173a30"}, - {file = "typed_ast-1.5.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:71dcda943a471d826ea930dd449ac7e76db7be778fcd722deb63642bab32ea3f"}, - {file = "typed_ast-1.5.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4f30a2bcd8e68adbb791ce1567fdb897357506f7ea6716f6bbdd3053ac4d9471"}, - {file = "typed_ast-1.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca9e8300d8ba0b66d140820cf463438c8e7b4cdc6fd710c059bfcfb1531d03fb"}, - {file = "typed_ast-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9caaf2b440efb39ecbc45e2fabde809cbe56272719131a6318fd9bf08b58e2cb"}, - {file = "typed_ast-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9bcad65d66d594bffab8575f39420fe0ee96f66e23c4d927ebb4e24354ec1af"}, - {file = "typed_ast-1.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:591bc04e507595887160ed7aa8d6785867fb86c5793911be79ccede61ae96f4d"}, - {file = "typed_ast-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:a80d84f535642420dd17e16ae25bb46c7f4c16ee231105e7f3eb43976a89670a"}, - {file = "typed_ast-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:38cf5c642fa808300bae1281460d4f9b7617cf864d4e383054a5ef336e344d32"}, - {file = "typed_ast-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5b6ab14c56bc9c7e3c30228a0a0b54b915b1579613f6e463ba6f4eb1382e7fd4"}, - {file = "typed_ast-1.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2b8d7007f6280e36fa42652df47087ac7b0a7d7f09f9468f07792ba646aac2d"}, - {file = "typed_ast-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:b6d17f37f6edd879141e64a5db17b67488cfeffeedad8c5cec0392305e9bc775"}, - {file = "typed_ast-1.5.1.tar.gz", hash = "sha256:484137cab8ecf47e137260daa20bafbba5f4e3ec7fda1c1e69ab299b75fa81c5"}, + {file = "torch-1.12.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:9c038662db894a23e49e385df13d47b2a777ffd56d9bcd5b832593fab0a7e286"}, + {file = "torch-1.12.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:4e1b9c14cf13fd2ab8d769529050629a0e68a6fc5cb8e84b4a3cc1dd8c4fe541"}, + {file = "torch-1.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:e9c8f4a311ac29fc7e8e955cfb7733deb5dbe1bdaabf5d4af2765695824b7e0d"}, + {file = "torch-1.12.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:976c3f997cea38ee91a0dd3c3a42322785414748d1761ef926b789dfa97c6134"}, + {file = "torch-1.12.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:68104e4715a55c4bb29a85c6a8d57d820e0757da363be1ba680fa8cc5be17b52"}, + {file = "torch-1.12.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:743784ccea0dc8f2a3fe6a536bec8c4763bd82c1352f314937cb4008d4805de1"}, + {file = "torch-1.12.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b5dbcca369800ce99ba7ae6dee3466607a66958afca3b740690d88168752abcf"}, + {file = "torch-1.12.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f3b52a634e62821e747e872084ab32fbcb01b7fa7dbb7471b6218279f02a178a"}, + {file = "torch-1.12.1-cp37-none-macosx_10_9_x86_64.whl", hash = "sha256:8a34a2fbbaa07c921e1b203f59d3d6e00ed379f2b384445773bd14e328a5b6c8"}, + {file = "torch-1.12.1-cp37-none-macosx_11_0_arm64.whl", hash = "sha256:42f639501928caabb9d1d55ddd17f07cd694de146686c24489ab8c615c2871f2"}, + {file = "torch-1.12.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0b44601ec56f7dd44ad8afc00846051162ef9c26a8579dda0a02194327f2d55e"}, + {file = "torch-1.12.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:cd26d8c5640c3a28c526d41ccdca14cf1cbca0d0f2e14e8263a7ac17194ab1d2"}, + {file = "torch-1.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:42e115dab26f60c29e298559dbec88444175528b729ae994ec4c65d56fe267dd"}, + {file = "torch-1.12.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:a8320ba9ad87e80ca5a6a016e46ada4d1ba0c54626e135d99b2129a4541c509d"}, + {file = "torch-1.12.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:03e31c37711db2cd201e02de5826de875529e45a55631d317aadce2f1ed45aa8"}, + {file = "torch-1.12.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:9b356aea223772cd754edb4d9ecf2a025909b8615a7668ac7d5130f86e7ec421"}, + {file = "torch-1.12.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:6cf6f54b43c0c30335428195589bd00e764a6d27f3b9ba637aaa8c11aaf93073"}, + {file = "torch-1.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:f00c721f489089dc6364a01fd84906348fe02243d0af737f944fddb36003400d"}, + {file = "torch-1.12.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:bfec2843daa654f04fda23ba823af03e7b6f7650a873cdb726752d0e3718dada"}, + {file = "torch-1.12.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:69fe2cae7c39ccadd65a123793d30e0db881f1c1927945519c5c17323131437e"}, ] typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, + {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, + {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, ] wrapt = [ - {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"}, - {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"}, - {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"}, - {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"}, - {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"}, - {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"}, - {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"}, - {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"}, - {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"}, - {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"}, - {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"}, - {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"}, - {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"}, - {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"}, - {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"}, - {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"}, - {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"}, - {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"}, - {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"}, - {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"}, - {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"}, - {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"}, - {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"}, - {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"}, - {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"}, - {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"}, - {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"}, - {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"}, - {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"}, - {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"}, - {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"}, - {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"}, - {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"}, - {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"}, - {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"}, - {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"}, - {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"}, - {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"}, - {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"}, -] -zipp = [ - {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, - {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, + {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, + {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, + {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, + {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, + {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, + {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, + {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, + {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, + {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, + {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, + {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, + {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, + {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, + {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, + {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, + {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, ] diff --git a/pyproject.toml b/pyproject.toml index 6012610..a391f8d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,10 +2,10 @@ name = "pytorch-datastream" version = "0.0.0" description = "Simple dataset to dataloader library for pytorch" -authors = ["Aiwizo"] +authors = ["NextML"] license = "Apache-2.0" readme = "README.rst" -repository = "https://github.com/Aiwizo/pytorch-datastream" +repository = "https://github.com/nextml-code/pytorch-datastream" documentation = "https://pytorch-datastream.readthedocs.io" keywords = [ "pytorch", @@ -34,16 +34,17 @@ packages = [ ] [tool.poetry.dependencies] -python = "^3.7" +python = "^3.8" torch = "^1.4.0" numpy = "^1.17.0" pandas = "^1.0.5" pydantic = "^1.5.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] pylint = "^2.6.0" flake8 = "^3.8.4" pytest = "^6.1.2" +black = "^22.10.0" [build-system] requires = ["poetry-core>=1.0.0"]