From 88d604926690c30793a804a58be939f0e39a69f2 Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Thu, 22 Jun 2023 18:25:02 +0200 Subject: [PATCH 1/7] Add support for alignment of contrast targets --- inseq/attr/feat/attribution_utils.py | 59 ++++++----------- inseq/attr/feat/feature_attribution.py | 36 +++++----- inseq/attr/step_functions.py | 78 ++++++++++++++++++---- inseq/data/__init__.py | 10 ++- inseq/data/attribution.py | 2 + inseq/data/batch.py | 24 +++++++ inseq/models/attribution_model.py | 81 ++++++++++++++++++----- inseq/models/decoder_only.py | 26 ++++---- inseq/models/encoder_decoder.py | 35 +++++----- inseq/models/huggingface_model.py | 8 +++ inseq/utils/__init__.py | 2 + inseq/utils/errors.py | 6 ++ tests/attr/feat/test_attribution_utils.py | 2 +- 13 files changed, 258 insertions(+), 111 deletions(-) diff --git a/inseq/attr/feat/attribution_utils.py b/inseq/attr/feat/attribution_utils.py index 432dd95d..9bb3b0a5 100644 --- a/inseq/attr/feat/attribution_utils.py +++ b/inseq/attr/feat/attribution_utils.py @@ -4,7 +4,7 @@ import torch -from ...utils import extract_signature_args +from ...utils import MissingAlignmentsError, extract_signature_args from ...utils.typing import ( OneOrMoreAttributionSequences, OneOrMoreIdSequences, @@ -13,7 +13,7 @@ TextInput, TokenWithId, ) -from ..step_functions import STEP_SCORES_MAP +from ..step_functions import get_step_scores_args if TYPE_CHECKING: from ...models import AttributionModel @@ -87,36 +87,36 @@ def check_attribute_positions( return attr_pos_start, attr_pos_end -def get_step_scores( - score_identifier: str = "probability", - step_scores_args: Dict[str, Any] = {}, -) -> SingleScorePerStepTensor: - """Returns step scores for the target tokens in the batch.""" - if score_identifier not in STEP_SCORES_MAP: - raise AttributeError( - f"Step score {score_identifier} not found. Available step scores are: " - f"{', '.join(list(STEP_SCORES_MAP.keys()))}. Use the inseq.register_step_function" - "function to register a custom step score." - ) - return STEP_SCORES_MAP[score_identifier](**step_scores_args) - - def join_token_ids( tokens: OneOrMoreTokenSequences, ids: OneOrMoreIdSequences, contrast_tokens: Optional[OneOrMoreTokenSequences] = None, + contrast_targets_alignments: Optional[List[List[Tuple[int, int]]]] = None, ) -> List[TokenWithId]: """Joins tokens and ids into a list of TokenWithId objects.""" if contrast_tokens is None: contrast_tokens = tokens + # 1:1 alignment between target and contrast tokens + if contrast_targets_alignments is None: + contrast_targets_alignments = [[(idx, idx) for idx, _ in enumerate(seq)] for seq in tokens] sequences = [] - for target_tokens_seq, contrast_target_tokens_seq, input_ids_seq in zip(tokens, contrast_tokens, ids): + for target_tokens_seq, contrast_target_tokens_seq, input_ids_seq, alignments_seq in zip( + tokens, contrast_tokens, ids, contrast_targets_alignments + ): curr_seq = [] - for token, contrast_token, idx in zip(target_tokens_seq, contrast_target_tokens_seq, input_ids_seq): - if token != contrast_token: - curr_seq.append(TokenWithId(f"{contrast_token} → {token}", -1)) + for pos_idx, (token, token_idx) in enumerate(zip(target_tokens_seq, input_ids_seq)): + # Find all alignment pairs for the current original target + aligned_idxs = [c_idx for idx, c_idx in alignments_seq if idx == pos_idx] + if not aligned_idxs: + raise MissingAlignmentsError( + f"No alignment found for token at index {pos_idx}: {token} ({token_idx}). " + "Please provide alignment pairs that cover all original target tokens." + ) + contrast_position = min(aligned_idxs) + if token != contrast_target_tokens_seq[contrast_position]: + curr_seq.append(TokenWithId(f"{contrast_target_tokens_seq[contrast_position]} → {token}", -1)) else: - curr_seq.append(TokenWithId(token, idx)) + curr_seq.append(TokenWithId(token, token_idx)) sequences.append(curr_seq) return sequences @@ -135,22 +135,7 @@ def extract_args( extra_attributed_fn_args, attributed_fn_unused_args = extract_signature_args( kwargs, attributed_fn, exclude_args=default_args, return_remaining=True ) - extra_step_scores_args = {} - for step_score in step_scores: - if step_score not in STEP_SCORES_MAP: - raise AttributeError( - f"Step score {step_score} not found. Available step scores are: " - f"{', '.join(list(STEP_SCORES_MAP.keys()))}. Use the inseq.register_step_function" - "function to register a custom step score." - ) - extra_step_scores_args.update( - **extract_signature_args( - kwargs, - STEP_SCORES_MAP[step_score], - exclude_args=default_args, - return_remaining=False, - ) - ) + extra_step_scores_args = get_step_scores_args(step_scores, kwargs, default_args) step_scores_unused_args = {k: v for k, v in kwargs.items() if k not in extra_step_scores_args} unused_args = { k: v diff --git a/inseq/attr/feat/feature_attribution.py b/inseq/attr/feat/feature_attribution.py index 8da1eb48..75aa2ad9 100644 --- a/inseq/attr/feat/feature_attribution.py +++ b/inseq/attr/feat/feature_attribution.py @@ -44,7 +44,12 @@ ) from ...utils.typing import ModelIdentifier, SingleScorePerStepTensor from ..attribution_decorators import batched, set_hook, unset_hook -from .attribution_utils import check_attribute_positions, get_source_target_attributions, get_step_scores, tok2string +from ..step_functions import get_step_scores +from .attribution_utils import ( + check_attribute_positions, + get_source_target_attributions, + tok2string, +) if TYPE_CHECKING: from ...models import AttributionModel @@ -301,13 +306,15 @@ def attribute( logger.debug("=" * 30 + f"\nfull batch: {batch}\n" + "=" * 30) # Sources are empty for decoder-only models sequences = self.attribution_model.formatter.get_text_sequences(self.attribution_model, batch) - contrast_targets = attributed_fn_args.get("contrast_targets", None) - contrast_targets = [contrast_targets] if isinstance(contrast_targets, str) else contrast_targets - target_tokens_with_ids = self.attribution_model.tokenize_with_ids( - sequences.targets, - as_targets=True, - skip_special_tokens=False, - contrast_inputs=contrast_targets, + contrast_batch, contrast_targets_alignments = self.attribution_model.formatter.get_contrast_options_from_args( + attribution_model=self.attribution_model, + args=attributed_fn_args, + target_tokens=batch.target_tokens, + ) + target_tokens_with_ids = self.attribution_model.get_token_with_ids( + batch, + contrast_batch=contrast_batch, + contrast_targets_alignments=contrast_targets_alignments, ) # Manages front padding for decoder-only models, using 0 as lower bound # when attr_pos_start exceeds target length. @@ -320,10 +327,6 @@ def attribute( ) for idx in range(len(target_tokens_with_ids)) ] - if self.attribution_model.is_encoder_decoder: - iter_pos_end = min(attr_pos_end + 1, batch.max_generation_length) - else: - iter_pos_end = attr_pos_end pbar = get_progress_bar( sequences=sequences, target_lengths=targets_lengths, @@ -339,7 +342,7 @@ def attribute( start = datetime.now() # Attribution loop for generation - for step in range(attr_pos_start, iter_pos_end): + for step in range(attr_pos_start, attr_pos_end): tgt_ids, tgt_mask = batch.get_step_target(step, with_attention=True) step_output = self.filtered_attribute_step( batch[:step], @@ -359,15 +362,16 @@ def attribute( batch[:step], self.attribution_model.convert_ids_to_tokens(tgt_ids.unsqueeze(1), skip_special_tokens=False), tgt_ids.detach().to("cpu"), - attributed_fn_args, + contrast_batch=contrast_batch, + contrast_targets_alignments=contrast_targets_alignments, ) attribution_outputs.append(step_output) if pretty_progress: tgt_tokens = batch.target_tokens skipped_prefixes = tok2string(self.attribution_model, tgt_tokens, end=attr_pos_start) attributed_sentences = tok2string(self.attribution_model, tgt_tokens, attr_pos_start, step + 1) - unattributed_suffixes = tok2string(self.attribution_model, tgt_tokens, step + 1, iter_pos_end) - skipped_suffixes = tok2string(self.attribution_model, tgt_tokens, start=iter_pos_end) + unattributed_suffixes = tok2string(self.attribution_model, tgt_tokens, step + 1, attr_pos_end) + skipped_suffixes = tok2string(self.attribution_model, tgt_tokens, start=attr_pos_end) update_progress_bar( pbar, skipped_prefixes, diff --git a/inseq/attr/step_functions.py b/inseq/attr/step_functions.py index e1c5ab81..e942299e 100644 --- a/inseq/attr/step_functions.py +++ b/inseq/attr/step_functions.py @@ -1,14 +1,15 @@ import logging from inspect import getfullargspec -from typing import TYPE_CHECKING, Dict, List, Optional, Protocol, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Protocol, Tuple, Union import torch from torch.nn.functional import kl_div, log_softmax from transformers import AutoModelForCausalLM, AutoModelForSeq2SeqLM from transformers.modeling_outputs import ModelOutput -from ..data import DecoderOnlyBatch, FeatureAttributionInput, get_batch_from_inputs +from ..data import FeatureAttributionInput, slice_batch_from_position from ..data.aggregation_functions import DEFAULT_ATTRIBUTION_AGGREGATE_DICT +from ..utils import extract_signature_args from ..utils.typing import EmbeddingsTensor, IdsTensor, SingleScorePerStepTensor, TargetIdsTensor if TYPE_CHECKING: @@ -101,6 +102,7 @@ def _get_contrast_output( contrast_target_prefixes: Optional[FeatureAttributionInput] = None, contrast_sources: Optional[FeatureAttributionInput] = None, contrast_targets: Optional[FeatureAttributionInput] = None, + contrast_targets_alignments: Optional[List[List[Tuple[int, int]]]] = None, return_contrastive_target_ids: bool = False, ) -> ModelOutput: """Utility function to return the output of the model for given contrastive inputs. @@ -115,24 +117,25 @@ def _get_contrast_output( contrast_targets (:obj:`str` or :obj:`list(str)`): Contrastive target text(s) to be compared to the original target text. If not specified, the original target text is used as contrastive target (will result in same output unless ``contrast_sources`` or ``contrast_target_prefixes`` are specified). Defaults to :obj:`None`. + contrast_targets_alignments (:obj:`list(tuple(int, int))`, `optional`): A list of tuples of indices, where the + first element is the index of the original target token and the second element is the index of the + contrastive target token, used only if :obj:`contrast_targets` is specified. If an explicit alignment is + not specified, the alignment of the original and contrastive target texts is assumed to be 1:1 for all + available tokens. Defaults to :obj:`None`. return_contrastive_target_ids (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether to return the contrastive target ids as well as the model output. Defaults to :obj:`False`. """ c_tgt_ids = None if contrast_targets: - c_batch = DecoderOnlyBatch.from_batch( - get_batch_from_inputs( - attribution_model=attribution_model, - inputs=contrast_targets, - as_targets=attribution_model.is_encoder_decoder, - ) + c_batch, contrast_targets_alignments = attribution_model.formatter.get_contrast_options_from_args( + attribution_model=attribution_model, + args={"contrast_targets": contrast_targets, "contrast_targets_alignments": contrast_targets_alignments}, + target_tokens=torch.zeros(decoder_input_ids.size(0), decoder_input_ids.size(1) + 1).long().tolist(), ) curr_prefix_len = decoder_input_ids.size(1) - - # We select the next contrastive token as target and truncate contrastive ids - # and their attention map to the current generation step. - c_tgt_ids = c_batch.target_ids[:, curr_prefix_len] - c_batch = c_batch[:curr_prefix_len].to(attribution_model.device) + if len(contrast_targets_alignments) > 0 and isinstance(contrast_targets_alignments[0], list): + contrast_targets_alignments = contrast_targets_alignments[0] + c_batch, c_tgt_ids = slice_batch_from_position(c_batch, curr_prefix_len, contrast_targets_alignments) if decoder_input_ids.size(0) != c_batch.target_ids.size(0): raise ValueError( @@ -194,6 +197,7 @@ def contrast_prob_fn( contrast_target_prefixes: Optional[FeatureAttributionInput] = None, contrast_sources: Optional[FeatureAttributionInput] = None, contrast_targets: Optional[FeatureAttributionInput] = None, + contrast_targets_alignments: Optional[List[List[Tuple[int, int]]]] = None, **kwargs, ): """Returns the probability of a generation target given contrastive context or target prediction alternative. @@ -211,6 +215,11 @@ def contrast_prob_fn( contrast_targets (:obj:`str` or :obj:`list(str)`): Contrastive target text(s) to be compared to the original target text. If not specified, the original target text is used as contrastive target (will result in same output unless ``contrast_sources`` or ``contrast_target_prefixes`` are specified). Defaults to :obj:`None`. + contrast_targets_alignments (:obj:`list(tuple(int, int))`, `optional`): A list of tuples of indices, where the + first element is the index of the original target token and the second element is the index of the + contrastive target token, used only if :obj:`contrast_targets` is specified. If an explicit alignment is + not specified, the alignment of the original and contrastive target texts is assumed to be 1:1 for all + available tokens. Defaults to :obj:`None`. """ kwargs.pop("forward_output", None) c_output, c_tgt_ids = _get_contrast_output( @@ -218,6 +227,7 @@ def contrast_prob_fn( contrast_sources=contrast_sources, contrast_target_prefixes=contrast_target_prefixes, contrast_targets=contrast_targets, + contrast_targets_alignments=contrast_targets_alignments, return_contrastive_target_ids=True, **kwargs, ) @@ -317,6 +327,7 @@ def contrast_prob_diff_fn( contrast_target_prefixes: Optional[FeatureAttributionInput] = None, contrast_sources: Optional[FeatureAttributionInput] = None, contrast_targets: Optional[FeatureAttributionInput] = None, + contrast_targets_alignments: Optional[List[List[Tuple[int, int]]]] = None, **kwargs, ): """Returns the difference between next step probability for a candidate generation target vs. a contrastive @@ -336,6 +347,11 @@ def contrast_prob_diff_fn( contrast_targets (:obj:`str` or :obj:`list(str)`): Contrastive target text(s) to be compared to the original target text. If not specified, the original target text is used as contrastive target (will result in same output unless ``contrast_sources`` or ``contrast_target_prefixes`` are specified). Defaults to :obj:`None`. + contrast_targets_alignments (:obj:`list(tuple(int, int))`, `optional`): A list of tuples of indices, where the + first element is the index of the original target token and the second element is the index of the + contrastive target token, used only if :obj:`contrast_targets` is specified. If an explicit alignment is + not specified, the alignment of the original and contrastive target texts is assumed to be 1:1 for all + available tokens. Defaults to :obj:`None`. """ model_probs = probability_fn(attribution_model, forward_output, target_ids) contrast_probs = contrast_prob_fn( @@ -344,6 +360,7 @@ def contrast_prob_diff_fn( contrast_sources=contrast_sources, contrast_target_prefixes=contrast_target_prefixes, contrast_targets=contrast_targets, + contrast_targets_alignments=contrast_targets_alignments, **kwargs, ) # Return the prob difference as target for attribution @@ -418,6 +435,41 @@ def mc_dropout_prob_avg_fn( } +def check_is_step_function(identifier: str) -> None: + if identifier not in STEP_SCORES_MAP: + raise AttributeError( + f"Step score {identifier} not found. Available step scores are: " + f"{', '.join(list(STEP_SCORES_MAP.keys()))}. Use the inseq.register_step_function" + "function to register a custom step score." + ) + + +def get_step_scores( + score_identifier: str = "probability", + step_scores_args: Dict[str, Any] = {}, +) -> SingleScorePerStepTensor: + """Returns step scores for the target tokens in the batch.""" + check_is_step_function(score_identifier) + return STEP_SCORES_MAP[score_identifier](**step_scores_args) + + +def get_step_scores_args( + score_identifiers: List[str], kwargs: Dict[str, Any], default_args: Dict[str, Any] +) -> Dict[str, Any]: + step_scores_args = {} + for step_score in score_identifiers: + check_is_step_function(step_score) + step_scores_args.update( + **extract_signature_args( + kwargs, + STEP_SCORES_MAP[step_score], + exclude_args=default_args, + return_remaining=False, + ) + ) + return step_scores_args + + def list_step_functions() -> List[str]: """Lists identifiers for all available step scores. One or more step scores identifiers can be passed to the :meth:`~inseq.models.AttributionModel.attribute` method either to compute scores while attributing (``step_scores`` diff --git a/inseq/data/__init__.py b/inseq/data/__init__.py index 768e96b7..16860a0d 100644 --- a/inseq/data/__init__.py +++ b/inseq/data/__init__.py @@ -22,7 +22,14 @@ MultiDimensionalFeatureAttributionStepOutput, get_batch_from_inputs, ) -from .batch import Batch, BatchEmbedding, BatchEncoding, DecoderOnlyBatch, EncoderDecoderBatch +from .batch import ( + Batch, + BatchEmbedding, + BatchEncoding, + DecoderOnlyBatch, + EncoderDecoderBatch, + slice_batch_from_position, +) from .viz import show_attributions __all__ = [ @@ -54,4 +61,5 @@ "MultiDimensionalFeatureAttributionStepOutput", "get_batch_from_inputs", "list_aggregators", + "slice_batch_from_position", ] diff --git a/inseq/data/attribution.py b/inseq/data/attribution.py index 9ba0f397..bc053d1c 100644 --- a/inseq/data/attribution.py +++ b/inseq/data/attribution.py @@ -164,6 +164,8 @@ def from_step_attributions( ] if tokenized_target_sentences is None: tokenized_target_sentences = targets + if has_bos_token: + tokenized_target_sentences = [tok_seq[1:] for tok_seq in tokenized_target_sentences] if attr_pos_end is None: attr_pos_end = max([len(t) for t in tokenized_target_sentences]) pos_start = [ diff --git a/inseq/data/batch.py b/inseq/data/batch.py index be6f9c1c..ec5d4d14 100644 --- a/inseq/data/batch.py +++ b/inseq/data/batch.py @@ -1,6 +1,7 @@ from dataclasses import dataclass from typing import List, Optional, Tuple, Union +from ..utils import MissingAlignmentsError from ..utils.typing import EmbeddingsTensor, ExpandedTargetIdsTensor, IdsTensor, OneOrMoreTokenSequences from .data_utils import TensorWrapper @@ -229,3 +230,26 @@ def from_batch(self, batch: Batch) -> "DecoderOnlyBatch": encoding=batch.encoding, embedding=batch.embedding, ) + + +def slice_batch_from_position( + batch: DecoderOnlyBatch, curr_position: int, alignments: Optional[List[Tuple[int, int]]] = None +) -> Tuple[DecoderOnlyBatch, IdsTensor]: + truncate_idx = curr_position + if alignments: + if len(alignments) > 0 and isinstance(alignments[0], list): + alignments = alignments[0] + # Find all alignment pairs for the current original target + aligned_idxs = [c_idx for idx, c_idx in alignments if idx == curr_position] + + if not aligned_idxs: + raise MissingAlignmentsError( + f"No alignment found for original target token at index {curr_position}. " + "Please provide alignment pairs that cover all original target tokens." + ) + # Select the minimum index to identify the next target token + truncate_idx = min(aligned_idxs) + # We select the target token and truncate the batch up to the selected index + tgt_ids = batch.target_ids[:, truncate_idx] + batch = batch[:truncate_idx] + return batch, tgt_ids diff --git a/inseq/models/attribution_model.py b/inseq/models/attribution_model.py index e8d5f6f6..8452377a 100644 --- a/inseq/models/attribution_model.py +++ b/inseq/models/attribution_model.py @@ -14,6 +14,7 @@ FeatureAttributionInput, FeatureAttributionOutput, FeatureAttributionStepOutput, + get_batch_from_inputs, ) from ..utils import ( MissingAttributionMethodError, @@ -91,7 +92,8 @@ def enrich_step_output( batch: Union[DecoderOnlyBatch, EncoderDecoderBatch], target_tokens: OneOrMoreTokenSequences, target_ids: TargetIdsTensor, - attributed_fn_args: Dict[str, Any] = {}, + contrast_batch: Optional[DecoderOnlyBatch] = None, + contrast_targets_alignments: Optional[List[List[Tuple[int, int]]]] = None, ) -> FeatureAttributionStepOutput: r"""Enriches the attribution output with token information, producing the finished :class:`~inseq.data.FeatureAttributionStepOutput` object. @@ -140,6 +142,54 @@ def get_text_sequences( ) -> TextSequences: raise NotImplementedError() + @staticmethod + def get_contrast_options_from_args( + attribution_model: "AttributionModel", args: Dict[str, Any], target_tokens: List[List[str]] + ) -> Tuple[DecoderOnlyBatch, Optional[List[List[Tuple[int, int]]]]]: + contrast_targets = args.get("contrast_targets", None) + contrast_targets_alignments = args.get("contrast_targets_alignments", None) + contrast_targets = [contrast_targets] if isinstance(contrast_targets, str) else contrast_targets + contrast_batch = None + adjusted_alignments = None + if contrast_targets is not None: + contrast_batch = DecoderOnlyBatch.from_batch( + get_batch_from_inputs( + attribution_model=attribution_model, + inputs=contrast_targets, + as_targets=attribution_model.is_encoder_decoder, + ) + ) + if isinstance(contrast_targets_alignments, list) and len(contrast_targets_alignments) > 0: + if isinstance(contrast_targets_alignments[0], tuple): + contrast_targets_alignments = [contrast_targets_alignments] + if not isinstance(contrast_targets_alignments[0], list): + raise ValueError("Invalid contrast_targets_alignments were provided.") + else: + contrast_targets_alignments = None + + if contrast_targets_alignments is None: + adjusted_alignments = [[(idx, idx) for idx, _ in enumerate(seq)] for seq in target_tokens] + else: + # Sort alignments + contrast_targets_alignments = [ + sorted(seq, key=lambda x: (x[0], x[1])) for seq in contrast_targets_alignments + ] + + # Filling alignments with missing tokens + # Assuming 1:1 mapping to cover all tokens from the original sequence + adjusted_alignments = [] + for seq_idx, seq in enumerate(target_tokens): + adjusted_seq_alignments = [] + for pair_idx, _ in enumerate(seq): + match_pairs = [x for x in contrast_targets_alignments[seq_idx] if x[0] == pair_idx] + if not match_pairs: + adjusted_seq_alignments.append((pair_idx, pair_idx)) + else: + adjusted_seq_alignments.append(match_pairs[0]) + adjusted_alignments.append(adjusted_seq_alignments) + + return contrast_batch, adjusted_alignments + class AttributionModel(ABC, torch.nn.Module): """Base class for all attribution models. @@ -421,23 +471,20 @@ def embed(self, inputs: Union[TextInput, IdsTensor], as_targets: bool = False): inputs = batch.input_ids return self.embed_ids(inputs, as_targets=as_targets) - def tokenize_with_ids( + def get_token_with_ids( self, - inputs: TextInput, - as_targets: bool = False, - skip_special_tokens: bool = True, - contrast_inputs: Optional[TextInput] = None, + batch: Union[EncoderDecoderBatch, DecoderOnlyBatch], + contrast_batch: Optional[DecoderOnlyBatch] = None, + contrast_targets_alignments: Optional[List[List[Tuple[int, int]]]] = None, ) -> List[List[TokenWithId]]: - tokenized_sentences = self.convert_string_to_tokens( - inputs, as_targets=as_targets, skip_special_tokens=skip_special_tokens - ) - ids_sentences = self.convert_tokens_to_ids(tokenized_sentences) - if contrast_inputs is not None: - contrast_tokenized_sentences = self.convert_string_to_tokens( - contrast_inputs, as_targets=as_targets, skip_special_tokens=skip_special_tokens + if contrast_batch is not None: + return join_token_ids( + batch.target_tokens, + batch.target_ids.tolist(), + contrast_batch.target_tokens, + contrast_targets_alignments, ) - return join_token_ids(tokenized_sentences, ids_sentences, contrast_tokenized_sentences) - return join_token_ids(tokenized_sentences, ids_sentences) + return join_token_ids(batch.target_tokens, batch.target_ids.tolist()) # Framework-specific methods @@ -466,6 +513,10 @@ def encode( ) -> BatchEncoding: pass + @abstractmethod + def decode(self, ids: IdsTensor, **kwargs) -> List[str]: + pass + @abstractmethod def embed_ids(self, ids: IdsTensor, as_targets: bool = False) -> EmbeddingsTensor: pass diff --git a/inseq/models/decoder_only.py b/inseq/models/decoder_only.py index df7e8126..3e7ff21f 100644 --- a/inseq/models/decoder_only.py +++ b/inseq/models/decoder_only.py @@ -12,6 +12,7 @@ FeatureAttributionInput, FeatureAttributionStepOutput, get_batch_from_inputs, + slice_batch_from_position, ) from ..utils.typing import ( AttributionForwardInputs, @@ -93,7 +94,8 @@ def enrich_step_output( batch: DecoderOnlyBatch, target_tokens: OneOrMoreTokenSequences, target_ids: TargetIdsTensor, - attributed_fn_args: Dict[str, Any] = {}, + contrast_batch: Optional[DecoderOnlyBatch] = None, + contrast_targets_alignments: Optional[List[List[Tuple[int, int]]]] = None, ) -> FeatureAttributionStepOutput: r"""Enriches the attribution output with token information, producing the finished :class:`~inseq.data.FeatureAttributionStepOutput` object. @@ -111,19 +113,19 @@ def enrich_step_output( if target_ids.ndim == 0: target_ids = target_ids.unsqueeze(0) step_output.source = None - if "contrast_targets" in attributed_fn_args: - contrast_batch = get_batch_from_inputs( - attribution_model=attribution_model, - inputs=attributed_fn_args["contrast_targets"], - as_targets=attribution_model.is_encoder_decoder, - ) + if contrast_batch is not None: offset = len(batch.input_tokens[0]) - contrast_prefix_tokens = [seq[:offset] for seq in contrast_batch.encoding.input_tokens] - contrast_target_tokens = [[seq[offset]] for seq in contrast_batch.encoding.input_tokens] + contrast_batch, contrast_target_ids = slice_batch_from_position( + contrast_batch, offset, contrast_targets_alignments + ) step_output.target = join_token_ids( - target_tokens, contrast_target_tokens, [[idx] for idx in target_ids.tolist()] + tokens=target_tokens, + ids=attribution_model.convert_ids_to_tokens(contrast_target_ids), + contrast_tokens=attribution_model.convert_ids_to_tokens( + contrast_target_ids[None, ...], skip_special_tokens=False + ), ) - step_output.prefix = join_token_ids(batch.target_tokens, contrast_prefix_tokens, batch.target_ids.tolist()) + step_output.prefix = join_token_ids(tokens=batch.target_tokens, ids=batch.target_ids.tolist()) else: step_output.target = join_token_ids(target_tokens, [[idx] for idx in target_ids.tolist()]) step_output.prefix = join_token_ids(batch.target_tokens, batch.target_ids.tolist()) @@ -193,7 +195,7 @@ def formatted_forward_input_wrapper( def get_text_sequences(attribution_model: "DecoderOnlyAttributionModel", batch: DecoderOnlyBatch) -> TextSequences: return TextSequences( sources=None, - targets=attribution_model.convert_tokens_to_string(batch.input_tokens, as_targets=True), + targets=attribution_model.decode(batch.target_ids), ) diff --git a/inseq/models/encoder_decoder.py b/inseq/models/encoder_decoder.py index 93869d30..6db9fa1b 100644 --- a/inseq/models/encoder_decoder.py +++ b/inseq/models/encoder_decoder.py @@ -7,10 +7,12 @@ Batch, BatchEmbedding, BatchEncoding, + DecoderOnlyBatch, EncoderDecoderBatch, FeatureAttributionInput, FeatureAttributionStepOutput, get_batch_from_inputs, + slice_batch_from_position, ) from ..utils.typing import ( AttributionForwardInputs, @@ -133,7 +135,8 @@ def enrich_step_output( batch: EncoderDecoderBatch, target_tokens: OneOrMoreTokenSequences, target_ids: TargetIdsTensor, - attributed_fn_args: Dict[str, Any] = {}, + contrast_batch: Optional[DecoderOnlyBatch] = None, + contrast_targets_alignments: Optional[List[List[Tuple[int, int]]]] = None, ) -> FeatureAttributionStepOutput: r"""Enriches the attribution output with token information, producing the finished :class:`~inseq.data.FeatureAttributionStepOutput` object. @@ -151,21 +154,19 @@ def enrich_step_output( if target_ids.ndim == 0: target_ids = target_ids.unsqueeze(0) step_output.source = join_token_ids(batch.sources.input_tokens, batch.sources.input_ids.tolist()) - if "contrast_targets" in attributed_fn_args: - contrast_batch = get_batch_from_inputs( - attribution_model=attribution_model, - inputs=attributed_fn_args["contrast_targets"], - as_targets=attribution_model.is_encoder_decoder, - ) + if contrast_batch is not None: offset = len(batch.targets.input_tokens[0]) - contrast_prefix_tokens = [seq[:offset] for seq in contrast_batch.encoding.input_tokens] - contrast_target_tokens = [[seq[offset]] for seq in contrast_batch.encoding.input_tokens] - step_output.target = join_token_ids( - target_tokens, contrast_target_tokens, [[idx] for idx in target_ids.tolist()] + contrast_batch, contrast_target_ids = slice_batch_from_position( + contrast_batch, offset, contrast_targets_alignments ) - step_output.prefix = join_token_ids( - batch.targets.input_tokens, contrast_prefix_tokens, batch.targets.input_ids.tolist() + step_output.target = join_token_ids( + tokens=target_tokens, + ids=[[idx] for idx in target_ids.tolist()], + contrast_tokens=attribution_model.convert_ids_to_tokens( + contrast_target_ids[None, ...], skip_special_tokens=False + ), ) + step_output.prefix = join_token_ids(tokens=batch.target_tokens, ids=batch.target_ids.tolist()) else: step_output.target = join_token_ids(target_tokens, [[idx] for idx in target_ids.tolist()]) step_output.prefix = join_token_ids(batch.targets.input_tokens, batch.targets.input_ids.tolist()) @@ -244,10 +245,12 @@ def formatted_forward_input_wrapper( return formatted_forward_input_wrapper @staticmethod - def get_text_sequences(self, batch: EncoderDecoderBatch) -> TextSequences: + def get_text_sequences( + attribution_model: "EncoderDecoderAttributionModel", batch: EncoderDecoderBatch + ) -> TextSequences: return TextSequences( - sources=self.convert_tokens_to_string(batch.sources.input_tokens), - targets=self.convert_tokens_to_string(batch.targets.input_tokens, as_targets=True), + sources=attribution_model.convert_tokens_to_string(batch.sources.input_tokens), + targets=attribution_model.decode(batch.targets.input_ids), ) diff --git a/inseq/models/huggingface_model.py b/inseq/models/huggingface_model.py index 43779d98..2fbc3535 100644 --- a/inseq/models/huggingface_model.py +++ b/inseq/models/huggingface_model.py @@ -290,6 +290,13 @@ def encode( baseline_ids=baseline_ids, ) + def decode( + self, + ids: Union[List[int], List[List[int]], IdsTensor], + skip_special_tokens: bool = True, + ) -> List[str]: + return self.tokenizer.batch_decode(ids, skip_special_tokens=skip_special_tokens) + def embed_ids(self, ids: IdsTensor, as_targets: bool = False) -> EmbeddingsTensor: if as_targets and not self.is_encoder_decoder: raise ValueError("Decoder-only models should use tokenization as source only.") @@ -343,6 +350,7 @@ def convert_string_to_tokens( ids = self.tokenizer( text=text if not as_targets else None, text_target=text if as_targets else None, + add_special_tokens=not skip_special_tokens, )["input_ids"] return self.tokenizer.convert_ids_to_tokens(ids, skip_special_tokens) return [self.convert_string_to_tokens(t, skip_special_tokens, as_targets) for t in text] diff --git a/inseq/utils/__init__.py b/inseq/utils/__init__.py index 8b72ad54..4daada05 100644 --- a/inseq/utils/__init__.py +++ b/inseq/utils/__init__.py @@ -3,6 +3,7 @@ from .errors import ( InseqDeprecationWarning, LengthMismatchError, + MissingAlignmentsError, MissingAttributionMethodError, UnknownAttributionMethodError, ) @@ -57,6 +58,7 @@ "LengthMismatchError", "MissingAttributionMethodError", "UnknownAttributionMethodError", + "MissingAlignmentsError", "cache_results", "optional", "pad", diff --git a/inseq/utils/errors.py b/inseq/utils/errors.py index 7fe1c8c6..905e0ae6 100644 --- a/inseq/utils/errors.py +++ b/inseq/utils/errors.py @@ -52,3 +52,9 @@ class LengthMismatchError(Exception): """Raised when lengths do not match.""" pass + + +class MissingAlignmentsError(Exception): + """Raised when lengths do not match.""" + + pass diff --git a/tests/attr/feat/test_attribution_utils.py b/tests/attr/feat/test_attribution_utils.py index 262c7690..3e341514 100644 --- a/tests/attr/feat/test_attribution_utils.py +++ b/tests/attr/feat/test_attribution_utils.py @@ -2,7 +2,7 @@ import torch import inseq -from inseq.attr.feat.attribution_utils import get_step_scores +from inseq.attr.step_functions import get_step_scores from ...inference_commons import get_example_batches From c974f9ecf550321e8c272ce6a74d9b3e3cc0ba7d Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Fri, 23 Jun 2023 12:45:36 +0200 Subject: [PATCH 2/7] Fix attribution positions --- inseq/attr/feat/feature_attribution.py | 6 +++++- inseq/data/attribution.py | 3 +++ tests/models/test_huggingface_model.py | 8 ++++---- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/inseq/attr/feat/feature_attribution.py b/inseq/attr/feat/feature_attribution.py index 75aa2ad9..55ac2e03 100644 --- a/inseq/attr/feat/feature_attribution.py +++ b/inseq/attr/feat/feature_attribution.py @@ -327,6 +327,10 @@ def attribute( ) for idx in range(len(target_tokens_with_ids)) ] + if self.attribution_model.is_encoder_decoder: + iter_pos_end = min(attr_pos_end + 1, batch.max_generation_length) + else: + iter_pos_end = attr_pos_end pbar = get_progress_bar( sequences=sequences, target_lengths=targets_lengths, @@ -342,7 +346,7 @@ def attribute( start = datetime.now() # Attribution loop for generation - for step in range(attr_pos_start, attr_pos_end): + for step in range(attr_pos_start, iter_pos_end): tgt_ids, tgt_mask = batch.get_step_target(step, with_attention=True) step_output = self.filtered_attribute_step( batch[:step], diff --git a/inseq/data/attribution.py b/inseq/data/attribution.py index bc053d1c..24065c0f 100644 --- a/inseq/data/attribution.py +++ b/inseq/data/attribution.py @@ -166,6 +166,9 @@ def from_step_attributions( tokenized_target_sentences = targets if has_bos_token: tokenized_target_sentences = [tok_seq[1:] for tok_seq in tokenized_target_sentences] + tokenized_target_sentences = [ + drop_padding(tokenized_target_sentences[seq_id], pad_id) for seq_id in range(num_sequences) + ] if attr_pos_end is None: attr_pos_end = max([len(t) for t in tokenized_target_sentences]) pos_start = [ diff --git a/tests/models/test_huggingface_model.py b/tests/models/test_huggingface_model.py index f9474333..7e52b9f0 100644 --- a/tests/models/test_huggingface_model.py +++ b/tests/models/test_huggingface_model.py @@ -280,12 +280,12 @@ def test_attribute_decoder(saliency_gpt2_model): assert ex1.target_attributions.shape[1] == ex1.attr_pos_end - ex1.attr_pos_start assert ex1.target_attributions.shape[0] == ex1.attr_pos_end # Empty attributions outputs have start and end set to seq length - assert ex2.attr_pos_start == 8 - assert ex2.attr_pos_end == 13 + assert ex2.attr_pos_start == 17 + assert ex2.attr_pos_end == 22 assert ex2.target_attributions.shape[1] == ex2.attr_pos_end - ex2.attr_pos_start assert ex2.target_attributions.shape[0] == ex2.attr_pos_end - assert ex3.attr_pos_start == 12 - assert ex3.attr_pos_end == 17 + assert ex3.attr_pos_start == 17 + assert ex3.attr_pos_end == 22 assert ex3.target_attributions.shape[1] == ex3.attr_pos_end - ex3.attr_pos_start assert ex3.target_attributions.shape[0] == ex3.attr_pos_end assert out.info["attr_pos_start"] == 17 From 63a8cf2a595a89cc125d17e6232ac98cd52d5968 Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Fri, 23 Jun 2023 12:58:11 +0200 Subject: [PATCH 3/7] Update deps --- poetry.lock | 1027 +++++++++++++++++++++++------------------- requirements-dev.txt | 62 +-- requirements.txt | 29 +- 3 files changed, 601 insertions(+), 517 deletions(-) diff --git a/poetry.lock b/poetry.lock index 74850e61..8f311a06 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,14 +2,14 @@ [[package]] name = "accelerate" -version = "0.19.0" +version = "0.20.3" description = "Accelerate" category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "accelerate-0.19.0-py3-none-any.whl", hash = "sha256:2866b0bf9fff08f51e6384c95fa96725838b70f1988d1cce42e56b820d8a91dd"}, - {file = "accelerate-0.19.0.tar.gz", hash = "sha256:84920226b9e642e453ef37593ee55b956b08d8200dea4087c546c34e26157e76"}, + {file = "accelerate-0.20.3-py3-none-any.whl", hash = "sha256:147183e7a2215f7bd45a7af3b986a963daa8a61fa58b0912b9473049e011ad15"}, + {file = "accelerate-0.20.3.tar.gz", hash = "sha256:79a896978c20dac270083d42bf033f4c9a80dcdd6b946f1ca92d8d6d0f0f5ba9"}, ] [package.dependencies] @@ -605,63 +605,72 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] [[package]] name = "coverage" -version = "7.2.5" +version = "7.2.7" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, - {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, - {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, - {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, - {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, - {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, - {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, - {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, - {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, - {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, - {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, - {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, - {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, - {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, - {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [package.dependencies] @@ -696,14 +705,14 @@ files = [ [[package]] name = "datasets" -version = "2.12.0" +version = "2.13.1" description = "HuggingFace community-driven open-source library of datasets" category = "main" optional = true python-versions = ">=3.7.0" files = [ - {file = "datasets-2.12.0-py3-none-any.whl", hash = "sha256:0a23bdf1fc28d82dd496375289d72f7917d149a95062ab2647cf621d67ed74ca"}, - {file = "datasets-2.12.0.tar.gz", hash = "sha256:faf164c18a41bea51df3f369e872f8be5b84c12ea5f6393c3896f56038af1ea3"}, + {file = "datasets-2.13.1-py3-none-any.whl", hash = "sha256:844d8dbc1759e0b6b8e5063af019dc95d6af07ea075002b03323a280bf8d53f6"}, + {file = "datasets-2.13.1.tar.gz", hash = "sha256:bacb7750b1a434417312b4281a55225a3f7e0163abdd12a2a3e2d700310d5221"}, ] [package.dependencies] @@ -718,7 +727,6 @@ pandas = "*" pyarrow = ">=8.0.0" pyyaml = ">=5.1" requests = ">=2.19.0" -responses = "<0.19" tqdm = ">=4.62.1" xxhash = "*" @@ -726,15 +734,15 @@ xxhash = "*" apache-beam = ["apache-beam (>=2.26.0,<2.44.0)"] audio = ["librosa", "soundfile (>=0.12.1)"] benchmarks = ["numpy (==1.18.5)", "protobuf (==3.20.3)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "transformers (==3.0.2)"] -dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "black (>=23.1,<24.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "ruff (>=0.0.241)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"] +dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "black (>=23.1,<24.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "ruff (>=0.0.241)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"] docs = ["s3fs"] jax = ["jax (>=0.2.8,!=0.3.2,<=0.3.25)", "jaxlib (>=0.1.65,<=0.3.25)"] -metrics-tests = ["Werkzeug (>=1.0.1)", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] +metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] quality = ["black (>=23.1,<24.0)", "pyyaml (>=5.3.1)", "ruff (>=0.0.241)"] s3 = ["s3fs"] tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"] +tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"] torch = ["torch"] vision = ["Pillow (>=6.2.1)"] @@ -869,30 +877,62 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] [[package]] name = "filelock" -version = "3.12.0" +version = "3.12.2" description = "A platform independent file lock." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, - {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "fonttools" -version = "4.39.4" +version = "4.40.0" description = "Tools to manipulate font files" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.39.4-py3-none-any.whl", hash = "sha256:106caf6167c4597556b31a8d9175a3fdc0356fdcd70ab19973c3b0d4c893c461"}, - {file = "fonttools-4.39.4.zip", hash = "sha256:dba8d7cdb8e2bac1b3da28c5ed5960de09e59a2fe7e63bb73f5a59e57b0430d2"}, + {file = "fonttools-4.40.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b802dcbf9bcff74672f292b2466f6589ab8736ce4dcf36f48eb994c2847c4b30"}, + {file = "fonttools-4.40.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f6e3fa3da923063c286320e728ba2270e49c73386e3a711aa680f4b0747d692"}, + {file = "fonttools-4.40.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdf60f8a5c6bcce7d024a33f7e4bc7921f5b74e8ea13bccd204f2c8b86f3470"}, + {file = "fonttools-4.40.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91784e21a1a085fac07c6a407564f4a77feb471b5954c9ee55a4f9165151f6c1"}, + {file = "fonttools-4.40.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:05171f3c546f64d78569f10adc0de72561882352cac39ec7439af12304d8d8c0"}, + {file = "fonttools-4.40.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7449e5e306f3a930a8944c85d0cbc8429cba13503372a1a40f23124d6fb09b58"}, + {file = "fonttools-4.40.0-cp310-cp310-win32.whl", hash = "sha256:bae8c13abbc2511e9a855d2142c0ab01178dd66b1a665798f357da0d06253e0d"}, + {file = "fonttools-4.40.0-cp310-cp310-win_amd64.whl", hash = "sha256:425b74a608427499b0e45e433c34ddc350820b6f25b7c8761963a08145157a66"}, + {file = "fonttools-4.40.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:00ab569b2a3e591e00425023ade87e8fef90380c1dde61be7691cb524ca5f743"}, + {file = "fonttools-4.40.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:18ea64ac43e94c9e0c23d7a9475f1026be0e25b10dda8f236fc956188761df97"}, + {file = "fonttools-4.40.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:022c4a16b412293e7f1ce21b8bab7a6f9d12c4ffdf171fdc67122baddb973069"}, + {file = "fonttools-4.40.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:530c5d35109f3e0cea2535742d6a3bc99c0786cf0cbd7bb2dc9212387f0d908c"}, + {file = "fonttools-4.40.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5e00334c66f4e83535384cb5339526d01d02d77f142c23b2f97bd6a4f585497a"}, + {file = "fonttools-4.40.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb52c10fda31159c22c7ed85074e05f8b97da8773ea461706c273e31bcbea836"}, + {file = "fonttools-4.40.0-cp311-cp311-win32.whl", hash = "sha256:6a8d71b9a5c884c72741868e845c0e563c5d83dcaf10bb0ceeec3b4b2eb14c67"}, + {file = "fonttools-4.40.0-cp311-cp311-win_amd64.whl", hash = "sha256:15abb3d055c1b2dff9ce376b6c3db10777cb74b37b52b78f61657634fd348a0d"}, + {file = "fonttools-4.40.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14037c31138fbd21847ad5e5441dfdde003e0a8f3feb5812a1a21fd1c255ffbd"}, + {file = "fonttools-4.40.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:94c915f6716589f78bc00fbc14c5b8de65cfd11ee335d32504f1ef234524cb24"}, + {file = "fonttools-4.40.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37467cee0f32cada2ec08bc16c9c31f9b53ea54b2f5604bf25a1246b5f50593a"}, + {file = "fonttools-4.40.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56d4d85f5374b45b08d2f928517d1e313ea71b4847240398decd0ab3ebbca885"}, + {file = "fonttools-4.40.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8c4305b171b61040b1ee75d18f9baafe58bd3b798d1670078efe2c92436bfb63"}, + {file = "fonttools-4.40.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a954b90d1473c85a22ecf305761d9fd89da93bbd31dae86e7dea436ad2cb5dc9"}, + {file = "fonttools-4.40.0-cp38-cp38-win32.whl", hash = "sha256:1bc4c5b147be8dbc5df9cc8ac5e93ee914ad030fe2a201cc8f02f499db71011d"}, + {file = "fonttools-4.40.0-cp38-cp38-win_amd64.whl", hash = "sha256:8a917828dbfdb1cbe50cf40eeae6fbf9c41aef9e535649ed8f4982b2ef65c091"}, + {file = "fonttools-4.40.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:882983279bf39afe4e945109772c2ffad2be2c90983d6559af8b75c19845a80a"}, + {file = "fonttools-4.40.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c55f1b4109dbc3aeb496677b3e636d55ef46dc078c2a5e3f3db4e90f1c6d2907"}, + {file = "fonttools-4.40.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec468c022d09f1817c691cf884feb1030ef6f1e93e3ea6831b0d8144c06480d1"}, + {file = "fonttools-4.40.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d5adf4ba114f028fc3f5317a221fd8b0f4ef7a2e5524a2b1e0fd891b093791a"}, + {file = "fonttools-4.40.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa83b3f151bc63970f39b2b42a06097c5a22fd7ed9f7ba008e618de4503d3895"}, + {file = "fonttools-4.40.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:97d95b8301b62bdece1af943b88bcb3680fd385f88346a4a899ee145913b414a"}, + {file = "fonttools-4.40.0-cp39-cp39-win32.whl", hash = "sha256:1a003608400dd1cca3e089e8c94973c6b51a4fb1ef00ff6d7641617b9242e637"}, + {file = "fonttools-4.40.0-cp39-cp39-win_amd64.whl", hash = "sha256:7961575221e3da0841c75da53833272c520000d76f7f71274dbf43370f8a1065"}, + {file = "fonttools-4.40.0-py3-none-any.whl", hash = "sha256:200729d12461e2038700d31f0d49ad5a7b55855dec7525074979a06b46f88505"}, + {file = "fonttools-4.40.0.tar.gz", hash = "sha256:337b6e83d7ee73c40ea62407f2ce03b07c3459e213b6f332b94a69923b9e1cb9"}, ] [package.extras] @@ -995,14 +1035,14 @@ files = [ [[package]] name = "fsspec" -version = "2023.5.0" +version = "2023.6.0" description = "File-system specification" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.5.0-py3-none-any.whl", hash = "sha256:51a4ad01a5bb66fcc58036e288c0d53d3975a0df2a5dc59a93b59bade0391f2a"}, - {file = "fsspec-2023.5.0.tar.gz", hash = "sha256:b3b56e00fb93ea321bc9e5d9cf6f8522a0198b20eb24e02774d329e9c6fb84ce"}, + {file = "fsspec-2023.6.0-py3-none-any.whl", hash = "sha256:1cbad1faef3e391fba6dc005ae9b5bdcbf43005c9167ce78c915549c352c869a"}, + {file = "fsspec-2023.6.0.tar.gz", hash = "sha256:d0b2f935446169753e7a5c5c55681c54ea91996cc67be93c39a154fb3a2742af"}, ] [package.dependencies] @@ -1083,14 +1123,14 @@ gitdb = ">=4.0.1,<5" [[package]] name = "huggingface-hub" -version = "0.14.1" +version = "0.15.1" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "huggingface_hub-0.14.1-py3-none-any.whl", hash = "sha256:9fc619170d800ff3793ad37c9757c255c8783051e1b5b00501205eb43ccc4f27"}, - {file = "huggingface_hub-0.14.1.tar.gz", hash = "sha256:9ab899af8e10922eac65e290d60ab956882ab0bf643e3d990b1394b6b47b7fbc"}, + {file = "huggingface_hub-0.15.1-py3-none-any.whl", hash = "sha256:05b0fb0abbf1f625dfee864648ac3049fe225ac4371c7bafaca0c2d3a2f83445"}, + {file = "huggingface_hub-0.15.1.tar.gz", hash = "sha256:a61b7d1a7769fe10119e730277c72ab99d95c48d86a3d6da3e9f3d0f632a4081"}, ] [package.dependencies] @@ -1103,13 +1143,13 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (>=23.1,<24.0)", "gradio", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (>=23.1,<24.0)", "gradio", "jedi", "mypy (==0.982)", "numpy", "pytest", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (>=23.1,<24.0)", "gradio", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (>=23.1,<24.0)", "gradio", "jedi", "mypy (==0.982)", "numpy", "pytest", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] quality = ["black (>=23.1,<24.0)", "mypy (==0.982)", "ruff (>=0.0.241)"] tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "gradio", "jedi", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "gradio", "jedi", "numpy", "pytest", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] torch = ["torch"] typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] @@ -1154,14 +1194,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.6.0" +version = "6.7.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, ] [package.dependencies] @@ -1170,7 +1210,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "iniconfig" @@ -1186,14 +1226,14 @@ files = [ [[package]] name = "ipykernel" -version = "6.23.1" +version = "6.23.2" description = "IPython Kernel for Jupyter" category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "ipykernel-6.23.1-py3-none-any.whl", hash = "sha256:77aeffab056c21d16f1edccdc9e5ccbf7d96eb401bd6703610a21be8b068aadc"}, - {file = "ipykernel-6.23.1.tar.gz", hash = "sha256:1aba0ae8453e15e9bc6b24e497ef6840114afcdb832ae597f32137fa19d42a6f"}, + {file = "ipykernel-6.23.2-py3-none-any.whl", hash = "sha256:7ccb6e2d32fd958c21453db494c914f3474908a2fdefd99ab548a5375b548d1f"}, + {file = "ipykernel-6.23.2.tar.gz", hash = "sha256:fcfb67c5b504aa1bfcda1c5b3716636239e0f7b9290958f1c558c79b4c0e7ed5"}, ] [package.dependencies] @@ -1332,14 +1372,14 @@ files = [ [[package]] name = "jupyter-client" -version = "8.2.0" +version = "8.3.0" description = "Jupyter protocol implementation and client libraries" category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.2.0-py3-none-any.whl", hash = "sha256:b18219aa695d39e2ad570533e0d71fb7881d35a873051054a84ee2a17c4b7389"}, - {file = "jupyter_client-8.2.0.tar.gz", hash = "sha256:9fe233834edd0e6c0aa5f05ca2ab4bdea1842bfd2d8a932878212fc5301ddaf0"}, + {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, + {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, ] [package.dependencies] @@ -1356,14 +1396,14 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -version = "5.3.0" +version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.3.0-py3-none-any.whl", hash = "sha256:d4201af84559bc8c70cead287e1ab94aeef3c512848dde077b7684b54d67730d"}, - {file = "jupyter_core-5.3.0.tar.gz", hash = "sha256:6db75be0c83edbf1b7c9f91ec266a9a24ef945da630f3120e1a0046dc13713fc"}, + {file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"}, + {file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"}, ] [package.dependencies] @@ -1467,62 +1507,62 @@ files = [ [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] @@ -2006,37 +2046,37 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pandas" -version = "2.0.1" +version = "2.0.2" description = "Powerful data structures for data analysis, time series, and statistics" category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "pandas-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70a996a1d2432dadedbb638fe7d921c88b0cc4dd90374eab51bb33dc6c0c2a12"}, - {file = "pandas-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:909a72b52175590debbf1d0c9e3e6bce2f1833c80c76d80bd1aa09188be768e5"}, - {file = "pandas-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe7914d8ddb2d54b900cec264c090b88d141a1eed605c9539a187dbc2547f022"}, - {file = "pandas-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a514ae436b23a92366fbad8365807fc0eed15ca219690b3445dcfa33597a5cc"}, - {file = "pandas-2.0.1-cp310-cp310-win32.whl", hash = "sha256:12bd6618e3cc737c5200ecabbbb5eaba8ab645a4b0db508ceeb4004bb10b060e"}, - {file = "pandas-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:2b6fe5f7ce1cba0e74188c8473c9091ead9b293ef0a6794939f8cc7947057abd"}, - {file = "pandas-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:00959a04a1d7bbc63d75a768540fb20ecc9e65fd80744c930e23768345a362a7"}, - {file = "pandas-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af2449e9e984dfad39276b885271ba31c5e0204ffd9f21f287a245980b0e4091"}, - {file = "pandas-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910df06feaf9935d05247db6de452f6d59820e432c18a2919a92ffcd98f8f79b"}, - {file = "pandas-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa0067f2419f933101bdc6001bcea1d50812afbd367b30943417d67fbb99678"}, - {file = "pandas-2.0.1-cp311-cp311-win32.whl", hash = "sha256:7b8395d335b08bc8b050590da264f94a439b4770ff16bb51798527f1dd840388"}, - {file = "pandas-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:8db5a644d184a38e6ed40feeb12d410d7fcc36648443defe4707022da127fc35"}, - {file = "pandas-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7bbf173d364130334e0159a9a034f573e8b44a05320995127cf676b85fd8ce86"}, - {file = "pandas-2.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c0853d487b6c868bf107a4b270a823746175b1932093b537b9b76c639fc6f7e"}, - {file = "pandas-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25e23a03f7ad7211ffa30cb181c3e5f6d96a8e4cb22898af462a7333f8a74eb"}, - {file = "pandas-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e09a53a4fe8d6ae2149959a2d02e1ef2f4d2ceb285ac48f74b79798507e468b4"}, - {file = "pandas-2.0.1-cp38-cp38-win32.whl", hash = "sha256:a2564629b3a47b6aa303e024e3d84e850d36746f7e804347f64229f8c87416ea"}, - {file = "pandas-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:03e677c6bc9cfb7f93a8b617d44f6091613a5671ef2944818469be7b42114a00"}, - {file = "pandas-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d099ecaa5b9e977b55cd43cf842ec13b14afa1cfa51b7e1179d90b38c53ce6a"}, - {file = "pandas-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a37ee35a3eb6ce523b2c064af6286c45ea1c7ff882d46e10d0945dbda7572753"}, - {file = "pandas-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:320b180d125c3842c5da5889183b9a43da4ebba375ab2ef938f57bf267a3c684"}, - {file = "pandas-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18d22cb9043b6c6804529810f492ab09d638ddf625c5dea8529239607295cb59"}, - {file = "pandas-2.0.1-cp39-cp39-win32.whl", hash = "sha256:90d1d365d77d287063c5e339f49b27bd99ef06d10a8843cf00b1a49326d492c1"}, - {file = "pandas-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:99f7192d8b0e6daf8e0d0fd93baa40056684e4b4aaaef9ea78dff34168e1f2f0"}, - {file = "pandas-2.0.1.tar.gz", hash = "sha256:19b8e5270da32b41ebf12f0e7165efa7024492e9513fb46fb631c5022ae5709d"}, + {file = "pandas-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ebb9f1c22ddb828e7fd017ea265a59d80461d5a79154b49a4207bd17514d122"}, + {file = "pandas-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eb09a242184092f424b2edd06eb2b99d06dc07eeddff9929e8667d4ed44e181"}, + {file = "pandas-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7319b6e68de14e6209460f72a8d1ef13c09fb3d3ef6c37c1e65b35d50b5c145"}, + {file = "pandas-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd46bde7309088481b1cf9c58e3f0e204b9ff9e3244f441accd220dd3365ce7c"}, + {file = "pandas-2.0.2-cp310-cp310-win32.whl", hash = "sha256:51a93d422fbb1bd04b67639ba4b5368dffc26923f3ea32a275d2cc450f1d1c86"}, + {file = "pandas-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:66d00300f188fa5de73f92d5725ced162488f6dc6ad4cecfe4144ca29debe3b8"}, + {file = "pandas-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02755de164da6827764ceb3bbc5f64b35cb12394b1024fdf88704d0fa06e0e2f"}, + {file = "pandas-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0a1e0576611641acde15c2322228d138258f236d14b749ad9af498ab69089e2d"}, + {file = "pandas-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6b5f14cd24a2ed06e14255ff40fe2ea0cfaef79a8dd68069b7ace74bd6acbba"}, + {file = "pandas-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50e451932b3011b61d2961b4185382c92cc8c6ee4658dcd4f320687bb2d000ee"}, + {file = "pandas-2.0.2-cp311-cp311-win32.whl", hash = "sha256:7b21cb72958fc49ad757685db1919021d99650d7aaba676576c9e88d3889d456"}, + {file = "pandas-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:c4af689352c4fe3d75b2834933ee9d0ccdbf5d7a8a7264f0ce9524e877820c08"}, + {file = "pandas-2.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69167693cb8f9b3fc060956a5d0a0a8dbfed5f980d9fd2c306fb5b9c855c814c"}, + {file = "pandas-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:30a89d0fec4263ccbf96f68592fd668939481854d2ff9da709d32a047689393b"}, + {file = "pandas-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a18e5c72b989ff0f7197707ceddc99828320d0ca22ab50dd1b9e37db45b010c0"}, + {file = "pandas-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7376e13d28eb16752c398ca1d36ccfe52bf7e887067af9a0474de6331dd948d2"}, + {file = "pandas-2.0.2-cp38-cp38-win32.whl", hash = "sha256:6d6d10c2142d11d40d6e6c0a190b1f89f525bcf85564707e31b0a39e3b398e08"}, + {file = "pandas-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:e69140bc2d29a8556f55445c15f5794490852af3de0f609a24003ef174528b79"}, + {file = "pandas-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b42b120458636a981077cfcfa8568c031b3e8709701315e2bfa866324a83efa8"}, + {file = "pandas-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f908a77cbeef9bbd646bd4b81214cbef9ac3dda4181d5092a4aa9797d1bc7774"}, + {file = "pandas-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713f2f70abcdade1ddd68fc91577cb090b3544b07ceba78a12f799355a13ee44"}, + {file = "pandas-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf3f0c361a4270185baa89ec7ab92ecaa355fe783791457077473f974f654df5"}, + {file = "pandas-2.0.2-cp39-cp39-win32.whl", hash = "sha256:598e9020d85a8cdbaa1815eb325a91cfff2bb2b23c1442549b8a3668e36f0f77"}, + {file = "pandas-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:77550c8909ebc23e56a89f91b40ad01b50c42cfbfab49b3393694a50549295ea"}, + {file = "pandas-2.0.2.tar.gz", hash = "sha256:dd5476b6c3fe410ee95926873f377b856dbc4e81a9c605a0dc05aaccc6a7c6c6"}, ] [package.dependencies] @@ -2233,30 +2273,30 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa [[package]] name = "platformdirs" -version = "3.5.1" +version = "3.8.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-3.8.0-py3-none-any.whl", hash = "sha256:ca9ed98ce73076ba72e092b23d3c93ea6c4e186b3f1c3dad6edd98ff6ffcca2e"}, + {file = "platformdirs-3.8.0.tar.gz", hash = "sha256:b0cabcb11063d21a0b261d557acb0a9d2126350e63b70cdf7db6347baea456dc"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [package.extras] @@ -2318,34 +2358,34 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "3.20.2" +version = "3.20.3" description = "Protocol Buffers" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-3.20.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559"}, - {file = "protobuf-3.20.2-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804"}, - {file = "protobuf-3.20.2-cp310-cp310-win32.whl", hash = "sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c"}, - {file = "protobuf-3.20.2-cp310-cp310-win_amd64.whl", hash = "sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f"}, - {file = "protobuf-3.20.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0"}, - {file = "protobuf-3.20.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d"}, - {file = "protobuf-3.20.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b"}, - {file = "protobuf-3.20.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359"}, - {file = "protobuf-3.20.2-cp37-cp37m-win32.whl", hash = "sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe"}, - {file = "protobuf-3.20.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334"}, - {file = "protobuf-3.20.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a"}, - {file = "protobuf-3.20.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0"}, - {file = "protobuf-3.20.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978"}, - {file = "protobuf-3.20.2-cp38-cp38-win32.whl", hash = "sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151"}, - {file = "protobuf-3.20.2-cp38-cp38-win_amd64.whl", hash = "sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3"}, - {file = "protobuf-3.20.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d"}, - {file = "protobuf-3.20.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb"}, - {file = "protobuf-3.20.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3"}, - {file = "protobuf-3.20.2-cp39-cp39-win32.whl", hash = "sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1"}, - {file = "protobuf-3.20.2-cp39-cp39-win_amd64.whl", hash = "sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422"}, - {file = "protobuf-3.20.2-py2.py3-none-any.whl", hash = "sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019"}, - {file = "protobuf-3.20.2.tar.gz", hash = "sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750"}, + {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, + {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, + {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, + {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, + {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, + {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, + {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, + {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, + {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, + {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, + {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, + {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, + {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, + {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, + {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, + {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, ] [[package]] @@ -2404,37 +2444,37 @@ tests = ["pytest"] [[package]] name = "pyarrow" -version = "12.0.0" +version = "12.0.1" description = "Python library for Apache Arrow" category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "pyarrow-12.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:3b97649c8a9a09e1d8dc76513054f1331bd9ece78ee39365e6bf6bc7503c1e94"}, - {file = "pyarrow-12.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bc4ea634dacb03936f50fcf59574a8e727f90c17c24527e488d8ceb52ae284de"}, - {file = "pyarrow-12.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d568acfca3faa565d663e53ee34173be8e23a95f78f2abfdad198010ec8f745"}, - {file = "pyarrow-12.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b50bb9a82dca38a002d7cbd802a16b1af0f8c50ed2ec94a319f5f2afc047ee9"}, - {file = "pyarrow-12.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:3d1733b1ea086b3c101427d0e57e2be3eb964686e83c2363862a887bb5c41fa8"}, - {file = "pyarrow-12.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:a7cd32fe77f967fe08228bc100433273020e58dd6caced12627bcc0a7675a513"}, - {file = "pyarrow-12.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:92fb031e6777847f5c9b01eaa5aa0c9033e853ee80117dce895f116d8b0c3ca3"}, - {file = "pyarrow-12.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:280289ebfd4ac3570f6b776515baa01e4dcbf17122c401e4b7170a27c4be63fd"}, - {file = "pyarrow-12.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:272f147d4f8387bec95f17bb58dcfc7bc7278bb93e01cb7b08a0e93a8921e18e"}, - {file = "pyarrow-12.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:0846ace49998825eda4722f8d7f83fa05601c832549c9087ea49d6d5397d8cec"}, - {file = "pyarrow-12.0.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:993287136369aca60005ee7d64130f9466489c4f7425f5c284315b0a5401ccd9"}, - {file = "pyarrow-12.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a7b6a765ee4f88efd7d8348d9a1f804487d60799d0428b6ddf3344eaef37282"}, - {file = "pyarrow-12.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c4fce253d5bdc8d62f11cfa3da5b0b34b562c04ce84abb8bd7447e63c2b327"}, - {file = "pyarrow-12.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e6be4d85707fc8e7a221c8ab86a40449ce62559ce25c94321df7c8500245888f"}, - {file = "pyarrow-12.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:ea830d9f66bfb82d30b5794642f83dd0e4a718846462d22328981e9eb149cba8"}, - {file = "pyarrow-12.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7b5b9f60d9ef756db59bec8d90e4576b7df57861e6a3d6a8bf99538f68ca15b3"}, - {file = "pyarrow-12.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99e559d27db36ad3a33868a475f03e3129430fc065accc839ef4daa12c6dab6"}, - {file = "pyarrow-12.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b0810864a593b89877120972d1f7af1d1c9389876dbed92b962ed81492d3ffc"}, - {file = "pyarrow-12.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:23a77d97f4d101ddfe81b9c2ee03a177f0e590a7e68af15eafa06e8f3cf05976"}, - {file = "pyarrow-12.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:2cc63e746221cddb9001f7281dee95fd658085dd5b717b076950e1ccc607059c"}, - {file = "pyarrow-12.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8c26912607e26c2991826bbaf3cf2b9c8c3e17566598c193b492f058b40d3a4"}, - {file = "pyarrow-12.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d8b90efc290e99a81d06015f3a46601c259ecc81ffb6d8ce288c91bd1b868c9"}, - {file = "pyarrow-12.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2466be046b81863be24db370dffd30a2e7894b4f9823fb60ef0a733c31ac6256"}, - {file = "pyarrow-12.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:0e36425b1c1cbf5447718b3f1751bf86c58f2b3ad299f996cd9b1aa040967656"}, - {file = "pyarrow-12.0.0.tar.gz", hash = "sha256:19c812d303610ab5d664b7b1de4051ae23565f9f94d04cbea9e50569746ae1ee"}, + {file = "pyarrow-12.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6d288029a94a9bb5407ceebdd7110ba398a00412c5b0155ee9813a40d246c5df"}, + {file = "pyarrow-12.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345e1828efdbd9aa4d4de7d5676778aba384a2c3add896d995b23d368e60e5af"}, + {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d6009fdf8986332b2169314da482baed47ac053311c8934ac6651e614deacd6"}, + {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d3c4cbbf81e6dd23fe921bc91dc4619ea3b79bc58ef10bce0f49bdafb103daf"}, + {file = "pyarrow-12.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdacf515ec276709ac8042c7d9bd5be83b4f5f39c6c037a17a60d7ebfd92c890"}, + {file = "pyarrow-12.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:749be7fd2ff260683f9cc739cb862fb11be376de965a2a8ccbf2693b098db6c7"}, + {file = "pyarrow-12.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6895b5fb74289d055c43db3af0de6e16b07586c45763cb5e558d38b86a91e3a7"}, + {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1887bdae17ec3b4c046fcf19951e71b6a619f39fa674f9881216173566c8f718"}, + {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c9cb8eeabbadf5fcfc3d1ddea616c7ce893db2ce4dcef0ac13b099ad7ca082"}, + {file = "pyarrow-12.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:ce4aebdf412bd0eeb800d8e47db854f9f9f7e2f5a0220440acf219ddfddd4f63"}, + {file = "pyarrow-12.0.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:e0d8730c7f6e893f6db5d5b86eda42c0a130842d101992b581e2138e4d5663d3"}, + {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43364daec02f69fec89d2315f7fbfbeec956e0d991cbbef471681bd77875c40f"}, + {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051f9f5ccf585f12d7de836e50965b3c235542cc896959320d9776ab93f3b33d"}, + {file = "pyarrow-12.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:be2757e9275875d2a9c6e6052ac7957fbbfc7bc7370e4a036a9b893e96fedaba"}, + {file = "pyarrow-12.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:cf812306d66f40f69e684300f7af5111c11f6e0d89d6b733e05a3de44961529d"}, + {file = "pyarrow-12.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:459a1c0ed2d68671188b2118c63bac91eaef6fc150c77ddd8a583e3c795737bf"}, + {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e705e33eaf666bbe508a16fd5ba27ca061e177916b7a317ba5a51bee43384c"}, + {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9120c3eb2b1f6f516a3b7a9714ed860882d9ef98c4b17edcdc91d95b7528db60"}, + {file = "pyarrow-12.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c780f4dc40460015d80fcd6a6140de80b615349ed68ef9adb653fe351778c9b3"}, + {file = "pyarrow-12.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a3c63124fc26bf5f95f508f5d04e1ece8cc23a8b0af2a1e6ab2b1ec3fdc91b24"}, + {file = "pyarrow-12.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b13329f79fa4472324f8d32dc1b1216616d09bd1e77cfb13104dec5463632c36"}, + {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb656150d3d12ec1396f6dde542db1675a95c0cc8366d507347b0beed96e87ca"}, + {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6251e38470da97a5b2e00de5c6a049149f7b2bd62f12fa5dbb9ac674119ba71a"}, + {file = "pyarrow-12.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3de26da901216149ce086920547dfff5cd22818c9eab67ebc41e863a5883bac7"}, + {file = "pyarrow-12.0.1.tar.gz", hash = "sha256:cce317fc96e5b71107bf1f9f184d5e54e2bd14bbf3f9a3d62819961f0af86fec"}, ] [package.dependencies] @@ -2469,14 +2509,14 @@ plugins = ["importlib-metadata"] [[package]] name = "pyparsing" -version = "3.0.9" +version = "3.1.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "main" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, + {file = "pyparsing-3.1.0-py3-none-any.whl", hash = "sha256:d554a96d1a7d3ddaf7183104485bc19fd80543ad6ac5bdb6426719d766fb06c1"}, + {file = "pyparsing-3.1.0.tar.gz", hash = "sha256:edb662d6fe322d6e990b1594b5feaeadf806803359e3d4d42f11e295e588f0ea"}, ] [package.extras] @@ -2484,14 +2524,14 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.3.1" +version = "7.3.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.3.2-py3-none-any.whl", hash = "sha256:cdcbd012c9312258922f8cd3f1b62a6580fdced17db6014896053d47cddf9295"}, + {file = "pytest-7.3.2.tar.gz", hash = "sha256:ee990a3cc55ba808b80795a79944756f315c67c12b56abd3ac993a7b8c17030b"}, ] [package.dependencies] @@ -2503,18 +2543,18 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] @@ -2627,89 +2667,89 @@ files = [ [[package]] name = "pyzmq" -version = "25.0.2" +version = "25.1.0" description = "Python bindings for 0MQ" category = "main" optional = true python-versions = ">=3.6" files = [ - {file = "pyzmq-25.0.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ac178e666c097c8d3deb5097b58cd1316092fc43e8ef5b5fdb259b51da7e7315"}, - {file = "pyzmq-25.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:659e62e1cbb063151c52f5b01a38e1df6b54feccfa3e2509d44c35ca6d7962ee"}, - {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8280ada89010735a12b968ec3ea9a468ac2e04fddcc1cede59cb7f5178783b9c"}, - {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b5eeb5278a8a636bb0abdd9ff5076bcbb836cd2302565df53ff1fa7d106d54"}, - {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a2e5fe42dfe6b73ca120b97ac9f34bfa8414feb15e00e37415dbd51cf227ef6"}, - {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:827bf60e749e78acb408a6c5af6688efbc9993e44ecc792b036ec2f4b4acf485"}, - {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7b504ae43d37e282301da586529e2ded8b36d4ee2cd5e6db4386724ddeaa6bbc"}, - {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb1f69a0a2a2b1aae8412979dd6293cc6bcddd4439bf07e4758d864ddb112354"}, - {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b9c9cc965cdf28381e36da525dcb89fc1571d9c54800fdcd73e3f73a2fc29bd"}, - {file = "pyzmq-25.0.2-cp310-cp310-win32.whl", hash = "sha256:24abbfdbb75ac5039205e72d6c75f10fc39d925f2df8ff21ebc74179488ebfca"}, - {file = "pyzmq-25.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6a821a506822fac55d2df2085a52530f68ab15ceed12d63539adc32bd4410f6e"}, - {file = "pyzmq-25.0.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:9af0bb0277e92f41af35e991c242c9c71920169d6aa53ade7e444f338f4c8128"}, - {file = "pyzmq-25.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:54a96cf77684a3a537b76acfa7237b1e79a8f8d14e7f00e0171a94b346c5293e"}, - {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88649b19ede1cab03b96b66c364cbbf17c953615cdbc844f7f6e5f14c5e5261c"}, - {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:715cff7644a80a7795953c11b067a75f16eb9fc695a5a53316891ebee7f3c9d5"}, - {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:312b3f0f066b4f1d17383aae509bacf833ccaf591184a1f3c7a1661c085063ae"}, - {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d488c5c8630f7e782e800869f82744c3aca4aca62c63232e5d8c490d3d66956a"}, - {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:38d9f78d69bcdeec0c11e0feb3bc70f36f9b8c44fc06e5d06d91dc0a21b453c7"}, - {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3059a6a534c910e1d5d068df42f60d434f79e6cc6285aa469b384fa921f78cf8"}, - {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6526d097b75192f228c09d48420854d53dfbc7abbb41b0e26f363ccb26fbc177"}, - {file = "pyzmq-25.0.2-cp311-cp311-win32.whl", hash = "sha256:5c5fbb229e40a89a2fe73d0c1181916f31e30f253cb2d6d91bea7927c2e18413"}, - {file = "pyzmq-25.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:ed15e3a2c3c2398e6ae5ce86d6a31b452dfd6ad4cd5d312596b30929c4b6e182"}, - {file = "pyzmq-25.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:032f5c8483c85bf9c9ca0593a11c7c749d734ce68d435e38c3f72e759b98b3c9"}, - {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:374b55516393bfd4d7a7daa6c3b36d6dd6a31ff9d2adad0838cd6a203125e714"}, - {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:08bfcc21b5997a9be4fefa405341320d8e7f19b4d684fb9c0580255c5bd6d695"}, - {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1a843d26a8da1b752c74bc019c7b20e6791ee813cd6877449e6a1415589d22ff"}, - {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b48616a09d7df9dbae2f45a0256eee7b794b903ddc6d8657a9948669b345f220"}, - {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d4427b4a136e3b7f85516c76dd2e0756c22eec4026afb76ca1397152b0ca8145"}, - {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:26b0358e8933990502f4513c991c9935b6c06af01787a36d133b7c39b1df37fa"}, - {file = "pyzmq-25.0.2-cp36-cp36m-win32.whl", hash = "sha256:c8fedc3ccd62c6b77dfe6f43802057a803a411ee96f14e946f4a76ec4ed0e117"}, - {file = "pyzmq-25.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2da6813b7995b6b1d1307329c73d3e3be2fd2d78e19acfc4eff2e27262732388"}, - {file = "pyzmq-25.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a35960c8b2f63e4ef67fd6731851030df68e4b617a6715dd11b4b10312d19fef"}, - {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2a0b880ab40aca5a878933376cb6c1ec483fba72f7f34e015c0f675c90b20"}, - {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:85762712b74c7bd18e340c3639d1bf2f23735a998d63f46bb6584d904b5e401d"}, - {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64812f29d6eee565e129ca14b0c785744bfff679a4727137484101b34602d1a7"}, - {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:510d8e55b3a7cd13f8d3e9121edf0a8730b87d925d25298bace29a7e7bc82810"}, - {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b164cc3c8acb3d102e311f2eb6f3c305865ecb377e56adc015cb51f721f1dda6"}, - {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28fdb9224a258134784a9cf009b59265a9dde79582fb750d4e88a6bcbc6fa3dc"}, - {file = "pyzmq-25.0.2-cp37-cp37m-win32.whl", hash = "sha256:dd771a440effa1c36d3523bc6ba4e54ff5d2e54b4adcc1e060d8f3ca3721d228"}, - {file = "pyzmq-25.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:9bdc40efb679b9dcc39c06d25629e55581e4c4f7870a5e88db4f1c51ce25e20d"}, - {file = "pyzmq-25.0.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:1f82906a2d8e4ee310f30487b165e7cc8ed09c009e4502da67178b03083c4ce0"}, - {file = "pyzmq-25.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:21ec0bf4831988af43c8d66ba3ccd81af2c5e793e1bf6790eb2d50e27b3c570a"}, - {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbce982a17c88d2312ec2cf7673985d444f1beaac6e8189424e0a0e0448dbb3"}, - {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9e1d2f2d86fc75ed7f8845a992c5f6f1ab5db99747fb0d78b5e4046d041164d2"}, - {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e92ff20ad5d13266bc999a29ed29a3b5b101c21fdf4b2cf420c09db9fb690e"}, - {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edbbf06cc2719889470a8d2bf5072bb00f423e12de0eb9ffec946c2c9748e149"}, - {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:77942243ff4d14d90c11b2afd8ee6c039b45a0be4e53fb6fa7f5e4fd0b59da39"}, - {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ab046e9cb902d1f62c9cc0eca055b1d11108bdc271caf7c2171487298f229b56"}, - {file = "pyzmq-25.0.2-cp38-cp38-win32.whl", hash = "sha256:ad761cfbe477236802a7ab2c080d268c95e784fe30cafa7e055aacd1ca877eb0"}, - {file = "pyzmq-25.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8560756318ec7c4c49d2c341012167e704b5a46d9034905853c3d1ade4f55bee"}, - {file = "pyzmq-25.0.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:ab2c056ac503f25a63f6c8c6771373e2a711b98b304614151dfb552d3d6c81f6"}, - {file = "pyzmq-25.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cca8524b61c0eaaa3505382dc9b9a3bc8165f1d6c010fdd1452c224225a26689"}, - {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb9f7eae02d3ac42fbedad30006b7407c984a0eb4189a1322241a20944d61e5"}, - {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5eaeae038c68748082137d6896d5c4db7927e9349237ded08ee1bbd94f7361c9"}, - {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a31992a8f8d51663ebf79df0df6a04ffb905063083d682d4380ab8d2c67257c"}, - {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6a979e59d2184a0c8f2ede4b0810cbdd86b64d99d9cc8a023929e40dce7c86cc"}, - {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1f124cb73f1aa6654d31b183810febc8505fd0c597afa127c4f40076be4574e0"}, - {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:65c19a63b4a83ae45d62178b70223adeee5f12f3032726b897431b6553aa25af"}, - {file = "pyzmq-25.0.2-cp39-cp39-win32.whl", hash = "sha256:83d822e8687621bed87404afc1c03d83fa2ce39733d54c2fd52d8829edb8a7ff"}, - {file = "pyzmq-25.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:24683285cc6b7bf18ad37d75b9db0e0fefe58404e7001f1d82bf9e721806daa7"}, - {file = "pyzmq-25.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a4b4261eb8f9ed71f63b9eb0198dd7c934aa3b3972dac586d0ef502ba9ab08b"}, - {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:62ec8d979f56c0053a92b2b6a10ff54b9ec8a4f187db2b6ec31ee3dd6d3ca6e2"}, - {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:affec1470351178e892121b3414c8ef7803269f207bf9bef85f9a6dd11cde264"}, - {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffc71111433bd6ec8607a37b9211f4ef42e3d3b271c6d76c813669834764b248"}, - {file = "pyzmq-25.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6fadc60970714d86eff27821f8fb01f8328dd36bebd496b0564a500fe4a9e354"}, - {file = "pyzmq-25.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:269968f2a76c0513490aeb3ba0dc3c77b7c7a11daa894f9d1da88d4a0db09835"}, - {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f7c8b8368e84381ae7c57f1f5283b029c888504aaf4949c32e6e6fb256ec9bf0"}, - {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25e6873a70ad5aa31e4a7c41e5e8c709296edef4a92313e1cd5fc87bbd1874e2"}, - {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b733076ff46e7db5504c5e7284f04a9852c63214c74688bdb6135808531755a3"}, - {file = "pyzmq-25.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a6f6ae12478fdc26a6d5fdb21f806b08fa5403cd02fd312e4cb5f72df078f96f"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:67da1c213fbd208906ab3470cfff1ee0048838365135a9bddc7b40b11e6d6c89"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531e36d9fcd66f18de27434a25b51d137eb546931033f392e85674c7a7cea853"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34a6fddd159ff38aa9497b2e342a559f142ab365576284bc8f77cb3ead1f79c5"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b491998ef886662c1f3d49ea2198055a9a536ddf7430b051b21054f2a5831800"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5d496815074e3e3d183fe2c7fcea2109ad67b74084c254481f87b64e04e9a471"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:56a94ab1d12af982b55ca96c6853db6ac85505e820d9458ac76364c1998972f4"}, - {file = "pyzmq-25.0.2.tar.gz", hash = "sha256:6b8c1bbb70e868dc88801aa532cae6bd4e3b5233784692b786f17ad2962e5149"}, + {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1a6169e69034eaa06823da6a93a7739ff38716142b3596c180363dee729d713d"}, + {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d0383b1f18411d137d891cab567de9afa609b214de68b86e20173dc624c101"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e931d9a92f628858a50f5bdffdfcf839aebe388b82f9d2ccd5d22a38a789dc"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97d984b1b2f574bc1bb58296d3c0b64b10e95e7026f8716ed6c0b86d4679843f"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154bddda2a351161474b36dba03bf1463377ec226a13458725183e508840df89"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cb6d161ae94fb35bb518b74bb06b7293299c15ba3bc099dccd6a5b7ae589aee3"}, + {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:90146ab578931e0e2826ee39d0c948d0ea72734378f1898939d18bc9c823fcf9"}, + {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:831ba20b660b39e39e5ac8603e8193f8fce1ee03a42c84ade89c36a251449d80"}, + {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a522510e3434e12aff80187144c6df556bb06fe6b9d01b2ecfbd2b5bfa5c60c"}, + {file = "pyzmq-25.1.0-cp310-cp310-win32.whl", hash = "sha256:be24a5867b8e3b9dd5c241de359a9a5217698ff616ac2daa47713ba2ebe30ad1"}, + {file = "pyzmq-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:5693dcc4f163481cf79e98cf2d7995c60e43809e325b77a7748d8024b1b7bcba"}, + {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:13bbe36da3f8aaf2b7ec12696253c0bf6ffe05f4507985a8844a1081db6ec22d"}, + {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:69511d604368f3dc58d4be1b0bad99b61ee92b44afe1cd9b7bd8c5e34ea8248a"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a983c8694667fd76d793ada77fd36c8317e76aa66eec75be2653cef2ea72883"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:332616f95eb400492103ab9d542b69d5f0ff628b23129a4bc0a2fd48da6e4e0b"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58416db767787aedbfd57116714aad6c9ce57215ffa1c3758a52403f7c68cff5"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cad9545f5801a125f162d09ec9b724b7ad9b6440151b89645241d0120e119dcc"}, + {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d6128d431b8dfa888bf51c22a04d48bcb3d64431caf02b3cb943269f17fd2994"}, + {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b15247c49d8cbea695b321ae5478d47cffd496a2ec5ef47131a9e79ddd7e46c"}, + {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:442d3efc77ca4d35bee3547a8e08e8d4bb88dadb54a8377014938ba98d2e074a"}, + {file = "pyzmq-25.1.0-cp311-cp311-win32.whl", hash = "sha256:65346f507a815a731092421d0d7d60ed551a80d9b75e8b684307d435a5597425"}, + {file = "pyzmq-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b45d722046fea5a5694cba5d86f21f78f0052b40a4bbbbf60128ac55bfcc7b6"}, + {file = "pyzmq-25.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f45808eda8b1d71308c5416ef3abe958f033fdbb356984fabbfc7887bed76b3f"}, + {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b697774ea8273e3c0460cf0bba16cd85ca6c46dfe8b303211816d68c492e132"}, + {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b324fa769577fc2c8f5efcd429cef5acbc17d63fe15ed16d6dcbac2c5eb00849"}, + {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5873d6a60b778848ce23b6c0ac26c39e48969823882f607516b91fb323ce80e5"}, + {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0d9e7ba6a815a12c8575ba7887da4b72483e4cfc57179af10c9b937f3f9308f"}, + {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:414b8beec76521358b49170db7b9967d6974bdfc3297f47f7d23edec37329b00"}, + {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:01f06f33e12497dca86353c354461f75275a5ad9eaea181ac0dc1662da8074fa"}, + {file = "pyzmq-25.1.0-cp36-cp36m-win32.whl", hash = "sha256:b5a07c4f29bf7cb0164664ef87e4aa25435dcc1f818d29842118b0ac1eb8e2b5"}, + {file = "pyzmq-25.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:968b0c737797c1809ec602e082cb63e9824ff2329275336bb88bd71591e94a90"}, + {file = "pyzmq-25.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47b915ba666c51391836d7ed9a745926b22c434efa76c119f77bcffa64d2c50c"}, + {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af31493663cf76dd36b00dafbc839e83bbca8a0662931e11816d75f36155897"}, + {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5489738a692bc7ee9a0a7765979c8a572520d616d12d949eaffc6e061b82b4d1"}, + {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1fc56a0221bdf67cfa94ef2d6ce5513a3d209c3dfd21fed4d4e87eca1822e3a3"}, + {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:75217e83faea9edbc29516fc90c817bc40c6b21a5771ecb53e868e45594826b0"}, + {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3830be8826639d801de9053cf86350ed6742c4321ba4236e4b5568528d7bfed7"}, + {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3575699d7fd7c9b2108bc1c6128641a9a825a58577775ada26c02eb29e09c517"}, + {file = "pyzmq-25.1.0-cp37-cp37m-win32.whl", hash = "sha256:95bd3a998d8c68b76679f6b18f520904af5204f089beebb7b0301d97704634dd"}, + {file = "pyzmq-25.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:dbc466744a2db4b7ca05589f21ae1a35066afada2f803f92369f5877c100ef62"}, + {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:3bed53f7218490c68f0e82a29c92335daa9606216e51c64f37b48eb78f1281f4"}, + {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb52e826d16c09ef87132c6e360e1879c984f19a4f62d8a935345deac43f3c12"}, + {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ddbef8b53cd16467fdbfa92a712eae46dd066aa19780681a2ce266e88fbc7165"}, + {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9301cf1d7fc1ddf668d0abbe3e227fc9ab15bc036a31c247276012abb921b5ff"}, + {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e23a8c3b6c06de40bdb9e06288180d630b562db8ac199e8cc535af81f90e64b"}, + {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4a82faae00d1eed4809c2f18b37f15ce39a10a1c58fe48b60ad02875d6e13d80"}, + {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c8398a1b1951aaa330269c35335ae69744be166e67e0ebd9869bdc09426f3871"}, + {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d40682ac60b2a613d36d8d3a0cd14fbdf8e7e0618fbb40aa9fa7b796c9081584"}, + {file = "pyzmq-25.1.0-cp38-cp38-win32.whl", hash = "sha256:33d5c8391a34d56224bccf74f458d82fc6e24b3213fc68165c98b708c7a69325"}, + {file = "pyzmq-25.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c66b7ff2527e18554030319b1376d81560ca0742c6e0b17ff1ee96624a5f1afd"}, + {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:af56229ea6527a849ac9fb154a059d7e32e77a8cba27e3e62a1e38d8808cb1a5"}, + {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdca18b94c404af6ae5533cd1bc310c4931f7ac97c148bbfd2cd4bdd62b96253"}, + {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b6b42f7055bbc562f63f3df3b63e3dd1ebe9727ff0f124c3aa7bcea7b3a00f9"}, + {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c2fc7aad520a97d64ffc98190fce6b64152bde57a10c704b337082679e74f67"}, + {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be86a26415a8b6af02cd8d782e3a9ae3872140a057f1cadf0133de685185c02b"}, + {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851fb2fe14036cfc1960d806628b80276af5424db09fe5c91c726890c8e6d943"}, + {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2a21fec5c3cea45421a19ccbe6250c82f97af4175bc09de4d6dd78fb0cb4c200"}, + {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bad172aba822444b32eae54c2d5ab18cd7dee9814fd5c7ed026603b8cae2d05f"}, + {file = "pyzmq-25.1.0-cp39-cp39-win32.whl", hash = "sha256:4d67609b37204acad3d566bb7391e0ecc25ef8bae22ff72ebe2ad7ffb7847158"}, + {file = "pyzmq-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:71c7b5896e40720d30cd77a81e62b433b981005bbff0cb2f739e0f8d059b5d99"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cb27ef9d3bdc0c195b2dc54fcb8720e18b741624686a81942e14c8b67cc61a6"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0c4fc2741e0513b5d5a12fe200d6785bbcc621f6f2278893a9ca7bed7f2efb7d"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fc34fdd458ff77a2a00e3c86f899911f6f269d393ca5675842a6e92eea565bae"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8751f9c1442624da391bbd92bd4b072def6d7702a9390e4479f45c182392ff78"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6581e886aec3135964a302a0f5eb68f964869b9efd1dbafdebceaaf2934f8a68"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5482f08d2c3c42b920e8771ae8932fbaa0a67dff925fc476996ddd8155a170f3"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7fbcafa3ea16d1de1f213c226005fea21ee16ed56134b75b2dede5a2129e62"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adecf6d02b1beab8d7c04bc36f22bb0e4c65a35eb0b4750b91693631d4081c70"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6d39e42a0aa888122d1beb8ec0d4ddfb6c6b45aecb5ba4013c27e2f28657765"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7018289b402ebf2b2c06992813523de61d4ce17bd514c4339d8f27a6f6809492"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9e68ae9864d260b18f311b68d29134d8776d82e7f5d75ce898b40a88df9db30f"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e21cc00e4debe8f54c3ed7b9fcca540f46eee12762a9fa56feb8512fd9057161"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f666ae327a6899ff560d741681fdcdf4506f990595201ed39b44278c471ad98"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5efcc29056dfe95e9c9db0dfbb12b62db9c4ad302f812931b6d21dd04a9119"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:48e5e59e77c1a83162ab3c163fc01cd2eebc5b34560341a67421b09be0891287"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:108c96ebbd573d929740d66e4c3d1bdf31d5cde003b8dc7811a3c8c5b0fc173b"}, + {file = "pyzmq-25.1.0.tar.gz", hash = "sha256:80c41023465d36280e801564a69cbfce8ae85ff79b080e1913f6e90481fb8957"}, ] [package.dependencies] @@ -2717,112 +2757,112 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "regex" -version = "2023.5.5" +version = "2023.6.3" description = "Alternative regular expression module, to replace re." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "regex-2023.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48c9ec56579d4ba1c88f42302194b8ae2350265cb60c64b7b9a88dcb7fbde309"}, - {file = "regex-2023.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f4541550459c08fdd6f97aa4e24c6f1932eec780d58a2faa2068253df7d6ff"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e22e4460f0245b468ee645156a4f84d0fc35a12d9ba79bd7d79bdcd2f9629d"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b870b6f632fc74941cadc2a0f3064ed8409e6f8ee226cdfd2a85ae50473aa94"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:171c52e320fe29260da550d81c6b99f6f8402450dc7777ef5ced2e848f3b6f8f"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad5524c2aedaf9aa14ef1bc9327f8abd915699dea457d339bebbe2f0d218f86"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a0f874ee8c0bc820e649c900243c6d1e6dc435b81da1492046716f14f1a2a96"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e645c757183ee0e13f0bbe56508598e2d9cd42b8abc6c0599d53b0d0b8dd1479"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a4c5da39bca4f7979eefcbb36efea04471cd68db2d38fcbb4ee2c6d440699833"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5e3f4468b8c6fd2fd33c218bbd0a1559e6a6fcf185af8bb0cc43f3b5bfb7d636"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:59e4b729eae1a0919f9e4c0fc635fbcc9db59c74ad98d684f4877be3d2607dd6"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ba73a14e9c8f9ac409863543cde3290dba39098fc261f717dc337ea72d3ebad2"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0bbd5dcb19603ab8d2781fac60114fb89aee8494f4505ae7ad141a3314abb1f9"}, - {file = "regex-2023.5.5-cp310-cp310-win32.whl", hash = "sha256:40005cbd383438aecf715a7b47fe1e3dcbc889a36461ed416bdec07e0ef1db66"}, - {file = "regex-2023.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:59597cd6315d3439ed4b074febe84a439c33928dd34396941b4d377692eca810"}, - {file = "regex-2023.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f08276466fedb9e36e5193a96cb944928301152879ec20c2d723d1031cd4ddd"}, - {file = "regex-2023.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cd46f30e758629c3ee91713529cfbe107ac50d27110fdcc326a42ce2acf4dafc"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2910502f718828cecc8beff004917dcf577fc5f8f5dd40ffb1ea7612124547b"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:445d6f4fc3bd9fc2bf0416164454f90acab8858cd5a041403d7a11e3356980e8"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18196c16a584619c7c1d843497c069955d7629ad4a3fdee240eb347f4a2c9dbe"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d430a23b661629661f1fe8395be2004006bc792bb9fc7c53911d661b69dd7e"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72a28979cc667e5f82ef433db009184e7ac277844eea0f7f4d254b789517941d"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f764e4dfafa288e2eba21231f455d209f4709436baeebb05bdecfb5d8ddc3d35"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23d86ad2121b3c4fc78c58f95e19173790e22ac05996df69b84e12da5816cb17"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:690a17db524ee6ac4a27efc5406530dd90e7a7a69d8360235323d0e5dafb8f5b"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1ecf3dcff71f0c0fe3e555201cbe749fa66aae8d18f80d2cc4de8e66df37390a"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:811040d7f3dd9c55eb0d8b00b5dcb7fd9ae1761c454f444fd9f37fe5ec57143a"}, - {file = "regex-2023.5.5-cp311-cp311-win32.whl", hash = "sha256:c8c143a65ce3ca42e54d8e6fcaf465b6b672ed1c6c90022794a802fb93105d22"}, - {file = "regex-2023.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:586a011f77f8a2da4b888774174cd266e69e917a67ba072c7fc0e91878178a80"}, - {file = "regex-2023.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b6365703e8cf1644b82104cdd05270d1a9f043119a168d66c55684b1b557d008"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a56c18f21ac98209da9c54ae3ebb3b6f6e772038681d6cb43b8d53da3b09ee81"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b942d8b3ce765dbc3b1dad0a944712a89b5de290ce8f72681e22b3c55f3cc8"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:844671c9c1150fcdac46d43198364034b961bd520f2c4fdaabfc7c7d7138a2dd"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2ce65bdeaf0a386bb3b533a28de3994e8e13b464ac15e1e67e4603dd88787fa"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fee0016cc35a8a91e8cc9312ab26a6fe638d484131a7afa79e1ce6165328a135"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:18f05d14f14a812fe9723f13afafefe6b74ca042d99f8884e62dbd34dcccf3e2"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:941b3f1b2392f0bcd6abf1bc7a322787d6db4e7457be6d1ffd3a693426a755f2"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:921473a93bcea4d00295799ab929522fc650e85c6b9f27ae1e6bb32a790ea7d3"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:e2205a81f815b5bb17e46e74cc946c575b484e5f0acfcb805fb252d67e22938d"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:385992d5ecf1a93cb85adff2f73e0402dd9ac29b71b7006d342cc920816e6f32"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:890a09cb0a62198bff92eda98b2b507305dd3abf974778bae3287f98b48907d3"}, - {file = "regex-2023.5.5-cp36-cp36m-win32.whl", hash = "sha256:821a88b878b6589c5068f4cc2cfeb2c64e343a196bc9d7ac68ea8c2a776acd46"}, - {file = "regex-2023.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:7918a1b83dd70dc04ab5ed24c78ae833ae8ea228cef84e08597c408286edc926"}, - {file = "regex-2023.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:338994d3d4ca4cf12f09822e025731a5bdd3a37aaa571fa52659e85ca793fb67"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a69cf0c00c4d4a929c6c7717fd918414cab0d6132a49a6d8fc3ded1988ed2ea"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f5e06df94fff8c4c85f98c6487f6636848e1dc85ce17ab7d1931df4a081f657"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8906669b03c63266b6a7693d1f487b02647beb12adea20f8840c1a087e2dfb5"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fda3e50abad8d0f48df621cf75adc73c63f7243cbe0e3b2171392b445401550"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ac2b7d341dc1bd102be849d6dd33b09701223a851105b2754339e390be0627a"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fb2b495dd94b02de8215625948132cc2ea360ae84fe6634cd19b6567709c8ae2"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aa7d032c1d84726aa9edeb6accf079b4caa87151ca9fabacef31fa028186c66d"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d45864693351c15531f7e76f545ec35000d50848daa833cead96edae1665559"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21e90a288e6ba4bf44c25c6a946cb9b0f00b73044d74308b5e0afd190338297c"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:10250a093741ec7bf74bcd2039e697f519b028518f605ff2aa7ac1e9c9f97423"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6b8d0c153f07a953636b9cdb3011b733cadd4178123ef728ccc4d5969e67f3c2"}, - {file = "regex-2023.5.5-cp37-cp37m-win32.whl", hash = "sha256:10374c84ee58c44575b667310d5bbfa89fb2e64e52349720a0182c0017512f6c"}, - {file = "regex-2023.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9b320677521aabf666cdd6e99baee4fb5ac3996349c3b7f8e7c4eee1c00dfe3a"}, - {file = "regex-2023.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:afb1c70ec1e594a547f38ad6bf5e3d60304ce7539e677c1429eebab115bce56e"}, - {file = "regex-2023.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf123225945aa58b3057d0fba67e8061c62d14cc8a4202630f8057df70189051"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99757ad7fe5c8a2bb44829fc57ced11253e10f462233c1255fe03888e06bc19"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a623564d810e7a953ff1357f7799c14bc9beeab699aacc8b7ab7822da1e952b8"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced02e3bd55e16e89c08bbc8128cff0884d96e7f7a5633d3dc366b6d95fcd1d6"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cbe6b5be3b9b698d8cc4ee4dee7e017ad655e83361cd0ea8e653d65e469468"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a6e4b0e0531223f53bad07ddf733af490ba2b8367f62342b92b39b29f72735a"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e9c4f778514a560a9c9aa8e5538bee759b55f6c1dcd35613ad72523fd9175b8"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:256f7f4c6ba145f62f7a441a003c94b8b1af78cee2cccacfc1e835f93bc09426"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd7b68fd2e79d59d86dcbc1ccd6e2ca09c505343445daaa4e07f43c8a9cc34da"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4a5059bd585e9e9504ef9c07e4bc15b0a621ba20504388875d66b8b30a5c4d18"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:6893544e06bae009916a5658ce7207e26ed17385149f35a3125f5259951f1bbe"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c64d5abe91a3dfe5ff250c6bb267ef00dbc01501518225b45a5f9def458f31fb"}, - {file = "regex-2023.5.5-cp38-cp38-win32.whl", hash = "sha256:7923470d6056a9590247ff729c05e8e0f06bbd4efa6569c916943cb2d9b68b91"}, - {file = "regex-2023.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:4035d6945cb961c90c3e1c1ca2feb526175bcfed44dfb1cc77db4fdced060d3e"}, - {file = "regex-2023.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50fd2d9b36938d4dcecbd684777dd12a407add4f9f934f235c66372e630772b0"}, - {file = "regex-2023.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d19e57f888b00cd04fc38f5e18d0efbd91ccba2d45039453ab2236e6eec48d4d"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd966475e963122ee0a7118ec9024388c602d12ac72860f6eea119a3928be053"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db09e6c18977a33fea26fe67b7a842f706c67cf8bda1450974d0ae0dd63570df"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6164d4e2a82f9ebd7752a06bd6c504791bedc6418c0196cd0a23afb7f3e12b2d"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84397d3f750d153ebd7f958efaa92b45fea170200e2df5e0e1fd4d85b7e3f58a"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c3efee9bb53cbe7b285760c81f28ac80dc15fa48b5fe7e58b52752e642553f1"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:144b5b017646b5a9392a5554a1e5db0000ae637be4971c9747566775fc96e1b2"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1189fbbb21e2c117fda5303653b61905aeeeea23de4a94d400b0487eb16d2d60"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f83fe9e10f9d0b6cf580564d4d23845b9d692e4c91bd8be57733958e4c602956"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:72aa4746993a28c841e05889f3f1b1e5d14df8d3daa157d6001a34c98102b393"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:de2f780c3242ea114dd01f84848655356af4dd561501896c751d7b885ea6d3a1"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:290fd35219486dfbc00b0de72f455ecdd63e59b528991a6aec9fdfc0ce85672e"}, - {file = "regex-2023.5.5-cp39-cp39-win32.whl", hash = "sha256:732176f5427e72fa2325b05c58ad0b45af341c459910d766f814b0584ac1f9ac"}, - {file = "regex-2023.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:1307aa4daa1cbb23823d8238e1f61292fd07e4e5d8d38a6efff00b67a7cdb764"}, - {file = "regex-2023.5.5.tar.gz", hash = "sha256:7d76a8a1fc9da08296462a18f16620ba73bcbf5909e42383b253ef34d9d5141e"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, + {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, + {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, + {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, + {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, + {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, + {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, + {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, + {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, + {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, + {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, + {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, + {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, + {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, + {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, + {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, ] [[package]] name = "requests" -version = "2.30.0" +version = "2.31.0" description = "Python HTTP for Humans." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "requests-2.30.0-py3-none-any.whl", hash = "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294"}, - {file = "requests-2.30.0.tar.gz", hash = "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] @@ -2835,25 +2875,6 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "responses" -version = "0.18.0" -description = "A utility library for mocking out the `requests` Python library." -category = "main" -optional = true -python-versions = ">=3.7" -files = [ - {file = "responses-0.18.0-py3-none-any.whl", hash = "sha256:15c63ad16de13ee8e7182d99c9334f64fd81f1ee79f90748d527c28f7ca9dd51"}, - {file = "responses-0.18.0.tar.gz", hash = "sha256:380cad4c1c1dc942e5e8a8eaae0b4d4edf708f4f010db8b7bcfafad1fcd254ff"}, -] - -[package.dependencies] -requests = ">=2.0,<3.0" -urllib3 = ">=1.25.10" - -[package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytest-cov", "pytest-localserver", "types-mock", "types-requests"] - [[package]] name = "rich" version = "10.16.2" @@ -2876,14 +2897,14 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruamel-yaml" -version = "0.17.26" +version = "0.17.32" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" category = "dev" optional = false python-versions = ">=3" files = [ - {file = "ruamel.yaml-0.17.26-py3-none-any.whl", hash = "sha256:25d0ee82a0a9a6f44683dcf8c282340def4074a4562f3a24f55695bb254c1693"}, - {file = "ruamel.yaml-0.17.26.tar.gz", hash = "sha256:baa2d0a5aad2034826c439ce61c142c07082b76f4791d54145e131206e998059"}, + {file = "ruamel.yaml-0.17.32-py3-none-any.whl", hash = "sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447"}, + {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"}, ] [package.dependencies] @@ -2964,6 +2985,67 @@ files = [ {file = "ruff-0.0.252.tar.gz", hash = "sha256:6992611ab7bdbe7204e4831c95ddd3febfeece2e6f5e44bbed044454c7db0f63"}, ] +[[package]] +name = "safetensors" +version = "0.3.1" +description = "Fast and Safe Tensor serialization" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "safetensors-0.3.1-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:2ae9b7dd268b4bae6624729dac86deb82104820e9786429b0583e5168db2f770"}, + {file = "safetensors-0.3.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:08c85c1934682f1e2cd904d38433b53cd2a98245a7cc31f5689f9322a2320bbf"}, + {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba625c7af9e1c5d0d91cb83d2fba97d29ea69d4db2015d9714d24c7f6d488e15"}, + {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b57d5890c619ec10d9f1b6426b8690d0c9c2868a90dc52f13fae6f6407ac141f"}, + {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c9f562ea696d50b95cadbeb1716dc476714a87792ffe374280c0835312cbfe2"}, + {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c115951b3a865ece8d98ee43882f2fd0a999c0200d6e6fec24134715ebe3b57"}, + {file = "safetensors-0.3.1-cp310-cp310-win32.whl", hash = "sha256:118f8f7503ea312fc7af27e934088a1b589fb1eff5a7dea2cd1de6c71ee33391"}, + {file = "safetensors-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:54846eaae25fded28a7bebbb66be563cad221b4c80daee39e2f55df5e5e0266f"}, + {file = "safetensors-0.3.1-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:5af82e10946c4822506db0f29269f43147e889054704dde994d4e22f0c37377b"}, + {file = "safetensors-0.3.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:626c86dd1d930963c8ea7f953a3787ae85322551e3a5203ac731d6e6f3e18f44"}, + {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12e30677e6af1f4cc4f2832546e91dbb3b0aa7d575bfa473d2899d524e1ace08"}, + {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d534b80bc8d39945bb902f34b0454773971fe9e5e1f2142af451759d7e52b356"}, + {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ddd0ddd502cf219666e7d30f23f196cb87e829439b52b39f3e7da7918c3416df"}, + {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997a2cc14023713f423e6d16536d55cb16a3d72850f142e05f82f0d4c76d383b"}, + {file = "safetensors-0.3.1-cp311-cp311-win32.whl", hash = "sha256:6ae9ca63d9e22f71ec40550207bd284a60a6b4916ae6ca12c85a8d86bf49e0c3"}, + {file = "safetensors-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:62aa7421ca455418423e35029524489480adda53e3f702453580180ecfebe476"}, + {file = "safetensors-0.3.1-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:6d54b3ed367b6898baab75dfd057c24f36ec64d3938ffff2af981d56bfba2f42"}, + {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:262423aeda91117010f8c607889066028f680fbb667f50cfe6eae96f22f9d150"}, + {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10efe2513a8327fd628cea13167089588acc23093ba132aecfc536eb9a4560fe"}, + {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:689b3d6a7ebce70ee9438267ee55ea89b575c19923876645e927d08757b552fe"}, + {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14cd9a87bc73ce06903e9f8ee8b05b056af6f3c9f37a6bd74997a16ed36ff5f4"}, + {file = "safetensors-0.3.1-cp37-cp37m-win32.whl", hash = "sha256:a77cb39624480d5f143c1cc272184f65a296f573d61629eff5d495d2e0541d3e"}, + {file = "safetensors-0.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9eff3190bfbbb52eef729911345c643f875ca4dbb374aa6c559675cfd0ab73db"}, + {file = "safetensors-0.3.1-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:05cbfef76e4daa14796db1bbb52072d4b72a44050c368b2b1f6fd3e610669a89"}, + {file = "safetensors-0.3.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:c49061461f4a81e5ec3415070a3f135530834c89cbd6a7db7cd49e3cb9d9864b"}, + {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cf7e73ca42974f098ce0cf4dd8918983700b6b07a4c6827d50c8daefca776e"}, + {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04f909442d6223ff0016cd2e1b2a95ef8039b92a558014627363a2e267213f62"}, + {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c573c5a0d5d45791ae8c179e26d74aff86e719056591aa7edb3ca7be55bc961"}, + {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6994043b12e717cf2a6ba69077ac41f0d3675b2819734f07f61819e854c622c7"}, + {file = "safetensors-0.3.1-cp38-cp38-win32.whl", hash = "sha256:158ede81694180a0dbba59422bc304a78c054b305df993c0c6e39c6330fa9348"}, + {file = "safetensors-0.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:afdc725beff7121ea8d39a7339f5a6abcb01daa189ea56290b67fe262d56e20f"}, + {file = "safetensors-0.3.1-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:cba910fcc9e5e64d32d62b837388721165e9c7e45d23bc3a38ad57694b77f40d"}, + {file = "safetensors-0.3.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a4f7dbfe7285573cdaddd85ef6fa84ebbed995d3703ab72d71257944e384612f"}, + {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54aed0802f9eaa83ca7b1cbb986bfb90b8e2c67b6a4bcfe245627e17dad565d4"}, + {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34b75a766f3cfc99fd4c33e329b76deae63f5f388e455d863a5d6e99472fca8e"}, + {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a0f31904f35dc14919a145b2d7a2d8842a43a18a629affe678233c4ea90b4af"}, + {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcf527ecc5f58907fd9031510378105487f318cc91ecdc5aee3c7cc8f46030a8"}, + {file = "safetensors-0.3.1-cp39-cp39-win32.whl", hash = "sha256:e2f083112cf97aa9611e2a05cc170a2795eccec5f6ff837f4565f950670a9d83"}, + {file = "safetensors-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:5f4f614b8e8161cd8a9ca19c765d176a82b122fa3d3387b77862145bfe9b4e93"}, + {file = "safetensors-0.3.1.tar.gz", hash = "sha256:571da56ff8d0bec8ae54923b621cda98d36dcef10feb36fd492c4d0c2cd0e869"}, +] + +[package.extras] +all = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (>=2.11.0)", "torch (>=1.10)"] +dev = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (>=2.11.0)", "torch (>=1.10)"] +jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)"] +numpy = ["numpy (>=1.21.6)"] +paddlepaddle = ["paddlepaddle (>=2.4.1)"] +quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] +tensorflow = ["tensorflow (>=2.11.0)"] +testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "numpy (>=1.21.6)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)"] +torch = ["torch (>=1.10)"] + [[package]] name = "safety" version = "2.3.5" @@ -3127,19 +3209,19 @@ files = [ [[package]] name = "setuptools" -version = "67.7.2" +version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, - {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -3660,50 +3742,51 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] [[package]] name = "transformers" -version = "4.29.1" +version = "4.30.2" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "transformers-4.29.1-py3-none-any.whl", hash = "sha256:75f851f2420c26410edbdf4a2a1a5b434ab2b96aea36eb5931d06cc3b2e7b509"}, - {file = "transformers-4.29.1.tar.gz", hash = "sha256:3dc9cd198918e140468edbf37d7edf3b7a75633655ce0771ce323bbf8c118c4d"}, + {file = "transformers-4.30.2-py3-none-any.whl", hash = "sha256:c332e3a3097f9ed89ce556b403251235931c00237b8bc2d7adaa19d226c13f1d"}, + {file = "transformers-4.30.2.tar.gz", hash = "sha256:f4a8aac4e1baffab4033f4a345b0d7dc7957d12a4f1ba969afea08205a513045"}, ] [package.dependencies] -accelerate = {version = ">=0.19.0", optional = true, markers = "extra == \"torch\""} +accelerate = {version = ">=0.20.2", optional = true, markers = "extra == \"torch\""} filelock = "*" huggingface-hub = ">=0.14.1,<1.0" numpy = ">=1.17" packaging = ">=20.0" -protobuf = {version = "<=3.20.2", optional = true, markers = "extra == \"sentencepiece\""} +protobuf = {version = "<=3.20.3", optional = true, markers = "extra == \"sentencepiece\""} pyyaml = ">=5.1" regex = "!=2019.12.17" requests = "*" +safetensors = ">=0.3.1" sentencepiece = {version = ">=0.1.91,<0.1.92 || >0.1.92", optional = true, markers = "extra == \"sentencepiece\""} tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" torch = {version = ">=1.9,<1.12.0 || >1.12.0", optional = true, markers = "extra == \"torch\""} tqdm = ">=4.27" [package.extras] -accelerate = ["accelerate (>=0.19.0)"] -agents = ["Pillow", "accelerate (>=0.19.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.9,!=1.12.0)"] -all = ["Pillow", "accelerate (>=0.19.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.6.9)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "numba (<0.57.0)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] -audio = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)"] +accelerate = ["accelerate (>=0.20.2)"] +agents = ["Pillow", "accelerate (>=0.20.2)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.9,!=1.12.0)"] +all = ["Pillow", "accelerate (>=0.20.2)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.6.9)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf (<=3.20.3)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] +audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.19.0)", "deepspeed (>=0.8.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.19.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.8.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.19.0)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.6.9)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "numba (<0.57.0)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "numba (<0.57.0)", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.19.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "numba (<0.57.0)", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -docs = ["Pillow", "accelerate (>=0.19.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.6.9)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "numba (<0.57.0)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] +deepspeed = ["accelerate (>=0.20.2)", "deepspeed (>=0.8.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.20.2)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.8.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.3)", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.20.2)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.6.9)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.3)", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.3)", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.20.2)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.3)", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +docs = ["Pillow", "accelerate (>=0.20.2)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.6.9)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf (<=3.20.3)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] docs-specific = ["hf-doc-builder"] fairscale = ["fairscale (>0.3)"] flax = ["flax (>=0.4.1,<=0.6.9)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8,<=0.1.4)"] -flax-speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)"] +flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] ftfy = ["ftfy"] integrations = ["optuna", "ray[tune]", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] modelcreation = ["cookiecutter (==1.7.3)"] natten = ["natten (>=0.14.6)"] onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] @@ -3713,21 +3796,21 @@ quality = ["GitPython (<3.1.19)", "black (>=23.1,<24.0)", "datasets (!=2.5.0)", ray = ["ray[tune]"] retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf (<=3.20.2)", "sentencepiece (>=0.1.91,!=0.1.92)"] +sentencepiece = ["protobuf (<=3.20.3)", "sentencepiece (>=0.1.91,!=0.1.92)"] serving = ["fastapi", "pydantic", "starlette", "uvicorn"] sigopt = ["sigopt"] sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] +speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.3)", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "timeout-decorator"] tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx"] tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)"] +tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] timm = ["timm"] tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -torch = ["accelerate (>=0.19.0)", "torch (>=1.9,!=1.12.0)"] -torch-speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +torch = ["accelerate (>=0.20.2)", "torch (>=1.9,!=1.12.0)"] +torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] torch-vision = ["Pillow", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.14.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.2)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "tqdm (>=4.27)"] +torchhub = ["filelock", "huggingface-hub (>=0.14.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.3)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "tqdm (>=4.27)"] video = ["av (==9.2.0)", "decord (==0.6.0)"] vision = ["Pillow"] @@ -3749,14 +3832,14 @@ test = ["mypy", "pytest", "typing-extensions"] [[package]] name = "typing-extensions" -version = "4.5.0" +version = "4.6.3" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, + {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, ] [[package]] @@ -3773,14 +3856,14 @@ files = [ [[package]] name = "urllib3" -version = "2.0.2" +version = "2.0.3" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"}, - {file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"}, + {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, + {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, ] [package.extras] @@ -3791,24 +3874,24 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.23.0" +version = "20.23.1" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"}, - {file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"}, + {file = "virtualenv-20.23.1-py3-none-any.whl", hash = "sha256:34da10f14fea9be20e0fd7f04aba9732f84e593dac291b757ce42e3368a39419"}, + {file = "virtualenv-20.23.1.tar.gz", hash = "sha256:8ff19a38c1021c742148edc4f81cb43d7f8c6816d2ede2ab72af5b84c749ade1"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.11,<4" -platformdirs = ">=3.2,<4" +filelock = ">=3.12,<4" +platformdirs = ">=3.5.1,<4" [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezer (>=0.4.6)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.8)", "time-machine (>=2.9)"] [[package]] name = "wcwidth" @@ -4069,4 +4152,4 @@ sklearn = ["scikit-learn", "joblib"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.12" -content-hash = "a1543b6d375fa418c0fdba25768ba354b1810f465f1913ba3dc477b8c2e2cf7e" +content-hash = "8db9bef0c3d0f20e252eac4c373d1b385a3a4f1c3b5b880da9eb02fb05399779" diff --git a/requirements-dev.txt b/requirements-dev.txt index 1bc901d0..f505b106 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ -accelerate==0.19.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +accelerate==0.20.3 ; python_full_version >= "3.8.1" and python_version < "3.12" aiohttp==3.8.4 ; python_full_version >= "3.8.1" and python_version < "3.12" aiosignal==1.3.1 ; python_full_version >= "3.8.1" and python_version < "3.12" alabaster==0.7.13 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -20,10 +20,10 @@ click==8.1.3 ; python_full_version >= "3.8.1" and python_version < "3.12" colorama==0.4.6 ; python_full_version >= "3.8.1" and python_version < "3.12" comm==0.1.3 ; python_full_version >= "3.8.1" and python_version < "3.12" commonmark==0.9.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -coverage[toml]==7.2.5 ; python_full_version >= "3.8.1" and python_version < "3.12" +coverage[toml]==7.2.7 ; python_full_version >= "3.8.1" and python_version < "3.12" cycler==0.11.0 ; python_full_version >= "3.8.1" and python_version < "3.12" darglint==1.8.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -datasets==2.12.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +datasets==2.13.1 ; python_full_version >= "3.8.1" and python_version < "3.12" debugpy==1.6.7 ; python_full_version >= "3.8.1" and python_version < "3.12" decorator==5.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" dill==0.3.6 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -32,31 +32,31 @@ docutils==0.19 ; python_full_version >= "3.8.1" and python_version < "3.12" dparse==0.6.2 ; python_full_version >= "3.8.1" and python_version < "3.12" exceptiongroup==1.1.1 ; python_full_version >= "3.8.1" and python_version < "3.11" executing==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -filelock==3.12.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -fonttools==4.39.4 ; python_full_version >= "3.8.1" and python_version < "3.12" +filelock==3.12.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +fonttools==4.40.0 ; python_full_version >= "3.8.1" and python_version < "3.12" frozenlist==1.3.3 ; python_full_version >= "3.8.1" and python_version < "3.12" -fsspec==2023.5.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -fsspec[http]==2023.5.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +fsspec==2023.6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +fsspec[http]==2023.6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" furo==2022.12.7 ; python_full_version >= "3.8.1" and python_version < "3.12" gitdb==4.0.10 ; python_full_version >= "3.8.1" and python_version < "3.12" gitpython==3.1.31 ; python_full_version >= "3.8.1" and python_version < "3.12" -huggingface-hub==0.14.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +huggingface-hub==0.15.1 ; python_full_version >= "3.8.1" and python_version < "3.12" identify==2.5.24 ; python_full_version >= "3.8.1" and python_version < "3.12" idna==3.4 ; python_full_version >= "3.8.1" and python_version < "3.12" imagesize==1.4.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -importlib-metadata==6.6.0 ; python_full_version >= "3.8.1" and python_version < "3.10" +importlib-metadata==6.7.0 ; python_full_version >= "3.8.1" and python_version < "3.10" iniconfig==2.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -ipykernel==6.23.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +ipykernel==6.23.2 ; python_full_version >= "3.8.1" and python_version < "3.12" ipython==8.12.2 ; python_full_version >= "3.8.1" and python_version < "3.12" ipywidgets==8.0.6 ; python_full_version >= "3.8.1" and python_version < "3.12" jedi==0.18.2 ; python_full_version >= "3.8.1" and python_version < "3.12" jinja2==3.1.2 ; python_full_version >= "3.8.1" and python_version < "3.12" joblib==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -jupyter-client==8.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -jupyter-core==5.3.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +jupyter-client==8.3.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +jupyter-core==5.3.1 ; python_full_version >= "3.8.1" and python_version < "3.12" jupyterlab-widgets==3.0.7 ; python_full_version >= "3.8.1" and python_version < "3.12" kiwisolver==1.4.4 ; python_full_version >= "3.8.1" and python_version < "3.12" -markupsafe==2.1.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +markupsafe==2.1.3 ; python_full_version >= "3.8.1" and python_version < "3.12" matplotlib-inline==0.1.6 ; python_full_version >= "3.8.1" and python_version < "3.12" matplotlib==3.5.3 ; python_full_version >= "3.8.1" and python_version < "3.12" mpmath==1.3.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -79,7 +79,7 @@ nvidia-cusparse-cu11==11.7.4.91 ; python_full_version >= "3.8.1" and python_vers nvidia-nccl-cu11==2.14.3 ; python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform == "linux" nvidia-nvtx-cu11==11.7.91 ; python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform == "linux" packaging==21.3 ; python_full_version >= "3.8.1" and python_version < "3.12" -pandas==2.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +pandas==2.0.2 ; python_full_version >= "3.8.1" and python_version < "3.12" parso==0.8.3 ; python_full_version >= "3.8.1" and python_version < "3.12" pastel==0.2.1 ; python_full_version >= "3.8.1" and python_version < "3.12" pathspec==0.11.1 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -87,38 +87,38 @@ pbr==5.11.1 ; python_full_version >= "3.8.1" and python_version < "3.12" pexpect==4.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform != "win32" pickleshare==0.7.5 ; python_full_version >= "3.8.1" and python_version < "3.12" pillow==9.5.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -platformdirs==3.5.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -pluggy==1.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +platformdirs==3.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +pluggy==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" poethepoet==0.13.1 ; python_full_version >= "3.8.1" and python_version < "3.12" pre-commit==2.21.0 ; python_full_version >= "3.8.1" and python_version < "3.12" prompt-toolkit==3.0.38 ; python_full_version >= "3.8.1" and python_version < "3.12" -protobuf==3.20.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +protobuf==3.20.3 ; python_full_version >= "3.8.1" and python_version < "3.12" psutil==5.9.5 ; python_full_version >= "3.8.1" and python_version < "3.12" ptyprocess==0.7.0 ; python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform != "win32" pure-eval==0.2.2 ; python_full_version >= "3.8.1" and python_version < "3.12" -pyarrow==12.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +pyarrow==12.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" pycparser==2.21 ; python_full_version >= "3.8.1" and python_version < "3.12" and implementation_name == "pypy" pygments==2.15.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -pyparsing==3.0.9 ; python_full_version >= "3.8.1" and python_version < "3.12" -pytest-cov==4.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -pytest==7.3.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +pyparsing==3.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +pytest-cov==4.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +pytest==7.3.2 ; python_full_version >= "3.8.1" and python_version < "3.12" python-dateutil==2.8.2 ; python_full_version >= "3.8.1" and python_version < "3.12" pytz==2023.3 ; python_full_version >= "3.8.1" and python_version < "3.12" pywin32==306 ; sys_platform == "win32" and platform_python_implementation != "PyPy" and python_full_version >= "3.8.1" and python_version < "3.12" pyyaml==6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -pyzmq==25.0.2 ; python_full_version >= "3.8.1" and python_version < "3.12" -regex==2023.5.5 ; python_full_version >= "3.8.1" and python_version < "3.12" -requests==2.30.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -responses==0.18.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +pyzmq==25.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +regex==2023.6.3 ; python_full_version >= "3.8.1" and python_version < "3.12" +requests==2.31.0 ; python_full_version >= "3.8.1" and python_version < "3.12" rich==10.16.2 ; python_full_version >= "3.8.1" and python_version < "3.12" ruamel-yaml-clib==0.2.7 ; platform_python_implementation == "CPython" and python_version < "3.12" and python_full_version >= "3.8.1" -ruamel-yaml==0.17.26 ; python_full_version >= "3.8.1" and python_version < "3.12" +ruamel-yaml==0.17.32 ; python_full_version >= "3.8.1" and python_version < "3.12" ruff==0.0.252 ; python_full_version >= "3.8.1" and python_version < "3.12" +safetensors==0.3.1 ; python_full_version >= "3.8.1" and python_version < "3.12" safety==2.3.5 ; python_full_version >= "3.8.1" and python_version < "3.12" scikit-learn==1.2.2 ; python_full_version >= "3.8.1" and python_version < "3.12" scipy==1.10.1 ; python_full_version >= "3.8.1" and python_version < "3.12" sentencepiece==0.1.99 ; python_full_version >= "3.8.1" and python_version < "3.12" -setuptools==67.7.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +setuptools==68.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" six==1.16.0 ; python_full_version >= "3.8.1" and python_version < "3.12" smmap==5.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" snowballstemmer==2.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -147,12 +147,12 @@ torchtyping==0.1.4 ; python_full_version >= "3.8.1" and python_version < "3.12" tornado==6.3.2 ; python_full_version >= "3.8.1" and python_version < "3.12" tqdm==4.65.0 ; python_full_version >= "3.8.1" and python_version < "3.12" traitlets==5.9.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -transformers[sentencepiece,tokenizers,torch]==4.29.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +transformers[sentencepiece,tokenizers,torch]==4.30.2 ; python_full_version >= "3.8.1" and python_version < "3.12" typeguard==2.13.3 ; python_full_version >= "3.8.1" and python_version < "3.12" -typing-extensions==4.5.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +typing-extensions==4.6.3 ; python_full_version >= "3.8.1" and python_version < "3.12" tzdata==2023.3 ; python_full_version >= "3.8.1" and python_version < "3.12" -urllib3==2.0.2 ; python_full_version >= "3.8.1" and python_version < "3.12" -virtualenv==20.23.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +urllib3==2.0.3 ; python_full_version >= "3.8.1" and python_version < "3.12" +virtualenv==20.23.1 ; python_full_version >= "3.8.1" and python_version < "3.12" wcwidth==0.2.6 ; python_full_version >= "3.8.1" and python_version < "3.12" wheel==0.40.0 ; python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform == "linux" widgetsnbextension==4.0.7 ; python_full_version >= "3.8.1" and python_version < "3.12" diff --git a/requirements.txt b/requirements.txt index e9c7f858..4cd744e6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,18 +1,18 @@ -accelerate==0.19.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +accelerate==0.20.3 ; python_full_version >= "3.8.1" and python_version < "3.12" captum==0.6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" certifi==2023.5.7 ; python_full_version >= "3.8.1" and python_version < "3.12" charset-normalizer==3.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" colorama==0.4.6 ; python_full_version >= "3.8.1" and python_version < "3.12" commonmark==0.9.1 ; python_full_version >= "3.8.1" and python_version < "3.12" cycler==0.11.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -filelock==3.12.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -fonttools==4.39.4 ; python_full_version >= "3.8.1" and python_version < "3.12" -fsspec==2023.5.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -huggingface-hub==0.14.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +filelock==3.12.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +fonttools==4.40.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +fsspec==2023.6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +huggingface-hub==0.15.1 ; python_full_version >= "3.8.1" and python_version < "3.12" idna==3.4 ; python_full_version >= "3.8.1" and python_version < "3.12" jinja2==3.1.2 ; python_full_version >= "3.8.1" and python_version < "3.12" kiwisolver==1.4.4 ; python_full_version >= "3.8.1" and python_version < "3.12" -markupsafe==2.1.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +markupsafe==2.1.3 ; python_full_version >= "3.8.1" and python_version < "3.12" matplotlib==3.5.3 ; python_full_version >= "3.8.1" and python_version < "3.12" mpmath==1.3.0 ; python_full_version >= "3.8.1" and python_version < "3.12" networkx==3.1 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -32,17 +32,18 @@ packaging==21.3 ; python_full_version >= "3.8.1" and python_version < "3.12" pastel==0.2.1 ; python_full_version >= "3.8.1" and python_version < "3.12" pillow==9.5.0 ; python_full_version >= "3.8.1" and python_version < "3.12" poethepoet==0.13.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -protobuf==3.20.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +protobuf==3.20.3 ; python_full_version >= "3.8.1" and python_version < "3.12" psutil==5.9.5 ; python_full_version >= "3.8.1" and python_version < "3.12" pygments==2.15.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -pyparsing==3.0.9 ; python_full_version >= "3.8.1" and python_version < "3.12" +pyparsing==3.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" python-dateutil==2.8.2 ; python_full_version >= "3.8.1" and python_version < "3.12" pyyaml==6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -regex==2023.5.5 ; python_full_version >= "3.8.1" and python_version < "3.12" -requests==2.30.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +regex==2023.6.3 ; python_full_version >= "3.8.1" and python_version < "3.12" +requests==2.31.0 ; python_full_version >= "3.8.1" and python_version < "3.12" rich==10.16.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +safetensors==0.3.1 ; python_full_version >= "3.8.1" and python_version < "3.12" sentencepiece==0.1.99 ; python_full_version >= "3.8.1" and python_version < "3.12" -setuptools==67.7.2 ; python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform == "linux" +setuptools==68.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform == "linux" six==1.16.0 ; python_full_version >= "3.8.1" and python_version < "3.12" sympy==1.12 ; python_full_version >= "3.8.1" and python_version < "3.12" tokenizers==0.13.3 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -50,8 +51,8 @@ tomli==2.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" torch==2.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" torchtyping==0.1.4 ; python_full_version >= "3.8.1" and python_version < "3.12" tqdm==4.65.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -transformers[sentencepiece,tokenizers,torch]==4.29.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +transformers[sentencepiece,tokenizers,torch]==4.30.2 ; python_full_version >= "3.8.1" and python_version < "3.12" typeguard==2.13.3 ; python_full_version >= "3.8.1" and python_version < "3.12" -typing-extensions==4.5.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -urllib3==2.0.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +typing-extensions==4.6.3 ; python_full_version >= "3.8.1" and python_version < "3.12" +urllib3==2.0.3 ; python_full_version >= "3.8.1" and python_version < "3.12" wheel==0.40.0 ; python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform == "linux" From 46831bb09cd725921e7946b84a5fc38c39b9b366 Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Sat, 24 Jun 2023 15:34:45 +0200 Subject: [PATCH 4/7] Alignment utils and tests, todo auto align --- inseq/attr/feat/attribution_utils.py | 15 +- inseq/attr/feat/feature_attribution.py | 28 +++- inseq/attr/step_functions.py | 12 +- inseq/data/batch.py | 23 +-- inseq/models/attribution_model.py | 58 ++------ inseq/models/decoder_only.py | 8 +- inseq/models/encoder_decoder.py | 8 +- inseq/utils/__init__.py | 4 +- inseq/utils/alignment_utils.py | 148 ++++++++++++++++++++ tests/attr/feat/test_feature_attribution.py | 75 ++++++++++ 10 files changed, 284 insertions(+), 95 deletions(-) create mode 100644 inseq/utils/alignment_utils.py diff --git a/inseq/attr/feat/attribution_utils.py b/inseq/attr/feat/attribution_utils.py index 9bb3b0a5..8387604e 100644 --- a/inseq/attr/feat/attribution_utils.py +++ b/inseq/attr/feat/attribution_utils.py @@ -4,7 +4,7 @@ import torch -from ...utils import MissingAlignmentsError, extract_signature_args +from ...utils import extract_signature_args, get_aligned_idx from ...utils.typing import ( OneOrMoreAttributionSequences, OneOrMoreIdSequences, @@ -105,16 +105,9 @@ def join_token_ids( ): curr_seq = [] for pos_idx, (token, token_idx) in enumerate(zip(target_tokens_seq, input_ids_seq)): - # Find all alignment pairs for the current original target - aligned_idxs = [c_idx for idx, c_idx in alignments_seq if idx == pos_idx] - if not aligned_idxs: - raise MissingAlignmentsError( - f"No alignment found for token at index {pos_idx}: {token} ({token_idx}). " - "Please provide alignment pairs that cover all original target tokens." - ) - contrast_position = min(aligned_idxs) - if token != contrast_target_tokens_seq[contrast_position]: - curr_seq.append(TokenWithId(f"{contrast_target_tokens_seq[contrast_position]} → {token}", -1)) + contrast_pos_idx = get_aligned_idx(pos_idx, alignments_seq) + if token != contrast_target_tokens_seq[contrast_pos_idx]: + curr_seq.append(TokenWithId(f"{contrast_target_tokens_seq[contrast_pos_idx]} → {token}", -1)) else: curr_seq.append(TokenWithId(token, token_idx)) sequences.append(curr_seq) diff --git a/inseq/attr/feat/feature_attribution.py b/inseq/attr/feat/feature_attribution.py index 55ac2e03..ec4e21c3 100644 --- a/inseq/attr/feat/feature_attribution.py +++ b/inseq/attr/feat/feature_attribution.py @@ -31,6 +31,7 @@ FeatureAttributionOutput, FeatureAttributionSequenceOutput, FeatureAttributionStepOutput, + get_batch_from_inputs, ) from ...data.viz import close_progress_bar, get_progress_bar, update_progress_bar from ...utils import ( @@ -306,16 +307,31 @@ def attribute( logger.debug("=" * 30 + f"\nfull batch: {batch}\n" + "=" * 30) # Sources are empty for decoder-only models sequences = self.attribution_model.formatter.get_text_sequences(self.attribution_model, batch) - contrast_batch, contrast_targets_alignments = self.attribution_model.formatter.get_contrast_options_from_args( - attribution_model=self.attribution_model, - args=attributed_fn_args, - target_tokens=batch.target_tokens, - ) + contrast_targets = attributed_fn_args.get("contrast_targets", None) + contrast_targets_alignments = attributed_fn_args.get("contrast_targets_alignments", None) + contrast_targets = [contrast_targets] if isinstance(contrast_targets, str) else contrast_targets + contrast_batch = None + if contrast_targets is not None: + contrast_batch = DecoderOnlyBatch.from_batch( + get_batch_from_inputs( + attribution_model=self.attribution_model, + inputs=contrast_targets, + as_targets=self.attribution_model.is_encoder_decoder, + ) + ) + contrast_targets_alignments = self.attribution_model.formatter.format_contrast_targets_alignments( + contrast_targets_alignments=contrast_targets_alignments, + target_tokens=batch.target_tokens, + ) + attributed_fn_args["contrast_targets_alignments"] = contrast_targets_alignments + if "contrast_targets_alignments" in step_scores_args: + step_scores_args["contrast_targets_alignments"] = contrast_targets_alignments target_tokens_with_ids = self.attribution_model.get_token_with_ids( batch, - contrast_batch=contrast_batch, + contrast_target_tokens=contrast_batch.target_tokens if contrast_batch is not None else None, contrast_targets_alignments=contrast_targets_alignments, ) + # Manages front padding for decoder-only models, using 0 as lower bound # when attr_pos_start exceeds target length. targets_lengths = [ diff --git a/inseq/attr/step_functions.py b/inseq/attr/step_functions.py index e942299e..ee0fe4fe 100644 --- a/inseq/attr/step_functions.py +++ b/inseq/attr/step_functions.py @@ -7,7 +7,7 @@ from transformers import AutoModelForCausalLM, AutoModelForSeq2SeqLM from transformers.modeling_outputs import ModelOutput -from ..data import FeatureAttributionInput, slice_batch_from_position +from ..data import DecoderOnlyBatch, FeatureAttributionInput, get_batch_from_inputs, slice_batch_from_position from ..data.aggregation_functions import DEFAULT_ATTRIBUTION_AGGREGATE_DICT from ..utils import extract_signature_args from ..utils.typing import EmbeddingsTensor, IdsTensor, SingleScorePerStepTensor, TargetIdsTensor @@ -127,10 +127,12 @@ def _get_contrast_output( """ c_tgt_ids = None if contrast_targets: - c_batch, contrast_targets_alignments = attribution_model.formatter.get_contrast_options_from_args( - attribution_model=attribution_model, - args={"contrast_targets": contrast_targets, "contrast_targets_alignments": contrast_targets_alignments}, - target_tokens=torch.zeros(decoder_input_ids.size(0), decoder_input_ids.size(1) + 1).long().tolist(), + c_batch = DecoderOnlyBatch.from_batch( + get_batch_from_inputs( + attribution_model=attribution_model, + inputs=contrast_targets, + as_targets=attribution_model.is_encoder_decoder, + ) ) curr_prefix_len = decoder_input_ids.size(1) if len(contrast_targets_alignments) > 0 and isinstance(contrast_targets_alignments[0], list): diff --git a/inseq/data/batch.py b/inseq/data/batch.py index ec5d4d14..dde9a0f5 100644 --- a/inseq/data/batch.py +++ b/inseq/data/batch.py @@ -1,7 +1,7 @@ from dataclasses import dataclass from typing import List, Optional, Tuple, Union -from ..utils import MissingAlignmentsError +from ..utils import get_aligned_idx from ..utils.typing import EmbeddingsTensor, ExpandedTargetIdsTensor, IdsTensor, OneOrMoreTokenSequences from .data_utils import TensorWrapper @@ -233,23 +233,8 @@ def from_batch(self, batch: Batch) -> "DecoderOnlyBatch": def slice_batch_from_position( - batch: DecoderOnlyBatch, curr_position: int, alignments: Optional[List[Tuple[int, int]]] = None + batch: DecoderOnlyBatch, curr_idx: int, alignments: Optional[List[Tuple[int, int]]] = None ) -> Tuple[DecoderOnlyBatch, IdsTensor]: - truncate_idx = curr_position - if alignments: - if len(alignments) > 0 and isinstance(alignments[0], list): - alignments = alignments[0] - # Find all alignment pairs for the current original target - aligned_idxs = [c_idx for idx, c_idx in alignments if idx == curr_position] - - if not aligned_idxs: - raise MissingAlignmentsError( - f"No alignment found for original target token at index {curr_position}. " - "Please provide alignment pairs that cover all original target tokens." - ) - # Select the minimum index to identify the next target token - truncate_idx = min(aligned_idxs) - # We select the target token and truncate the batch up to the selected index + truncate_idx = get_aligned_idx(curr_idx, alignments) tgt_ids = batch.target_ids[:, truncate_idx] - batch = batch[:truncate_idx] - return batch, tgt_ids + return batch[:truncate_idx], tgt_ids diff --git a/inseq/models/attribution_model.py b/inseq/models/attribution_model.py index 8452377a..1ada9917 100644 --- a/inseq/models/attribution_model.py +++ b/inseq/models/attribution_model.py @@ -14,12 +14,12 @@ FeatureAttributionInput, FeatureAttributionOutput, FeatureAttributionStepOutput, - get_batch_from_inputs, ) from ..utils import ( MissingAttributionMethodError, check_device, format_input_texts, + get_adjusted_alignments, get_default_device, isnotebook, pretty_tensor, @@ -143,52 +143,24 @@ def get_text_sequences( raise NotImplementedError() @staticmethod - def get_contrast_options_from_args( - attribution_model: "AttributionModel", args: Dict[str, Any], target_tokens: List[List[str]] + def format_contrast_targets_alignments( + contrast_targets_alignments: Union[List[Tuple[int, int]], List[List[Tuple[int, int]]]], + target_tokens: List[List[str]], ) -> Tuple[DecoderOnlyBatch, Optional[List[List[Tuple[int, int]]]]]: - contrast_targets = args.get("contrast_targets", None) - contrast_targets_alignments = args.get("contrast_targets_alignments", None) - contrast_targets = [contrast_targets] if isinstance(contrast_targets, str) else contrast_targets - contrast_batch = None - adjusted_alignments = None - if contrast_targets is not None: - contrast_batch = DecoderOnlyBatch.from_batch( - get_batch_from_inputs( - attribution_model=attribution_model, - inputs=contrast_targets, - as_targets=attribution_model.is_encoder_decoder, - ) - ) + adjusted_alignments = [] + if contrast_targets_alignments: if isinstance(contrast_targets_alignments, list) and len(contrast_targets_alignments) > 0: if isinstance(contrast_targets_alignments[0], tuple): contrast_targets_alignments = [contrast_targets_alignments] if not isinstance(contrast_targets_alignments[0], list): raise ValueError("Invalid contrast_targets_alignments were provided.") + for seq_idx, tokens in enumerate(target_tokens): + if isinstance(contrast_targets_alignments, list): + aligns = contrast_targets_alignments[seq_idx] else: - contrast_targets_alignments = None - - if contrast_targets_alignments is None: - adjusted_alignments = [[(idx, idx) for idx, _ in enumerate(seq)] for seq in target_tokens] - else: - # Sort alignments - contrast_targets_alignments = [ - sorted(seq, key=lambda x: (x[0], x[1])) for seq in contrast_targets_alignments - ] - - # Filling alignments with missing tokens - # Assuming 1:1 mapping to cover all tokens from the original sequence - adjusted_alignments = [] - for seq_idx, seq in enumerate(target_tokens): - adjusted_seq_alignments = [] - for pair_idx, _ in enumerate(seq): - match_pairs = [x for x in contrast_targets_alignments[seq_idx] if x[0] == pair_idx] - if not match_pairs: - adjusted_seq_alignments.append((pair_idx, pair_idx)) - else: - adjusted_seq_alignments.append(match_pairs[0]) - adjusted_alignments.append(adjusted_seq_alignments) - - return contrast_batch, adjusted_alignments + aligns = contrast_targets_alignments + adjusted_alignments.append(get_adjusted_alignments(aligns, fill_missing_len=len(tokens))) + return adjusted_alignments class AttributionModel(ABC, torch.nn.Module): @@ -474,14 +446,14 @@ def embed(self, inputs: Union[TextInput, IdsTensor], as_targets: bool = False): def get_token_with_ids( self, batch: Union[EncoderDecoderBatch, DecoderOnlyBatch], - contrast_batch: Optional[DecoderOnlyBatch] = None, + contrast_target_tokens: Optional[OneOrMoreTokenSequences] = None, contrast_targets_alignments: Optional[List[List[Tuple[int, int]]]] = None, ) -> List[List[TokenWithId]]: - if contrast_batch is not None: + if contrast_target_tokens is not None: return join_token_ids( batch.target_tokens, batch.target_ids.tolist(), - contrast_batch.target_tokens, + contrast_target_tokens, contrast_targets_alignments, ) return join_token_ids(batch.target_tokens, batch.target_ids.tolist()) diff --git a/inseq/models/decoder_only.py b/inseq/models/decoder_only.py index 3e7ff21f..169862ed 100644 --- a/inseq/models/decoder_only.py +++ b/inseq/models/decoder_only.py @@ -12,8 +12,8 @@ FeatureAttributionInput, FeatureAttributionStepOutput, get_batch_from_inputs, - slice_batch_from_position, ) +from ..utils import get_aligned_idx from ..utils.typing import ( AttributionForwardInputs, EmbeddingsTensor, @@ -114,10 +114,8 @@ def enrich_step_output( target_ids = target_ids.unsqueeze(0) step_output.source = None if contrast_batch is not None: - offset = len(batch.input_tokens[0]) - contrast_batch, contrast_target_ids = slice_batch_from_position( - contrast_batch, offset, contrast_targets_alignments - ) + contrast_aligned_idx = get_aligned_idx(len(batch.target_tokens[0]), contrast_targets_alignments[0]) + contrast_target_ids = contrast_batch.target_ids[:, contrast_aligned_idx] step_output.target = join_token_ids( tokens=target_tokens, ids=attribution_model.convert_ids_to_tokens(contrast_target_ids), diff --git a/inseq/models/encoder_decoder.py b/inseq/models/encoder_decoder.py index 6db9fa1b..11f40264 100644 --- a/inseq/models/encoder_decoder.py +++ b/inseq/models/encoder_decoder.py @@ -12,8 +12,8 @@ FeatureAttributionInput, FeatureAttributionStepOutput, get_batch_from_inputs, - slice_batch_from_position, ) +from ..utils import get_aligned_idx from ..utils.typing import ( AttributionForwardInputs, EmbeddingsTensor, @@ -155,10 +155,8 @@ def enrich_step_output( target_ids = target_ids.unsqueeze(0) step_output.source = join_token_ids(batch.sources.input_tokens, batch.sources.input_ids.tolist()) if contrast_batch is not None: - offset = len(batch.targets.input_tokens[0]) - contrast_batch, contrast_target_ids = slice_batch_from_position( - contrast_batch, offset, contrast_targets_alignments - ) + contrast_aligned_idx = get_aligned_idx(len(batch.target_tokens[0]), contrast_targets_alignments[0]) + contrast_target_ids = contrast_batch.target_ids[:, contrast_aligned_idx] step_output.target = join_token_ids( tokens=target_tokens, ids=[[idx] for idx in target_ids.tolist()], diff --git a/inseq/utils/__init__.py b/inseq/utils/__init__.py index 4daada05..0b883513 100644 --- a/inseq/utils/__init__.py +++ b/inseq/utils/__init__.py @@ -1,3 +1,4 @@ +from .alignment_utils import get_adjusted_alignments, get_aligned_idx from .argparse import InseqArgumentParser from .cache import INSEQ_ARTIFACTS_CACHE, INSEQ_HOME_CACHE, cache_results from .errors import ( @@ -110,5 +111,6 @@ "json_advanced_load", "get_nn_submodule", "find_block_stack", - "get_post_variable_assignment_hook", + "get_adjusted_alignments", + "get_aligned_idx", ] diff --git a/inseq/utils/alignment_utils.py b/inseq/utils/alignment_utils.py new file mode 100644 index 00000000..182cdfd1 --- /dev/null +++ b/inseq/utils/alignment_utils.py @@ -0,0 +1,148 @@ +import re +from dataclasses import dataclass +from enum import Enum +from functools import lru_cache +from itertools import chain +from typing import Any, Dict, List, Optional, Tuple, Union + +import torch +from transformers import AutoModel, AutoTokenizer, PreTrainedModel, PreTrainedTokenizerBase + +from .errors import MissingAlignmentsError + + +@dataclass +class AlignedSequences: + source_tokens: List[str] + target_tokens: List[str] + alignments: List[Tuple[int, int]] + + +class AlignmentMethod(Enum): + AUTO = "auto" + + +@lru_cache +def get_aligner_model() -> PreTrainedModel: + return AutoModel.from_pretrained("sentence-transformers/LaBSE") + + +@lru_cache +def get_aligner_tokenizer() -> PreTrainedTokenizerBase: + return AutoTokenizer.from_pretrained("sentence-transformers/LaBSE") + + +def _preprocess_sequence_for_alignment(tokenized_seq: List[str]) -> Tuple[torch.Tensor, List[List[int]]]: + aligner_tokenizer = get_aligner_tokenizer() + idxs = [aligner_tokenizer.convert_tokens_to_ids(x) for x in tokenized_seq] + idxs = aligner_tokenizer.prepare_for_model( + list(chain(*idxs)), + return_tensors="pt", + truncation=True, + model_max_length=aligner_tokenizer.model_max_length, + )["input_ids"] + sub2word_map = [] + for i, word_list in enumerate(tokenized_seq): + sub2word_map += [i for x in word_list] + return idxs, sub2word_map + + +def _get_aligner_subword_aligns( + src: List[str], + tgt: List[str], + align_layer: int, + score_threshold: float, +) -> torch.Tensor: + aligner = get_aligner_model() + tokenizer = get_aligner_tokenizer() + src_tokenized = [tokenizer.tokenize(word) for word in src] + tgt_tokenized = [tokenizer.tokenize(word) for word in tgt] + ids_src, sub2word_map_src = _preprocess_sequence_for_alignment(src_tokenized) + ids_tgt, sub2word_map_tgt = _preprocess_sequence_for_alignment(tgt_tokenized) + aligner.eval() + with torch.no_grad(): + out_src = aligner(ids_src.unsqueeze(0), output_hidden_states=True)[2][align_layer][0, 1:-1] + out_tgt = aligner(ids_tgt.unsqueeze(0), output_hidden_states=True)[2][align_layer][0, 1:-1] + dot_prod = torch.matmul(out_src, out_tgt.transpose(-1, -2)) + softmax_srctgt = torch.nn.Softmax(dim=-1)(dot_prod) + softmax_tgtsrc = torch.nn.Softmax(dim=-2)(dot_prod) + softmax_inter = (softmax_srctgt > score_threshold) * (softmax_tgtsrc > score_threshold) + align_subwords = torch.nonzero(softmax_inter, as_tuple=False) + return align_subwords, sub2word_map_src, sub2word_map_tgt + + +def get_word_aligns( + src: Union[str, List[str]], + tgt: Union[str, List[str]], + split_pattern: str = r"\s+|\b", + align_layer: int = 8, + score_threshold: float = 1e-3, +) -> Dict[str, Any]: + if isinstance(src, str): + src = [word for word in re.split(split_pattern, src) if word] + if isinstance(tgt, str): + tgt = [word for word in re.split(split_pattern, tgt) if word] + align_subwords, sub2word_map_src, sub2word_map_tgt = _get_aligner_subword_aligns( + src, tgt, align_layer, score_threshold + ) + align_words = set() + for i, j in align_subwords: + align_words.add((sub2word_map_src[i], sub2word_map_tgt[j])) + word_alignments = [(src_idx, tgt_idx) for src_idx, tgt_idx in sorted(align_words, key=lambda x: (x[0], x[1]))] + return AlignedSequences( + source_tokens=src, + target_tokens=tgt, + alignments=word_alignments, + ) + + +def get_adjusted_alignments( + alignments: Union[List[Tuple[int, int]], str], + do_sort: bool = True, + fill_missing_len: Optional[int] = None, +) -> List[Tuple[int, int]]: + if alignments is None and isinstance(fill_missing_len, int): + alignments = [(idx, idx) for idx in range(fill_missing_len)] + elif isinstance(alignments, str): + if alignments == AlignmentMethod.AUTO: + raise NotImplementedError + # TODO: Implement alignment method. Wrap it in a try-except block that raises a Runtime error in case any + # of the steps fail. + # 1. Use LaBSE to get alignments at word level + # 2. Align word-level alignments to token-level alignments from the generative model tokenizer. + # 2.1 Requires cleaning up the model tokens from special tokens and characters, check if something native + # exists in the tokenizer. + # 3. Propagate word-level alignments to token-level alignments. + else: + raise ValueError(f"Unknown alignment method: {alignments}") + if do_sort: + # Sort alignments + alignments = sorted(set(alignments), key=lambda x: (x[0], x[1])) + + # Filling alignments with missing tokens + if isinstance(fill_missing_len, int): + filled_alignments = [] + for pair_idx in range(fill_missing_len): + match_pairs = [x for x in alignments if x[0] == pair_idx] + if not match_pairs: + # Assuming 1:1 mapping to cover all tokens from the original sequence + filled_alignments.append((pair_idx, pair_idx)) + else: + # Use only first match for the source sequence + filled_alignments.append(match_pairs[0]) + alignments = filled_alignments + return alignments + + +def get_aligned_idx(src_idx: int, alignments: List[Tuple[int, int]]) -> int: + if alignments: + # Find all alignment pairs for the current original target + aligned_idxs = [t_idx for s_idx, t_idx in alignments if s_idx == src_idx] + if not aligned_idxs: + raise MissingAlignmentsError( + f"No alignment found for token at index {src_idx}. " + "Please provide alignment pairs that cover all original target tokens." + ) + # Select the minimum index to identify the next target token + return min(aligned_idxs) + return src_idx diff --git a/tests/attr/feat/test_feature_attribution.py b/tests/attr/feat/test_feature_attribution.py index 6fa507c2..10199f71 100644 --- a/tests/attr/feat/test_feature_attribution.py +++ b/tests/attr/feat/test_feature_attribution.py @@ -11,6 +11,11 @@ def saliency_mt_model_larger() -> HuggingfaceEncoderDecoderModel: return inseq.load_model("Helsinki-NLP/opus-mt-en-it", "saliency") +@fixture(scope="session") +def saliency_gpt_model_larger() -> HuggingfaceDecoderOnlyModel: + return inseq.load_model("gpt2", "saliency") + + @fixture(scope="session") def saliency_mt_model() -> HuggingfaceEncoderDecoderModel: return inseq.load_model("hf-internal-testing/tiny-random-MarianMTModel", "saliency") @@ -69,6 +74,76 @@ def test_contrastive_attribution_gpt(saliency_gpt_model: HuggingfaceDecoderOnlyM assert attribution_scores.shape == torch.Size([23, 5, 32]) +def test_contrastive_attribution_seq2seq_alignments(saliency_mt_model_larger: HuggingfaceEncoderDecoderModel): + aligned = { + "src": "UN peacekeepers", + "orig_tgt": "I soldati della pace ONU", + "contrast_tgt": "Le forze militari di pace delle Nazioni Unite", + "alignments": [[(0, 0), (1, 1), (2, 2), (3, 4), (4, 5), (5, 7), (6, 9)]], + "aligned_tgts": ["▁Le → ▁I", "▁forze → ▁soldati", "▁di → ▁della", "▁pace", "▁Nazioni → ▁ONU", ""], + } + out = saliency_mt_model_larger.attribute( + aligned["src"], + aligned["orig_tgt"], + attributed_fn="contrast_prob_diff", + step_scores=["contrast_prob_diff"], + contrast_targets=aligned["contrast_tgt"], + contrast_targets_alignments=aligned["alignments"], + show_progress=False, + ) + # Check tokens are aligned as expected + assert [t.token for t in out[0].target] == aligned["aligned_tgts"] + + # Check that a single list of alignments is correctly processed + out_single_list = saliency_mt_model_larger.attribute( + aligned["src"], + aligned["orig_tgt"], + attributed_fn="contrast_prob_diff", + step_scores=["contrast_prob_diff"], + contrast_targets=aligned["contrast_tgt"], + contrast_targets_alignments=aligned["alignments"][0], + attribute_target=True, + show_progress=False, + ) + assert out[0].target == out_single_list[0].target + assert torch.allclose( + out[0].source_attributions, + out_single_list[0].source_attributions, + atol=8e-2, + ) + + # Check that providing only non-matching ids also works + out_non_matching_ids = saliency_mt_model_larger.attribute( + aligned["src"], + aligned["orig_tgt"], + attributed_fn="contrast_prob_diff", + step_scores=["contrast_prob_diff"], + contrast_targets=aligned["contrast_tgt"], + contrast_targets_alignments=[(3, 4), (4, 5), (5, 7), (6, 9)], + show_progress=False, + ) + assert out[0].target == out_non_matching_ids[0].target + assert torch.allclose( + out[0].source_attributions, + out_non_matching_ids[0].source_attributions, + atol=8e-2, + ) + + +def test_contrastive_attribution_gpt_alignments(saliency_gpt_model_larger: HuggingfaceDecoderOnlyModel): + out = saliency_gpt_model_larger.attribute( + "UN peacekeepers", + "UN peacekeepers were deployed in the region.", + attributed_fn="contrast_prob_diff", + contrast_targets="UN peacekeepers were sent to the war-torn region.", + contrast_targets_alignments=[(7, 10), (8, 11)], + step_scores=["contrast_prob_diff"], + show_progress=False, + ) + contrast_targets = ["UN", "Ġpeace", "keepers", "Ġwere", "Ġsent → Ġdeployed", "Ġto → Ġin", "Ġthe", "Ġregion", "."] + assert [t.token for t in out[0].target] == contrast_targets + + def test_mcd_weighted_attribution_seq2seq(saliency_mt_model, auxiliary_saliency_mt_model): """Runs a MCD-weighted feature attribution taking advantage of the custom feature attribution target function module. From 6b666c697d77bb6e532de82175606c236c01ecbb Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Sat, 24 Jun 2023 18:06:33 +0200 Subject: [PATCH 5/7] Started auto align logic --- inseq/attr/feat/feature_attribution.py | 3 ++ inseq/models/attribution_model.py | 29 ++++++++++--- inseq/utils/alignment_utils.py | 60 ++++++++++++++++++++------ 3 files changed, 72 insertions(+), 20 deletions(-) diff --git a/inseq/attr/feat/feature_attribution.py b/inseq/attr/feat/feature_attribution.py index ec4e21c3..a7c99528 100644 --- a/inseq/attr/feat/feature_attribution.py +++ b/inseq/attr/feat/feature_attribution.py @@ -321,7 +321,10 @@ def attribute( ) contrast_targets_alignments = self.attribution_model.formatter.format_contrast_targets_alignments( contrast_targets_alignments=contrast_targets_alignments, + target_sequences=sequences.targets, target_tokens=batch.target_tokens, + contrast_sequences=contrast_targets, + contrast_tokens=contrast_batch.target_tokens, ) attributed_fn_args["contrast_targets_alignments"] = contrast_targets_alignments if "contrast_targets_alignments" in step_scores_args: diff --git a/inseq/models/attribution_model.py b/inseq/models/attribution_model.py index 1ada9917..e642b907 100644 --- a/inseq/models/attribution_model.py +++ b/inseq/models/attribution_model.py @@ -144,22 +144,39 @@ def get_text_sequences( @staticmethod def format_contrast_targets_alignments( - contrast_targets_alignments: Union[List[Tuple[int, int]], List[List[Tuple[int, int]]]], + contrast_targets_alignments: Union[List[Tuple[int, int]], List[List[Tuple[int, int]]], str], + target_sequences: List[str], target_tokens: List[List[str]], + contrast_sequences: List[str], + contrast_tokens: List[List[str]], ) -> Tuple[DecoderOnlyBatch, Optional[List[List[Tuple[int, int]]]]]: - adjusted_alignments = [] + # Ensure that the contrast_targets_alignments are in the correct format (list of lists of idxs pairs) if contrast_targets_alignments: if isinstance(contrast_targets_alignments, list) and len(contrast_targets_alignments) > 0: if isinstance(contrast_targets_alignments[0], tuple): contrast_targets_alignments = [contrast_targets_alignments] if not isinstance(contrast_targets_alignments[0], list): raise ValueError("Invalid contrast_targets_alignments were provided.") - for seq_idx, tokens in enumerate(target_tokens): + elif not isinstance(str): + raise ValueError("Invalid contrast_targets_alignments were provided.") + + adjusted_alignments = [] + aligns = contrast_targets_alignments + for seq_idx, (tgt_seq, tgt_tok, c_seq, c_tok) in enumerate( + zip(target_sequences, target_tokens, contrast_sequences, contrast_tokens) + ): if isinstance(contrast_targets_alignments, list): aligns = contrast_targets_alignments[seq_idx] - else: - aligns = contrast_targets_alignments - adjusted_alignments.append(get_adjusted_alignments(aligns, fill_missing_len=len(tokens))) + adjusted_alignments.append( + get_adjusted_alignments( + aligns, + target_sequence=tgt_seq, + target_tokens=tgt_tok, + contrast_sequence=c_seq, + contrast_tokens=c_tok, + fill_missing=True, + ) + ) return adjusted_alignments diff --git a/inseq/utils/alignment_utils.py b/inseq/utils/alignment_utils.py index 182cdfd1..728575ea 100644 --- a/inseq/utils/alignment_utils.py +++ b/inseq/utils/alignment_utils.py @@ -1,3 +1,4 @@ +import logging import re from dataclasses import dataclass from enum import Enum @@ -10,6 +11,8 @@ from .errors import MissingAlignmentsError +logger = logging.getLogger(__name__) + @dataclass class AlignedSequences: @@ -98,31 +101,54 @@ def get_word_aligns( def get_adjusted_alignments( alignments: Union[List[Tuple[int, int]], str], + target_sequence: Optional[str] = None, + target_tokens: Optional[List[str]] = None, + contrast_sequence: Optional[str] = None, + contrast_tokens: Optional[List[str]] = None, do_sort: bool = True, - fill_missing_len: Optional[int] = None, + fill_missing: bool = False, ) -> List[Tuple[int, int]]: - if alignments is None and isinstance(fill_missing_len, int): - alignments = [(idx, idx) for idx in range(fill_missing_len)] + if fill_missing and not target_tokens: + raise ValueError("Missing target tokens. Please provide target tokens to fill missing alignments.") + if alignments is None and fill_missing: + alignments = [(idx, idx) for idx in range(len(target_tokens))] elif isinstance(alignments, str): if alignments == AlignmentMethod.AUTO: - raise NotImplementedError - # TODO: Implement alignment method. Wrap it in a try-except block that raises a Runtime error in case any - # of the steps fail. - # 1. Use LaBSE to get alignments at word level - # 2. Align word-level alignments to token-level alignments from the generative model tokenizer. - # 2.1 Requires cleaning up the model tokens from special tokens and characters, check if something native - # exists in the tokenizer. - # 3. Propagate word-level alignments to token-level alignments. + if not target_sequence or not contrast_sequence or not target_tokens or not contrast_tokens: + raise ValueError( + "Missing required arguments to compute alignments. " + "Please provide target and contrast sequence and tokens." + ) + try: + # 1. Use aligner to get alignments at word level + align_seq = get_word_aligns(target_sequence, contrast_sequence) + raise NotImplementedError(f"{align_seq.alignments}") + # TODO: + # 2. Align word-level alignments to token-level alignments from the generative model tokenizer. + # 2.1 Requires cleaning up the model tokens from special tokens and characters, check if smth native + # exists in the tokenizer. + # 2.2 Default behavior to handle missing aligns: let the step below fill them with 1:1 mapping. + # 3. Propagate word-level alignments to token-level alignments. + # 4. Log info about the produced alignments + except Exception as e: + logger.warning( + "Failed to compute alignments using the aligner. " + f"Please check the following error and provide custom alignments if needed.\n{e}" + ) + raise e else: - raise ValueError(f"Unknown alignment method: {alignments}") + raise ValueError( + f"Unknown alignment method: {alignments}. " + f"Available methods: {','.join([m.value for m in AlignmentMethod])}" + ) if do_sort: # Sort alignments alignments = sorted(set(alignments), key=lambda x: (x[0], x[1])) # Filling alignments with missing tokens - if isinstance(fill_missing_len, int): + if fill_missing: filled_alignments = [] - for pair_idx in range(fill_missing_len): + for pair_idx in range(len(target_tokens)): match_pairs = [x for x in alignments if x[0] == pair_idx] if not match_pairs: # Assuming 1:1 mapping to cover all tokens from the original sequence @@ -130,6 +156,12 @@ def get_adjusted_alignments( else: # Use only first match for the source sequence filled_alignments.append(match_pairs[0]) + if alignments != filled_alignments: + logger.warning( + f"Provided alignments do not cover all {len(target_tokens)} tokens from the original sequence.\n" + f"Filling missing position with 1:1 position alignments.\\Filled alignments: {filled_alignments}.\n" + 'Alternatively, use contrast_targets_alignments="auto" to produce custom alignments.' + ) alignments = filled_alignments return alignments From 195d87f2b6b7035a31b61393a71fc1034dd6082c Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Wed, 28 Jun 2023 23:57:49 +0200 Subject: [PATCH 6/7] Auto align working, tests missing --- inseq/attr/feat/feature_attribution.py | 21 ++- inseq/models/attribution_model.py | 15 +- inseq/models/huggingface_model.py | 32 ++++ inseq/utils/alignment_utils.py | 252 ++++++++++++++++++++----- 4 files changed, 264 insertions(+), 56 deletions(-) diff --git a/inseq/attr/feat/feature_attribution.py b/inseq/attr/feat/feature_attribution.py index a7c99528..2840294b 100644 --- a/inseq/attr/feat/feature_attribution.py +++ b/inseq/attr/feat/feature_attribution.py @@ -312,22 +312,25 @@ def attribute( contrast_targets = [contrast_targets] if isinstance(contrast_targets, str) else contrast_targets contrast_batch = None if contrast_targets is not None: - contrast_batch = DecoderOnlyBatch.from_batch( - get_batch_from_inputs( - attribution_model=self.attribution_model, - inputs=contrast_targets, - as_targets=self.attribution_model.is_encoder_decoder, - ) + as_targets = self.attribution_model.is_encoder_decoder + contrast_batch = get_batch_from_inputs( + attribution_model=self.attribution_model, + inputs=contrast_targets, + as_targets=as_targets, ) + contrast_batch = DecoderOnlyBatch.from_batch(contrast_batch) contrast_targets_alignments = self.attribution_model.formatter.format_contrast_targets_alignments( contrast_targets_alignments=contrast_targets_alignments, target_sequences=sequences.targets, - target_tokens=batch.target_tokens, + target_tokens=self.attribution_model.clean_tokens(batch.target_tokens, as_targets=as_targets), contrast_sequences=contrast_targets, - contrast_tokens=contrast_batch.target_tokens, + contrast_tokens=self.attribution_model.clean_tokens( + contrast_batch.target_tokens, as_targets=as_targets + ), + special_tokens=self.attribution_model.special_tokens, ) attributed_fn_args["contrast_targets_alignments"] = contrast_targets_alignments - if "contrast_targets_alignments" in step_scores_args: + if "contrast_targets" in step_scores_args: step_scores_args["contrast_targets_alignments"] = contrast_targets_alignments target_tokens_with_ids = self.attribution_model.get_token_with_ids( batch, diff --git a/inseq/models/attribution_model.py b/inseq/models/attribution_model.py index e642b907..a1a84e41 100644 --- a/inseq/models/attribution_model.py +++ b/inseq/models/attribution_model.py @@ -149,6 +149,7 @@ def format_contrast_targets_alignments( target_tokens: List[List[str]], contrast_sequences: List[str], contrast_tokens: List[List[str]], + special_tokens: List[str] = [], ) -> Tuple[DecoderOnlyBatch, Optional[List[List[Tuple[int, int]]]]]: # Ensure that the contrast_targets_alignments are in the correct format (list of lists of idxs pairs) if contrast_targets_alignments: @@ -157,7 +158,7 @@ def format_contrast_targets_alignments( contrast_targets_alignments = [contrast_targets_alignments] if not isinstance(contrast_targets_alignments[0], list): raise ValueError("Invalid contrast_targets_alignments were provided.") - elif not isinstance(str): + elif not isinstance(contrast_targets_alignments, str): raise ValueError("Invalid contrast_targets_alignments were provided.") adjusted_alignments = [] @@ -175,6 +176,7 @@ def format_contrast_targets_alignments( contrast_sequence=c_seq, contrast_tokens=c_tok, fill_missing=True, + special_tokens=special_tokens, ) ) return adjusted_alignments @@ -503,7 +505,7 @@ def encode( pass @abstractmethod - def decode(self, ids: IdsTensor, **kwargs) -> List[str]: + def decode(self, ids: IdsTensor, skip_special_tokens: bool = True) -> List[str]: pass @abstractmethod @@ -541,6 +543,15 @@ def convert_string_to_tokens( ) -> OneOrMoreTokenSequences: pass + @abstractmethod + def clean_tokens( + self, + tokens: OneOrMoreTokenSequences, + skip_special_tokens: bool = False, + as_targets: bool = False, + ): + pass + @property @abstractmethod def special_tokens(self) -> List[str]: diff --git a/inseq/models/huggingface_model.py b/inseq/models/huggingface_model.py index 2fbc3535..a5528fe8 100644 --- a/inseq/models/huggingface_model.py +++ b/inseq/models/huggingface_model.py @@ -355,6 +355,38 @@ def convert_string_to_tokens( return self.tokenizer.convert_ids_to_tokens(ids, skip_special_tokens) return [self.convert_string_to_tokens(t, skip_special_tokens, as_targets) for t in text] + def clean_tokens( + self, + tokens: OneOrMoreTokenSequences, + skip_special_tokens: bool = False, + as_targets: bool = False, + ) -> OneOrMoreTokenSequences: + """Cleans special characters from tokens. + + Args: + tokens (`OneOrMoreTokenSequences`): + A list containing one or more lists of tokens. + skip_special_tokens (`bool`, *optional*, defaults to True): + If true, special tokens are skipped. + as_targets (`bool`, *optional*, defaults to False): + If true, a target tokenizer is used to clean the tokens. + + Returns: + `OneOrMoreTokenSequences`: A list containing one or more lists of cleaned tokens. + """ + if isinstance(tokens, list) and len(tokens) == 0: + return [] + elif isinstance(tokens[0], str): + clean_tokens = [] + for tok in tokens: + clean_tok = self.convert_tokens_to_string( + [tok], skip_special_tokens=skip_special_tokens, as_targets=as_targets + ) + if clean_tok: + clean_tokens.append(clean_tok) + return clean_tokens + return [self.clean_tokens(token_seq, skip_special_tokens, as_targets) for token_seq in tokens] + @property def special_tokens(self) -> List[str]: return self.tokenizer.all_special_tokens diff --git a/inseq/utils/alignment_utils.py b/inseq/utils/alignment_utils.py index 728575ea..44f3aca3 100644 --- a/inseq/utils/alignment_utils.py +++ b/inseq/utils/alignment_utils.py @@ -4,7 +4,7 @@ from enum import Enum from functools import lru_cache from itertools import chain -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import List, Optional, Tuple, Union import torch from transformers import AutoModel, AutoTokenizer, PreTrainedModel, PreTrainedTokenizerBase @@ -13,6 +13,8 @@ logger = logging.getLogger(__name__) +ALIGN_MODEL_ID = "sentence-transformers/LaBSE" + @dataclass class AlignedSequences: @@ -20,6 +22,20 @@ class AlignedSequences: target_tokens: List[str] alignments: List[Tuple[int, int]] + @property + def aligned_tokens(self) -> List[Tuple[str, str]]: + return [(self.source_tokens[a_idx], self.target_tokens[b_idx]) for a_idx, b_idx in self.alignments] + + def reverse(self) -> "AlignedSequences": + return AlignedSequences( + source_tokens=self.target_tokens, + target_tokens=self.source_tokens, + alignments=[(b_idx, a_idx) for a_idx, b_idx in self.alignments], + ) + + def __str__(self) -> str: + return f"{', '.join([f'{a}→{b} ({self.source_tokens[a]}→{self.target_tokens[b]})'for a,b in self.alignments])}" + class AlignmentMethod(Enum): AUTO = "auto" @@ -27,12 +43,12 @@ class AlignmentMethod(Enum): @lru_cache def get_aligner_model() -> PreTrainedModel: - return AutoModel.from_pretrained("sentence-transformers/LaBSE") + return AutoModel.from_pretrained(ALIGN_MODEL_ID) @lru_cache def get_aligner_tokenizer() -> PreTrainedTokenizerBase: - return AutoTokenizer.from_pretrained("sentence-transformers/LaBSE") + return AutoTokenizer.from_pretrained(ALIGN_MODEL_ID) def _preprocess_sequence_for_alignment(tokenized_seq: List[str]) -> Tuple[torch.Tensor, List[List[int]]]: @@ -58,10 +74,10 @@ def _get_aligner_subword_aligns( ) -> torch.Tensor: aligner = get_aligner_model() tokenizer = get_aligner_tokenizer() - src_tokenized = [tokenizer.tokenize(word) for word in src] - tgt_tokenized = [tokenizer.tokenize(word) for word in tgt] - ids_src, sub2word_map_src = _preprocess_sequence_for_alignment(src_tokenized) - ids_tgt, sub2word_map_tgt = _preprocess_sequence_for_alignment(tgt_tokenized) + tok_aenized = [tokenizer.tokenize(word) for word in src] + tok_benized = [tokenizer.tokenize(word) for word in tgt] + ids_src, sub2word_map_src = _preprocess_sequence_for_alignment(tok_aenized) + ids_tgt, sub2word_map_tgt = _preprocess_sequence_for_alignment(tok_benized) aligner.eval() with torch.no_grad(): out_src = aligner(ids_src.unsqueeze(0), output_hidden_states=True)[2][align_layer][0, 1:-1] @@ -74,13 +90,13 @@ def _get_aligner_subword_aligns( return align_subwords, sub2word_map_src, sub2word_map_tgt -def get_word_aligns( +def compute_word_aligns( src: Union[str, List[str]], tgt: Union[str, List[str]], split_pattern: str = r"\s+|\b", align_layer: int = 8, score_threshold: float = 1e-3, -) -> Dict[str, Any]: +) -> AlignedSequences: if isinstance(src, str): src = [word for word in re.split(split_pattern, src) if word] if isinstance(tgt, str): @@ -91,14 +107,164 @@ def get_word_aligns( align_words = set() for i, j in align_subwords: align_words.add((sub2word_map_src[i], sub2word_map_tgt[j])) - word_alignments = [(src_idx, tgt_idx) for src_idx, tgt_idx in sorted(align_words, key=lambda x: (x[0], x[1]))] + word_alignments = [(a_idx, b_idx) for a_idx, b_idx in sorted(align_words, key=lambda x: (x[0], x[1]))] + return AlignedSequences( + source_tokens=src.copy(), + target_tokens=tgt.copy(), + alignments=word_alignments.copy(), + ) + + +def align_tokenizations( + tok_a: List[str], + tok_b: List[str], +) -> AlignedSequences: + """Align tokens from a sentence tokenized by different tokenizers. + + Args: + tok_a (:obj:`str` or :obj:`list` of :obj:`str`): + Sequence of tokens produced by the first tokenizer. + tok_b (:obj:`str` or :obj:`list` of :obj:`str`): + Sequence of tokens produced by the second tokenizer. + + Raises: + `ValueError`: Raised if the provided sequences do not have the same contents when concatenated. + """ + if "".join(tok_a) != "".join(tok_b): + raise ValueError( + "The provided sequences must have the same contents when concatenated.\n" + f"Sequence A: {tok_a}\nSequence B: {tok_b}\n" + ) + aligns = [] + orig_tok_a = tok_a.copy() + orig_tok_b = tok_b.copy() + a_idx, b_idx = 0, 0 + while a_idx < len(tok_a): + curr_tok_a = tok_a[a_idx] + curr_tok_b = tok_b[b_idx] + if curr_tok_a == curr_tok_b: + aligns.append((a_idx, b_idx)) + a_idx += 1 + b_idx += 1 + elif curr_tok_a in curr_tok_b: + aligns.append((a_idx, b_idx)) + tok_b[b_idx] = tok_b[b_idx].replace(curr_tok_a, "", 1) + a_idx += 1 + elif curr_tok_b in curr_tok_a: + aligns.append((a_idx, b_idx)) + tok_a[a_idx] = tok_a[a_idx].replace(curr_tok_b, "", 1) + b_idx += 1 + else: + raise ValueError( + f"Found mismatching tokens '{curr_tok_a}' and '{curr_tok_b}' when aligning tokens. " + "Please provide tokenizations that can be aligned." + ) + return AlignedSequences( + source_tokens=orig_tok_a, + target_tokens=orig_tok_b, + alignments=aligns.copy(), + ) + + +def propagate_alignments(aligns_a_b: AlignedSequences, aligns_b_c: AlignedSequences) -> AlignedSequences: + """Given two set of alignments corresponding to the aligned tokens of strings A and B + and those of strings B and C respectively, returns the alignment of tokens between + string A and C. + + Args: + aligns_a_b (:obj:`list` of :obj:`tuple` of :obj:`int`): List of alignment index pairs + between sequences A and B. + aligns_b_c (:obj:`list` of :obj:`tuple` of :obj:`int`): List of alignment index pairs + between sequences B and C. + + Returns: + :class:`AlignedSequences`: Alignment pairs between sequences A and C. + """ + aligns_a_c = [] + for idx_a, idx_b_in_ab in aligns_a_b.alignments: + for idx_b_in_bc, idx_c in aligns_b_c.alignments: + if idx_b_in_ab == idx_b_in_bc: + aligns_a_c.append((idx_a, idx_c)) return AlignedSequences( - source_tokens=src, - target_tokens=tgt, - alignments=word_alignments, + source_tokens=aligns_a_b.source_tokens.copy(), + target_tokens=aligns_b_c.target_tokens.copy(), + alignments=aligns_a_c.copy(), ) +def auto_align_sequences( + a_sequence: Optional[str] = None, + a_tokens: Optional[List[str]] = None, + b_sequence: Optional[str] = None, + b_tokens: Optional[List[str]] = None, + filter_special_tokens: List[str] = [], + split_pattern: str = r"\s+|\b", +) -> AlignedSequences: + if not a_sequence or not b_sequence or not a_tokens or not b_tokens: + raise ValueError( + "Missing required arguments to compute alignments. Please provide target and contrast sequence and tokens." + ) + try: + for token in filter_special_tokens: + b_sequence = b_sequence.replace(token, "") + # 1. Use aligner to get alignments at word level + # Alignments are target to contrast word-level alignment pairs + a_words = [word for word in re.split(split_pattern, a_sequence) if word] + b_words = [word for word in re.split(split_pattern, b_sequence) if word] + a_to_b_word_align = compute_word_aligns(a_words, b_words) + # 2. Align word-level alignments to token-level alignments from the generative model tokenizer. + # Requires cleaning up the model tokens from special tokens (special characters already removed) + clean_a_tokens = [] + removed_a_token_idxs = [] + for idx_a, tok_a in enumerate(a_tokens): + if tok_a not in filter_special_tokens: + clean_a_tokens += [tok_a.strip()] + else: + removed_a_token_idxs += [idx_a] + clean_b_tokens = [] + removed_b_token_idxs = [] + for idx_b, tok_b in enumerate(b_tokens): + if tok_b not in filter_special_tokens: + clean_b_tokens += [tok_b.strip()] + else: + removed_b_token_idxs += [idx_b] + if len(removed_a_token_idxs) != len(removed_b_token_idxs): + raise ValueError( + "The number of special tokens in the target and contrast sequences do not match. " + "Please provide sequences with the same number of special tokens." + ) + aligned_special_tokens = [(rm_a, rm_b) for rm_a, rm_b in zip(removed_a_token_idxs, removed_b_token_idxs)] + a_word_to_token_align = align_tokenizations(a_words, clean_a_tokens) + b_word_to_token_align = align_tokenizations(b_words, clean_b_tokens) + # 3. Propagate word-level alignments to token-level alignments. + # target token-level -> target word-level -> contrast word-level -> contrast token-level + # First step: get target token-level -> contrast word-level + a_token_to_word_align = a_word_to_token_align.reverse() + a_token_to_b_word_align = propagate_alignments(a_token_to_word_align, a_to_b_word_align) + # Second step: get target token-level -> contrast token-level using previous step outputs + a_to_b_token_align = propagate_alignments(a_token_to_b_word_align, b_word_to_token_align) + # 4. Add special tokens alignments + for s_idx_a, s_idx_b in aligned_special_tokens: + for pos, (idx_a, idx_b) in enumerate(a_to_b_token_align.alignments): + a_val, b_val = idx_a, idx_b + if idx_a >= s_idx_a: + a_val += 1 + if idx_b >= s_idx_b: + b_val += 1 + a_to_b_token_align.alignments[pos] = (a_val, b_val) + return AlignedSequences( + source_tokens=a_tokens, + target_tokens=b_tokens, + alignments=a_to_b_token_align.alignments + aligned_special_tokens, + ) + except Exception as e: + logger.warning( + "Failed to compute alignments using the aligner. " + f"Please check the following error and provide custom alignments if needed.\n{e}" + ) + raise e + + def get_adjusted_alignments( alignments: Union[List[Tuple[int, int]], str], target_sequence: Optional[str] = None, @@ -107,35 +273,24 @@ def get_adjusted_alignments( contrast_tokens: Optional[List[str]] = None, do_sort: bool = True, fill_missing: bool = False, + special_tokens: List[str] = [], ) -> List[Tuple[int, int]]: + is_auto_aligned = False if fill_missing and not target_tokens: raise ValueError("Missing target tokens. Please provide target tokens to fill missing alignments.") if alignments is None and fill_missing: alignments = [(idx, idx) for idx in range(len(target_tokens))] elif isinstance(alignments, str): - if alignments == AlignmentMethod.AUTO: - if not target_sequence or not contrast_sequence or not target_tokens or not contrast_tokens: - raise ValueError( - "Missing required arguments to compute alignments. " - "Please provide target and contrast sequence and tokens." - ) - try: - # 1. Use aligner to get alignments at word level - align_seq = get_word_aligns(target_sequence, contrast_sequence) - raise NotImplementedError(f"{align_seq.alignments}") - # TODO: - # 2. Align word-level alignments to token-level alignments from the generative model tokenizer. - # 2.1 Requires cleaning up the model tokens from special tokens and characters, check if smth native - # exists in the tokenizer. - # 2.2 Default behavior to handle missing aligns: let the step below fill them with 1:1 mapping. - # 3. Propagate word-level alignments to token-level alignments. - # 4. Log info about the produced alignments - except Exception as e: - logger.warning( - "Failed to compute alignments using the aligner. " - f"Please check the following error and provide custom alignments if needed.\n{e}" - ) - raise e + if alignments == AlignmentMethod.AUTO.value: + auto_aligned = auto_align_sequences( + a_sequence=target_sequence, + a_tokens=target_tokens, + b_sequence=contrast_sequence, + b_tokens=contrast_tokens, + filter_special_tokens=special_tokens, + ) + is_auto_aligned = True + alignments = auto_aligned.alignments else: raise ValueError( f"Unknown alignment method: {alignments}. " @@ -149,32 +304,39 @@ def get_adjusted_alignments( if fill_missing: filled_alignments = [] for pair_idx in range(len(target_tokens)): - match_pairs = [x for x in alignments if x[0] == pair_idx] + match_pairs = [pair for pair in alignments if pair[0] == pair_idx] + if not match_pairs: # Assuming 1:1 mapping to cover all tokens from the original sequence filled_alignments.append((pair_idx, pair_idx)) else: - # Use only first match for the source sequence - filled_alignments.append(match_pairs[0]) + match_pairs_unaligned = [p for p in match_pairs if p[1] not in [f[1] for f in filled_alignments]] + # If found, use the first match that containing an unaligned target token, first match otherwise + valid_match = match_pairs_unaligned[0] if match_pairs_unaligned else match_pairs[0] + filled_alignments.append(valid_match) if alignments != filled_alignments: logger.warning( f"Provided alignments do not cover all {len(target_tokens)} tokens from the original sequence.\n" - f"Filling missing position with 1:1 position alignments.\\Filled alignments: {filled_alignments}.\n" - 'Alternatively, use contrast_targets_alignments="auto" to produce custom alignments.' + "Filling missing position with 1:1 position alignments." + ) + if is_auto_aligned: + logger.warning( + f"Using {ALIGN_MODEL_ID} for automatic alignments. Provide custom alignments for non-linguistic " + f"sequences, or for languages not covered by the aligner.\nGenerated alignments: {filled_alignments}" ) alignments = filled_alignments return alignments -def get_aligned_idx(src_idx: int, alignments: List[Tuple[int, int]]) -> int: +def get_aligned_idx(a_idx: int, alignments: List[Tuple[int, int]]) -> int: if alignments: # Find all alignment pairs for the current original target - aligned_idxs = [t_idx for s_idx, t_idx in alignments if s_idx == src_idx] + aligned_idxs = [t_idx for s_idx, t_idx in alignments if s_idx == a_idx] if not aligned_idxs: raise MissingAlignmentsError( - f"No alignment found for token at index {src_idx}. " + f"No alignment found for token at index {a_idx}. " "Please provide alignment pairs that cover all original target tokens." ) # Select the minimum index to identify the next target token return min(aligned_idxs) - return src_idx + return a_idx From 07f18ac59f690e37de7aa175a1cbbdd52bfad1a5 Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Fri, 30 Jun 2023 09:35:59 +0200 Subject: [PATCH 7/7] Add tests for auto align --- inseq/utils/alignment_utils.py | 44 +++++++------ inseq/utils/misc.py | 12 ++++ tests/attr/feat/test_step_functions.py | 85 ++++++++++++++++++++++++++ 3 files changed, 118 insertions(+), 23 deletions(-) diff --git a/inseq/utils/alignment_utils.py b/inseq/utils/alignment_utils.py index 44f3aca3..30fb6a25 100644 --- a/inseq/utils/alignment_utils.py +++ b/inseq/utils/alignment_utils.py @@ -10,6 +10,7 @@ from transformers import AutoModel, AutoTokenizer, PreTrainedModel, PreTrainedTokenizerBase from .errors import MissingAlignmentsError +from .misc import clean_tokens logger = logging.getLogger(__name__) @@ -192,6 +193,20 @@ def propagate_alignments(aligns_a_b: AlignedSequences, aligns_b_c: AlignedSequen ) +def add_alignment_extra_positions( + alignments: List[Tuple[int, int]], extra_positions: List[Tuple[int, int]] +) -> List[Tuple[int, int]]: + for x_idx_a, x_idx_b in extra_positions: + for pos, (idx_a, idx_b) in enumerate(alignments): + a_val, b_val = idx_a, idx_b + if idx_a >= x_idx_a: + a_val += 1 + if idx_b >= x_idx_b: + b_val += 1 + alignments[pos] = (a_val, b_val) + return alignments + extra_positions + + def auto_align_sequences( a_sequence: Optional[str] = None, a_tokens: Optional[List[str]] = None, @@ -214,20 +229,8 @@ def auto_align_sequences( a_to_b_word_align = compute_word_aligns(a_words, b_words) # 2. Align word-level alignments to token-level alignments from the generative model tokenizer. # Requires cleaning up the model tokens from special tokens (special characters already removed) - clean_a_tokens = [] - removed_a_token_idxs = [] - for idx_a, tok_a in enumerate(a_tokens): - if tok_a not in filter_special_tokens: - clean_a_tokens += [tok_a.strip()] - else: - removed_a_token_idxs += [idx_a] - clean_b_tokens = [] - removed_b_token_idxs = [] - for idx_b, tok_b in enumerate(b_tokens): - if tok_b not in filter_special_tokens: - clean_b_tokens += [tok_b.strip()] - else: - removed_b_token_idxs += [idx_b] + clean_a_tokens, removed_a_token_idxs = clean_tokens(a_tokens, filter_special_tokens) + clean_b_tokens, removed_b_token_idxs = clean_tokens(b_tokens, filter_special_tokens) if len(removed_a_token_idxs) != len(removed_b_token_idxs): raise ValueError( "The number of special tokens in the target and contrast sequences do not match. " @@ -244,18 +247,13 @@ def auto_align_sequences( # Second step: get target token-level -> contrast token-level using previous step outputs a_to_b_token_align = propagate_alignments(a_token_to_b_word_align, b_word_to_token_align) # 4. Add special tokens alignments - for s_idx_a, s_idx_b in aligned_special_tokens: - for pos, (idx_a, idx_b) in enumerate(a_to_b_token_align.alignments): - a_val, b_val = idx_a, idx_b - if idx_a >= s_idx_a: - a_val += 1 - if idx_b >= s_idx_b: - b_val += 1 - a_to_b_token_align.alignments[pos] = (a_val, b_val) + a_to_b_aligns_with_special_tokens = add_alignment_extra_positions( + a_to_b_token_align.alignments.copy(), aligned_special_tokens + ) return AlignedSequences( source_tokens=a_tokens, target_tokens=b_tokens, - alignments=a_to_b_token_align.alignments + aligned_special_tokens, + alignments=a_to_b_aligns_with_special_tokens, ) except Exception as e: logger.warning( diff --git a/inseq/utils/misc.py b/inseq/utils/misc.py index c2e0cba3..105f9803 100644 --- a/inseq/utils/misc.py +++ b/inseq/utils/misc.py @@ -423,3 +423,15 @@ def get_cls_from_instance_type(mod, name, cls_lookup_map): if curr_class is None: raise ImportError(f"{imp_err}; add the class to `cls_lookup_map={{'{name}': Class}}` argument") return curr_class + + +def clean_tokens(tokens: List[str], remove_tokens: List[str]) -> Tuple[List[str], List[int]]: + """Removes tokens from a list of tokens and returns the cleaned list and the removed token indexes.""" + clean_tokens = [] + removed_token_idxs = [] + for idx, tok in enumerate(tokens): + if tok not in remove_tokens: + clean_tokens += [tok.strip()] + else: + removed_token_idxs += [idx] + return clean_tokens, removed_token_idxs diff --git a/tests/attr/feat/test_step_functions.py b/tests/attr/feat/test_step_functions.py index 86aaac89..4bb29e6d 100644 --- a/tests/attr/feat/test_step_functions.py +++ b/tests/attr/feat/test_step_functions.py @@ -51,3 +51,88 @@ def test_contrast_prob_consistency_enc_dec(saliency_mt_model: EncoderDecoderAttr ) regular_prob = out_regular.sequence_attributions[0].step_scores["probability"] assert all(c == r for c, r in zip(contrast_prob, regular_prob[-len(contrast_prob) :])) + + +def test_contrast_prob_diff_contrast_targets_auto_align_seq2seq(saliency_mt_model: EncoderDecoderAttributionModel): + out = saliency_mt_model.attribute( + ( + " UN peacekeepers, whom arrived in Haiti after the 2010 earthquake, are being blamed for the spread of the" + " disease which started near the troop's encampment." + ), + ( + "I soldati della pace dell'ONU, che sono arrivati ad Haiti dopo il terremoto del 2010, sono stati" + " incolpati per la diffusione della malattia che è iniziata vicino al campo delle truppe." + ), + attributed_fn="contrast_prob_diff", + step_scores=["contrast_prob_diff"], + contrast_targets=( + "Le forze di pace delle Nazioni Unite, arrivate ad Haiti dopo il terremoto del 2010, sono state accusate" + " di aver diffuso la malattia iniziata nei pressi dell'accampamento delle truppe." + ), + contrast_targets_alignments="auto", + ) + contrast_targets = [ + "▁Le → ▁I", + "▁forze → ▁soldati", + "▁di → ▁della", + "▁pace", + "▁delle → ▁dell", + "▁delle → '", + "▁Nazioni → ONU", + ",", + "▁arriva → ▁che", + "te → ▁sono", + "▁arriva → ▁arrivati", + "▁ad", + "▁Haiti", + "▁dopo", + "▁il", + "▁terremoto", + "▁del", + "▁2010,", + "▁sono", + "▁state → ▁stati", + "▁accusa → ▁in", + "te → col", + "▁accusa → pati", + "▁di → ▁per", + "▁aver → ▁la", + "▁diffuso → ▁diffusione", + "▁la → ▁della", + "▁malattia", + "▁iniziata → ▁che", + "▁dell → ▁è", + "▁iniziata", + "▁pressi → ▁vicino", + "▁nei → ▁al", + "acca → ▁campo", + "▁delle", + "▁truppe", + ".", + "", + ] + assert [t.token for t in out[0].target] == contrast_targets + + +def test_contrast_prob_diff_contrast_targets_auto_align_gpt(saliency_gpt2: DecoderOnlyAttributionModel): + out = saliency_gpt2.attribute( + "", + "UN peacekeepers were deployed in the region.", + attributed_fn="contrast_prob_diff", + contrast_targets="<|endoftext|> UN peacekeepers were sent to the war-torn region.", + contrast_targets_alignments="auto", + step_scores=["contrast_prob_diff"], + ) + contrast_targets = [ + "<|endoftext|>", + "ĠUN", + "Ġpeace", + "keepers", + "Ġwere", + "Ġsent → Ġdeployed", + "Ġto → Ġin", + "Ġthe", + "Ġregion", + ".", + ] + assert [t.token for t in out[0].target] == contrast_targets