text
stringlengths
38
361k
type
stringclasses
1 value
start
int64
156
155k
end
int64
451
418k
depth
int64
0
0
filepath
stringlengths
87
141
parent_class
null
class_index
int64
0
305
class FillMaskParameters(BaseInferenceType): """Additional inference parameters for Fill Mask""" targets: Optional[List[str]] = None """When passed, the model will limit the scores to the passed targets instead of looking up in the whole vocabulary. If the provided targets are not in the model vocab, they will be tokenized and the first resulting token will be used (with a warning, and that might be slower). """ top_k: Optional[int] = None """When passed, overrides the number of predictions to return."""
class_definition
418
964
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/fill_mask.py
null
200
class FillMaskInput(BaseInferenceType): """Inputs for Fill Mask inference""" inputs: str """The text with masked tokens""" parameters: Optional[FillMaskParameters] = None """Additional inference parameters for Fill Mask"""
class_definition
978
1,221
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/fill_mask.py
null
201
class FillMaskOutputElement(BaseInferenceType): """Outputs of inference for the Fill Mask task""" score: float """The corresponding probability""" sequence: str """The corresponding input with the mask token prediction.""" token: int """The predicted token id (to replace the masked one).""" token_str: Any fill_mask_output_token_str: Optional[str] = None """The predicted token (to replace the masked one)."""
class_definition
1,235
1,686
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/fill_mask.py
null
202
class ImageToTextGenerationParameters(BaseInferenceType): """Parametrization of the text generation process""" do_sample: Optional[bool] = None """Whether to use sampling instead of greedy decoding when generating new tokens.""" early_stopping: Optional[Union[bool, "ImageToTextEarlyStoppingEnum"]] = None """Controls the stopping condition for beam-based methods.""" epsilon_cutoff: Optional[float] = None """If set to float strictly between 0 and 1, only tokens with a conditional probability greater than epsilon_cutoff will be sampled. In the paper, suggested values range from 3e-4 to 9e-4, depending on the size of the model. See [Truncation Sampling as Language Model Desmoothing](https://hf.co/papers/2210.15191) for more details. """ eta_cutoff: Optional[float] = None """Eta sampling is a hybrid of locally typical sampling and epsilon sampling. If set to float strictly between 0 and 1, a token is only considered if it is greater than either eta_cutoff or sqrt(eta_cutoff) * exp(-entropy(softmax(next_token_logits))). The latter term is intuitively the expected next token probability, scaled by sqrt(eta_cutoff). In the paper, suggested values range from 3e-4 to 2e-3, depending on the size of the model. See [Truncation Sampling as Language Model Desmoothing](https://hf.co/papers/2210.15191) for more details. """ max_length: Optional[int] = None """The maximum length (in tokens) of the generated text, including the input.""" max_new_tokens: Optional[int] = None """The maximum number of tokens to generate. Takes precedence over max_length.""" min_length: Optional[int] = None """The minimum length (in tokens) of the generated text, including the input.""" min_new_tokens: Optional[int] = None """The minimum number of tokens to generate. Takes precedence over min_length.""" num_beam_groups: Optional[int] = None """Number of groups to divide num_beams into in order to ensure diversity among different groups of beams. See [this paper](https://hf.co/papers/1610.02424) for more details. """ num_beams: Optional[int] = None """Number of beams to use for beam search.""" penalty_alpha: Optional[float] = None """The value balances the model confidence and the degeneration penalty in contrastive search decoding. """ temperature: Optional[float] = None """The value used to modulate the next token probabilities.""" top_k: Optional[int] = None """The number of highest probability vocabulary tokens to keep for top-k-filtering.""" top_p: Optional[float] = None """If set to float < 1, only the smallest set of most probable tokens with probabilities that add up to top_p or higher are kept for generation. """ typical_p: Optional[float] = None """Local typicality measures how similar the conditional probability of predicting a target token next is to the expected conditional probability of predicting a random token next, given the partial text already generated. If set to float < 1, the smallest set of the most locally typical tokens with probabilities that add up to typical_p or higher are kept for generation. See [this paper](https://hf.co/papers/2202.00666) for more details. """ use_cache: Optional[bool] = None """Whether the model should use the past last key/values attentions to speed up decoding"""
class_definition
478
3,933
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/image_to_text.py
null
203
class ImageToTextParameters(BaseInferenceType): """Additional inference parameters for Image To Text""" max_new_tokens: Optional[int] = None """The amount of maximum tokens to generate.""" # Will be deprecated in the future when the renaming to `generation_parameters` is implemented in transformers generate_kwargs: Optional[ImageToTextGenerationParameters] = None """Parametrization of the text generation process"""
class_definition
3,947
4,390
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/image_to_text.py
null
204
class ImageToTextInput(BaseInferenceType): """Inputs for Image To Text inference""" inputs: Any """The input image data""" parameters: Optional[ImageToTextParameters] = None """Additional inference parameters for Image To Text"""
class_definition
4,404
4,654
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/image_to_text.py
null
205
class ImageToTextOutput(BaseInferenceType): """Outputs of inference for the Image To Text task""" generated_text: Any image_to_text_output_generated_text: Optional[str] = None """The generated text."""
class_definition
4,668
4,886
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/image_to_text.py
null
206
class AudioClassificationParameters(BaseInferenceType): """Additional inference parameters for Audio Classification""" function_to_apply: Optional["AudioClassificationOutputTransform"] = None """The function to apply to the model outputs in order to retrieve the scores.""" top_k: Optional[int] = None """When specified, limits the output to the top K most probable classes."""
class_definition
493
891
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/audio_classification.py
null
207
class AudioClassificationInput(BaseInferenceType): """Inputs for Audio Classification inference""" inputs: str """The input audio data as a base64-encoded string. If no `parameters` are provided, you can also provide the audio data as a raw bytes payload. """ parameters: Optional[AudioClassificationParameters] = None """Additional inference parameters for Audio Classification"""
class_definition
905
1,315
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/audio_classification.py
null
208
class AudioClassificationOutputElement(BaseInferenceType): """Outputs for Audio Classification inference""" label: str """The predicted class label.""" score: float """The corresponding probability."""
class_definition
1,329
1,551
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/audio_classification.py
null
209
class VideoClassificationParameters(BaseInferenceType): """Additional inference parameters for Video Classification""" frame_sampling_rate: Optional[int] = None """The sampling rate used to select frames from the video.""" function_to_apply: Optional["VideoClassificationOutputTransform"] = None """The function to apply to the model outputs in order to retrieve the scores.""" num_frames: Optional[int] = None """The number of sampled frames to consider for classification.""" top_k: Optional[int] = None """When specified, limits the output to the top K most probable classes."""
class_definition
498
1,116
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/video_classification.py
null
210
class VideoClassificationInput(BaseInferenceType): """Inputs for Video Classification inference""" inputs: Any """The input video data""" parameters: Optional[VideoClassificationParameters] = None """Additional inference parameters for Video Classification"""
class_definition
1,130
1,410
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/video_classification.py
null
211
class VideoClassificationOutputElement(BaseInferenceType): """Outputs of inference for the Video Classification task""" label: str """The predicted class label.""" score: float """The corresponding probability."""
class_definition
1,424
1,658
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/video_classification.py
null
212
class ImageToImageTargetSize(BaseInferenceType): """The size in pixel of the output image.""" height: int width: int
class_definition
418
547
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/image_to_image.py
null
213
class ImageToImageParameters(BaseInferenceType): """Additional inference parameters for Image To Image""" guidance_scale: Optional[float] = None """For diffusion models. A higher guidance scale value encourages the model to generate images closely linked to the text prompt at the expense of lower image quality. """ negative_prompt: Optional[List[str]] = None """One or several prompt to guide what NOT to include in image generation.""" num_inference_steps: Optional[int] = None """For diffusion models. The number of denoising steps. More denoising steps usually lead to a higher quality image at the expense of slower inference. """ target_size: Optional[ImageToImageTargetSize] = None """The size in pixel of the output image."""
class_definition
561
1,348
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/image_to_image.py
null
214
class ImageToImageInput(BaseInferenceType): """Inputs for Image To Image inference""" inputs: str """The input image data as a base64-encoded string. If no `parameters` are provided, you can also provide the image data as a raw bytes payload. """ parameters: Optional[ImageToImageParameters] = None """Additional inference parameters for Image To Image"""
class_definition
1,362
1,746
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/image_to_image.py
null
215
class ImageToImageOutput(BaseInferenceType): """Outputs of inference for the Image To Image task""" image: Any """The output image returned as raw bytes in the payload."""
class_definition
1,760
1,944
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/image_to_image.py
null
216
class SentenceSimilarityInputData(BaseInferenceType): sentences: List[str] """A list of strings which will be compared against the source_sentence.""" source_sentence: str """The string that you wish to compare the other strings with. This can be a phrase, sentence, or longer passage, depending on the model being used. """
class_definition
424
772
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/sentence_similarity.py
null
217
class SentenceSimilarityInput(BaseInferenceType): """Inputs for Sentence similarity inference""" inputs: SentenceSimilarityInputData parameters: Optional[Dict[str, Any]] = None """Additional inference parameters for Sentence Similarity"""
class_definition
786
1,041
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/sentence_similarity.py
null
218
class FeatureExtractionInput(BaseInferenceType): """Feature Extraction Input. Auto-generated from TEI specs. For more details, check out https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tei-import.ts. """ inputs: str """The text to embed.""" normalize: Optional[bool] = None prompt_name: Optional[str] = None """The name of the prompt that should be used by for encoding. If not set, no prompt will be applied. Must be a key in the `sentence-transformers` configuration `prompts` dictionary. For example if ``prompt_name`` is "query" and the ``prompts`` is {"query": "query: ", ...}, then the sentence "What is the capital of France?" will be encoded as "query: What is the capital of France?" because the prompt text will be prepended before any text to encode. """ truncate: Optional[bool] = None truncation_direction: Optional["FeatureExtractionInputTruncationDirection"] = None
class_definition
487
1,489
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/feature_extraction.py
null
219
class DepthEstimationInput(BaseInferenceType): """Inputs for Depth Estimation inference""" inputs: Any """The input image data""" parameters: Optional[Dict[str, Any]] = None """Additional inference parameters for Depth Estimation"""
class_definition
418
671
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/depth_estimation.py
null
220
class DepthEstimationOutput(BaseInferenceType): """Outputs of inference for the Depth Estimation task""" depth: Any """The predicted depth as an image""" predicted_depth: Any """The predicted depth as a tensor"""
class_definition
685
918
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/depth_estimation.py
null
221
class TextToSpeechGenerationParameters(BaseInferenceType): """Parametrization of the text generation process""" do_sample: Optional[bool] = None """Whether to use sampling instead of greedy decoding when generating new tokens.""" early_stopping: Optional[Union[bool, "TextToSpeechEarlyStoppingEnum"]] = None """Controls the stopping condition for beam-based methods.""" epsilon_cutoff: Optional[float] = None """If set to float strictly between 0 and 1, only tokens with a conditional probability greater than epsilon_cutoff will be sampled. In the paper, suggested values range from 3e-4 to 9e-4, depending on the size of the model. See [Truncation Sampling as Language Model Desmoothing](https://hf.co/papers/2210.15191) for more details. """ eta_cutoff: Optional[float] = None """Eta sampling is a hybrid of locally typical sampling and epsilon sampling. If set to float strictly between 0 and 1, a token is only considered if it is greater than either eta_cutoff or sqrt(eta_cutoff) * exp(-entropy(softmax(next_token_logits))). The latter term is intuitively the expected next token probability, scaled by sqrt(eta_cutoff). In the paper, suggested values range from 3e-4 to 2e-3, depending on the size of the model. See [Truncation Sampling as Language Model Desmoothing](https://hf.co/papers/2210.15191) for more details. """ max_length: Optional[int] = None """The maximum length (in tokens) of the generated text, including the input.""" max_new_tokens: Optional[int] = None """The maximum number of tokens to generate. Takes precedence over max_length.""" min_length: Optional[int] = None """The minimum length (in tokens) of the generated text, including the input.""" min_new_tokens: Optional[int] = None """The minimum number of tokens to generate. Takes precedence over min_length.""" num_beam_groups: Optional[int] = None """Number of groups to divide num_beams into in order to ensure diversity among different groups of beams. See [this paper](https://hf.co/papers/1610.02424) for more details. """ num_beams: Optional[int] = None """Number of beams to use for beam search.""" penalty_alpha: Optional[float] = None """The value balances the model confidence and the degeneration penalty in contrastive search decoding. """ temperature: Optional[float] = None """The value used to modulate the next token probabilities.""" top_k: Optional[int] = None """The number of highest probability vocabulary tokens to keep for top-k-filtering.""" top_p: Optional[float] = None """If set to float < 1, only the smallest set of most probable tokens with probabilities that add up to top_p or higher are kept for generation. """ typical_p: Optional[float] = None """Local typicality measures how similar the conditional probability of predicting a target token next is to the expected conditional probability of predicting a random token next, given the partial text already generated. If set to float < 1, the smallest set of the most locally typical tokens with probabilities that add up to typical_p or higher are kept for generation. See [this paper](https://hf.co/papers/2202.00666) for more details. """ use_cache: Optional[bool] = None """Whether the model should use the past last key/values attentions to speed up decoding"""
class_definition
479
3,936
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_to_speech.py
null
222
class TextToSpeechParameters(BaseInferenceType): """Additional inference parameters for Text To Speech""" # Will be deprecated in the future when the renaming to `generation_parameters` is implemented in transformers generate_kwargs: Optional[TextToSpeechGenerationParameters] = None """Parametrization of the text generation process"""
class_definition
3,950
4,303
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_to_speech.py
null
223
class TextToSpeechInput(BaseInferenceType): """Inputs for Text To Speech inference""" inputs: str """The input text data""" parameters: Optional[TextToSpeechParameters] = None """Additional inference parameters for Text To Speech"""
class_definition
4,317
4,570
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_to_speech.py
null
224
class TextToSpeechOutput(BaseInferenceType): """Outputs for Text to Speech inference Outputs of inference for the Text To Audio task """ audio: Any """The generated audio waveform.""" sampling_rate: Any text_to_speech_output_sampling_rate: Optional[float] = None """The sampling rate of the generated audio waveform."""
class_definition
4,584
4,936
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_to_speech.py
null
225
class ZeroShotClassificationParameters(BaseInferenceType): """Additional inference parameters for Zero Shot Classification""" candidate_labels: List[str] """The set of possible class labels to classify the text into.""" hypothesis_template: Optional[str] = None """The sentence used in conjunction with `candidate_labels` to attempt the text classification by replacing the placeholder with the candidate labels. """ multi_label: Optional[bool] = None """Whether multiple candidate labels can be true. If false, the scores are normalized such that the sum of the label likelihoods for each sequence is 1. If true, the labels are considered independent and probabilities are normalized for each candidate. """
class_definition
413
1,170
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/zero_shot_classification.py
null
226
class ZeroShotClassificationInput(BaseInferenceType): """Inputs for Zero Shot Classification inference""" inputs: str """The text to classify""" parameters: ZeroShotClassificationParameters """Additional inference parameters for Zero Shot Classification"""
class_definition
1,184
1,461
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/zero_shot_classification.py
null
227
class ZeroShotClassificationOutputElement(BaseInferenceType): """Outputs of inference for the Zero Shot Classification task""" label: str """The predicted class label.""" score: float """The corresponding probability."""
class_definition
1,475
1,716
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/zero_shot_classification.py
null
228
class TextClassificationParameters(BaseInferenceType): """Additional inference parameters for Text Classification""" function_to_apply: Optional["TextClassificationOutputTransform"] = None """The function to apply to the model outputs in order to retrieve the scores.""" top_k: Optional[int] = None """When specified, limits the output to the top K most probable classes."""
class_definition
492
887
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_classification.py
null
229
class TextClassificationInput(BaseInferenceType): """Inputs for Text Classification inference""" inputs: str """The text to classify""" parameters: Optional[TextClassificationParameters] = None """Additional inference parameters for Text Classification"""
class_definition
901
1,177
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_classification.py
null
230
class TextClassificationOutputElement(BaseInferenceType): """Outputs of inference for the Text Classification task""" label: str """The predicted class label.""" score: float """The corresponding probability."""
class_definition
1,191
1,423
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_classification.py
null
231
class DocumentQuestionAnsweringInputData(BaseInferenceType): """One (document, question) pair to answer""" image: Any """The image on which the question is asked""" question: str """A question to ask of the document"""
class_definition
425
664
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/document_question_answering.py
null
232
class DocumentQuestionAnsweringParameters(BaseInferenceType): """Additional inference parameters for Document Question Answering""" doc_stride: Optional[int] = None """If the words in the document are too long to fit with the question for the model, it will be split in several chunks with some overlap. This argument controls the size of that overlap. """ handle_impossible_answer: Optional[bool] = None """Whether to accept impossible as an answer""" lang: Optional[str] = None """Language to use while running OCR. Defaults to english.""" max_answer_len: Optional[int] = None """The maximum length of predicted answers (e.g., only answers with a shorter length are considered). """ max_question_len: Optional[int] = None """The maximum length of the question after tokenization. It will be truncated if needed.""" max_seq_len: Optional[int] = None """The maximum length of the total sentence (context + question) in tokens of each chunk passed to the model. The context will be split in several chunks (using doc_stride as overlap) if needed. """ top_k: Optional[int] = None """The number of answers to return (will be chosen by order of likelihood). Can return less than top_k answers if there are not enough options available within the context. """ word_boxes: Optional[List[Union[List[float], str]]] = None """A list of words and bounding boxes (normalized 0->1000). If provided, the inference will skip the OCR step and use the provided bounding boxes instead. """
class_definition
678
2,267
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/document_question_answering.py
null
233
class DocumentQuestionAnsweringInput(BaseInferenceType): """Inputs for Document Question Answering inference""" inputs: DocumentQuestionAnsweringInputData """One (document, question) pair to answer""" parameters: Optional[DocumentQuestionAnsweringParameters] = None """Additional inference parameters for Document Question Answering"""
class_definition
2,281
2,637
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/document_question_answering.py
null
234
class DocumentQuestionAnsweringOutputElement(BaseInferenceType): """Outputs of inference for the Document Question Answering task""" answer: str """The answer to the question.""" end: int """The end word index of the answer (in the OCR’d version of the input or provided word boxes). """ score: float """The probability associated to the answer.""" start: int """The start word index of the answer (in the OCR’d version of the input or provided word boxes). """
class_definition
2,651
3,169
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/document_question_answering.py
null
235
class TranslationParameters(BaseInferenceType): """Additional inference parameters for Translation""" clean_up_tokenization_spaces: Optional[bool] = None """Whether to clean up the potential extra spaces in the text output.""" generate_parameters: Optional[Dict[str, Any]] = None """Additional parametrization of the text generation algorithm.""" src_lang: Optional[str] = None """The source language of the text. Required for models that can translate from multiple languages. """ tgt_lang: Optional[str] = None """Target language to translate to. Required for models that can translate to multiple languages. """ truncation: Optional["TranslationTruncationStrategy"] = None """The truncation strategy to use."""
class_definition
534
1,308
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/translation.py
null
236
class TranslationInput(BaseInferenceType): """Inputs for Translation inference""" inputs: str """The text to translate.""" parameters: Optional[TranslationParameters] = None """Additional inference parameters for Translation"""
class_definition
1,322
1,570
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/translation.py
null
237
class TranslationOutput(BaseInferenceType): """Outputs of inference for the Translation task""" translation_text: str """The translated text."""
class_definition
1,584
1,741
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/translation.py
null
238
class TextGenerationInputGrammarType(BaseInferenceType): type: "TypeEnum" value: Any """A string that represents a [JSON Schema](https://json-schema.org/). JSON Schema is a declarative language that allows to annotate JSON documents with types and descriptions. """
class_definition
465
754
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
239
class TextGenerationInputGenerateParameters(BaseInferenceType): adapter_id: Optional[str] = None """Lora adapter id""" best_of: Optional[int] = None """Generate best_of sequences and return the one if the highest token logprobs.""" decoder_input_details: Optional[bool] = None """Whether to return decoder input token logprobs and ids.""" details: Optional[bool] = None """Whether to return generation details.""" do_sample: Optional[bool] = None """Activate logits sampling.""" frequency_penalty: Optional[float] = None """The parameter for frequency penalty. 1.0 means no penalty Penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. """ grammar: Optional[TextGenerationInputGrammarType] = None max_new_tokens: Optional[int] = None """Maximum number of tokens to generate.""" repetition_penalty: Optional[float] = None """The parameter for repetition penalty. 1.0 means no penalty. See [this paper](https://arxiv.org/pdf/1909.05858.pdf) for more details. """ return_full_text: Optional[bool] = None """Whether to prepend the prompt to the generated text""" seed: Optional[int] = None """Random sampling seed.""" stop: Optional[List[str]] = None """Stop generating tokens if a member of `stop` is generated.""" temperature: Optional[float] = None """The value used to module the logits distribution.""" top_k: Optional[int] = None """The number of highest probability vocabulary tokens to keep for top-k-filtering.""" top_n_tokens: Optional[int] = None """The number of highest probability vocabulary tokens to keep for top-n-filtering.""" top_p: Optional[float] = None """Top-p value for nucleus sampling.""" truncate: Optional[int] = None """Truncate inputs tokens to the given size.""" typical_p: Optional[float] = None """Typical Decoding mass See [Typical Decoding for Natural Language Generation](https://arxiv.org/abs/2202.00666) for more information. """ watermark: Optional[bool] = None """Watermarking with [A Watermark for Large Language Models](https://arxiv.org/abs/2301.10226). """
class_definition
768
3,040
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
240
class TextGenerationInput(BaseInferenceType): """Text Generation Input. Auto-generated from TGI specs. For more details, check out https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tgi-import.ts. """ inputs: str parameters: Optional[TextGenerationInputGenerateParameters] = None stream: Optional[bool] = None
class_definition
3,054
3,434
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
241
class TextGenerationOutputPrefillToken(BaseInferenceType): id: int logprob: float text: str
class_definition
3,533
3,636
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
242
class TextGenerationOutputToken(BaseInferenceType): id: int logprob: float special: bool text: str
class_definition
3,650
3,764
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
243
class TextGenerationOutputBestOfSequence(BaseInferenceType): finish_reason: "TextGenerationOutputFinishReason" generated_text: str generated_tokens: int prefill: List[TextGenerationOutputPrefillToken] tokens: List[TextGenerationOutputToken] seed: Optional[int] = None top_tokens: Optional[List[List[TextGenerationOutputToken]]] = None
class_definition
3,778
4,140
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
244
class TextGenerationOutputDetails(BaseInferenceType): finish_reason: "TextGenerationOutputFinishReason" generated_tokens: int prefill: List[TextGenerationOutputPrefillToken] tokens: List[TextGenerationOutputToken] best_of_sequences: Optional[List[TextGenerationOutputBestOfSequence]] = None seed: Optional[int] = None top_tokens: Optional[List[List[TextGenerationOutputToken]]] = None
class_definition
4,154
4,566
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
245
class TextGenerationOutput(BaseInferenceType): """Text Generation Output. Auto-generated from TGI specs. For more details, check out https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tgi-import.ts. """ generated_text: str details: Optional[TextGenerationOutputDetails] = None
class_definition
4,580
4,923
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
246
class TextGenerationStreamOutputStreamDetails(BaseInferenceType): finish_reason: "TextGenerationOutputFinishReason" generated_tokens: int input_length: int seed: Optional[int] = None
class_definition
4,937
5,135
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
247
class TextGenerationStreamOutputToken(BaseInferenceType): id: int logprob: float special: bool text: str
class_definition
5,149
5,269
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
248
class TextGenerationStreamOutput(BaseInferenceType): """Text Generation Stream Output. Auto-generated from TGI specs. For more details, check out https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tgi-import.ts. """ index: int token: TextGenerationStreamOutputToken details: Optional[TextGenerationStreamOutputStreamDetails] = None generated_text: Optional[str] = None top_tokens: Optional[List[TextGenerationStreamOutputToken]] = None
class_definition
5,283
5,797
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/text_generation.py
null
249
class ZeroShotObjectDetectionParameters(BaseInferenceType): """Additional inference parameters for Zero Shot Object Detection""" candidate_labels: List[str] """The candidate labels for this image"""
class_definition
403
614
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/zero_shot_object_detection.py
null
250
class ZeroShotObjectDetectionInput(BaseInferenceType): """Inputs for Zero Shot Object Detection inference""" inputs: str """The input image data as a base64-encoded string.""" parameters: ZeroShotObjectDetectionParameters """Additional inference parameters for Zero Shot Object Detection"""
class_definition
628
939
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/zero_shot_object_detection.py
null
251
class ZeroShotObjectDetectionBoundingBox(BaseInferenceType): """The predicted bounding box. Coordinates are relative to the top left corner of the input image. """ xmax: int xmin: int ymax: int ymin: int
class_definition
953
1,185
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/zero_shot_object_detection.py
null
252
class ZeroShotObjectDetectionOutputElement(BaseInferenceType): """Outputs of inference for the Zero Shot Object Detection task""" box: ZeroShotObjectDetectionBoundingBox """The predicted bounding box. Coordinates are relative to the top left corner of the input image. """ label: str """A candidate label""" score: float """The associated score / probability"""
class_definition
1,199
1,597
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/zero_shot_object_detection.py
null
253
class AutomaticSpeechRecognitionGenerationParameters(BaseInferenceType): """Parametrization of the text generation process""" do_sample: Optional[bool] = None """Whether to use sampling instead of greedy decoding when generating new tokens.""" early_stopping: Optional[Union[bool, "AutomaticSpeechRecognitionEarlyStoppingEnum"]] = None """Controls the stopping condition for beam-based methods.""" epsilon_cutoff: Optional[float] = None """If set to float strictly between 0 and 1, only tokens with a conditional probability greater than epsilon_cutoff will be sampled. In the paper, suggested values range from 3e-4 to 9e-4, depending on the size of the model. See [Truncation Sampling as Language Model Desmoothing](https://hf.co/papers/2210.15191) for more details. """ eta_cutoff: Optional[float] = None """Eta sampling is a hybrid of locally typical sampling and epsilon sampling. If set to float strictly between 0 and 1, a token is only considered if it is greater than either eta_cutoff or sqrt(eta_cutoff) * exp(-entropy(softmax(next_token_logits))). The latter term is intuitively the expected next token probability, scaled by sqrt(eta_cutoff). In the paper, suggested values range from 3e-4 to 2e-3, depending on the size of the model. See [Truncation Sampling as Language Model Desmoothing](https://hf.co/papers/2210.15191) for more details. """ max_length: Optional[int] = None """The maximum length (in tokens) of the generated text, including the input.""" max_new_tokens: Optional[int] = None """The maximum number of tokens to generate. Takes precedence over max_length.""" min_length: Optional[int] = None """The minimum length (in tokens) of the generated text, including the input.""" min_new_tokens: Optional[int] = None """The minimum number of tokens to generate. Takes precedence over min_length.""" num_beam_groups: Optional[int] = None """Number of groups to divide num_beams into in order to ensure diversity among different groups of beams. See [this paper](https://hf.co/papers/1610.02424) for more details. """ num_beams: Optional[int] = None """Number of beams to use for beam search.""" penalty_alpha: Optional[float] = None """The value balances the model confidence and the degeneration penalty in contrastive search decoding. """ temperature: Optional[float] = None """The value used to modulate the next token probabilities.""" top_k: Optional[int] = None """The number of highest probability vocabulary tokens to keep for top-k-filtering.""" top_p: Optional[float] = None """If set to float < 1, only the smallest set of most probable tokens with probabilities that add up to top_p or higher are kept for generation. """ typical_p: Optional[float] = None """Local typicality measures how similar the conditional probability of predicting a target token next is to the expected conditional probability of predicting a random token next, given the partial text already generated. If set to float < 1, the smallest set of the most locally typical tokens with probabilities that add up to typical_p or higher are kept for generation. See [this paper](https://hf.co/papers/2202.00666) for more details. """ use_cache: Optional[bool] = None """Whether the model should use the past last key/values attentions to speed up decoding"""
class_definition
494
3,979
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/automatic_speech_recognition.py
null
254
class AutomaticSpeechRecognitionParameters(BaseInferenceType): """Additional inference parameters for Automatic Speech Recognition""" return_timestamps: Optional[bool] = None """Whether to output corresponding timestamps with the generated text""" # Will be deprecated in the future when the renaming to `generation_parameters` is implemented in transformers generate_kwargs: Optional[AutomaticSpeechRecognitionGenerationParameters] = None """Parametrization of the text generation process"""
class_definition
3,993
4,510
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/automatic_speech_recognition.py
null
255
class AutomaticSpeechRecognitionInput(BaseInferenceType): """Inputs for Automatic Speech Recognition inference""" inputs: str """The input audio data as a base64-encoded string. If no `parameters` are provided, you can also provide the audio data as a raw bytes payload. """ parameters: Optional[AutomaticSpeechRecognitionParameters] = None """Additional inference parameters for Automatic Speech Recognition"""
class_definition
4,524
4,964
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/automatic_speech_recognition.py
null
256
class AutomaticSpeechRecognitionOutputChunk(BaseInferenceType): text: str """A chunk of text identified by the model""" timestamps: List[float] """The start and end timestamps corresponding with the text"""
class_definition
4,978
5,200
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/automatic_speech_recognition.py
null
257
class AutomaticSpeechRecognitionOutput(BaseInferenceType): """Outputs of inference for the Automatic Speech Recognition task""" text: str """The recognized text.""" chunks: Optional[List[AutomaticSpeechRecognitionOutputChunk]] = None """When returnTimestamps is enabled, chunks contains a list of audio chunks identified by the model. """
class_definition
5,214
5,581
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/automatic_speech_recognition.py
null
258
class SummarizationParameters(BaseInferenceType): """Additional inference parameters for summarization.""" clean_up_tokenization_spaces: Optional[bool] = None """Whether to clean up the potential extra spaces in the text output.""" generate_parameters: Optional[Dict[str, Any]] = None """Additional parametrization of the text generation algorithm.""" truncation: Optional["SummarizationTruncationStrategy"] = None """The truncation strategy to use."""
class_definition
536
1,017
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/summarization.py
null
259
class SummarizationInput(BaseInferenceType): """Inputs for Summarization inference""" inputs: str """The input text to summarize.""" parameters: Optional[SummarizationParameters] = None """Additional inference parameters for summarization."""
class_definition
1,031
1,294
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/summarization.py
null
260
class SummarizationOutput(BaseInferenceType): """Outputs of inference for the Summarization task""" summary_text: str """The summarized text."""
class_definition
1,308
1,465
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/summarization.py
null
261
class ZeroShotImageClassificationParameters(BaseInferenceType): """Additional inference parameters for Zero Shot Image Classification""" candidate_labels: List[str] """The candidate labels for this image""" hypothesis_template: Optional[str] = None """The sentence used in conjunction with `candidate_labels` to attempt the image classification by replacing the placeholder with the candidate labels. """
class_definition
413
846
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/zero_shot_image_classification.py
null
262
class ZeroShotImageClassificationInput(BaseInferenceType): """Inputs for Zero Shot Image Classification inference""" inputs: str """The input image data to classify as a base64-encoded string.""" parameters: ZeroShotImageClassificationParameters """Additional inference parameters for Zero Shot Image Classification"""
class_definition
860
1,199
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/zero_shot_image_classification.py
null
263
class ZeroShotImageClassificationOutputElement(BaseInferenceType): """Outputs of inference for the Zero Shot Image Classification task""" label: str """The predicted class label.""" score: float """The corresponding probability."""
class_definition
1,213
1,465
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/zero_shot_image_classification.py
null
264
class ObjectDetectionParameters(BaseInferenceType): """Additional inference parameters for Object Detection""" threshold: Optional[float] = None """The probability necessary to make a prediction."""
class_definition
407
618
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/object_detection.py
null
265
class ObjectDetectionInput(BaseInferenceType): """Inputs for Object Detection inference""" inputs: str """The input image data as a base64-encoded string. If no `parameters` are provided, you can also provide the image data as a raw bytes payload. """ parameters: Optional[ObjectDetectionParameters] = None """Additional inference parameters for Object Detection"""
class_definition
632
1,026
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/object_detection.py
null
266
class ObjectDetectionBoundingBox(BaseInferenceType): """The predicted bounding box. Coordinates are relative to the top left corner of the input image. """ xmax: int """The x-coordinate of the bottom-right corner of the bounding box.""" xmin: int """The x-coordinate of the top-left corner of the bounding box.""" ymax: int """The y-coordinate of the bottom-right corner of the bounding box.""" ymin: int """The y-coordinate of the top-left corner of the bounding box."""
class_definition
1,040
1,556
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/object_detection.py
null
267
class ObjectDetectionOutputElement(BaseInferenceType): """Outputs of inference for the Object Detection task""" box: ObjectDetectionBoundingBox """The predicted bounding box. Coordinates are relative to the top left corner of the input image. """ label: str """The predicted label for the bounding box.""" score: float """The associated score / probability."""
class_definition
1,570
1,967
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/object_detection.py
null
268
class TableQuestionAnsweringInputData(BaseInferenceType): """One (table, question) pair to answer""" question: str """The question to be answered about the table""" table: Dict[str, List[str]] """The table to serve as context for the questions"""
class_definition
428
695
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/table_question_answering.py
null
269
class TableQuestionAnsweringParameters(BaseInferenceType): """Additional inference parameters for Table Question Answering""" padding: Optional["Padding"] = None """Activates and controls padding.""" sequential: Optional[bool] = None """Whether to do inference sequentially or as a batch. Batching is faster, but models like SQA require the inference to be done sequentially to extract relations within sequences, given their conversational nature. """ truncation: Optional[bool] = None """Activates and controls truncation."""
class_definition
768
1,336
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/table_question_answering.py
null
270
class TableQuestionAnsweringInput(BaseInferenceType): """Inputs for Table Question Answering inference""" inputs: TableQuestionAnsweringInputData """One (table, question) pair to answer""" parameters: Optional[TableQuestionAnsweringParameters] = None """Additional inference parameters for Table Question Answering"""
class_definition
1,350
1,688
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/table_question_answering.py
null
271
class TableQuestionAnsweringOutputElement(BaseInferenceType): """Outputs of inference for the Table Question Answering task""" answer: str """The answer of the question given the table. If there is an aggregator, the answer will be preceded by `AGGREGATOR >`. """ cells: List[str] """List of strings made up of the answer cell values.""" coordinates: List[List[int]] """Coordinates of the cells of the answers.""" aggregator: Optional[str] = None """If the model has an aggregator, this returns the aggregator."""
class_definition
1,702
2,260
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/table_question_answering.py
null
272
class BaseInferenceType(dict): """Base class for all inference types. Object is a dataclass and a dict for backward compatibility but plan is to remove the dict part in the future. Handle parsing from dict, list and json strings in a permissive way to ensure future-compatibility (e.g. all fields are made optional, and non-expected fields are added as dict attributes). """ @classmethod def parse_obj_as_list(cls: Type[T], data: Union[bytes, str, List, Dict]) -> List[T]: """Alias to parse server response and return a single instance. See `parse_obj` for more details. """ output = cls.parse_obj(data) if not isinstance(output, list): raise ValueError(f"Invalid input data for {cls}. Expected a list, but got {type(output)}.") return output @classmethod def parse_obj_as_instance(cls: Type[T], data: Union[bytes, str, List, Dict]) -> T: """Alias to parse server response and return a single instance. See `parse_obj` for more details. """ output = cls.parse_obj(data) if isinstance(output, list): raise ValueError(f"Invalid input data for {cls}. Expected a single instance, but got a list.") return output @classmethod def parse_obj(cls: Type[T], data: Union[bytes, str, List, Dict]) -> Union[List[T], T]: """Parse server response as a dataclass or list of dataclasses. To enable future-compatibility, we want to handle cases where the server return more fields than expected. In such cases, we don't want to raise an error but still create the dataclass object. Remaining fields are added as dict attributes. """ # Parse server response (from bytes) if isinstance(data, bytes): data = data.decode() if isinstance(data, str): data = json.loads(data) # If a list, parse each item individually if isinstance(data, List): return [cls.parse_obj(d) for d in data] # type: ignore [misc] # At this point, we expect a dict if not isinstance(data, dict): raise ValueError(f"Invalid data type: {type(data)}") init_values = {} other_values = {} for key, value in data.items(): key = normalize_key(key) if key in cls.__dataclass_fields__ and cls.__dataclass_fields__[key].init: if isinstance(value, dict) or isinstance(value, list): field_type = cls.__dataclass_fields__[key].type # if `field_type` is a `BaseInferenceType`, parse it if inspect.isclass(field_type) and issubclass(field_type, BaseInferenceType): value = field_type.parse_obj(value) # otherwise, recursively parse nested dataclasses (if possible) # `get_args` returns handle Union and Optional for us else: expected_types = get_args(field_type) for expected_type in expected_types: if getattr(expected_type, "_name", None) == "List": expected_type = get_args(expected_type)[ 0 ] # assume same type for all items in the list if inspect.isclass(expected_type) and issubclass(expected_type, BaseInferenceType): value = expected_type.parse_obj(value) break init_values[key] = value else: other_values[key] = value # Make all missing fields default to None # => ensure that dataclass initialization will never fail even if the server does not return all fields. for key in cls.__dataclass_fields__: if key not in init_values: init_values[key] = None # Initialize dataclass with expected values item = cls(**init_values) # Add remaining fields as dict attributes item.update(other_values) return item def __post_init__(self): self.update(asdict(self)) def __setitem__(self, __key: Any, __value: Any) -> None: # Hacky way to keep dataclass values in sync when dict is updated super().__setitem__(__key, __value) if __key in self.__dataclass_fields__ and getattr(self, __key, None) != __value: self.__setattr__(__key, __value) return def __setattr__(self, __name: str, __value: Any) -> None: # Hacky way to keep dict values is sync when dataclass is updated super().__setattr__(__name, __value) if self.get(__name) != __value: self[__name] = __value return
class_definition
855
5,707
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/base.py
null
273
class AudioToAudioInput(BaseInferenceType): """Inputs for Audio to Audio inference""" inputs: Any """The input audio data"""
class_definition
402
539
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/audio_to_audio.py
null
274
class AudioToAudioOutputElement(BaseInferenceType): """Outputs of inference for the Audio To Audio task A generated audio file with its label. """ blob: Any """The generated audio file.""" content_type: str """The content type of audio file.""" label: str """The label of the audio file."""
class_definition
553
880
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/audio_to_audio.py
null
275
class TokenClassificationParameters(BaseInferenceType): """Additional inference parameters for Token Classification""" aggregation_strategy: Optional["TokenClassificationAggregationStrategy"] = None """The strategy used to fuse tokens based on model predictions""" ignore_labels: Optional[List[str]] = None """A list of labels to ignore""" stride: Optional[int] = None """The number of overlapping tokens between chunks when splitting the input text."""
class_definition
518
1,000
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/token_classification.py
null
276
class TokenClassificationInput(BaseInferenceType): """Inputs for Token Classification inference""" inputs: str """The input text data""" parameters: Optional[TokenClassificationParameters] = None """Additional inference parameters for Token Classification"""
class_definition
1,014
1,293
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/token_classification.py
null
277
class TokenClassificationOutputElement(BaseInferenceType): """Outputs of inference for the Token Classification task""" end: int """The character position in the input where this group ends.""" score: float """The associated score / probability""" start: int """The character position in the input where this group begins.""" word: str """The corresponding text""" entity: Optional[str] = None """The predicted label for a single token""" entity_group: Optional[str] = None """The predicted label for a group of one or more tokens"""
class_definition
1,307
1,893
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference/_generated/types/token_classification.py
null
278
class UserCommands(BaseHuggingfaceCLICommand): @staticmethod def register_subcommand(parser: _SubParsersAction): login_parser = parser.add_parser("login", help="Log in using a token from huggingface.co/settings/tokens") login_parser.add_argument( "--token", type=str, help="Token generated from https://huggingface.co/settings/tokens", ) login_parser.add_argument( "--add-to-git-credential", action="store_true", help="Optional: Save token to git credential helper.", ) login_parser.set_defaults(func=lambda args: LoginCommand(args)) whoami_parser = parser.add_parser("whoami", help="Find out which huggingface.co account you are logged in as.") whoami_parser.set_defaults(func=lambda args: WhoamiCommand(args)) logout_parser = parser.add_parser("logout", help="Log out") logout_parser.add_argument( "--token-name", type=str, help="Optional: Name of the access token to log out from.", ) logout_parser.set_defaults(func=lambda args: LogoutCommand(args)) auth_parser = parser.add_parser("auth", help="Other authentication related commands") auth_subparsers = auth_parser.add_subparsers(help="Authentication subcommands") auth_switch_parser = auth_subparsers.add_parser("switch", help="Switch between access tokens") auth_switch_parser.add_argument( "--token-name", type=str, help="Optional: Name of the access token to switch to.", ) auth_switch_parser.add_argument( "--add-to-git-credential", action="store_true", help="Optional: Save token to git credential helper.", ) auth_switch_parser.set_defaults(func=lambda args: AuthSwitchCommand(args)) auth_list_parser = auth_subparsers.add_parser("list", help="List all stored access tokens") auth_list_parser.set_defaults(func=lambda args: AuthListCommand(args)) # new system: git-based repo system repo_parser = parser.add_parser("repo", help="{create} Commands to interact with your huggingface.co repos.") repo_subparsers = repo_parser.add_subparsers(help="huggingface.co repos related commands") repo_create_parser = repo_subparsers.add_parser("create", help="Create a new repo on huggingface.co") repo_create_parser.add_argument( "name", type=str, help="Name for your repo. Will be namespaced under your username to build the repo id.", ) repo_create_parser.add_argument( "--type", type=str, help='Optional: repo_type: set to "dataset" or "space" if creating a dataset or space, default is model.', ) repo_create_parser.add_argument("--organization", type=str, help="Optional: organization namespace.") repo_create_parser.add_argument( "--space_sdk", type=str, help='Optional: Hugging Face Spaces SDK type. Required when --type is set to "space".', choices=SPACES_SDK_TYPES, ) repo_create_parser.add_argument( "-y", "--yes", action="store_true", help="Optional: answer Yes to the prompt", ) repo_create_parser.set_defaults(func=lambda args: RepoCreateCommand(args))
class_definition
2,238
5,700
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/user.py
null
279
class BaseUserCommand: def __init__(self, args): self.args = args self._api = HfApi()
class_definition
5,703
5,808
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/user.py
null
280
class LoginCommand(BaseUserCommand): def run(self): logging.set_verbosity_info() login( token=self.args.token, add_to_git_credential=self.args.add_to_git_credential, )
class_definition
5,811
6,030
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/user.py
null
281
class LogoutCommand(BaseUserCommand): def run(self): logging.set_verbosity_info() logout(token_name=self.args.token_name)
class_definition
6,033
6,174
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/user.py
null
282
class AuthSwitchCommand(BaseUserCommand): def run(self): logging.set_verbosity_info() token_name = self.args.token_name if token_name is None: token_name = self._select_token_name() if token_name is None: print("No token name provided. Aborting.") exit() auth_switch(token_name, add_to_git_credential=self.args.add_to_git_credential) def _select_token_name(self) -> Optional[str]: token_names = list(get_stored_tokens().keys()) if not token_names: logger.error("No stored tokens found. Please login first.") return None if _inquirer_py_available: return self._select_token_name_tui(token_names) # if inquirer is not available, use a simpler terminal UI print("Available stored tokens:") for i, token_name in enumerate(token_names, 1): print(f"{i}. {token_name}") while True: try: choice = input("Enter the number of the token to switch to (or 'q' to quit): ") if choice.lower() == "q": return None index = int(choice) - 1 if 0 <= index < len(token_names): return token_names[index] else: print("Invalid selection. Please try again.") except ValueError: print("Invalid input. Please enter a number or 'q' to quit.") def _select_token_name_tui(self, token_names: List[str]) -> Optional[str]: choices = [Choice(token_name, name=token_name) for token_name in token_names] try: return inquirer.select( message="Select a token to switch to:", choices=choices, default=None, ).execute() except KeyboardInterrupt: logger.info("Token selection cancelled.") return None
class_definition
6,177
8,133
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/user.py
null
283
class AuthListCommand(BaseUserCommand): def run(self): logging.set_verbosity_info() auth_list()
class_definition
8,136
8,251
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/user.py
null
284
class WhoamiCommand(BaseUserCommand): def run(self): token = get_token() if token is None: print("Not logged in") exit() try: info = self._api.whoami(token) print(info["name"]) orgs = [org["name"] for org in info["orgs"]] if orgs: print(ANSI.bold("orgs: "), ",".join(orgs)) if ENDPOINT != "https://huggingface.co": print(f"Authenticated through private endpoint: {ENDPOINT}") except HTTPError as e: print(e) print(ANSI.red(e.response.text)) exit(1)
class_definition
8,254
8,891
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/user.py
null
285
class RepoCreateCommand(BaseUserCommand): def run(self): token = get_token() if token is None: print("Not logged in") exit(1) try: stdout = subprocess.check_output(["git", "--version"]).decode("utf-8") print(ANSI.gray(stdout.strip())) except FileNotFoundError: print("Looks like you do not have git installed, please install.") try: stdout = subprocess.check_output(["git-lfs", "--version"]).decode("utf-8") print(ANSI.gray(stdout.strip())) except FileNotFoundError: print( ANSI.red( "Looks like you do not have git-lfs installed, please install." " You can install from https://git-lfs.github.com/." " Then run `git lfs install` (you only have to do this once)." ) ) print("") user = self._api.whoami(token)["name"] namespace = self.args.organization if self.args.organization is not None else user repo_id = f"{namespace}/{self.args.name}" if self.args.type not in REPO_TYPES: print("Invalid repo --type") exit(1) if self.args.type in REPO_TYPES_URL_PREFIXES: prefixed_repo_id = REPO_TYPES_URL_PREFIXES[self.args.type] + repo_id else: prefixed_repo_id = repo_id print(f"You are about to create {ANSI.bold(prefixed_repo_id)}") if not self.args.yes: choice = input("Proceed? [Y/n] ").lower() if not (choice == "" or choice == "y" or choice == "yes"): print("Abort") exit() try: url = self._api.create_repo( repo_id=repo_id, token=token, repo_type=self.args.type, space_sdk=self.args.space_sdk, ) except HTTPError as e: print(e) print(ANSI.red(e.response.text)) exit(1) print("\nYour repo now lives at:") print(f" {ANSI.bold(url)}") print("\nYou can clone it locally with the command below, and commit/push as usual.") print(f"\n git clone {url}") print("")
class_definition
8,894
11,167
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/user.py
null
286
class VersionCommand(BaseHuggingfaceCLICommand): def __init__(self, args): self.args = args @staticmethod def register_subcommand(parser: _SubParsersAction): version_parser = parser.add_parser("version", help="Print information about the huggingface-cli version.") version_parser.set_defaults(func=VersionCommand) def run(self) -> None: print(f"huggingface_hub version: {__version__}")
class_definition
830
1,265
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/version.py
null
287
class EnvironmentCommand(BaseHuggingfaceCLICommand): def __init__(self, args): self.args = args @staticmethod def register_subcommand(parser: _SubParsersAction): env_parser = parser.add_parser("env", help="Print information about the environment.") env_parser.set_defaults(func=EnvironmentCommand) def run(self) -> None: dump_environment_info()
class_definition
831
1,225
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/env.py
null
288
class UploadCommand(BaseHuggingfaceCLICommand): @staticmethod def register_subcommand(parser: _SubParsersAction): upload_parser = parser.add_parser("upload", help="Upload a file or a folder to a repo on the Hub") upload_parser.add_argument( "repo_id", type=str, help="The ID of the repo to upload to (e.g. `username/repo-name`)." ) upload_parser.add_argument( "local_path", nargs="?", help="Local path to the file or folder to upload. Defaults to current directory." ) upload_parser.add_argument( "path_in_repo", nargs="?", help="Path of the file or folder in the repo. Defaults to the relative path of the file or folder.", ) upload_parser.add_argument( "--repo-type", choices=["model", "dataset", "space"], default="model", help="Type of the repo to upload to (e.g. `dataset`).", ) upload_parser.add_argument( "--revision", type=str, help=( "An optional Git revision to push to. It can be a branch name or a PR reference. If revision does not" " exist and `--create-pr` is not set, a branch will be automatically created." ), ) upload_parser.add_argument( "--private", action="store_true", help=( "Whether to create a private repo if repo doesn't exist on the Hub. Ignored if the repo already" " exists." ), ) upload_parser.add_argument("--include", nargs="*", type=str, help="Glob patterns to match files to upload.") upload_parser.add_argument( "--exclude", nargs="*", type=str, help="Glob patterns to exclude from files to upload." ) upload_parser.add_argument( "--delete", nargs="*", type=str, help="Glob patterns for file to be deleted from the repo while committing.", ) upload_parser.add_argument( "--commit-message", type=str, help="The summary / title / first line of the generated commit." ) upload_parser.add_argument("--commit-description", type=str, help="The description of the generated commit.") upload_parser.add_argument( "--create-pr", action="store_true", help="Whether to upload content as a new Pull Request." ) upload_parser.add_argument( "--every", type=float, help="If set, a background job is scheduled to create commits every `every` minutes.", ) upload_parser.add_argument( "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens" ) upload_parser.add_argument( "--quiet", action="store_true", help="If True, progress bars are disabled and only the path to the uploaded files is printed.", ) upload_parser.set_defaults(func=UploadCommand) def __init__(self, args: Namespace) -> None: self.repo_id: str = args.repo_id self.repo_type: Optional[str] = args.repo_type self.revision: Optional[str] = args.revision self.private: bool = args.private self.include: Optional[List[str]] = args.include self.exclude: Optional[List[str]] = args.exclude self.delete: Optional[List[str]] = args.delete self.commit_message: Optional[str] = args.commit_message self.commit_description: Optional[str] = args.commit_description self.create_pr: bool = args.create_pr self.api: HfApi = HfApi(token=args.token, library_name="huggingface-cli") self.quiet: bool = args.quiet # disable warnings and progress bars # Check `--every` is valid if args.every is not None and args.every <= 0: raise ValueError(f"`every` must be a positive value (got '{args.every}')") self.every: Optional[float] = args.every # Resolve `local_path` and `path_in_repo` repo_name: str = args.repo_id.split("/")[-1] # e.g. "Wauplin/my-cool-model" => "my-cool-model" self.local_path: str self.path_in_repo: str if args.local_path is None and os.path.isfile(repo_name): # Implicit case 1: user provided only a repo_id which happen to be a local file as well => upload it with same name self.local_path = repo_name self.path_in_repo = repo_name elif args.local_path is None and os.path.isdir(repo_name): # Implicit case 2: user provided only a repo_id which happen to be a local folder as well => upload it at root self.local_path = repo_name self.path_in_repo = "." elif args.local_path is None: # Implicit case 3: user provided only a repo_id that does not match a local file or folder # => the user must explicitly provide a local_path => raise exception raise ValueError(f"'{repo_name}' is not a local file or folder. Please set `local_path` explicitly.") elif args.path_in_repo is None and os.path.isfile(args.local_path): # Explicit local path to file, no path in repo => upload it at root with same name self.local_path = args.local_path self.path_in_repo = os.path.basename(args.local_path) elif args.path_in_repo is None: # Explicit local path to folder, no path in repo => upload at root self.local_path = args.local_path self.path_in_repo = "." else: # Finally, if both paths are explicit self.local_path = args.local_path self.path_in_repo = args.path_in_repo def run(self) -> None: if self.quiet: disable_progress_bars() with warnings.catch_warnings(): warnings.simplefilter("ignore") print(self._upload()) enable_progress_bars() else: logging.set_verbosity_info() print(self._upload()) logging.set_verbosity_warning() def _upload(self) -> str: if os.path.isfile(self.local_path): if self.include is not None and len(self.include) > 0: warnings.warn("Ignoring `--include` since a single file is uploaded.") if self.exclude is not None and len(self.exclude) > 0: warnings.warn("Ignoring `--exclude` since a single file is uploaded.") if self.delete is not None and len(self.delete) > 0: warnings.warn("Ignoring `--delete` since a single file is uploaded.") if not HF_HUB_ENABLE_HF_TRANSFER: logger.info( "Consider using `hf_transfer` for faster uploads. This solution comes with some limitations. See" " https://huggingface.co/docs/huggingface_hub/hf_transfer for more details." ) # Schedule commits if `every` is set if self.every is not None: if os.path.isfile(self.local_path): # If file => watch entire folder + use allow_patterns folder_path = os.path.dirname(self.local_path) path_in_repo = ( self.path_in_repo[: -len(self.local_path)] # remove filename from path_in_repo if self.path_in_repo.endswith(self.local_path) else self.path_in_repo ) allow_patterns = [self.local_path] ignore_patterns = [] else: folder_path = self.local_path path_in_repo = self.path_in_repo allow_patterns = self.include or [] ignore_patterns = self.exclude or [] if self.delete is not None and len(self.delete) > 0: warnings.warn("Ignoring `--delete` when uploading with scheduled commits.") scheduler = CommitScheduler( folder_path=folder_path, repo_id=self.repo_id, repo_type=self.repo_type, revision=self.revision, allow_patterns=allow_patterns, ignore_patterns=ignore_patterns, path_in_repo=path_in_repo, private=self.private, every=self.every, hf_api=self.api, ) print(f"Scheduling commits every {self.every} minutes to {scheduler.repo_id}.") try: # Block main thread until KeyboardInterrupt while True: time.sleep(100) except KeyboardInterrupt: scheduler.stop() return "Stopped scheduled commits." # Otherwise, create repo and proceed with the upload if not os.path.isfile(self.local_path) and not os.path.isdir(self.local_path): raise FileNotFoundError(f"No such file or directory: '{self.local_path}'.") repo_id = self.api.create_repo( repo_id=self.repo_id, repo_type=self.repo_type, exist_ok=True, private=self.private, space_sdk="gradio" if self.repo_type == "space" else None, # ^ We don't want it to fail when uploading to a Space => let's set Gradio by default. # ^ I'd rather not add CLI args to set it explicitly as we already have `huggingface-cli repo create` for that. ).repo_id # Check if branch already exists and if not, create it if self.revision is not None and not self.create_pr: try: self.api.repo_info(repo_id=repo_id, repo_type=self.repo_type, revision=self.revision) except RevisionNotFoundError: logger.info(f"Branch '{self.revision}' not found. Creating it...") self.api.create_branch(repo_id=repo_id, repo_type=self.repo_type, branch=self.revision, exist_ok=True) # ^ `exist_ok=True` to avoid race concurrency issues # File-based upload if os.path.isfile(self.local_path): return self.api.upload_file( path_or_fileobj=self.local_path, path_in_repo=self.path_in_repo, repo_id=repo_id, repo_type=self.repo_type, revision=self.revision, commit_message=self.commit_message, commit_description=self.commit_description, create_pr=self.create_pr, ) # Folder-based upload else: return self.api.upload_folder( folder_path=self.local_path, path_in_repo=self.path_in_repo, repo_id=repo_id, repo_type=self.repo_type, revision=self.revision, commit_message=self.commit_message, commit_description=self.commit_description, create_pr=self.create_pr, allow_patterns=self.include, ignore_patterns=self.exclude, delete_patterns=self.delete, )
class_definition
2,463
13,655
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/upload.py
null
289
class DeleteCacheCommand(BaseHuggingfaceCLICommand): @staticmethod def register_subcommand(parser: _SubParsersAction): delete_cache_parser = parser.add_parser("delete-cache", help="Delete revisions from the cache directory.") delete_cache_parser.add_argument( "--dir", type=str, default=None, help="cache directory (optional). Default to the default HuggingFace cache.", ) delete_cache_parser.add_argument( "--disable-tui", action="store_true", help=( "Disable Terminal User Interface (TUI) mode. Useful if your" " platform/terminal doesn't support the multiselect menu." ), ) delete_cache_parser.set_defaults(func=DeleteCacheCommand) def __init__(self, args: Namespace) -> None: self.cache_dir: Optional[str] = args.dir self.disable_tui: bool = args.disable_tui def run(self): """Run `delete-cache` command with or without TUI.""" # Scan cache directory hf_cache_info = scan_cache_dir(self.cache_dir) # Manual review from the user if self.disable_tui: selected_hashes = _manual_review_no_tui(hf_cache_info, preselected=[]) else: selected_hashes = _manual_review_tui(hf_cache_info, preselected=[]) # If deletion is not cancelled if len(selected_hashes) > 0 and _CANCEL_DELETION_STR not in selected_hashes: confirm_message = _get_expectations_str(hf_cache_info, selected_hashes) + " Confirm deletion ?" # Confirm deletion if self.disable_tui: confirmed = _ask_for_confirmation_no_tui(confirm_message) else: confirmed = _ask_for_confirmation_tui(confirm_message) # Deletion is confirmed if confirmed: strategy = hf_cache_info.delete_revisions(*selected_hashes) print("Start deletion.") strategy.execute() print( f"Done. Deleted {len(strategy.repos)} repo(s) and" f" {len(strategy.snapshots)} revision(s) for a total of" f" {strategy.expected_freed_size_str}." ) return # Deletion is cancelled print("Deletion is cancelled. Do nothing.")
class_definition
4,066
6,482
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/delete_cache.py
null
290
class ANSI: """ Helper for en.wikipedia.org/wiki/ANSI_escape_code """ _bold = "\u001b[1m" _gray = "\u001b[90m" _red = "\u001b[31m" _reset = "\u001b[0m" _yellow = "\u001b[33m" @classmethod def bold(cls, s: str) -> str: return cls._format(s, cls._bold) @classmethod def gray(cls, s: str) -> str: return cls._format(s, cls._gray) @classmethod def red(cls, s: str) -> str: return cls._format(s, cls._bold + cls._red) @classmethod def yellow(cls, s: str) -> str: return cls._format(s, cls._yellow) @classmethod def _format(cls, s: str, code: str) -> str: if os.environ.get("NO_COLOR"): # See https://no-color.org/ return s return f"{code}{s}{cls._reset}"
class_definition
700
1,499
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/_cli_utils.py
null
291
class ScanCacheCommand(BaseHuggingfaceCLICommand): @staticmethod def register_subcommand(parser: _SubParsersAction): scan_cache_parser = parser.add_parser("scan-cache", help="Scan cache directory.") scan_cache_parser.add_argument( "--dir", type=str, default=None, help="cache directory to scan (optional). Default to the default HuggingFace cache.", ) scan_cache_parser.add_argument( "-v", "--verbose", action="count", default=0, help="show a more verbose output", ) scan_cache_parser.set_defaults(func=ScanCacheCommand) def __init__(self, args: Namespace) -> None: self.verbosity: int = args.verbose self.cache_dir: Optional[str] = args.dir def run(self): try: t0 = time.time() hf_cache_info = scan_cache_dir(self.cache_dir) t1 = time.time() except CacheNotFound as exc: cache_dir = exc.cache_dir print(f"Cache directory not found: {cache_dir}") return self._print_hf_cache_info_as_table(hf_cache_info) print( f"\nDone in {round(t1-t0,1)}s. Scanned {len(hf_cache_info.repos)} repo(s)" f" for a total of {ANSI.red(hf_cache_info.size_on_disk_str)}." ) if len(hf_cache_info.warnings) > 0: message = f"Got {len(hf_cache_info.warnings)} warning(s) while scanning." if self.verbosity >= 3: print(ANSI.gray(message)) for warning in hf_cache_info.warnings: print(ANSI.gray(warning)) else: print(ANSI.gray(message + " Use -vvv to print details.")) def _print_hf_cache_info_as_table(self, hf_cache_info: HFCacheInfo) -> None: print(get_table(hf_cache_info, verbosity=self.verbosity))
class_definition
1,077
3,005
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/scan_cache.py
null
292
class DownloadCommand(BaseHuggingfaceCLICommand): @staticmethod def register_subcommand(parser: _SubParsersAction): download_parser = parser.add_parser("download", help="Download files from the Hub") download_parser.add_argument( "repo_id", type=str, help="ID of the repo to download from (e.g. `username/repo-name`)." ) download_parser.add_argument( "filenames", type=str, nargs="*", help="Files to download (e.g. `config.json`, `data/metadata.jsonl`)." ) download_parser.add_argument( "--repo-type", choices=["model", "dataset", "space"], default="model", help="Type of repo to download from (defaults to 'model').", ) download_parser.add_argument( "--revision", type=str, help="An optional Git revision id which can be a branch name, a tag, or a commit hash.", ) download_parser.add_argument( "--include", nargs="*", type=str, help="Glob patterns to match files to download." ) download_parser.add_argument( "--exclude", nargs="*", type=str, help="Glob patterns to exclude from files to download." ) download_parser.add_argument( "--cache-dir", type=str, help="Path to the directory where to save the downloaded files." ) download_parser.add_argument( "--local-dir", type=str, help=( "If set, the downloaded file will be placed under this directory. Check out" " https://huggingface.co/docs/huggingface_hub/guides/download#download-files-to-local-folder for more" " details." ), ) download_parser.add_argument( "--local-dir-use-symlinks", choices=["auto", "True", "False"], help=("Deprecated and ignored. Downloading to a local directory does not use symlinks anymore."), ) download_parser.add_argument( "--force-download", action="store_true", help="If True, the files will be downloaded even if they are already cached.", ) download_parser.add_argument( "--resume-download", action="store_true", help="Deprecated and ignored. Downloading a file to local dir always attempts to resume previously interrupted downloads (unless hf-transfer is enabled).", ) download_parser.add_argument( "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens" ) download_parser.add_argument( "--quiet", action="store_true", help="If True, progress bars are disabled and only the path to the download files is printed.", ) download_parser.add_argument( "--max-workers", type=int, default=8, help="Maximum number of workers to use for downloading files. Default is 8.", ) download_parser.set_defaults(func=DownloadCommand) def __init__(self, args: Namespace) -> None: self.token = args.token self.repo_id: str = args.repo_id self.filenames: List[str] = args.filenames self.repo_type: str = args.repo_type self.revision: Optional[str] = args.revision self.include: Optional[List[str]] = args.include self.exclude: Optional[List[str]] = args.exclude self.cache_dir: Optional[str] = args.cache_dir self.local_dir: Optional[str] = args.local_dir self.force_download: bool = args.force_download self.resume_download: Optional[bool] = args.resume_download or None self.quiet: bool = args.quiet self.max_workers: int = args.max_workers if args.local_dir_use_symlinks is not None: warnings.warn( "Ignoring --local-dir-use-symlinks. Downloading to a local directory does not use symlinks anymore.", FutureWarning, ) def run(self) -> None: if self.quiet: disable_progress_bars() with warnings.catch_warnings(): warnings.simplefilter("ignore") print(self._download()) # Print path to downloaded files enable_progress_bars() else: logging.set_verbosity_info() print(self._download()) # Print path to downloaded files logging.set_verbosity_warning() def _download(self) -> str: # Warn user if patterns are ignored if len(self.filenames) > 0: if self.include is not None and len(self.include) > 0: warnings.warn("Ignoring `--include` since filenames have being explicitly set.") if self.exclude is not None and len(self.exclude) > 0: warnings.warn("Ignoring `--exclude` since filenames have being explicitly set.") # Single file to download: use `hf_hub_download` if len(self.filenames) == 1: return hf_hub_download( repo_id=self.repo_id, repo_type=self.repo_type, revision=self.revision, filename=self.filenames[0], cache_dir=self.cache_dir, resume_download=self.resume_download, force_download=self.force_download, token=self.token, local_dir=self.local_dir, library_name="huggingface-cli", ) # Otherwise: use `snapshot_download` to ensure all files comes from same revision elif len(self.filenames) == 0: allow_patterns = self.include ignore_patterns = self.exclude else: allow_patterns = self.filenames ignore_patterns = None return snapshot_download( repo_id=self.repo_id, repo_type=self.repo_type, revision=self.revision, allow_patterns=allow_patterns, ignore_patterns=ignore_patterns, resume_download=self.resume_download, force_download=self.force_download, cache_dir=self.cache_dir, token=self.token, local_dir=self.local_dir, library_name="huggingface-cli", max_workers=self.max_workers, )
class_definition
1,763
8,182
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/download.py
null
293
class BaseHuggingfaceCLICommand(ABC): @staticmethod @abstractmethod def register_subcommand(parser: _SubParsersAction): raise NotImplementedError() @abstractmethod def run(self): raise NotImplementedError()
class_definition
684
927
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/__init__.py
null
294
class LfsCommands(BaseHuggingfaceCLICommand): """ Implementation of a custom transfer agent for the transfer type "multipart" for git-lfs. This lets users upload large files >5GB 🔥. Spec for LFS custom transfer agent is: https://github.com/git-lfs/git-lfs/blob/master/docs/custom-transfers.md This introduces two commands to the CLI: 1. $ huggingface-cli lfs-enable-largefiles This should be executed once for each model repo that contains a model file >5GB. It's documented in the error message you get if you just try to git push a 5GB file without having enabled it before. 2. $ huggingface-cli lfs-multipart-upload This command is called by lfs directly and is not meant to be called by the user. """ @staticmethod def register_subcommand(parser: _SubParsersAction): enable_parser = parser.add_parser( "lfs-enable-largefiles", help="Configure your repository to enable upload of files > 5GB." ) enable_parser.add_argument("path", type=str, help="Local path to repository you want to configure.") enable_parser.set_defaults(func=lambda args: LfsEnableCommand(args)) # Command will get called by git-lfs, do not call it directly. upload_parser = parser.add_parser(LFS_MULTIPART_UPLOAD_COMMAND, add_help=False) upload_parser.set_defaults(func=lambda args: LfsUploadCommand(args))
class_definition
941
2,363
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/lfs.py
null
295
class LfsEnableCommand: def __init__(self, args): self.args = args def run(self): local_path = os.path.abspath(self.args.path) if not os.path.isdir(local_path): print("This does not look like a valid git repo.") exit(1) subprocess.run( "git config lfs.customtransfer.multipart.path huggingface-cli".split(), check=True, cwd=local_path, ) subprocess.run( f"git config lfs.customtransfer.multipart.args {LFS_MULTIPART_UPLOAD_COMMAND}".split(), check=True, cwd=local_path, ) print("Local repo set up for largefiles")
class_definition
2,366
3,048
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/lfs.py
null
296
class LfsUploadCommand: def __init__(self, args) -> None: self.args = args def run(self) -> None: # Immediately after invoking a custom transfer process, git-lfs # sends initiation data to the process over stdin. # This tells the process useful information about the configuration. init_msg = json.loads(sys.stdin.readline().strip()) if not (init_msg.get("event") == "init" and init_msg.get("operation") == "upload"): write_msg({"error": {"code": 32, "message": "Wrong lfs init operation"}}) sys.exit(1) # The transfer process should use the information it needs from the # initiation structure, and also perform any one-off setup tasks it # needs to do. It should then respond on stdout with a simple empty # confirmation structure, as follows: write_msg({}) # After the initiation exchange, git-lfs will send any number of # transfer requests to the stdin of the transfer process, in a serial sequence. while True: msg = read_msg() if msg is None: # When all transfers have been processed, git-lfs will send # a terminate event to the stdin of the transfer process. # On receiving this message the transfer process should # clean up and terminate. No response is expected. sys.exit(0) oid = msg["oid"] filepath = msg["path"] completion_url = msg["action"]["href"] header = msg["action"]["header"] chunk_size = int(header.pop("chunk_size")) presigned_urls: List[str] = list(header.values()) # Send a "started" progress event to allow other workers to start. # Otherwise they're delayed until first "progress" event is reported, # i.e. after the first 5GB by default (!) write_msg( { "event": "progress", "oid": oid, "bytesSoFar": 1, "bytesSinceLast": 0, } ) parts = [] with open(filepath, "rb") as file: for i, presigned_url in enumerate(presigned_urls): with SliceFileObj( file, seek_from=i * chunk_size, read_limit=chunk_size, ) as data: r = get_session().put(presigned_url, data=data) hf_raise_for_status(r) parts.append( { "etag": r.headers.get("etag"), "partNumber": i + 1, } ) # In order to support progress reporting while data is uploading / downloading, # the transfer process should post messages to stdout write_msg( { "event": "progress", "oid": oid, "bytesSoFar": (i + 1) * chunk_size, "bytesSinceLast": chunk_size, } ) # Not precise but that's ok. r = get_session().post( completion_url, json={ "oid": oid, "parts": parts, }, ) hf_raise_for_status(r) write_msg({"event": "complete", "oid": oid})
class_definition
3,623
7,341
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/lfs.py
null
297
class UploadLargeFolderCommand(BaseHuggingfaceCLICommand): @staticmethod def register_subcommand(parser: _SubParsersAction): subparser = parser.add_parser("upload-large-folder", help="Upload a large folder to a repo on the Hub") subparser.add_argument( "repo_id", type=str, help="The ID of the repo to upload to (e.g. `username/repo-name`)." ) subparser.add_argument("local_path", type=str, help="Local path to the file or folder to upload.") subparser.add_argument( "--repo-type", choices=["model", "dataset", "space"], help="Type of the repo to upload to (e.g. `dataset`).", ) subparser.add_argument( "--revision", type=str, help=("An optional Git revision to push to. It can be a branch name or a PR reference."), ) subparser.add_argument( "--private", action="store_true", help=( "Whether to create a private repo if repo doesn't exist on the Hub. Ignored if the repo already exists." ), ) subparser.add_argument("--include", nargs="*", type=str, help="Glob patterns to match files to upload.") subparser.add_argument("--exclude", nargs="*", type=str, help="Glob patterns to exclude from files to upload.") subparser.add_argument( "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens" ) subparser.add_argument( "--num-workers", type=int, help="Number of workers to use to hash, upload and commit files." ) subparser.add_argument("--no-report", action="store_true", help="Whether to disable regular status report.") subparser.add_argument("--no-bars", action="store_true", help="Whether to disable progress bars.") subparser.set_defaults(func=UploadLargeFolderCommand) def __init__(self, args: Namespace) -> None: self.repo_id: str = args.repo_id self.local_path: str = args.local_path self.repo_type: str = args.repo_type self.revision: Optional[str] = args.revision self.private: bool = args.private self.include: Optional[List[str]] = args.include self.exclude: Optional[List[str]] = args.exclude self.api: HfApi = HfApi(token=args.token, library_name="huggingface-cli") self.num_workers: Optional[int] = args.num_workers self.no_report: bool = args.no_report self.no_bars: bool = args.no_bars if not os.path.isdir(self.local_path): raise ValueError("Large upload is only supported for folders.") def run(self) -> None: logging.set_verbosity_info() print( ANSI.yellow( "You are about to upload a large folder to the Hub using `huggingface-cli upload-large-folder`. " "This is a new feature so feedback is very welcome!\n" "\n" "A few things to keep in mind:\n" " - Repository limits still apply: https://huggingface.co/docs/hub/repositories-recommendations\n" " - Do not start several processes in parallel.\n" " - You can interrupt and resume the process at any time. " "The script will pick up where it left off except for partially uploaded files that would have to be entirely reuploaded.\n" " - Do not upload the same folder to several repositories. If you need to do so, you must delete the `./.cache/huggingface/` folder first.\n" "\n" f"Some temporary metadata will be stored under `{self.local_path}/.cache/huggingface`.\n" " - You must not modify those files manually.\n" " - You must not delete the `./.cache/huggingface/` folder while a process is running.\n" " - You can delete the `./.cache/huggingface/` folder to reinitialize the upload state when process is not running. Files will have to be hashed and preuploaded again, except for already committed files.\n" "\n" "If the process output is to verbose, you can disable the progress bars with `--no-bars`. " "You can also entirely disable the status report with `--no-report`.\n" "\n" "For more details, run `huggingface-cli upload-large-folder --help` or check the documentation at " "https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-large-folder." ) ) if self.no_bars: disable_progress_bars() self.api.upload_large_folder( repo_id=self.repo_id, folder_path=self.local_path, repo_type=self.repo_type, revision=self.revision, private=self.private, allow_patterns=self.include, ignore_patterns=self.exclude, num_workers=self.num_workers, print_report=not self.no_report, )
class_definition
1,040
6,127
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/upload_large_folder.py
null
298
class TagCommands(BaseHuggingfaceCLICommand): @staticmethod def register_subcommand(parser: _SubParsersAction): tag_parser = parser.add_parser("tag", help="(create, list, delete) tags for a repo in the hub") tag_parser.add_argument("repo_id", type=str, help="The ID of the repo to tag (e.g. `username/repo-name`).") tag_parser.add_argument("tag", nargs="?", type=str, help="The name of the tag for creation or deletion.") tag_parser.add_argument("-m", "--message", type=str, help="The description of the tag to create.") tag_parser.add_argument("--revision", type=str, help="The git revision to tag.") tag_parser.add_argument( "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens." ) tag_parser.add_argument( "--repo-type", choices=["model", "dataset", "space"], default="model", help="Set the type of repository (model, dataset, or space).", ) tag_parser.add_argument("-y", "--yes", action="store_true", help="Answer Yes to prompts automatically.") tag_parser.add_argument("-l", "--list", action="store_true", help="List tags for a repository.") tag_parser.add_argument("-d", "--delete", action="store_true", help="Delete a tag for a repository.") tag_parser.set_defaults(func=lambda args: handle_commands(args))
class_definition
1,732
3,168
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/commands/tag.py
null
299