text
stringlengths
38
361k
type
stringclasses
1 value
start
int64
156
155k
end
int64
451
418k
depth
int64
0
0
filepath
stringlengths
87
141
parent_class
null
class_index
int64
0
305
class EvalResult: """ Flattened representation of individual evaluation results found in model-index of Model Cards. For more information on the model-index spec, see https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1. Args: task_type (`str`): The task identifier. Example: "image-classification". dataset_type (`str`): The dataset identifier. Example: "common_voice". Use dataset id from https://hf.co/datasets. dataset_name (`str`): A pretty name for the dataset. Example: "Common Voice (French)". metric_type (`str`): The metric identifier. Example: "wer". Use metric id from https://hf.co/metrics. metric_value (`Any`): The metric value. Example: 0.9 or "20.0 ± 1.2". task_name (`str`, *optional*): A pretty name for the task. Example: "Speech Recognition". dataset_config (`str`, *optional*): The name of the dataset configuration used in `load_dataset()`. Example: fr in `load_dataset("common_voice", "fr")`. See the `datasets` docs for more info: https://hf.co/docs/datasets/package_reference/loading_methods#datasets.load_dataset.name dataset_split (`str`, *optional*): The split used in `load_dataset()`. Example: "test". dataset_revision (`str`, *optional*): The revision (AKA Git Sha) of the dataset used in `load_dataset()`. Example: 5503434ddd753f426f4b38109466949a1217c2bb dataset_args (`Dict[str, Any]`, *optional*): The arguments passed during `Metric.compute()`. Example for `bleu`: `{"max_order": 4}` metric_name (`str`, *optional*): A pretty name for the metric. Example: "Test WER". metric_config (`str`, *optional*): The name of the metric configuration used in `load_metric()`. Example: bleurt-large-512 in `load_metric("bleurt", "bleurt-large-512")`. See the `datasets` docs for more info: https://huggingface.co/docs/datasets/v2.1.0/en/loading#load-configurations metric_args (`Dict[str, Any]`, *optional*): The arguments passed during `Metric.compute()`. Example for `bleu`: max_order: 4 verified (`bool`, *optional*): Indicates whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. Automatically computed by Hugging Face, do not set. verify_token (`str`, *optional*): A JSON Web Token that is used to verify whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. source_name (`str`, *optional*): The name of the source of the evaluation result. Example: "Open LLM Leaderboard". source_url (`str`, *optional*): The URL of the source of the evaluation result. Example: "https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard". """ # Required # The task identifier # Example: automatic-speech-recognition task_type: str # The dataset identifier # Example: common_voice. Use dataset id from https://hf.co/datasets dataset_type: str # A pretty name for the dataset. # Example: Common Voice (French) dataset_name: str # The metric identifier # Example: wer. Use metric id from https://hf.co/metrics metric_type: str # Value of the metric. # Example: 20.0 or "20.0 ± 1.2" metric_value: Any # Optional # A pretty name for the task. # Example: Speech Recognition task_name: Optional[str] = None # The name of the dataset configuration used in `load_dataset()`. # Example: fr in `load_dataset("common_voice", "fr")`. # See the `datasets` docs for more info: # https://huggingface.co/docs/datasets/package_reference/loading_methods#datasets.load_dataset.name dataset_config: Optional[str] = None # The split used in `load_dataset()`. # Example: test dataset_split: Optional[str] = None # The revision (AKA Git Sha) of the dataset used in `load_dataset()`. # Example: 5503434ddd753f426f4b38109466949a1217c2bb dataset_revision: Optional[str] = None # The arguments passed during `Metric.compute()`. # Example for `bleu`: max_order: 4 dataset_args: Optional[Dict[str, Any]] = None # A pretty name for the metric. # Example: Test WER metric_name: Optional[str] = None # The name of the metric configuration used in `load_metric()`. # Example: bleurt-large-512 in `load_metric("bleurt", "bleurt-large-512")`. # See the `datasets` docs for more info: https://huggingface.co/docs/datasets/v2.1.0/en/loading#load-configurations metric_config: Optional[str] = None # The arguments passed during `Metric.compute()`. # Example for `bleu`: max_order: 4 metric_args: Optional[Dict[str, Any]] = None # Indicates whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. Automatically computed by Hugging Face, do not set. verified: Optional[bool] = None # A JSON Web Token that is used to verify whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. verify_token: Optional[str] = None # The name of the source of the evaluation result. # Example: Open LLM Leaderboard source_name: Optional[str] = None # The URL of the source of the evaluation result. # Example: https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard source_url: Optional[str] = None @property def unique_identifier(self) -> tuple: """Returns a tuple that uniquely identifies this evaluation.""" return ( self.task_type, self.dataset_type, self.dataset_config, self.dataset_split, self.dataset_revision, ) def is_equal_except_value(self, other: "EvalResult") -> bool: """ Return True if `self` and `other` describe exactly the same metric but with a different value. """ for key, _ in self.__dict__.items(): if key == "metric_value": continue # For metrics computed by Hugging Face's evaluation service, `verify_token` is derived from `metric_value`, # so we exclude it here in the comparison. if key != "verify_token" and getattr(self, key) != getattr(other, key): return False return True def __post_init__(self) -> None: if self.source_name is not None and self.source_url is None: raise ValueError("If `source_name` is provided, `source_url` must also be provided.")
class_definition
248
7,185
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/repocard_data.py
null
0
class CardData: """Structure containing metadata from a RepoCard. [`CardData`] is the parent class of [`ModelCardData`] and [`DatasetCardData`]. Metadata can be exported as a dictionary or YAML. Export can be customized to alter the representation of the data (example: flatten evaluation results). `CardData` behaves as a dictionary (can get, pop, set values) but do not inherit from `dict` to allow this export step. """ def __init__(self, ignore_metadata_errors: bool = False, **kwargs): self.__dict__.update(kwargs) def to_dict(self): """Converts CardData to a dict. Returns: `dict`: CardData represented as a dictionary ready to be dumped to a YAML block for inclusion in a README.md file. """ data_dict = copy.deepcopy(self.__dict__) self._to_dict(data_dict) return {key: value for key, value in data_dict.items() if value is not None} def _to_dict(self, data_dict): """Use this method in child classes to alter the dict representation of the data. Alter the dict in-place. Args: data_dict (`dict`): The raw dict representation of the card data. """ pass def to_yaml(self, line_break=None, original_order: Optional[List[str]] = None) -> str: """Dumps CardData to a YAML block for inclusion in a README.md file. Args: line_break (str, *optional*): The line break to use when dumping to yaml. Returns: `str`: CardData represented as a YAML block. """ if original_order: self.__dict__ = { k: self.__dict__[k] for k in original_order + list(set(self.__dict__.keys()) - set(original_order)) if k in self.__dict__ } return yaml_dump(self.to_dict(), sort_keys=False, line_break=line_break).strip() def __repr__(self): return repr(self.__dict__) def __str__(self): return self.to_yaml() def get(self, key: str, default: Any = None) -> Any: """Get value for a given metadata key.""" return self.__dict__.get(key, default) def pop(self, key: str, default: Any = None) -> Any: """Pop value for a given metadata key.""" return self.__dict__.pop(key, default) def __getitem__(self, key: str) -> Any: """Get value for a given metadata key.""" return self.__dict__[key] def __setitem__(self, key: str, value: Any) -> None: """Set value for a given metadata key.""" self.__dict__[key] = value def __contains__(self, key: str) -> bool: """Check if a given metadata key is set.""" return key in self.__dict__ def __len__(self) -> int: """Return the number of metadata keys set.""" return len(self.__dict__)
class_definition
7,199
10,080
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/repocard_data.py
null
1
class ModelCardData(CardData): """Model Card Metadata that is used by Hugging Face Hub when included at the top of your README.md Args: base_model (`str` or `List[str]`, *optional*): The identifier of the base model from which the model derives. This is applicable for example if your model is a fine-tune or adapter of an existing model. The value must be the ID of a model on the Hub (or a list of IDs if your model derives from multiple models). Defaults to None. datasets (`Union[str, List[str]]`, *optional*): Dataset or list of datasets that were used to train this model. Should be a dataset ID found on https://hf.co/datasets. Defaults to None. eval_results (`Union[List[EvalResult], EvalResult]`, *optional*): List of `huggingface_hub.EvalResult` that define evaluation results of the model. If provided, `model_name` is used to as a name on PapersWithCode's leaderboards. Defaults to `None`. language (`Union[str, List[str]]`, *optional*): Language of model's training data or metadata. It must be an ISO 639-1, 639-2 or 639-3 code (two/three letters), or a special value like "code", "multilingual". Defaults to `None`. library_name (`str`, *optional*): Name of library used by this model. Example: keras or any library from https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/src/model-libraries.ts. Defaults to None. license (`str`, *optional*): License of this model. Example: apache-2.0 or any license from https://huggingface.co/docs/hub/repositories-licenses. Defaults to None. license_name (`str`, *optional*): Name of the license of this model. Defaults to None. To be used in conjunction with `license_link`. Common licenses (Apache-2.0, MIT, CC-BY-SA-4.0) do not need a name. In that case, use `license` instead. license_link (`str`, *optional*): Link to the license of this model. Defaults to None. To be used in conjunction with `license_name`. Common licenses (Apache-2.0, MIT, CC-BY-SA-4.0) do not need a link. In that case, use `license` instead. metrics (`List[str]`, *optional*): List of metrics used to evaluate this model. Should be a metric name that can be found at https://hf.co/metrics. Example: 'accuracy'. Defaults to None. model_name (`str`, *optional*): A name for this model. It is used along with `eval_results` to construct the `model-index` within the card's metadata. The name you supply here is what will be used on PapersWithCode's leaderboards. If None is provided then the repo name is used as a default. Defaults to None. pipeline_tag (`str`, *optional*): The pipeline tag associated with the model. Example: "text-classification". tags (`List[str]`, *optional*): List of tags to add to your model that can be used when filtering on the Hugging Face Hub. Defaults to None. ignore_metadata_errors (`str`): If True, errors while parsing the metadata section will be ignored. Some information might be lost during the process. Use it at your own risk. kwargs (`dict`, *optional*): Additional metadata that will be added to the model card. Defaults to None. Example: ```python >>> from huggingface_hub import ModelCardData >>> card_data = ModelCardData( ... language="en", ... license="mit", ... library_name="timm", ... tags=['image-classification', 'resnet'], ... ) >>> card_data.to_dict() {'language': 'en', 'license': 'mit', 'library_name': 'timm', 'tags': ['image-classification', 'resnet']} ``` """ def __init__( self, *, base_model: Optional[Union[str, List[str]]] = None, datasets: Optional[Union[str, List[str]]] = None, eval_results: Optional[List[EvalResult]] = None, language: Optional[Union[str, List[str]]] = None, library_name: Optional[str] = None, license: Optional[str] = None, license_name: Optional[str] = None, license_link: Optional[str] = None, metrics: Optional[List[str]] = None, model_name: Optional[str] = None, pipeline_tag: Optional[str] = None, tags: Optional[List[str]] = None, ignore_metadata_errors: bool = False, **kwargs, ): self.base_model = base_model self.datasets = datasets self.eval_results = eval_results self.language = language self.library_name = library_name self.license = license self.license_name = license_name self.license_link = license_link self.metrics = metrics self.model_name = model_name self.pipeline_tag = pipeline_tag self.tags = _to_unique_list(tags) model_index = kwargs.pop("model-index", None) if model_index: try: model_name, eval_results = model_index_to_eval_results(model_index) self.model_name = model_name self.eval_results = eval_results except (KeyError, TypeError) as error: if ignore_metadata_errors: logger.warning("Invalid model-index. Not loading eval results into CardData.") else: raise ValueError( f"Invalid `model_index` in metadata cannot be parsed: {error.__class__} {error}. Pass" " `ignore_metadata_errors=True` to ignore this error while loading a Model Card. Warning:" " some information will be lost. Use it at your own risk." ) super().__init__(**kwargs) if self.eval_results: if isinstance(self.eval_results, EvalResult): self.eval_results = [self.eval_results] if self.model_name is None: raise ValueError("Passing `eval_results` requires `model_name` to be set.") def _to_dict(self, data_dict): """Format the internal data dict. In this case, we convert eval results to a valid model index""" if self.eval_results is not None: data_dict["model-index"] = eval_results_to_model_index(self.model_name, self.eval_results) del data_dict["eval_results"], data_dict["model_name"]
class_definition
10,083
16,711
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/repocard_data.py
null
2
class DatasetCardData(CardData): """Dataset Card Metadata that is used by Hugging Face Hub when included at the top of your README.md Args: language (`List[str]`, *optional*): Language of dataset's data or metadata. It must be an ISO 639-1, 639-2 or 639-3 code (two/three letters), or a special value like "code", "multilingual". license (`Union[str, List[str]]`, *optional*): License(s) of this dataset. Example: apache-2.0 or any license from https://huggingface.co/docs/hub/repositories-licenses. annotations_creators (`Union[str, List[str]]`, *optional*): How the annotations for the dataset were created. Options are: 'found', 'crowdsourced', 'expert-generated', 'machine-generated', 'no-annotation', 'other'. language_creators (`Union[str, List[str]]`, *optional*): How the text-based data in the dataset was created. Options are: 'found', 'crowdsourced', 'expert-generated', 'machine-generated', 'other' multilinguality (`Union[str, List[str]]`, *optional*): Whether the dataset is multilingual. Options are: 'monolingual', 'multilingual', 'translation', 'other'. size_categories (`Union[str, List[str]]`, *optional*): The number of examples in the dataset. Options are: 'n<1K', '1K<n<10K', '10K<n<100K', '100K<n<1M', '1M<n<10M', '10M<n<100M', '100M<n<1B', '1B<n<10B', '10B<n<100B', '100B<n<1T', 'n>1T', and 'other'. source_datasets (`List[str]]`, *optional*): Indicates whether the dataset is an original dataset or extended from another existing dataset. Options are: 'original' and 'extended'. task_categories (`Union[str, List[str]]`, *optional*): What categories of task does the dataset support? task_ids (`Union[str, List[str]]`, *optional*): What specific tasks does the dataset support? paperswithcode_id (`str`, *optional*): ID of the dataset on PapersWithCode. pretty_name (`str`, *optional*): A more human-readable name for the dataset. (ex. "Cats vs. Dogs") train_eval_index (`Dict`, *optional*): A dictionary that describes the necessary spec for doing evaluation on the Hub. If not provided, it will be gathered from the 'train-eval-index' key of the kwargs. config_names (`Union[str, List[str]]`, *optional*): A list of the available dataset configs for the dataset. """ def __init__( self, *, language: Optional[Union[str, List[str]]] = None, license: Optional[Union[str, List[str]]] = None, annotations_creators: Optional[Union[str, List[str]]] = None, language_creators: Optional[Union[str, List[str]]] = None, multilinguality: Optional[Union[str, List[str]]] = None, size_categories: Optional[Union[str, List[str]]] = None, source_datasets: Optional[List[str]] = None, task_categories: Optional[Union[str, List[str]]] = None, task_ids: Optional[Union[str, List[str]]] = None, paperswithcode_id: Optional[str] = None, pretty_name: Optional[str] = None, train_eval_index: Optional[Dict] = None, config_names: Optional[Union[str, List[str]]] = None, ignore_metadata_errors: bool = False, **kwargs, ): self.annotations_creators = annotations_creators self.language_creators = language_creators self.language = language self.license = license self.multilinguality = multilinguality self.size_categories = size_categories self.source_datasets = source_datasets self.task_categories = task_categories self.task_ids = task_ids self.paperswithcode_id = paperswithcode_id self.pretty_name = pretty_name self.config_names = config_names # TODO - maybe handle this similarly to EvalResult? self.train_eval_index = train_eval_index or kwargs.pop("train-eval-index", None) super().__init__(**kwargs) def _to_dict(self, data_dict): data_dict["train-eval-index"] = data_dict.pop("train_eval_index")
class_definition
16,714
20,971
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/repocard_data.py
null
3
class SpaceCardData(CardData): """Space Card Metadata that is used by Hugging Face Hub when included at the top of your README.md To get an exhaustive reference of Spaces configuration, please visit https://huggingface.co/docs/hub/spaces-config-reference#spaces-configuration-reference. Args: title (`str`, *optional*) Title of the Space. sdk (`str`, *optional*) SDK of the Space (one of `gradio`, `streamlit`, `docker`, or `static`). sdk_version (`str`, *optional*) Version of the used SDK (if Gradio/Streamlit sdk). python_version (`str`, *optional*) Python version used in the Space (if Gradio/Streamlit sdk). app_file (`str`, *optional*) Path to your main application file (which contains either gradio or streamlit Python code, or static html code). Path is relative to the root of the repository. app_port (`str`, *optional*) Port on which your application is running. Used only if sdk is `docker`. license (`str`, *optional*) License of this model. Example: apache-2.0 or any license from https://huggingface.co/docs/hub/repositories-licenses. duplicated_from (`str`, *optional*) ID of the original Space if this is a duplicated Space. models (List[`str`], *optional*) List of models related to this Space. Should be a dataset ID found on https://hf.co/models. datasets (`List[str]`, *optional*) List of datasets related to this Space. Should be a dataset ID found on https://hf.co/datasets. tags (`List[str]`, *optional*) List of tags to add to your Space that can be used when filtering on the Hub. ignore_metadata_errors (`str`): If True, errors while parsing the metadata section will be ignored. Some information might be lost during the process. Use it at your own risk. kwargs (`dict`, *optional*): Additional metadata that will be added to the space card. Example: ```python >>> from huggingface_hub import SpaceCardData >>> card_data = SpaceCardData( ... title="Dreambooth Training", ... license="mit", ... sdk="gradio", ... duplicated_from="multimodalart/dreambooth-training" ... ) >>> card_data.to_dict() {'title': 'Dreambooth Training', 'sdk': 'gradio', 'license': 'mit', 'duplicated_from': 'multimodalart/dreambooth-training'} ``` """ def __init__( self, *, title: Optional[str] = None, sdk: Optional[str] = None, sdk_version: Optional[str] = None, python_version: Optional[str] = None, app_file: Optional[str] = None, app_port: Optional[int] = None, license: Optional[str] = None, duplicated_from: Optional[str] = None, models: Optional[List[str]] = None, datasets: Optional[List[str]] = None, tags: Optional[List[str]] = None, ignore_metadata_errors: bool = False, **kwargs, ): self.title = title self.sdk = sdk self.sdk_version = sdk_version self.python_version = python_version self.app_file = app_file self.app_port = app_port self.license = license self.duplicated_from = duplicated_from self.models = models self.datasets = datasets self.tags = _to_unique_list(tags) super().__init__(**kwargs)
class_definition
20,974
24,542
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/repocard_data.py
null
4
class BaseModel: # type: ignore [no-redef] def __init__(self, *args, **kwargs) -> None: raise ImportError( "You must have `pydantic` installed to use `WebhookPayload`. This is an optional dependency that" " should be installed separately. Please run `pip install --upgrade pydantic` and retry." )
class_definition
986
1,347
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
5
class ObjectId(BaseModel): id: str
class_definition
1,991
2,029
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
6
class WebhookPayloadUrl(BaseModel): web: str api: Optional[str] = None
class_definition
2,032
2,110
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
7
class WebhookPayloadMovedTo(BaseModel): name: str owner: ObjectId
class_definition
2,113
2,186
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
8
class WebhookPayloadWebhook(ObjectId): version: SupportedWebhookVersion
class_definition
2,189
2,264
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
9
class WebhookPayloadEvent(BaseModel): action: WebhookEvent_T scope: str
class_definition
2,267
2,346
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
10
class WebhookPayloadDiscussionChanges(BaseModel): base: str mergeCommitId: Optional[str] = None
class_definition
2,349
2,452
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
11
class WebhookPayloadComment(ObjectId): author: ObjectId hidden: bool content: Optional[str] = None url: WebhookPayloadUrl
class_definition
2,455
2,592
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
12
class WebhookPayloadDiscussion(ObjectId): num: int author: ObjectId url: WebhookPayloadUrl title: str isPullRequest: bool status: DiscussionStatus_T changes: Optional[WebhookPayloadDiscussionChanges] = None pinned: Optional[bool] = None
class_definition
2,595
2,863
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
13
class WebhookPayloadRepo(ObjectId): owner: ObjectId head_sha: Optional[str] = None name: str private: bool subdomain: Optional[str] = None tags: Optional[List[str]] = None type: Literal["dataset", "model", "space"] url: WebhookPayloadUrl
class_definition
2,866
3,135
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
14
class WebhookPayloadUpdatedRef(BaseModel): ref: str oldSha: Optional[str] = None newSha: Optional[str] = None
class_definition
3,138
3,259
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
15
class WebhookPayload(BaseModel): event: WebhookPayloadEvent repo: WebhookPayloadRepo discussion: Optional[WebhookPayloadDiscussion] = None comment: Optional[WebhookPayloadComment] = None webhook: WebhookPayloadWebhook movedTo: Optional[WebhookPayloadMovedTo] = None updatedRefs: Optional[List[WebhookPayloadUpdatedRef]] = None
class_definition
3,262
3,616
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_payload.py
null
16
class MixinInfo: model_card_template: str model_card_data: ModelCardData repo_url: Optional[str] = None docs_url: Optional[str] = None
class_definition
1,902
2,052
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hub_mixin.py
null
17
class ModelHubMixin: """ A generic mixin to integrate ANY machine learning framework with the Hub. To integrate your framework, your model class must inherit from this class. Custom logic for saving/loading models have to be overwritten in [`_from_pretrained`] and [`_save_pretrained`]. [`PyTorchModelHubMixin`] is a good example of mixin integration with the Hub. Check out our [integration guide](../guides/integrations) for more instructions. When inheriting from [`ModelHubMixin`], you can define class-level attributes. These attributes are not passed to `__init__` but to the class definition itself. This is useful to define metadata about the library integrating [`ModelHubMixin`]. For more details on how to integrate the mixin with your library, checkout the [integration guide](../guides/integrations). Args: repo_url (`str`, *optional*): URL of the library repository. Used to generate model card. docs_url (`str`, *optional*): URL of the library documentation. Used to generate model card. model_card_template (`str`, *optional*): Template of the model card. Used to generate model card. Defaults to a generic template. language (`str` or `List[str]`, *optional*): Language supported by the library. Used to generate model card. library_name (`str`, *optional*): Name of the library integrating ModelHubMixin. Used to generate model card. license (`str`, *optional*): License of the library integrating ModelHubMixin. Used to generate model card. E.g: "apache-2.0" license_name (`str`, *optional*): Name of the library integrating ModelHubMixin. Used to generate model card. Only used if `license` is set to `other`. E.g: "coqui-public-model-license". license_link (`str`, *optional*): URL to the license of the library integrating ModelHubMixin. Used to generate model card. Only used if `license` is set to `other` and `license_name` is set. E.g: "https://coqui.ai/cpml". pipeline_tag (`str`, *optional*): Tag of the pipeline. Used to generate model card. E.g. "text-classification". tags (`List[str]`, *optional*): Tags to be added to the model card. Used to generate model card. E.g. ["x-custom-tag", "arxiv:2304.12244"] coders (`Dict[Type, Tuple[Callable, Callable]]`, *optional*): Dictionary of custom types and their encoders/decoders. Used to encode/decode arguments that are not jsonable by default. E.g dataclasses, argparse.Namespace, OmegaConf, etc. Example: ```python >>> from huggingface_hub import ModelHubMixin # Inherit from ModelHubMixin >>> class MyCustomModel( ... ModelHubMixin, ... library_name="my-library", ... tags=["x-custom-tag", "arxiv:2304.12244"], ... repo_url="https://github.com/huggingface/my-cool-library", ... docs_url="https://huggingface.co/docs/my-cool-library", ... # ^ optional metadata to generate model card ... ): ... def __init__(self, size: int = 512, device: str = "cpu"): ... # define how to initialize your model ... super().__init__() ... ... ... ... def _save_pretrained(self, save_directory: Path) -> None: ... # define how to serialize your model ... ... ... ... @classmethod ... def from_pretrained( ... cls: Type[T], ... pretrained_model_name_or_path: Union[str, Path], ... *, ... force_download: bool = False, ... resume_download: Optional[bool] = None, ... proxies: Optional[Dict] = None, ... token: Optional[Union[str, bool]] = None, ... cache_dir: Optional[Union[str, Path]] = None, ... local_files_only: bool = False, ... revision: Optional[str] = None, ... **model_kwargs, ... ) -> T: ... # define how to deserialize your model ... ... >>> model = MyCustomModel(size=256, device="gpu") # Save model weights to local directory >>> model.save_pretrained("my-awesome-model") # Push model weights to the Hub >>> model.push_to_hub("my-awesome-model") # Download and initialize weights from the Hub >>> reloaded_model = MyCustomModel.from_pretrained("username/my-awesome-model") >>> reloaded_model.size 256 # Model card has been correctly populated >>> from huggingface_hub import ModelCard >>> card = ModelCard.load("username/my-awesome-model") >>> card.data.tags ["x-custom-tag", "pytorch_model_hub_mixin", "model_hub_mixin"] >>> card.data.library_name "my-library" ``` """ _hub_mixin_config: Optional[Union[dict, "DataclassInstance"]] = None # ^ optional config attribute automatically set in `from_pretrained` _hub_mixin_info: MixinInfo # ^ information about the library integrating ModelHubMixin (used to generate model card) _hub_mixin_inject_config: bool # whether `_from_pretrained` expects `config` or not _hub_mixin_init_parameters: Dict[str, inspect.Parameter] # __init__ parameters _hub_mixin_jsonable_default_values: Dict[str, Any] # default values for __init__ parameters _hub_mixin_jsonable_custom_types: Tuple[Type, ...] # custom types that can be encoded/decoded _hub_mixin_coders: Dict[Type, CODER_T] # encoders/decoders for custom types # ^ internal values to handle config def __init_subclass__( cls, *, # Generic info for model card repo_url: Optional[str] = None, docs_url: Optional[str] = None, # Model card template model_card_template: str = DEFAULT_MODEL_CARD, # Model card metadata language: Optional[List[str]] = None, library_name: Optional[str] = None, license: Optional[str] = None, license_name: Optional[str] = None, license_link: Optional[str] = None, pipeline_tag: Optional[str] = None, tags: Optional[List[str]] = None, # How to encode/decode arguments with custom type into a JSON config? coders: Optional[ Dict[Type, CODER_T] # Key is a type. # Value is a tuple (encoder, decoder). # Example: {MyCustomType: (lambda x: x.value, lambda data: MyCustomType(data))} ] = None, ) -> None: """Inspect __init__ signature only once when subclassing + handle modelcard.""" super().__init_subclass__() # Will be reused when creating modelcard tags = tags or [] tags.append("model_hub_mixin") # Initialize MixinInfo if not existent info = MixinInfo(model_card_template=model_card_template, model_card_data=ModelCardData()) # If parent class has a MixinInfo, inherit from it as a copy if hasattr(cls, "_hub_mixin_info"): # Inherit model card template from parent class if not explicitly set if model_card_template == DEFAULT_MODEL_CARD: info.model_card_template = cls._hub_mixin_info.model_card_template # Inherit from parent model card data info.model_card_data = ModelCardData(**cls._hub_mixin_info.model_card_data.to_dict()) # Inherit other info info.docs_url = cls._hub_mixin_info.docs_url info.repo_url = cls._hub_mixin_info.repo_url cls._hub_mixin_info = info # Update MixinInfo with metadata if model_card_template is not None and model_card_template != DEFAULT_MODEL_CARD: info.model_card_template = model_card_template if repo_url is not None: info.repo_url = repo_url if docs_url is not None: info.docs_url = docs_url if language is not None: info.model_card_data.language = language if library_name is not None: info.model_card_data.library_name = library_name if license is not None: info.model_card_data.license = license if license_name is not None: info.model_card_data.license_name = license_name if license_link is not None: info.model_card_data.license_link = license_link if pipeline_tag is not None: info.model_card_data.pipeline_tag = pipeline_tag if tags is not None: if info.model_card_data.tags is not None: info.model_card_data.tags.extend(tags) else: info.model_card_data.tags = tags info.model_card_data.tags = sorted(set(info.model_card_data.tags)) # Handle encoders/decoders for args cls._hub_mixin_coders = coders or {} cls._hub_mixin_jsonable_custom_types = tuple(cls._hub_mixin_coders.keys()) # Inspect __init__ signature to handle config cls._hub_mixin_init_parameters = dict(inspect.signature(cls.__init__).parameters) cls._hub_mixin_jsonable_default_values = { param.name: cls._encode_arg(param.default) for param in cls._hub_mixin_init_parameters.values() if param.default is not inspect.Parameter.empty and cls._is_jsonable(param.default) } cls._hub_mixin_inject_config = "config" in inspect.signature(cls._from_pretrained).parameters def __new__(cls: Type[T], *args, **kwargs) -> T: """Create a new instance of the class and handle config. 3 cases: - If `self._hub_mixin_config` is already set, do nothing. - If `config` is passed as a dataclass, set it as `self._hub_mixin_config`. - Otherwise, build `self._hub_mixin_config` from default values and passed values. """ instance = super().__new__(cls) # If `config` is already set, return early if instance._hub_mixin_config is not None: return instance # Infer passed values passed_values = { **{ key: value for key, value in zip( # [1:] to skip `self` parameter list(cls._hub_mixin_init_parameters)[1:], args, ) }, **kwargs, } # If config passed as dataclass => set it and return early if is_dataclass(passed_values.get("config")): instance._hub_mixin_config = passed_values["config"] return instance # Otherwise, build config from default + passed values init_config = { # default values **cls._hub_mixin_jsonable_default_values, # passed values **{ key: cls._encode_arg(value) # Encode custom types as jsonable value for key, value in passed_values.items() if instance._is_jsonable(value) # Only if jsonable or we have a custom encoder }, } passed_config = init_config.pop("config", {}) # Populate `init_config` with provided config if isinstance(passed_config, dict): init_config.update(passed_config) # Set `config` attribute and return if init_config != {}: instance._hub_mixin_config = init_config return instance @classmethod def _is_jsonable(cls, value: Any) -> bool: """Check if a value is JSON serializable.""" if isinstance(value, cls._hub_mixin_jsonable_custom_types): return True return is_jsonable(value) @classmethod def _encode_arg(cls, arg: Any) -> Any: """Encode an argument into a JSON serializable format.""" for type_, (encoder, _) in cls._hub_mixin_coders.items(): if isinstance(arg, type_): if arg is None: return None return encoder(arg) return arg @classmethod def _decode_arg(cls, expected_type: Type[ARGS_T], value: Any) -> Optional[ARGS_T]: """Decode a JSON serializable value into an argument.""" if is_simple_optional_type(expected_type): if value is None: return None expected_type = unwrap_simple_optional_type(expected_type) # Dataclass => handle it if is_dataclass(expected_type): return _load_dataclass(expected_type, value) # type: ignore[return-value] # Otherwise => check custom decoders for type_, (_, decoder) in cls._hub_mixin_coders.items(): if inspect.isclass(expected_type) and issubclass(expected_type, type_): return decoder(value) # Otherwise => don't decode return value def save_pretrained( self, save_directory: Union[str, Path], *, config: Optional[Union[dict, "DataclassInstance"]] = None, repo_id: Optional[str] = None, push_to_hub: bool = False, model_card_kwargs: Optional[Dict[str, Any]] = None, **push_to_hub_kwargs, ) -> Optional[str]: """ Save weights in local directory. Args: save_directory (`str` or `Path`): Path to directory in which the model weights and configuration will be saved. config (`dict` or `DataclassInstance`, *optional*): Model configuration specified as a key/value dictionary or a dataclass instance. push_to_hub (`bool`, *optional*, defaults to `False`): Whether or not to push your model to the Huggingface Hub after saving it. repo_id (`str`, *optional*): ID of your repository on the Hub. Used only if `push_to_hub=True`. Will default to the folder name if not provided. model_card_kwargs (`Dict[str, Any]`, *optional*): Additional arguments passed to the model card template to customize the model card. push_to_hub_kwargs: Additional key word arguments passed along to the [`~ModelHubMixin.push_to_hub`] method. Returns: `str` or `None`: url of the commit on the Hub if `push_to_hub=True`, `None` otherwise. """ save_directory = Path(save_directory) save_directory.mkdir(parents=True, exist_ok=True) # Remove config.json if already exists. After `_save_pretrained` we don't want to overwrite config.json # as it might have been saved by the custom `_save_pretrained` already. However we do want to overwrite # an existing config.json if it was not saved by `_save_pretrained`. config_path = save_directory / constants.CONFIG_NAME config_path.unlink(missing_ok=True) # save model weights/files (framework-specific) self._save_pretrained(save_directory) # save config (if provided and if not serialized yet in `_save_pretrained`) if config is None: config = self._hub_mixin_config if config is not None: if is_dataclass(config): config = asdict(config) # type: ignore[arg-type] if not config_path.exists(): config_str = json.dumps(config, sort_keys=True, indent=2) config_path.write_text(config_str) # save model card model_card_path = save_directory / "README.md" model_card_kwargs = model_card_kwargs if model_card_kwargs is not None else {} if not model_card_path.exists(): # do not overwrite if already exists self.generate_model_card(**model_card_kwargs).save(save_directory / "README.md") # push to the Hub if required if push_to_hub: kwargs = push_to_hub_kwargs.copy() # soft-copy to avoid mutating input if config is not None: # kwarg for `push_to_hub` kwargs["config"] = config if repo_id is None: repo_id = save_directory.name # Defaults to `save_directory` name return self.push_to_hub(repo_id=repo_id, model_card_kwargs=model_card_kwargs, **kwargs) return None def _save_pretrained(self, save_directory: Path) -> None: """ Overwrite this method in subclass to define how to save your model. Check out our [integration guide](../guides/integrations) for instructions. Args: save_directory (`str` or `Path`): Path to directory in which the model weights and configuration will be saved. """ raise NotImplementedError @classmethod @validate_hf_hub_args def from_pretrained( cls: Type[T], pretrained_model_name_or_path: Union[str, Path], *, force_download: bool = False, resume_download: Optional[bool] = None, proxies: Optional[Dict] = None, token: Optional[Union[str, bool]] = None, cache_dir: Optional[Union[str, Path]] = None, local_files_only: bool = False, revision: Optional[str] = None, **model_kwargs, ) -> T: """ Download a model from the Huggingface Hub and instantiate it. Args: pretrained_model_name_or_path (`str`, `Path`): - Either the `model_id` (string) of a model hosted on the Hub, e.g. `bigscience/bloom`. - Or a path to a `directory` containing model weights saved using [`~transformers.PreTrainedModel.save_pretrained`], e.g., `../path/to/my_model_directory/`. revision (`str`, *optional*): Revision of the model on the Hub. Can be a branch name, a git tag or any commit id. Defaults to the latest commit on `main` branch. force_download (`bool`, *optional*, defaults to `False`): Whether to force (re-)downloading the model weights and configuration files from the Hub, overriding the existing cache. proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The proxies are used on every request. token (`str` or `bool`, *optional*): The token to use as HTTP bearer authorization for remote files. By default, it will use the token cached when running `huggingface-cli login`. cache_dir (`str`, `Path`, *optional*): Path to the folder where cached files are stored. local_files_only (`bool`, *optional*, defaults to `False`): If `True`, avoid downloading the file and return the path to the local cached file if it exists. model_kwargs (`Dict`, *optional*): Additional kwargs to pass to the model during initialization. """ model_id = str(pretrained_model_name_or_path) config_file: Optional[str] = None if os.path.isdir(model_id): if constants.CONFIG_NAME in os.listdir(model_id): config_file = os.path.join(model_id, constants.CONFIG_NAME) else: logger.warning(f"{constants.CONFIG_NAME} not found in {Path(model_id).resolve()}") else: try: config_file = hf_hub_download( repo_id=model_id, filename=constants.CONFIG_NAME, revision=revision, cache_dir=cache_dir, force_download=force_download, proxies=proxies, resume_download=resume_download, token=token, local_files_only=local_files_only, ) except HfHubHTTPError as e: logger.info(f"{constants.CONFIG_NAME} not found on the HuggingFace Hub: {str(e)}") # Read config config = None if config_file is not None: with open(config_file, "r", encoding="utf-8") as f: config = json.load(f) # Decode custom types in config for key, value in config.items(): if key in cls._hub_mixin_init_parameters: expected_type = cls._hub_mixin_init_parameters[key].annotation if expected_type is not inspect.Parameter.empty: config[key] = cls._decode_arg(expected_type, value) # Populate model_kwargs from config for param in cls._hub_mixin_init_parameters.values(): if param.name not in model_kwargs and param.name in config: model_kwargs[param.name] = config[param.name] # Check if `config` argument was passed at init if "config" in cls._hub_mixin_init_parameters and "config" not in model_kwargs: # Decode `config` argument if it was passed config_annotation = cls._hub_mixin_init_parameters["config"].annotation config = cls._decode_arg(config_annotation, config) # Forward config to model initialization model_kwargs["config"] = config # Inject config if `**kwargs` are expected if is_dataclass(cls): for key in cls.__dataclass_fields__: if key not in model_kwargs and key in config: model_kwargs[key] = config[key] elif any(param.kind == inspect.Parameter.VAR_KEYWORD for param in cls._hub_mixin_init_parameters.values()): for key, value in config.items(): if key not in model_kwargs: model_kwargs[key] = value # Finally, also inject if `_from_pretrained` expects it if cls._hub_mixin_inject_config and "config" not in model_kwargs: model_kwargs["config"] = config instance = cls._from_pretrained( model_id=str(model_id), revision=revision, cache_dir=cache_dir, force_download=force_download, proxies=proxies, resume_download=resume_download, local_files_only=local_files_only, token=token, **model_kwargs, ) # Implicitly set the config as instance attribute if not already set by the class # This way `config` will be available when calling `save_pretrained` or `push_to_hub`. if config is not None and (getattr(instance, "_hub_mixin_config", None) in (None, {})): instance._hub_mixin_config = config return instance @classmethod def _from_pretrained( cls: Type[T], *, model_id: str, revision: Optional[str], cache_dir: Optional[Union[str, Path]], force_download: bool, proxies: Optional[Dict], resume_download: Optional[bool], local_files_only: bool, token: Optional[Union[str, bool]], **model_kwargs, ) -> T: """Overwrite this method in subclass to define how to load your model from pretrained. Use [`hf_hub_download`] or [`snapshot_download`] to download files from the Hub before loading them. Most args taken as input can be directly passed to those 2 methods. If needed, you can add more arguments to this method using "model_kwargs". For example [`PyTorchModelHubMixin._from_pretrained`] takes as input a `map_location` parameter to set on which device the model should be loaded. Check out our [integration guide](../guides/integrations) for more instructions. Args: model_id (`str`): ID of the model to load from the Huggingface Hub (e.g. `bigscience/bloom`). revision (`str`, *optional*): Revision of the model on the Hub. Can be a branch name, a git tag or any commit id. Defaults to the latest commit on `main` branch. force_download (`bool`, *optional*, defaults to `False`): Whether to force (re-)downloading the model weights and configuration files from the Hub, overriding the existing cache. proxies (`Dict[str, str]`, *optional*): A dictionary of proxy servers to use by protocol or endpoint (e.g., `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`). token (`str` or `bool`, *optional*): The token to use as HTTP bearer authorization for remote files. By default, it will use the token cached when running `huggingface-cli login`. cache_dir (`str`, `Path`, *optional*): Path to the folder where cached files are stored. local_files_only (`bool`, *optional*, defaults to `False`): If `True`, avoid downloading the file and return the path to the local cached file if it exists. model_kwargs: Additional keyword arguments passed along to the [`~ModelHubMixin._from_pretrained`] method. """ raise NotImplementedError @validate_hf_hub_args def push_to_hub( self, repo_id: str, *, config: Optional[Union[dict, "DataclassInstance"]] = None, commit_message: str = "Push model using huggingface_hub.", private: Optional[bool] = None, token: Optional[str] = None, branch: Optional[str] = None, create_pr: Optional[bool] = None, allow_patterns: Optional[Union[List[str], str]] = None, ignore_patterns: Optional[Union[List[str], str]] = None, delete_patterns: Optional[Union[List[str], str]] = None, model_card_kwargs: Optional[Dict[str, Any]] = None, ) -> str: """ Upload model checkpoint to the Hub. Use `allow_patterns` and `ignore_patterns` to precisely filter which files should be pushed to the hub. Use `delete_patterns` to delete existing remote files in the same commit. See [`upload_folder`] reference for more details. Args: repo_id (`str`): ID of the repository to push to (example: `"username/my-model"`). config (`dict` or `DataclassInstance`, *optional*): Model configuration specified as a key/value dictionary or a dataclass instance. commit_message (`str`, *optional*): Message to commit while pushing. private (`bool`, *optional*): Whether the repository created should be private. If `None` (default), the repo will be public unless the organization's default is private. token (`str`, *optional*): The token to use as HTTP bearer authorization for remote files. By default, it will use the token cached when running `huggingface-cli login`. branch (`str`, *optional*): The git branch on which to push the model. This defaults to `"main"`. create_pr (`boolean`, *optional*): Whether or not to create a Pull Request from `branch` with that commit. Defaults to `False`. allow_patterns (`List[str]` or `str`, *optional*): If provided, only files matching at least one pattern are pushed. ignore_patterns (`List[str]` or `str`, *optional*): If provided, files matching any of the patterns are not pushed. delete_patterns (`List[str]` or `str`, *optional*): If provided, remote files matching any of the patterns will be deleted from the repo. model_card_kwargs (`Dict[str, Any]`, *optional*): Additional arguments passed to the model card template to customize the model card. Returns: The url of the commit of your model in the given repository. """ api = HfApi(token=token) repo_id = api.create_repo(repo_id=repo_id, private=private, exist_ok=True).repo_id # Push the files to the repo in a single commit with SoftTemporaryDirectory() as tmp: saved_path = Path(tmp) / repo_id self.save_pretrained(saved_path, config=config, model_card_kwargs=model_card_kwargs) return api.upload_folder( repo_id=repo_id, repo_type="model", folder_path=saved_path, commit_message=commit_message, revision=branch, create_pr=create_pr, allow_patterns=allow_patterns, ignore_patterns=ignore_patterns, delete_patterns=delete_patterns, ) def generate_model_card(self, *args, **kwargs) -> ModelCard: card = ModelCard.from_template( card_data=self._hub_mixin_info.model_card_data, template_str=self._hub_mixin_info.model_card_template, repo_url=self._hub_mixin_info.repo_url, docs_url=self._hub_mixin_info.docs_url, **kwargs, ) return card
class_definition
2,055
31,281
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hub_mixin.py
null
18
class PyTorchModelHubMixin(ModelHubMixin): """ Implementation of [`ModelHubMixin`] to provide model Hub upload/download capabilities to PyTorch models. The model is set in evaluation mode by default using `model.eval()` (dropout modules are deactivated). To train the model, you should first set it back in training mode with `model.train()`. See [`ModelHubMixin`] for more details on how to use the mixin. Example: ```python >>> import torch >>> import torch.nn as nn >>> from huggingface_hub import PyTorchModelHubMixin >>> class MyModel( ... nn.Module, ... PyTorchModelHubMixin, ... library_name="keras-nlp", ... repo_url="https://github.com/keras-team/keras-nlp", ... docs_url="https://keras.io/keras_nlp/", ... # ^ optional metadata to generate model card ... ): ... def __init__(self, hidden_size: int = 512, vocab_size: int = 30000, output_size: int = 4): ... super().__init__() ... self.param = nn.Parameter(torch.rand(hidden_size, vocab_size)) ... self.linear = nn.Linear(output_size, vocab_size) ... def forward(self, x): ... return self.linear(x + self.param) >>> model = MyModel(hidden_size=256) # Save model weights to local directory >>> model.save_pretrained("my-awesome-model") # Push model weights to the Hub >>> model.push_to_hub("my-awesome-model") # Download and initialize weights from the Hub >>> model = MyModel.from_pretrained("username/my-awesome-model") >>> model.hidden_size 256 ``` """ def __init_subclass__(cls, *args, tags: Optional[List[str]] = None, **kwargs) -> None: tags = tags or [] tags.append("pytorch_model_hub_mixin") kwargs["tags"] = tags return super().__init_subclass__(*args, **kwargs) def _save_pretrained(self, save_directory: Path) -> None: """Save weights from a Pytorch model to a local directory.""" model_to_save = self.module if hasattr(self, "module") else self # type: ignore save_model_as_safetensor(model_to_save, str(save_directory / constants.SAFETENSORS_SINGLE_FILE)) @classmethod def _from_pretrained( cls, *, model_id: str, revision: Optional[str], cache_dir: Optional[Union[str, Path]], force_download: bool, proxies: Optional[Dict], resume_download: Optional[bool], local_files_only: bool, token: Union[str, bool, None], map_location: str = "cpu", strict: bool = False, **model_kwargs, ): """Load Pytorch pretrained weights and return the loaded model.""" model = cls(**model_kwargs) if os.path.isdir(model_id): print("Loading weights from local directory") model_file = os.path.join(model_id, constants.SAFETENSORS_SINGLE_FILE) return cls._load_as_safetensor(model, model_file, map_location, strict) else: try: model_file = hf_hub_download( repo_id=model_id, filename=constants.SAFETENSORS_SINGLE_FILE, revision=revision, cache_dir=cache_dir, force_download=force_download, proxies=proxies, resume_download=resume_download, token=token, local_files_only=local_files_only, ) return cls._load_as_safetensor(model, model_file, map_location, strict) except EntryNotFoundError: model_file = hf_hub_download( repo_id=model_id, filename=constants.PYTORCH_WEIGHTS_NAME, revision=revision, cache_dir=cache_dir, force_download=force_download, proxies=proxies, resume_download=resume_download, token=token, local_files_only=local_files_only, ) return cls._load_as_pickle(model, model_file, map_location, strict) @classmethod def _load_as_pickle(cls, model: T, model_file: str, map_location: str, strict: bool) -> T: state_dict = torch.load(model_file, map_location=torch.device(map_location), weights_only=True) model.load_state_dict(state_dict, strict=strict) # type: ignore model.eval() # type: ignore return model @classmethod def _load_as_safetensor(cls, model: T, model_file: str, map_location: str, strict: bool) -> T: if packaging.version.parse(safetensors.__version__) < packaging.version.parse("0.4.3"): # type: ignore [attr-defined] load_model_as_safetensor(model, model_file, strict=strict) # type: ignore [arg-type] if map_location != "cpu": logger.warning( "Loading model weights on other devices than 'cpu' is not supported natively in your version of safetensors." " This means that the model is loaded on 'cpu' first and then copied to the device." " This leads to a slower loading time." " Please update safetensors to version 0.4.3 or above for improved performance." ) model.to(map_location) # type: ignore [attr-defined] else: safetensors.torch.load_model(model, model_file, strict=strict, device=map_location) # type: ignore [arg-type] return model
class_definition
31,284
36,922
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hub_mixin.py
null
19
class WebhooksServer: """ The [`WebhooksServer`] class lets you create an instance of a Gradio app that can receive Huggingface webhooks. These webhooks can be registered using the [`~WebhooksServer.add_webhook`] decorator. Webhook endpoints are added to the app as a POST endpoint to the FastAPI router. Once all the webhooks are registered, the `launch` method has to be called to start the app. It is recommended to accept [`WebhookPayload`] as the first argument of the webhook function. It is a Pydantic model that contains all the information about the webhook event. The data will be parsed automatically for you. Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your WebhooksServer and deploy it on a Space. <Tip warning={true}> `WebhooksServer` is experimental. Its API is subject to change in the future. </Tip> <Tip warning={true}> You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`). </Tip> Args: ui (`gradio.Blocks`, optional): A Gradio UI instance to be used as the Space landing page. If `None`, a UI displaying instructions about the configured webhooks is created. webhook_secret (`str`, optional): A secret key to verify incoming webhook requests. You can set this value to any secret you want as long as you also configure it in your [webhooks settings panel](https://huggingface.co/settings/webhooks). You can also set this value as the `WEBHOOK_SECRET` environment variable. If no secret is provided, the webhook endpoints are opened without any security. Example: ```python import gradio as gr from huggingface_hub import WebhooksServer, WebhookPayload with gr.Blocks() as ui: ... app = WebhooksServer(ui=ui, webhook_secret="my_secret_key") @app.add_webhook("/say_hello") async def hello(payload: WebhookPayload): return {"message": "hello"} app.launch() ``` """ def __new__(cls, *args, **kwargs) -> "WebhooksServer": if not is_gradio_available(): raise ImportError( "You must have `gradio` installed to use `WebhooksServer`. Please run `pip install --upgrade gradio`" " first." ) if not is_fastapi_available(): raise ImportError( "You must have `fastapi` installed to use `WebhooksServer`. Please run `pip install --upgrade fastapi`" " first." ) return super().__new__(cls) def __init__( self, ui: Optional["gr.Blocks"] = None, webhook_secret: Optional[str] = None, ) -> None: self._ui = ui self.webhook_secret = webhook_secret or os.getenv("WEBHOOK_SECRET") self.registered_webhooks: Dict[str, Callable] = {} _warn_on_empty_secret(self.webhook_secret) def add_webhook(self, path: Optional[str] = None) -> Callable: """ Decorator to add a webhook to the [`WebhooksServer`] server. Args: path (`str`, optional): The URL path to register the webhook function. If not provided, the function name will be used as the path. In any case, all webhooks are registered under `/webhooks`. Raises: ValueError: If the provided path is already registered as a webhook. Example: ```python from huggingface_hub import WebhooksServer, WebhookPayload app = WebhooksServer() @app.add_webhook async def trigger_training(payload: WebhookPayload): if payload.repo.type == "dataset" and payload.event.action == "update": # Trigger a training job if a dataset is updated ... app.launch() ``` """ # Usage: directly as decorator. Example: `@app.add_webhook` if callable(path): # If path is a function, it means it was used as a decorator without arguments return self.add_webhook()(path) # Usage: provide a path. Example: `@app.add_webhook(...)` @wraps(FastAPI.post) def _inner_post(*args, **kwargs): func = args[0] abs_path = f"/webhooks/{(path or func.__name__).strip('/')}" if abs_path in self.registered_webhooks: raise ValueError(f"Webhook {abs_path} already exists.") self.registered_webhooks[abs_path] = func return _inner_post def launch(self, prevent_thread_lock: bool = False, **launch_kwargs: Any) -> None: """Launch the Gradio app and register webhooks to the underlying FastAPI server. Input parameters are forwarded to Gradio when launching the app. """ ui = self._ui or self._get_default_ui() # Start Gradio App # - as non-blocking so that webhooks can be added afterwards # - as shared if launch locally (to debug webhooks) launch_kwargs.setdefault("share", _is_local) self.fastapi_app, _, _ = ui.launch(prevent_thread_lock=True, **launch_kwargs) # Register webhooks to FastAPI app for path, func in self.registered_webhooks.items(): # Add secret check if required if self.webhook_secret is not None: func = _wrap_webhook_to_check_secret(func, webhook_secret=self.webhook_secret) # Add route to FastAPI app self.fastapi_app.post(path)(func) # Print instructions and block main thread space_host = os.environ.get("SPACE_HOST") url = "https://" + space_host if space_host is not None else (ui.share_url or ui.local_url) url = url.strip("/") message = "\nWebhooks are correctly setup and ready to use:" message += "\n" + "\n".join(f" - POST {url}{webhook}" for webhook in self.registered_webhooks) message += "\nGo to https://huggingface.co/settings/webhooks to setup your webhooks." print(message) if not prevent_thread_lock: ui.block_thread() def _get_default_ui(self) -> "gr.Blocks": """Default UI if not provided (lists webhooks and provides basic instructions).""" import gradio as gr with gr.Blocks() as ui: gr.Markdown("# This is an app to process 🤗 Webhooks") gr.Markdown( "Webhooks are a foundation for MLOps-related features. They allow you to listen for new changes on" " specific repos or to all repos belonging to particular set of users/organizations (not just your" " repos, but any repo). Check out this [guide](https://huggingface.co/docs/hub/webhooks) to get to" " know more about webhooks on the Huggingface Hub." ) gr.Markdown( f"{len(self.registered_webhooks)} webhook(s) are registered:" + "\n\n" + "\n ".join( f"- [{webhook_path}]({_get_webhook_doc_url(webhook.__name__, webhook_path)})" for webhook_path, webhook in self.registered_webhooks.items() ) ) gr.Markdown( "Go to https://huggingface.co/settings/webhooks to setup your webhooks." + "\nYou app is running locally. Please look at the logs to check the full URL you need to set." if _is_local else ( "\nThis app is running on a Space. You can find the corresponding URL in the options menu" " (top-right) > 'Embed the Space'. The URL looks like 'https://{username}-{repo_name}.hf.space'." ) ) return ui
class_definition
1,356
9,272
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_webhooks_server.py
null
20
class SpaceStage(str, Enum): """ Enumeration of possible stage of a Space on the Hub. Value can be compared to a string: ```py assert SpaceStage.BUILDING == "BUILDING" ``` Taken from https://github.com/huggingface/moon-landing/blob/main/server/repo_types/SpaceInfo.ts#L61 (private url). """ # Copied from moon-landing > server > repo_types > SpaceInfo.ts (private repo) NO_APP_FILE = "NO_APP_FILE" CONFIG_ERROR = "CONFIG_ERROR" BUILDING = "BUILDING" BUILD_ERROR = "BUILD_ERROR" RUNNING = "RUNNING" RUNNING_BUILDING = "RUNNING_BUILDING" RUNTIME_ERROR = "RUNTIME_ERROR" DELETING = "DELETING" STOPPED = "STOPPED" PAUSED = "PAUSED"
class_definition
786
1,492
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_space_api.py
null
21
class SpaceHardware(str, Enum): """ Enumeration of hardwares available to run your Space on the Hub. Value can be compared to a string: ```py assert SpaceHardware.CPU_BASIC == "cpu-basic" ``` Taken from https://github.com/huggingface/moon-landing/blob/main/server/repo_types/SpaceInfo.ts#L73 (private url). """ CPU_BASIC = "cpu-basic" CPU_UPGRADE = "cpu-upgrade" T4_SMALL = "t4-small" T4_MEDIUM = "t4-medium" L4X1 = "l4x1" L4X4 = "l4x4" ZERO_A10G = "zero-a10g" A10G_SMALL = "a10g-small" A10G_LARGE = "a10g-large" A10G_LARGEX2 = "a10g-largex2" A10G_LARGEX4 = "a10g-largex4" A100_LARGE = "a100-large" V5E_1X1 = "v5e-1x1" V5E_2X2 = "v5e-2x2" V5E_2X4 = "v5e-2x4"
class_definition
1,495
2,248
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_space_api.py
null
22
class SpaceStorage(str, Enum): """ Enumeration of persistent storage available for your Space on the Hub. Value can be compared to a string: ```py assert SpaceStorage.SMALL == "small" ``` Taken from https://github.com/huggingface/moon-landing/blob/main/server/repo_types/SpaceHardwareFlavor.ts#L24 (private url). """ SMALL = "small" MEDIUM = "medium" LARGE = "large"
class_definition
2,251
2,664
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_space_api.py
null
23
class SpaceRuntime: """ Contains information about the current runtime of a Space. Args: stage (`str`): Current stage of the space. Example: RUNNING. hardware (`str` or `None`): Current hardware of the space. Example: "cpu-basic". Can be `None` if Space is `BUILDING` for the first time. requested_hardware (`str` or `None`): Requested hardware. Can be different than `hardware` especially if the request has just been made. Example: "t4-medium". Can be `None` if no hardware has been requested yet. sleep_time (`int` or `None`): Number of seconds the Space will be kept alive after the last request. By default (if value is `None`), the Space will never go to sleep if it's running on an upgraded hardware, while it will go to sleep after 48 hours on a free 'cpu-basic' hardware. For more details, see https://huggingface.co/docs/hub/spaces-gpus#sleep-time. raw (`dict`): Raw response from the server. Contains more information about the Space runtime like number of replicas, number of cpu, memory size,... """ stage: SpaceStage hardware: Optional[SpaceHardware] requested_hardware: Optional[SpaceHardware] sleep_time: Optional[int] storage: Optional[SpaceStorage] raw: Dict def __init__(self, data: Dict) -> None: self.stage = data["stage"] self.hardware = data.get("hardware", {}).get("current") self.requested_hardware = data.get("hardware", {}).get("requested") self.sleep_time = data.get("gcTimeout") self.storage = data.get("storage") self.raw = data
class_definition
2,678
4,403
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_space_api.py
null
24
class SpaceVariable: """ Contains information about the current variables of a Space. Args: key (`str`): Variable key. Example: `"MODEL_REPO_ID"` value (`str`): Variable value. Example: `"the_model_repo_id"`. description (`str` or None): Description of the variable. Example: `"Model Repo ID of the implemented model"`. updatedAt (`datetime` or None): datetime of the last update of the variable (if the variable has been updated at least once). """ key: str value: str description: Optional[str] updated_at: Optional[datetime] def __init__(self, key: str, values: Dict) -> None: self.key = key self.value = values["value"] self.description = values.get("description") updated_at = values.get("updatedAt") self.updated_at = parse_datetime(updated_at) if updated_at is not None else None
class_definition
4,417
5,362
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_space_api.py
null
25
class KerasModelHubMixin(ModelHubMixin): """ Implementation of [`ModelHubMixin`] to provide model Hub upload/download capabilities to Keras models. ```python >>> import tensorflow as tf >>> from huggingface_hub import KerasModelHubMixin >>> class MyModel(tf.keras.Model, KerasModelHubMixin): ... def __init__(self, **kwargs): ... super().__init__() ... self.config = kwargs.pop("config", None) ... self.dummy_inputs = ... ... self.layer = ... ... def call(self, *args): ... return ... >>> # Initialize and compile the model as you normally would >>> model = MyModel() >>> model.compile(...) >>> # Build the graph by training it or passing dummy inputs >>> _ = model(model.dummy_inputs) >>> # Save model weights to local directory >>> model.save_pretrained("my-awesome-model") >>> # Push model weights to the Hub >>> model.push_to_hub("my-awesome-model") >>> # Download and initialize weights from the Hub >>> model = MyModel.from_pretrained("username/super-cool-model") ``` """ def _save_pretrained(self, save_directory): save_pretrained_keras(self, save_directory) @classmethod def _from_pretrained( cls, model_id, revision, cache_dir, force_download, proxies, resume_download, local_files_only, token, config: Optional[Dict[str, Any]] = None, **model_kwargs, ): """Here we just call [`from_pretrained_keras`] function so both the mixin and functional APIs stay in sync. TODO - Some args above aren't used since we are calling snapshot_download instead of hf_hub_download. """ if keras is None: raise ImportError("Called a TensorFlow-specific function but could not import it.") # Root is either a local filepath matching model_id or a cached snapshot if not os.path.isdir(model_id): storage_folder = snapshot_download( repo_id=model_id, revision=revision, cache_dir=cache_dir, library_name="keras", library_version=get_tf_version(), ) else: storage_folder = model_id # TODO: change this in a future PR. We are not returning a KerasModelHubMixin instance here... model = keras.models.load_model(storage_folder) # For now, we add a new attribute, config, to store the config loaded from the hub/a local dir. model.config = config return model
class_definition
16,893
19,573
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/keras_mixin.py
null
26
class _FileToUpload: """Temporary dataclass to store info about files to upload. Not meant to be used directly.""" local_path: Path path_in_repo: str size_limit: int last_modified: float
class_definition
461
668
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_commit_scheduler.py
null
27
class CommitScheduler: """ Scheduler to upload a local folder to the Hub at regular intervals (e.g. push to hub every 5 minutes). The recommended way to use the scheduler is to use it as a context manager. This ensures that the scheduler is properly stopped and the last commit is triggered when the script ends. The scheduler can also be stopped manually with the `stop` method. Checkout the [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#scheduled-uploads) to learn more about how to use it. Args: repo_id (`str`): The id of the repo to commit to. folder_path (`str` or `Path`): Path to the local folder to upload regularly. every (`int` or `float`, *optional*): The number of minutes between each commit. Defaults to 5 minutes. path_in_repo (`str`, *optional*): Relative path of the directory in the repo, for example: `"checkpoints/"`. Defaults to the root folder of the repository. repo_type (`str`, *optional*): The type of the repo to commit to. Defaults to `model`. revision (`str`, *optional*): The revision of the repo to commit to. Defaults to `main`. private (`bool`, *optional*): Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists. token (`str`, *optional*): The token to use to commit to the repo. Defaults to the token saved on the machine. allow_patterns (`List[str]` or `str`, *optional*): If provided, only files matching at least one pattern are uploaded. ignore_patterns (`List[str]` or `str`, *optional*): If provided, files matching any of the patterns are not uploaded. squash_history (`bool`, *optional*): Whether to squash the history of the repo after each commit. Defaults to `False`. Squashing commits is useful to avoid degraded performances on the repo when it grows too large. hf_api (`HfApi`, *optional*): The [`HfApi`] client to use to commit to the Hub. Can be set with custom settings (user agent, token,...). Example: ```py >>> from pathlib import Path >>> from huggingface_hub import CommitScheduler # Scheduler uploads every 10 minutes >>> csv_path = Path("watched_folder/data.csv") >>> CommitScheduler(repo_id="test_scheduler", repo_type="dataset", folder_path=csv_path.parent, every=10) >>> with csv_path.open("a") as f: ... f.write("first line") # Some time later (...) >>> with csv_path.open("a") as f: ... f.write("second line") ``` Example using a context manager: ```py >>> from pathlib import Path >>> from huggingface_hub import CommitScheduler >>> with CommitScheduler(repo_id="test_scheduler", repo_type="dataset", folder_path="watched_folder", every=10) as scheduler: ... csv_path = Path("watched_folder/data.csv") ... with csv_path.open("a") as f: ... f.write("first line") ... (...) ... with csv_path.open("a") as f: ... f.write("second line") # Scheduler is now stopped and last commit have been triggered ``` """ def __init__( self, *, repo_id: str, folder_path: Union[str, Path], every: Union[int, float] = 5, path_in_repo: Optional[str] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, private: Optional[bool] = None, token: Optional[str] = None, allow_patterns: Optional[Union[List[str], str]] = None, ignore_patterns: Optional[Union[List[str], str]] = None, squash_history: bool = False, hf_api: Optional["HfApi"] = None, ) -> None: self.api = hf_api or HfApi(token=token) # Folder self.folder_path = Path(folder_path).expanduser().resolve() self.path_in_repo = path_in_repo or "" self.allow_patterns = allow_patterns if ignore_patterns is None: ignore_patterns = [] elif isinstance(ignore_patterns, str): ignore_patterns = [ignore_patterns] self.ignore_patterns = ignore_patterns + DEFAULT_IGNORE_PATTERNS if self.folder_path.is_file(): raise ValueError(f"'folder_path' must be a directory, not a file: '{self.folder_path}'.") self.folder_path.mkdir(parents=True, exist_ok=True) # Repository repo_url = self.api.create_repo(repo_id=repo_id, private=private, repo_type=repo_type, exist_ok=True) self.repo_id = repo_url.repo_id self.repo_type = repo_type self.revision = revision self.token = token # Keep track of already uploaded files self.last_uploaded: Dict[Path, float] = {} # key is local path, value is timestamp # Scheduler if not every > 0: raise ValueError(f"'every' must be a positive integer, not '{every}'.") self.lock = Lock() self.every = every self.squash_history = squash_history logger.info(f"Scheduled job to push '{self.folder_path}' to '{self.repo_id}' every {self.every} minutes.") self._scheduler_thread = Thread(target=self._run_scheduler, daemon=True) self._scheduler_thread.start() atexit.register(self._push_to_hub) self.__stopped = False def stop(self) -> None: """Stop the scheduler. A stopped scheduler cannot be restarted. Mostly for tests purposes. """ self.__stopped = True def __enter__(self) -> "CommitScheduler": return self def __exit__(self, exc_type, exc_value, traceback) -> None: # Upload last changes before exiting self.trigger().result() self.stop() return def _run_scheduler(self) -> None: """Dumb thread waiting between each scheduled push to Hub.""" while True: self.last_future = self.trigger() time.sleep(self.every * 60) if self.__stopped: break def trigger(self) -> Future: """Trigger a `push_to_hub` and return a future. This method is automatically called every `every` minutes. You can also call it manually to trigger a commit immediately, without waiting for the next scheduled commit. """ return self.api.run_as_future(self._push_to_hub) def _push_to_hub(self) -> Optional[CommitInfo]: if self.__stopped: # If stopped, already scheduled commits are ignored return None logger.info("(Background) scheduled commit triggered.") try: value = self.push_to_hub() if self.squash_history: logger.info("(Background) squashing repo history.") self.api.super_squash_history(repo_id=self.repo_id, repo_type=self.repo_type, branch=self.revision) return value except Exception as e: logger.error(f"Error while pushing to Hub: {e}") # Depending on the setup, error might be silenced raise def push_to_hub(self) -> Optional[CommitInfo]: """ Push folder to the Hub and return the commit info. <Tip warning={true}> This method is not meant to be called directly. It is run in the background by the scheduler, respecting a queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency issues. </Tip> The default behavior of `push_to_hub` is to assume an append-only folder. It lists all files in the folder and uploads only changed files. If no changes are found, the method returns without committing anything. If you want to change this behavior, you can inherit from [`CommitScheduler`] and override this method. This can be useful for example to compress data together in a single file before committing. For more details and examples, check out our [integration guide](https://huggingface.co/docs/huggingface_hub/main/en/guides/upload#scheduled-uploads). """ # Check files to upload (with lock) with self.lock: logger.debug("Listing files to upload for scheduled commit.") # List files from folder (taken from `_prepare_upload_folder_additions`) relpath_to_abspath = { path.relative_to(self.folder_path).as_posix(): path for path in sorted(self.folder_path.glob("**/*")) # sorted to be deterministic if path.is_file() } prefix = f"{self.path_in_repo.strip('/')}/" if self.path_in_repo else "" # Filter with pattern + filter out unchanged files + retrieve current file size files_to_upload: List[_FileToUpload] = [] for relpath in filter_repo_objects( relpath_to_abspath.keys(), allow_patterns=self.allow_patterns, ignore_patterns=self.ignore_patterns ): local_path = relpath_to_abspath[relpath] stat = local_path.stat() if self.last_uploaded.get(local_path) is None or self.last_uploaded[local_path] != stat.st_mtime: files_to_upload.append( _FileToUpload( local_path=local_path, path_in_repo=prefix + relpath, size_limit=stat.st_size, last_modified=stat.st_mtime, ) ) # Return if nothing to upload if len(files_to_upload) == 0: logger.debug("Dropping schedule commit: no changed file to upload.") return None # Convert `_FileToUpload` as `CommitOperationAdd` (=> compute file shas + limit to file size) logger.debug("Removing unchanged files since previous scheduled commit.") add_operations = [ CommitOperationAdd( # Cap the file to its current size, even if the user append data to it while a scheduled commit is happening path_or_fileobj=PartialFileIO(file_to_upload.local_path, size_limit=file_to_upload.size_limit), path_in_repo=file_to_upload.path_in_repo, ) for file_to_upload in files_to_upload ] # Upload files (append mode expected - no need for lock) logger.debug("Uploading files for scheduled commit.") commit_info = self.api.create_commit( repo_id=self.repo_id, repo_type=self.repo_type, operations=add_operations, commit_message="Scheduled Commit", revision=self.revision, ) # Successful commit: keep track of the latest "last_modified" for each file for file in files_to_upload: self.last_uploaded[file.local_path] = file.last_modified return commit_info
class_definition
671
11,802
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_commit_scheduler.py
null
28
class PartialFileIO(BytesIO): """A file-like object that reads only the first part of a file. Useful to upload a file to the Hub when the user might still be appending data to it. Only the first part of the file is uploaded (i.e. the part that was available when the filesystem was first scanned). In practice, only used internally by the CommitScheduler to regularly push a folder to the Hub with minimal disturbance for the user. The object is passed to `CommitOperationAdd`. Only supports `read`, `tell` and `seek` methods. Args: file_path (`str` or `Path`): Path to the file to read. size_limit (`int`): The maximum number of bytes to read from the file. If the file is larger than this, only the first part will be read (and uploaded). """ def __init__(self, file_path: Union[str, Path], size_limit: int) -> None: self._file_path = Path(file_path) self._file = self._file_path.open("rb") self._size_limit = min(size_limit, os.fstat(self._file.fileno()).st_size) def __del__(self) -> None: self._file.close() return super().__del__() def __repr__(self) -> str: return f"<PartialFileIO file_path={self._file_path} size_limit={self._size_limit}>" def __len__(self) -> int: return self._size_limit def __getattribute__(self, name: str): if name.startswith("_") or name in ("read", "tell", "seek"): # only 3 public methods supported return super().__getattribute__(name) raise NotImplementedError(f"PartialFileIO does not support '{name}'.") def tell(self) -> int: """Return the current file position.""" return self._file.tell() def seek(self, __offset: int, __whence: int = SEEK_SET) -> int: """Change the stream position to the given offset. Behavior is the same as a regular file, except that the position is capped to the size limit. """ if __whence == SEEK_END: # SEEK_END => set from the truncated end __offset = len(self) + __offset __whence = SEEK_SET pos = self._file.seek(__offset, __whence) if pos > self._size_limit: return self._file.seek(self._size_limit) return pos def read(self, __size: Optional[int] = -1) -> bytes: """Read at most `__size` bytes from the file. Behavior is the same as a regular file, except that it is capped to the size limit. """ current = self._file.tell() if __size is None or __size < 0: # Read until file limit truncated_size = self._size_limit - current else: # Read until file limit or __size truncated_size = min(__size, self._size_limit - current) return self._file.read(truncated_size)
class_definition
11,805
14,678
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_commit_scheduler.py
null
29
class LocalDownloadFilePaths: """ Paths to the files related to a download process in a local dir. Returned by [`get_local_download_paths`]. Attributes: file_path (`Path`): Path where the file will be saved. lock_path (`Path`): Path to the lock file used to ensure atomicity when reading/writing metadata. metadata_path (`Path`): Path to the metadata file. """ file_path: Path lock_path: Path metadata_path: Path def incomplete_path(self, etag: str) -> Path: """Return the path where a file will be temporarily downloaded before being moved to `file_path`.""" return self.metadata_path.with_suffix(f".{etag}.incomplete")
class_definition
1,891
2,627
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_local_folder.py
null
30
class LocalUploadFilePaths: """ Paths to the files related to an upload process in a local dir. Returned by [`get_local_upload_paths`]. Attributes: path_in_repo (`str`): Path of the file in the repo. file_path (`Path`): Path where the file will be saved. lock_path (`Path`): Path to the lock file used to ensure atomicity when reading/writing metadata. metadata_path (`Path`): Path to the metadata file. """ path_in_repo: str file_path: Path lock_path: Path metadata_path: Path
class_definition
2,654
3,250
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_local_folder.py
null
31
class LocalDownloadFileMetadata: """ Metadata about a file in the local directory related to a download process. Attributes: filename (`str`): Path of the file in the repo. commit_hash (`str`): Commit hash of the file in the repo. etag (`str`): ETag of the file in the repo. Used to check if the file has changed. For LFS files, this is the sha256 of the file. For regular files, it corresponds to the git hash. timestamp (`int`): Unix timestamp of when the metadata was saved i.e. when the metadata was accurate. """ filename: str commit_hash: str etag: str timestamp: float
class_definition
3,264
3,965
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_local_folder.py
null
32
class LocalUploadFileMetadata: """ Metadata about a file in the local directory related to an upload process. """ size: int # Default values correspond to "we don't know yet" timestamp: Optional[float] = None should_ignore: Optional[bool] = None sha256: Optional[str] = None upload_mode: Optional[str] = None is_uploaded: bool = False is_committed: bool = False def save(self, paths: LocalUploadFilePaths) -> None: """Save the metadata to disk.""" with WeakFileLock(paths.lock_path): with paths.metadata_path.open("w") as f: new_timestamp = time.time() f.write(str(new_timestamp) + "\n") f.write(str(self.size)) # never None f.write("\n") if self.should_ignore is not None: f.write(str(int(self.should_ignore))) f.write("\n") if self.sha256 is not None: f.write(self.sha256) f.write("\n") if self.upload_mode is not None: f.write(self.upload_mode) f.write("\n") f.write(str(int(self.is_uploaded)) + "\n") f.write(str(int(self.is_committed)) + "\n") self.timestamp = new_timestamp
class_definition
3,979
5,308
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_local_folder.py
null
33
class UploadInfo: """ Dataclass holding required information to determine whether a blob should be uploaded to the hub using the LFS protocol or the regular protocol Args: sha256 (`bytes`): SHA256 hash of the blob size (`int`): Size in bytes of the blob sample (`bytes`): First 512 bytes of the blob """ sha256: bytes size: int sample: bytes @classmethod def from_path(cls, path: str): size = getsize(path) with io.open(path, "rb") as file: sample = file.peek(512)[:512] sha = sha_fileobj(file) return cls(size=size, sha256=sha, sample=sample) @classmethod def from_bytes(cls, data: bytes): sha = sha256(data).digest() return cls(size=len(data), sample=data[:512], sha256=sha) @classmethod def from_fileobj(cls, fileobj: BinaryIO): sample = fileobj.read(512) fileobj.seek(0, io.SEEK_SET) sha = sha_fileobj(fileobj) size = fileobj.tell() fileobj.seek(0, io.SEEK_SET) return cls(size=size, sha256=sha, sample=sample)
class_definition
1,632
2,779
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/lfs.py
null
34
class PayloadPartT(TypedDict): partNumber: int etag: str
class_definition
5,640
5,704
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/lfs.py
null
35
class CompletionPayloadT(TypedDict): """Payload that will be sent to the Hub when uploading multi-part.""" oid: str parts: List[PayloadPartT]
class_definition
5,707
5,861
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/lfs.py
null
36
class HfFileMetadata: """Data structure containing information about a file versioned on the Hub. Returned by [`get_hf_file_metadata`] based on a URL. Args: commit_hash (`str`, *optional*): The commit_hash related to the file. etag (`str`, *optional*): Etag of the file on the server. location (`str`): Location where to download the file. Can be a Hub url or not (CDN). size (`size`): Size of the file. In case of an LFS file, contains the size of the actual LFS file, not the pointer. """ commit_hash: Optional[str] etag: Optional[str] location: str size: Optional[int]
class_definition
5,777
6,475
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/file_download.py
null
37
class Discussion: """ A Discussion or Pull Request on the Hub. This dataclass is not intended to be instantiated directly. Attributes: title (`str`): The title of the Discussion / Pull Request status (`str`): The status of the Discussion / Pull Request. It must be one of: * `"open"` * `"closed"` * `"merged"` (only for Pull Requests ) * `"draft"` (only for Pull Requests ) num (`int`): The number of the Discussion / Pull Request. repo_id (`str`): The id (`"{namespace}/{repo_name}"`) of the repo on which the Discussion / Pull Request was open. repo_type (`str`): The type of the repo on which the Discussion / Pull Request was open. Possible values are: `"model"`, `"dataset"`, `"space"`. author (`str`): The username of the Discussion / Pull Request author. Can be `"deleted"` if the user has been deleted since. is_pull_request (`bool`): Whether or not this is a Pull Request. created_at (`datetime`): The `datetime` of creation of the Discussion / Pull Request. endpoint (`str`): Endpoint of the Hub. Default is https://huggingface.co. git_reference (`str`, *optional*): (property) Git reference to which changes can be pushed if this is a Pull Request, `None` otherwise. url (`str`): (property) URL of the discussion on the Hub. """ title: str status: DiscussionStatus num: int repo_id: str repo_type: str author: str is_pull_request: bool created_at: datetime endpoint: str @property def git_reference(self) -> Optional[str]: """ If this is a Pull Request , returns the git reference to which changes can be pushed. Returns `None` otherwise. """ if self.is_pull_request: return f"refs/pr/{self.num}" return None @property def url(self) -> str: """Returns the URL of the discussion on the Hub.""" if self.repo_type is None or self.repo_type == constants.REPO_TYPE_MODEL: return f"{self.endpoint}/{self.repo_id}/discussions/{self.num}" return f"{self.endpoint}/{self.repo_type}s/{self.repo_id}/discussions/{self.num}"
class_definition
530
2,958
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/community.py
null
38
class DiscussionWithDetails(Discussion): """ Subclass of [`Discussion`]. Attributes: title (`str`): The title of the Discussion / Pull Request status (`str`): The status of the Discussion / Pull Request. It can be one of: * `"open"` * `"closed"` * `"merged"` (only for Pull Requests ) * `"draft"` (only for Pull Requests ) num (`int`): The number of the Discussion / Pull Request. repo_id (`str`): The id (`"{namespace}/{repo_name}"`) of the repo on which the Discussion / Pull Request was open. repo_type (`str`): The type of the repo on which the Discussion / Pull Request was open. Possible values are: `"model"`, `"dataset"`, `"space"`. author (`str`): The username of the Discussion / Pull Request author. Can be `"deleted"` if the user has been deleted since. is_pull_request (`bool`): Whether or not this is a Pull Request. created_at (`datetime`): The `datetime` of creation of the Discussion / Pull Request. events (`list` of [`DiscussionEvent`]) The list of [`DiscussionEvents`] in this Discussion or Pull Request. conflicting_files (`Union[List[str], bool, None]`, *optional*): A list of conflicting files if this is a Pull Request. `None` if `self.is_pull_request` is `False`. `True` if there are conflicting files but the list can't be retrieved. target_branch (`str`, *optional*): The branch into which changes are to be merged if this is a Pull Request . `None` if `self.is_pull_request` is `False`. merge_commit_oid (`str`, *optional*): If this is a merged Pull Request , this is set to the OID / SHA of the merge commit, `None` otherwise. diff (`str`, *optional*): The git diff if this is a Pull Request , `None` otherwise. endpoint (`str`): Endpoint of the Hub. Default is https://huggingface.co. git_reference (`str`, *optional*): (property) Git reference to which changes can be pushed if this is a Pull Request, `None` otherwise. url (`str`): (property) URL of the discussion on the Hub. """ events: List["DiscussionEvent"] conflicting_files: Union[List[str], bool, None] target_branch: Optional[str] merge_commit_oid: Optional[str] diff: Optional[str]
class_definition
2,972
5,564
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/community.py
null
39
class DiscussionEvent: """ An event in a Discussion or Pull Request. Use concrete classes: * [`DiscussionComment`] * [`DiscussionStatusChange`] * [`DiscussionCommit`] * [`DiscussionTitleChange`] Attributes: id (`str`): The ID of the event. An hexadecimal string. type (`str`): The type of the event. created_at (`datetime`): A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime) object holding the creation timestamp for the event. author (`str`): The username of the Discussion / Pull Request author. Can be `"deleted"` if the user has been deleted since. """ id: str type: str created_at: datetime author: str _event: dict """Stores the original event data, in case we need to access it later."""
class_definition
5,578
6,507
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/community.py
null
40
class DiscussionComment(DiscussionEvent): """A comment in a Discussion / Pull Request. Subclass of [`DiscussionEvent`]. Attributes: id (`str`): The ID of the event. An hexadecimal string. type (`str`): The type of the event. created_at (`datetime`): A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime) object holding the creation timestamp for the event. author (`str`): The username of the Discussion / Pull Request author. Can be `"deleted"` if the user has been deleted since. content (`str`): The raw markdown content of the comment. Mentions, links and images are not rendered. edited (`bool`): Whether or not this comment has been edited. hidden (`bool`): Whether or not this comment has been hidden. """ content: str edited: bool hidden: bool @property def rendered(self) -> str: """The rendered comment, as a HTML string""" return self._event["data"]["latest"]["html"] @property def last_edited_at(self) -> datetime: """The last edit time, as a `datetime` object.""" return parse_datetime(self._event["data"]["latest"]["updatedAt"]) @property def last_edited_by(self) -> str: """The last edit time, as a `datetime` object.""" return self._event["data"]["latest"].get("author", {}).get("name", "deleted") @property def edit_history(self) -> List[dict]: """The edit history of the comment""" return self._event["data"]["history"] @property def number_of_edits(self) -> int: return len(self.edit_history)
class_definition
6,521
8,292
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/community.py
null
41
class DiscussionStatusChange(DiscussionEvent): """A change of status in a Discussion / Pull Request. Subclass of [`DiscussionEvent`]. Attributes: id (`str`): The ID of the event. An hexadecimal string. type (`str`): The type of the event. created_at (`datetime`): A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime) object holding the creation timestamp for the event. author (`str`): The username of the Discussion / Pull Request author. Can be `"deleted"` if the user has been deleted since. new_status (`str`): The status of the Discussion / Pull Request after the change. It can be one of: * `"open"` * `"closed"` * `"merged"` (only for Pull Requests ) """ new_status: str
class_definition
8,306
9,238
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/community.py
null
42
class DiscussionCommit(DiscussionEvent): """A commit in a Pull Request. Subclass of [`DiscussionEvent`]. Attributes: id (`str`): The ID of the event. An hexadecimal string. type (`str`): The type of the event. created_at (`datetime`): A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime) object holding the creation timestamp for the event. author (`str`): The username of the Discussion / Pull Request author. Can be `"deleted"` if the user has been deleted since. summary (`str`): The summary of the commit. oid (`str`): The OID / SHA of the commit, as a hexadecimal string. """ summary: str oid: str
class_definition
9,252
10,073
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/community.py
null
43
class DiscussionTitleChange(DiscussionEvent): """A rename event in a Discussion / Pull Request. Subclass of [`DiscussionEvent`]. Attributes: id (`str`): The ID of the event. An hexadecimal string. type (`str`): The type of the event. created_at (`datetime`): A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime) object holding the creation timestamp for the event. author (`str`): The username of the Discussion / Pull Request author. Can be `"deleted"` if the user has been deleted since. old_title (`str`): The previous title for the Discussion / Pull Request. new_title (`str`): The new title. """ old_title: str new_title: str
class_definition
10,087
10,936
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/community.py
null
44
class InferenceApi: """Client to configure requests and make calls to the HuggingFace Inference API. Example: ```python >>> from huggingface_hub.inference_api import InferenceApi >>> # Mask-fill example >>> inference = InferenceApi("bert-base-uncased") >>> inference(inputs="The goal of life is [MASK].") [{'sequence': 'the goal of life is life.', 'score': 0.10933292657136917, 'token': 2166, 'token_str': 'life'}] >>> # Question Answering example >>> inference = InferenceApi("deepset/roberta-base-squad2") >>> inputs = { ... "question": "What's my name?", ... "context": "My name is Clara and I live in Berkeley.", ... } >>> inference(inputs) {'score': 0.9326569437980652, 'start': 11, 'end': 16, 'answer': 'Clara'} >>> # Zero-shot example >>> inference = InferenceApi("typeform/distilbert-base-uncased-mnli") >>> inputs = "Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!" >>> params = {"candidate_labels": ["refund", "legal", "faq"]} >>> inference(inputs, params) {'sequence': 'Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!', 'labels': ['refund', 'faq', 'legal'], 'scores': [0.9378499388694763, 0.04914155602455139, 0.013008488342165947]} >>> # Overriding configured task >>> inference = InferenceApi("bert-base-uncased", task="feature-extraction") >>> # Text-to-image >>> inference = InferenceApi("stabilityai/stable-diffusion-2-1") >>> inference("cat") <PIL.PngImagePlugin.PngImageFile image (...)> >>> # Return as raw response to parse the output yourself >>> inference = InferenceApi("mio/amadeus") >>> response = inference("hello world", raw_response=True) >>> response.headers {"Content-Type": "audio/flac", ...} >>> response.content # raw bytes from server b'(...)' ``` """ @validate_hf_hub_args @_deprecate_method( version="1.0", message=( "`InferenceApi` client is deprecated in favor of the more feature-complete `InferenceClient`. Check out" " this guide to learn how to convert your script to use it:" " https://huggingface.co/docs/huggingface_hub/guides/inference#legacy-inferenceapi-client." ), ) def __init__( self, repo_id: str, task: Optional[str] = None, token: Optional[str] = None, gpu: bool = False, ): """Inits headers and API call information. Args: repo_id (``str``): Id of repository (e.g. `user/bert-base-uncased`). task (``str``, `optional`, defaults ``None``): Whether to force a task instead of using task specified in the repository. token (`str`, `optional`): The API token to use as HTTP bearer authorization. This is not the authentication token. You can find the token in https://huggingface.co/settings/token. Alternatively, you can find both your organizations and personal API tokens using `HfApi().whoami(token)`. gpu (`bool`, `optional`, defaults `False`): Whether to use GPU instead of CPU for inference(requires Startup plan at least). """ self.options = {"wait_for_model": True, "use_gpu": gpu} self.headers = build_hf_headers(token=token) # Configure task model_info = HfApi(token=token).model_info(repo_id=repo_id) if not model_info.pipeline_tag and not task: raise ValueError( "Task not specified in the repository. Please add it to the model card" " using pipeline_tag" " (https://huggingface.co/docs#how-is-a-models-type-of-inference-api-and-widget-determined)" ) if task and task != model_info.pipeline_tag: if task not in ALL_TASKS: raise ValueError(f"Invalid task {task}. Make sure it's valid.") logger.warning( "You're using a different task than the one specified in the" " repository. Be sure to know what you're doing :)" ) self.task = task else: assert model_info.pipeline_tag is not None, "Pipeline tag cannot be None" self.task = model_info.pipeline_tag self.api_url = f"{constants.INFERENCE_ENDPOINT}/pipeline/{self.task}/{repo_id}" def __repr__(self): # Do not add headers to repr to avoid leaking token. return f"InferenceAPI(api_url='{self.api_url}', task='{self.task}', options={self.options})" def __call__( self, inputs: Optional[Union[str, Dict, List[str], List[List[str]]]] = None, params: Optional[Dict] = None, data: Optional[bytes] = None, raw_response: bool = False, ) -> Any: """Make a call to the Inference API. Args: inputs (`str` or `Dict` or `List[str]` or `List[List[str]]`, *optional*): Inputs for the prediction. params (`Dict`, *optional*): Additional parameters for the models. Will be sent as `parameters` in the payload. data (`bytes`, *optional*): Bytes content of the request. In this case, leave `inputs` and `params` empty. raw_response (`bool`, defaults to `False`): If `True`, the raw `Response` object is returned. You can parse its content as preferred. By default, the content is parsed into a more practical format (json dictionary or PIL Image for example). """ # Build payload payload: Dict[str, Any] = { "options": self.options, } if inputs: payload["inputs"] = inputs if params: payload["parameters"] = params # Make API call response = get_session().post(self.api_url, headers=self.headers, json=payload, data=data) # Let the user handle the response if raw_response: return response # By default, parse the response for the user. content_type = response.headers.get("Content-Type") or "" if content_type.startswith("image"): if not is_pillow_available(): raise ImportError( f"Task '{self.task}' returned as image but Pillow is not installed." " Please install it (`pip install Pillow`) or pass" " `raw_response=True` to get the raw `Response` object and parse" " the image by yourself." ) from PIL import Image return Image.open(io.BytesIO(response.content)) elif content_type == "application/json": return response.json() else: raise NotImplementedError( f"{content_type} output type is not implemented yet. You can pass" " `raw_response=True` to get the raw `Response` object and parse the" " output by yourself." )
class_definition
1,026
8,322
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/inference_api.py
null
45
class InferenceEndpointStatus(str, Enum): PENDING = "pending" INITIALIZING = "initializing" UPDATING = "updating" UPDATE_FAILED = "updateFailed" RUNNING = "running" PAUSED = "paused" FAILED = "failed" SCALED_TO_ZERO = "scaledToZero"
class_definition
516
780
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_inference_endpoints.py
null
46
class InferenceEndpointType(str, Enum): PUBlIC = "public" PROTECTED = "protected" PRIVATE = "private"
class_definition
783
896
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_inference_endpoints.py
null
47
class InferenceEndpoint: """ Contains information about a deployed Inference Endpoint. Args: name (`str`): The unique name of the Inference Endpoint. namespace (`str`): The namespace where the Inference Endpoint is located. repository (`str`): The name of the model repository deployed on this Inference Endpoint. status ([`InferenceEndpointStatus`]): The current status of the Inference Endpoint. url (`str`, *optional*): The URL of the Inference Endpoint, if available. Only a deployed Inference Endpoint will have a URL. framework (`str`): The machine learning framework used for the model. revision (`str`): The specific model revision deployed on the Inference Endpoint. task (`str`): The task associated with the deployed model. created_at (`datetime.datetime`): The timestamp when the Inference Endpoint was created. updated_at (`datetime.datetime`): The timestamp of the last update of the Inference Endpoint. type ([`InferenceEndpointType`]): The type of the Inference Endpoint (public, protected, private). raw (`Dict`): The raw dictionary data returned from the API. token (`str` or `bool`, *optional*): Authentication token for the Inference Endpoint, if set when requesting the API. Will default to the locally saved token if not provided. Pass `token=False` if you don't want to send your token to the server. Example: ```python >>> from huggingface_hub import get_inference_endpoint >>> endpoint = get_inference_endpoint("my-text-to-image") >>> endpoint InferenceEndpoint(name='my-text-to-image', ...) # Get status >>> endpoint.status 'running' >>> endpoint.url 'https://my-text-to-image.region.vendor.endpoints.huggingface.cloud' # Run inference >>> endpoint.client.text_to_image(...) # Pause endpoint to save $$$ >>> endpoint.pause() # ... # Resume and wait for deployment >>> endpoint.resume() >>> endpoint.wait() >>> endpoint.client.text_to_image(...) ``` """ # Field in __repr__ name: str = field(init=False) namespace: str repository: str = field(init=False) status: InferenceEndpointStatus = field(init=False) url: Optional[str] = field(init=False) # Other fields framework: str = field(repr=False, init=False) revision: str = field(repr=False, init=False) task: str = field(repr=False, init=False) created_at: datetime = field(repr=False, init=False) updated_at: datetime = field(repr=False, init=False) type: InferenceEndpointType = field(repr=False, init=False) # Raw dict from the API raw: Dict = field(repr=False) # Internal fields _token: Union[str, bool, None] = field(repr=False, compare=False) _api: "HfApi" = field(repr=False, compare=False) @classmethod def from_raw( cls, raw: Dict, namespace: str, token: Union[str, bool, None] = None, api: Optional["HfApi"] = None ) -> "InferenceEndpoint": """Initialize object from raw dictionary.""" if api is None: from .hf_api import HfApi api = HfApi() if token is None: token = api.token # All other fields are populated in __post_init__ return cls(raw=raw, namespace=namespace, _token=token, _api=api) def __post_init__(self) -> None: """Populate fields from raw dictionary.""" self._populate_from_raw() @property def client(self) -> InferenceClient: """Returns a client to make predictions on this Inference Endpoint. Returns: [`InferenceClient`]: an inference client pointing to the deployed endpoint. Raises: [`InferenceEndpointError`]: If the Inference Endpoint is not yet deployed. """ if self.url is None: raise InferenceEndpointError( "Cannot create a client for this Inference Endpoint as it is not yet deployed. " "Please wait for the Inference Endpoint to be deployed using `endpoint.wait()` and try again." ) return InferenceClient(model=self.url, token=self._token) @property def async_client(self) -> AsyncInferenceClient: """Returns a client to make predictions on this Inference Endpoint. Returns: [`AsyncInferenceClient`]: an asyncio-compatible inference client pointing to the deployed endpoint. Raises: [`InferenceEndpointError`]: If the Inference Endpoint is not yet deployed. """ if self.url is None: raise InferenceEndpointError( "Cannot create a client for this Inference Endpoint as it is not yet deployed. " "Please wait for the Inference Endpoint to be deployed using `endpoint.wait()` and try again." ) return AsyncInferenceClient(model=self.url, token=self._token) def wait(self, timeout: Optional[int] = None, refresh_every: int = 5) -> "InferenceEndpoint": """Wait for the Inference Endpoint to be deployed. Information from the server will be fetched every 1s. If the Inference Endpoint is not deployed after `timeout` seconds, a [`InferenceEndpointTimeoutError`] will be raised. The [`InferenceEndpoint`] will be mutated in place with the latest data. Args: timeout (`int`, *optional*): The maximum time to wait for the Inference Endpoint to be deployed, in seconds. If `None`, will wait indefinitely. refresh_every (`int`, *optional*): The time to wait between each fetch of the Inference Endpoint status, in seconds. Defaults to 5s. Returns: [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. Raises: [`InferenceEndpointError`] If the Inference Endpoint ended up in a failed state. [`InferenceEndpointTimeoutError`] If the Inference Endpoint is not deployed after `timeout` seconds. """ if timeout is not None and timeout < 0: raise ValueError("`timeout` cannot be negative.") if refresh_every <= 0: raise ValueError("`refresh_every` must be positive.") start = time.time() while True: if self.url is not None: # Means the URL is provisioned => check if the endpoint is reachable response = get_session().get(self.url, headers=self._api._build_hf_headers(token=self._token)) if response.status_code == 200: logger.info("Inference Endpoint is ready to be used.") return self if self.status == InferenceEndpointStatus.FAILED: raise InferenceEndpointError( f"Inference Endpoint {self.name} failed to deploy. Please check the logs for more information." ) if timeout is not None: if time.time() - start > timeout: raise InferenceEndpointTimeoutError("Timeout while waiting for Inference Endpoint to be deployed.") logger.info(f"Inference Endpoint is not deployed yet ({self.status}). Waiting {refresh_every}s...") time.sleep(refresh_every) self.fetch() def fetch(self) -> "InferenceEndpoint": """Fetch latest information about the Inference Endpoint. Returns: [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. """ obj = self._api.get_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type] self.raw = obj.raw self._populate_from_raw() return self def update( self, *, # Compute update accelerator: Optional[str] = None, instance_size: Optional[str] = None, instance_type: Optional[str] = None, min_replica: Optional[int] = None, max_replica: Optional[int] = None, scale_to_zero_timeout: Optional[int] = None, # Model update repository: Optional[str] = None, framework: Optional[str] = None, revision: Optional[str] = None, task: Optional[str] = None, custom_image: Optional[Dict] = None, secrets: Optional[Dict[str, str]] = None, ) -> "InferenceEndpoint": """Update the Inference Endpoint. This method allows the update of either the compute configuration, the deployed model, or both. All arguments are optional but at least one must be provided. This is an alias for [`HfApi.update_inference_endpoint`]. The current object is mutated in place with the latest data from the server. Args: accelerator (`str`, *optional*): The hardware accelerator to be used for inference (e.g. `"cpu"`). instance_size (`str`, *optional*): The size or type of the instance to be used for hosting the model (e.g. `"x4"`). instance_type (`str`, *optional*): The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`). min_replica (`int`, *optional*): The minimum number of replicas (instances) to keep running for the Inference Endpoint. max_replica (`int`, *optional*): The maximum number of replicas (instances) to scale to for the Inference Endpoint. scale_to_zero_timeout (`int`, *optional*): The duration in minutes before an inactive endpoint is scaled to zero. repository (`str`, *optional*): The name of the model repository associated with the Inference Endpoint (e.g. `"gpt2"`). framework (`str`, *optional*): The machine learning framework used for the model (e.g. `"custom"`). revision (`str`, *optional*): The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`). task (`str`, *optional*): The task on which to deploy the model (e.g. `"text-classification"`). custom_image (`Dict`, *optional*): A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples). secrets (`Dict[str, str]`, *optional*): Secret values to inject in the container environment. Returns: [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. """ # Make API call obj = self._api.update_inference_endpoint( name=self.name, namespace=self.namespace, accelerator=accelerator, instance_size=instance_size, instance_type=instance_type, min_replica=min_replica, max_replica=max_replica, scale_to_zero_timeout=scale_to_zero_timeout, repository=repository, framework=framework, revision=revision, task=task, custom_image=custom_image, secrets=secrets, token=self._token, # type: ignore [arg-type] ) # Mutate current object self.raw = obj.raw self._populate_from_raw() return self def pause(self) -> "InferenceEndpoint": """Pause the Inference Endpoint. A paused Inference Endpoint will not be charged. It can be resumed at any time using [`InferenceEndpoint.resume`]. This is different than scaling the Inference Endpoint to zero with [`InferenceEndpoint.scale_to_zero`], which would be automatically restarted when a request is made to it. This is an alias for [`HfApi.pause_inference_endpoint`]. The current object is mutated in place with the latest data from the server. Returns: [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. """ obj = self._api.pause_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type] self.raw = obj.raw self._populate_from_raw() return self def resume(self, running_ok: bool = True) -> "InferenceEndpoint": """Resume the Inference Endpoint. This is an alias for [`HfApi.resume_inference_endpoint`]. The current object is mutated in place with the latest data from the server. Args: running_ok (`bool`, *optional*): If `True`, the method will not raise an error if the Inference Endpoint is already running. Defaults to `True`. Returns: [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. """ obj = self._api.resume_inference_endpoint( name=self.name, namespace=self.namespace, running_ok=running_ok, token=self._token ) # type: ignore [arg-type] self.raw = obj.raw self._populate_from_raw() return self def scale_to_zero(self) -> "InferenceEndpoint": """Scale Inference Endpoint to zero. An Inference Endpoint scaled to zero will not be charged. It will be resume on the next request to it, with a cold start delay. This is different than pausing the Inference Endpoint with [`InferenceEndpoint.pause`], which would require a manual resume with [`InferenceEndpoint.resume`]. This is an alias for [`HfApi.scale_to_zero_inference_endpoint`]. The current object is mutated in place with the latest data from the server. Returns: [`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data. """ obj = self._api.scale_to_zero_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type] self.raw = obj.raw self._populate_from_raw() return self def delete(self) -> None: """Delete the Inference Endpoint. This operation is not reversible. If you don't want to be charged for an Inference Endpoint, it is preferable to pause it with [`InferenceEndpoint.pause`] or scale it to zero with [`InferenceEndpoint.scale_to_zero`]. This is an alias for [`HfApi.delete_inference_endpoint`]. """ self._api.delete_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type] def _populate_from_raw(self) -> None: """Populate fields from raw dictionary. Called in __post_init__ + each time the Inference Endpoint is updated. """ # Repr fields self.name = self.raw["name"] self.repository = self.raw["model"]["repository"] self.status = self.raw["status"]["state"] self.url = self.raw["status"].get("url") # Other fields self.framework = self.raw["model"]["framework"] self.revision = self.raw["model"]["revision"] self.task = self.raw["model"]["task"] self.created_at = parse_datetime(self.raw["status"]["createdAt"]) self.updated_at = parse_datetime(self.raw["status"]["updatedAt"]) self.type = self.raw["type"]
class_definition
910
16,749
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/_inference_endpoints.py
null
48
class LastCommitInfo(dict): oid: str title: str date: datetime def __post_init__(self): # hack to make LastCommitInfo backward compatible self.update(asdict(self))
class_definition
9,657
9,846
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
49
class BlobLfsInfo(dict): size: int sha256: str pointer_size: int def __post_init__(self): # hack to make BlobLfsInfo backward compatible self.update(asdict(self))
class_definition
9,860
10,048
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
50
class BlobSecurityInfo(dict): safe: bool # duplicate information with "status" field, keeping it for backward compatibility status: str av_scan: Optional[Dict] pickle_import_scan: Optional[Dict] def __post_init__(self): # hack to make BlogSecurityInfo backward compatible self.update(asdict(self))
class_definition
10,062
10,390
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
51
class TransformersInfo(dict): auto_model: str custom_class: Optional[str] = None # possible `pipeline_tag` values: https://github.com/huggingface/huggingface.js/blob/3ee32554b8620644a6287e786b2a83bf5caf559c/packages/tasks/src/pipelines.ts#L72 pipeline_tag: Optional[str] = None processor: Optional[str] = None def __post_init__(self): # hack to make TransformersInfo backward compatible self.update(asdict(self))
class_definition
10,404
10,850
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
52
class SafeTensorsInfo(dict): parameters: Dict[str, int] total: int def __post_init__(self): # hack to make SafeTensorsInfo backward compatible self.update(asdict(self))
class_definition
10,864
11,054
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
53
class CommitInfo(str): """Data structure containing information about a newly created commit. Returned by any method that creates a commit on the Hub: [`create_commit`], [`upload_file`], [`upload_folder`], [`delete_file`], [`delete_folder`]. It inherits from `str` for backward compatibility but using methods specific to `str` is deprecated. Attributes: commit_url (`str`): Url where to find the commit. commit_message (`str`): The summary (first line) of the commit that has been created. commit_description (`str`): Description of the commit that has been created. Can be empty. oid (`str`): Commit hash id. Example: `"91c54ad1727ee830252e457677f467be0bfd8a57"`. pr_url (`str`, *optional*): Url to the PR that has been created, if any. Populated when `create_pr=True` is passed. pr_revision (`str`, *optional*): Revision of the PR that has been created, if any. Populated when `create_pr=True` is passed. Example: `"refs/pr/1"`. pr_num (`int`, *optional*): Number of the PR discussion that has been created, if any. Populated when `create_pr=True` is passed. Can be passed as `discussion_num` in [`get_discussion_details`]. Example: `1`. repo_url (`RepoUrl`): Repo URL of the commit containing info like repo_id, repo_type, etc. _url (`str`, *optional*): Legacy url for `str` compatibility. Can be the url to the uploaded file on the Hub (if returned by [`upload_file`]), to the uploaded folder on the Hub (if returned by [`upload_folder`]) or to the commit on the Hub (if returned by [`create_commit`]). Defaults to `commit_url`. It is deprecated to use this attribute. Please use `commit_url` instead. """ commit_url: str commit_message: str commit_description: str oid: str pr_url: Optional[str] = None # Computed from `commit_url` in `__post_init__` repo_url: RepoUrl = field(init=False) # Computed from `pr_url` in `__post_init__` pr_revision: Optional[str] = field(init=False) pr_num: Optional[str] = field(init=False) # legacy url for `str` compatibility (ex: url to uploaded file, url to uploaded folder, url to PR, etc.) _url: str = field(repr=False, default=None) # type: ignore # defaults to `commit_url` def __new__(cls, *args, commit_url: str, _url: Optional[str] = None, **kwargs): return str.__new__(cls, _url or commit_url) def __post_init__(self): """Populate pr-related fields after initialization. See https://docs.python.org/3.10/library/dataclasses.html#post-init-processing. """ # Repo info self.repo_url = RepoUrl(self.commit_url.split("/commit/")[0]) # PR info if self.pr_url is not None: self.pr_revision = _parse_revision_from_pr_url(self.pr_url) self.pr_num = int(self.pr_revision.split("/")[-1]) else: self.pr_revision = None self.pr_num = None
class_definition
11,068
14,227
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
54
class AccessRequest: """Data structure containing information about a user access request. Attributes: username (`str`): Username of the user who requested access. fullname (`str`): Fullname of the user who requested access. email (`Optional[str]`): Email of the user who requested access. Can only be `None` in the /accepted list if the user was granted access manually. timestamp (`datetime`): Timestamp of the request. status (`Literal["pending", "accepted", "rejected"]`): Status of the request. Can be one of `["pending", "accepted", "rejected"]`. fields (`Dict[str, Any]`, *optional*): Additional fields filled by the user in the gate form. """ username: str fullname: str email: Optional[str] timestamp: datetime status: Literal["pending", "accepted", "rejected"] # Additional fields filled by the user in the gate form fields: Optional[Dict[str, Any]] = None
class_definition
14,241
15,282
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
55
class WebhookWatchedItem: """Data structure containing information about the items watched by a webhook. Attributes: type (`Literal["dataset", "model", "org", "space", "user"]`): Type of the item to be watched. Can be one of `["dataset", "model", "org", "space", "user"]`. name (`str`): Name of the item to be watched. Can be the username, organization name, model name, dataset name or space name. """ type: Literal["dataset", "model", "org", "space", "user"] name: str
class_definition
15,296
15,828
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
56
class WebhookInfo: """Data structure containing information about a webhook. Attributes: id (`str`): ID of the webhook. url (`str`): URL of the webhook. watched (`List[WebhookWatchedItem]`): List of items watched by the webhook, see [`WebhookWatchedItem`]. domains (`List[WEBHOOK_DOMAIN_T]`): List of domains the webhook is watching. Can be one of `["repo", "discussions"]`. secret (`str`, *optional*): Secret of the webhook. disabled (`bool`): Whether the webhook is disabled or not. """ id: str url: str watched: List[WebhookWatchedItem] domains: List[constants.WEBHOOK_DOMAIN_T] secret: Optional[str] disabled: bool
class_definition
15,842
16,618
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
57
class RepoUrl(str): """Subclass of `str` describing a repo URL on the Hub. `RepoUrl` is returned by `HfApi.create_repo`. It inherits from `str` for backward compatibility. At initialization, the URL is parsed to populate properties: - endpoint (`str`) - namespace (`Optional[str]`) - repo_name (`str`) - repo_id (`str`) - repo_type (`Literal["model", "dataset", "space"]`) - url (`str`) Args: url (`Any`): String value of the repo url. endpoint (`str`, *optional*): Endpoint of the Hub. Defaults to <https://huggingface.co>. Example: ```py >>> RepoUrl('https://huggingface.co/gpt2') RepoUrl('https://huggingface.co/gpt2', endpoint='https://huggingface.co', repo_type='model', repo_id='gpt2') >>> RepoUrl('https://hub-ci.huggingface.co/datasets/dummy_user/dummy_dataset', endpoint='https://hub-ci.huggingface.co') RepoUrl('https://hub-ci.huggingface.co/datasets/dummy_user/dummy_dataset', endpoint='https://hub-ci.huggingface.co', repo_type='dataset', repo_id='dummy_user/dummy_dataset') >>> RepoUrl('hf://datasets/my-user/my-dataset') RepoUrl('hf://datasets/my-user/my-dataset', endpoint='https://huggingface.co', repo_type='dataset', repo_id='user/dataset') >>> HfApi.create_repo("dummy_model") RepoUrl('https://huggingface.co/Wauplin/dummy_model', endpoint='https://huggingface.co', repo_type='model', repo_id='Wauplin/dummy_model') ``` Raises: [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) If URL cannot be parsed. [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) If `repo_type` is unknown. """ def __new__(cls, url: Any, endpoint: Optional[str] = None): url = fix_hf_endpoint_in_url(url, endpoint=endpoint) return super(RepoUrl, cls).__new__(cls, url) def __init__(self, url: Any, endpoint: Optional[str] = None) -> None: super().__init__() # Parse URL self.endpoint = endpoint or constants.ENDPOINT repo_type, namespace, repo_name = repo_type_and_id_from_hf_id(self, hub_url=self.endpoint) # Populate fields self.namespace = namespace self.repo_name = repo_name self.repo_id = repo_name if namespace is None else f"{namespace}/{repo_name}" self.repo_type = repo_type or constants.REPO_TYPE_MODEL self.url = str(self) # just in case it's needed def __repr__(self) -> str: return f"RepoUrl('{self}', endpoint='{self.endpoint}', repo_type='{self.repo_type}', repo_id='{self.repo_id}')"
class_definition
16,621
19,267
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
58
class RepoSibling: """ Contains basic information about a repo file inside a repo on the Hub. <Tip> All attributes of this class are optional except `rfilename`. This is because only the file names are returned when listing repositories on the Hub (with [`list_models`], [`list_datasets`] or [`list_spaces`]). If you need more information like file size, blob id or lfs details, you must request them specifically from one repo at a time (using [`model_info`], [`dataset_info`] or [`space_info`]) as it adds more constraints on the backend server to retrieve these. </Tip> Attributes: rfilename (str): file name, relative to the repo root. size (`int`, *optional*): The file's size, in bytes. This attribute is defined when `files_metadata` argument of [`repo_info`] is set to `True`. It's `None` otherwise. blob_id (`str`, *optional*): The file's git OID. This attribute is defined when `files_metadata` argument of [`repo_info`] is set to `True`. It's `None` otherwise. lfs (`BlobLfsInfo`, *optional*): The file's LFS metadata. This attribute is defined when`files_metadata` argument of [`repo_info`] is set to `True` and the file is stored with Git LFS. It's `None` otherwise. """ rfilename: str size: Optional[int] = None blob_id: Optional[str] = None lfs: Optional[BlobLfsInfo] = None
class_definition
19,281
20,751
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
59
class RepoFile: """ Contains information about a file on the Hub. Attributes: path (str): file path relative to the repo root. size (`int`): The file's size, in bytes. blob_id (`str`): The file's git OID. lfs (`BlobLfsInfo`): The file's LFS metadata. last_commit (`LastCommitInfo`, *optional*): The file's last commit metadata. Only defined if [`list_repo_tree`] and [`get_paths_info`] are called with `expand=True`. security (`BlobSecurityInfo`, *optional*): The file's security scan metadata. Only defined if [`list_repo_tree`] and [`get_paths_info`] are called with `expand=True`. """ path: str size: int blob_id: str lfs: Optional[BlobLfsInfo] = None last_commit: Optional[LastCommitInfo] = None security: Optional[BlobSecurityInfo] = None def __init__(self, **kwargs): self.path = kwargs.pop("path") self.size = kwargs.pop("size") self.blob_id = kwargs.pop("oid") lfs = kwargs.pop("lfs", None) if lfs is not None: lfs = BlobLfsInfo(size=lfs["size"], sha256=lfs["oid"], pointer_size=lfs["pointerSize"]) self.lfs = lfs last_commit = kwargs.pop("lastCommit", None) or kwargs.pop("last_commit", None) if last_commit is not None: last_commit = LastCommitInfo( oid=last_commit["id"], title=last_commit["title"], date=parse_datetime(last_commit["date"]) ) self.last_commit = last_commit security = kwargs.pop("securityFileStatus", None) if security is not None: safe = security["status"] == "safe" security = BlobSecurityInfo( safe=safe, status=security["status"], av_scan=security["avScan"], pickle_import_scan=security["pickleImportScan"], ) self.security = security # backwards compatibility self.rfilename = self.path self.lastCommit = self.last_commit
class_definition
20,765
22,883
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
60
class RepoFolder: """ Contains information about a folder on the Hub. Attributes: path (str): folder path relative to the repo root. tree_id (`str`): The folder's git OID. last_commit (`LastCommitInfo`, *optional*): The folder's last commit metadata. Only defined if [`list_repo_tree`] and [`get_paths_info`] are called with `expand=True`. """ path: str tree_id: str last_commit: Optional[LastCommitInfo] = None def __init__(self, **kwargs): self.path = kwargs.pop("path") self.tree_id = kwargs.pop("oid") last_commit = kwargs.pop("lastCommit", None) or kwargs.pop("last_commit", None) if last_commit is not None: last_commit = LastCommitInfo( oid=last_commit["id"], title=last_commit["title"], date=parse_datetime(last_commit["date"]) ) self.last_commit = last_commit
class_definition
22,897
23,852
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
61
class ModelInfo: """ Contains information about a model on the Hub. <Tip> Most attributes of this class are optional. This is because the data returned by the Hub depends on the query made. In general, the more specific the query, the more information is returned. On the contrary, when listing models using [`list_models`] only a subset of the attributes are returned. </Tip> Attributes: id (`str`): ID of model. author (`str`, *optional*): Author of the model. sha (`str`, *optional*): Repo SHA at this particular revision. created_at (`datetime`, *optional*): Date of creation of the repo on the Hub. Note that the lowest value is `2022-03-02T23:29:04.000Z`, corresponding to the date when we began to store creation dates. last_modified (`datetime`, *optional*): Date of last commit to the repo. private (`bool`): Is the repo private. disabled (`bool`, *optional*): Is the repo disabled. downloads (`int`): Number of downloads of the model over the last 30 days. downloads_all_time (`int`): Cumulated number of downloads of the model since its creation. gated (`Literal["auto", "manual", False]`, *optional*): Is the repo gated. If so, whether there is manual or automatic approval. gguf (`Dict`, *optional*): GGUF information of the model. inference (`Literal["cold", "frozen", "warm"]`, *optional*): Status of the model on the inference API. Warm models are available for immediate use. Cold models will be loaded on first inference call. Frozen models are not available in Inference API. likes (`int`): Number of likes of the model. library_name (`str`, *optional*): Library associated with the model. tags (`List[str]`): List of tags of the model. Compared to `card_data.tags`, contains extra tags computed by the Hub (e.g. supported libraries, model's arXiv). pipeline_tag (`str`, *optional*): Pipeline tag associated with the model. mask_token (`str`, *optional*): Mask token used by the model. widget_data (`Any`, *optional*): Widget data associated with the model. model_index (`Dict`, *optional*): Model index for evaluation. config (`Dict`, *optional*): Model configuration. transformers_info (`TransformersInfo`, *optional*): Transformers-specific info (auto class, processor, etc.) associated with the model. trending_score (`int`, *optional*): Trending score of the model. card_data (`ModelCardData`, *optional*): Model Card Metadata as a [`huggingface_hub.repocard_data.ModelCardData`] object. siblings (`List[RepoSibling]`): List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the model. spaces (`List[str]`, *optional*): List of spaces using the model. safetensors (`SafeTensorsInfo`, *optional*): Model's safetensors information. security_repo_status (`Dict`, *optional*): Model's security scan status. """ id: str author: Optional[str] sha: Optional[str] created_at: Optional[datetime] last_modified: Optional[datetime] private: Optional[bool] disabled: Optional[bool] downloads: Optional[int] downloads_all_time: Optional[int] gated: Optional[Literal["auto", "manual", False]] gguf: Optional[Dict] inference: Optional[Literal["warm", "cold", "frozen"]] likes: Optional[int] library_name: Optional[str] tags: Optional[List[str]] pipeline_tag: Optional[str] mask_token: Optional[str] card_data: Optional[ModelCardData] widget_data: Optional[Any] model_index: Optional[Dict] config: Optional[Dict] transformers_info: Optional[TransformersInfo] trending_score: Optional[int] siblings: Optional[List[RepoSibling]] spaces: Optional[List[str]] safetensors: Optional[SafeTensorsInfo] security_repo_status: Optional[Dict] def __init__(self, **kwargs): self.id = kwargs.pop("id") self.author = kwargs.pop("author", None) self.sha = kwargs.pop("sha", None) last_modified = kwargs.pop("lastModified", None) or kwargs.pop("last_modified", None) self.last_modified = parse_datetime(last_modified) if last_modified else None created_at = kwargs.pop("createdAt", None) or kwargs.pop("created_at", None) self.created_at = parse_datetime(created_at) if created_at else None self.private = kwargs.pop("private", None) self.gated = kwargs.pop("gated", None) self.disabled = kwargs.pop("disabled", None) self.downloads = kwargs.pop("downloads", None) self.downloads_all_time = kwargs.pop("downloadsAllTime", None) self.likes = kwargs.pop("likes", None) self.library_name = kwargs.pop("library_name", None) self.gguf = kwargs.pop("gguf", None) self.inference = kwargs.pop("inference", None) self.tags = kwargs.pop("tags", None) self.pipeline_tag = kwargs.pop("pipeline_tag", None) self.mask_token = kwargs.pop("mask_token", None) self.trending_score = kwargs.pop("trendingScore", None) card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None) self.card_data = ( ModelCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data ) self.widget_data = kwargs.pop("widgetData", None) self.model_index = kwargs.pop("model-index", None) or kwargs.pop("model_index", None) self.config = kwargs.pop("config", None) transformers_info = kwargs.pop("transformersInfo", None) or kwargs.pop("transformers_info", None) self.transformers_info = TransformersInfo(**transformers_info) if transformers_info else None siblings = kwargs.pop("siblings", None) self.siblings = ( [ RepoSibling( rfilename=sibling["rfilename"], size=sibling.get("size"), blob_id=sibling.get("blobId"), lfs=( BlobLfsInfo( size=sibling["lfs"]["size"], sha256=sibling["lfs"]["sha256"], pointer_size=sibling["lfs"]["pointerSize"], ) if sibling.get("lfs") else None ), ) for sibling in siblings ] if siblings is not None else None ) self.spaces = kwargs.pop("spaces", None) safetensors = kwargs.pop("safetensors", None) self.safetensors = ( SafeTensorsInfo( parameters=safetensors["parameters"], total=safetensors["total"], ) if safetensors else None ) self.security_repo_status = kwargs.pop("securityRepoStatus", None) # backwards compatibility self.lastModified = self.last_modified self.cardData = self.card_data self.transformersInfo = self.transformers_info self.__dict__.update(**kwargs)
class_definition
23,866
31,439
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
62
class DatasetInfo: """ Contains information about a dataset on the Hub. <Tip> Most attributes of this class are optional. This is because the data returned by the Hub depends on the query made. In general, the more specific the query, the more information is returned. On the contrary, when listing datasets using [`list_datasets`] only a subset of the attributes are returned. </Tip> Attributes: id (`str`): ID of dataset. author (`str`): Author of the dataset. sha (`str`): Repo SHA at this particular revision. created_at (`datetime`, *optional*): Date of creation of the repo on the Hub. Note that the lowest value is `2022-03-02T23:29:04.000Z`, corresponding to the date when we began to store creation dates. last_modified (`datetime`, *optional*): Date of last commit to the repo. private (`bool`): Is the repo private. disabled (`bool`, *optional*): Is the repo disabled. gated (`Literal["auto", "manual", False]`, *optional*): Is the repo gated. If so, whether there is manual or automatic approval. downloads (`int`): Number of downloads of the dataset over the last 30 days. downloads_all_time (`int`): Cumulated number of downloads of the model since its creation. likes (`int`): Number of likes of the dataset. tags (`List[str]`): List of tags of the dataset. card_data (`DatasetCardData`, *optional*): Model Card Metadata as a [`huggingface_hub.repocard_data.DatasetCardData`] object. siblings (`List[RepoSibling]`): List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the dataset. paperswithcode_id (`str`, *optional*): Papers with code ID of the dataset. trending_score (`int`, *optional*): Trending score of the dataset. """ id: str author: Optional[str] sha: Optional[str] created_at: Optional[datetime] last_modified: Optional[datetime] private: Optional[bool] gated: Optional[Literal["auto", "manual", False]] disabled: Optional[bool] downloads: Optional[int] downloads_all_time: Optional[int] likes: Optional[int] paperswithcode_id: Optional[str] tags: Optional[List[str]] trending_score: Optional[int] card_data: Optional[DatasetCardData] siblings: Optional[List[RepoSibling]] def __init__(self, **kwargs): self.id = kwargs.pop("id") self.author = kwargs.pop("author", None) self.sha = kwargs.pop("sha", None) created_at = kwargs.pop("createdAt", None) or kwargs.pop("created_at", None) self.created_at = parse_datetime(created_at) if created_at else None last_modified = kwargs.pop("lastModified", None) or kwargs.pop("last_modified", None) self.last_modified = parse_datetime(last_modified) if last_modified else None self.private = kwargs.pop("private", None) self.gated = kwargs.pop("gated", None) self.disabled = kwargs.pop("disabled", None) self.downloads = kwargs.pop("downloads", None) self.downloads_all_time = kwargs.pop("downloadsAllTime", None) self.likes = kwargs.pop("likes", None) self.paperswithcode_id = kwargs.pop("paperswithcode_id", None) self.tags = kwargs.pop("tags", None) self.trending_score = kwargs.pop("trendingScore", None) card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None) self.card_data = ( DatasetCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data ) siblings = kwargs.pop("siblings", None) self.siblings = ( [ RepoSibling( rfilename=sibling["rfilename"], size=sibling.get("size"), blob_id=sibling.get("blobId"), lfs=( BlobLfsInfo( size=sibling["lfs"]["size"], sha256=sibling["lfs"]["sha256"], pointer_size=sibling["lfs"]["pointerSize"], ) if sibling.get("lfs") else None ), ) for sibling in siblings ] if siblings is not None else None ) # backwards compatibility self.lastModified = self.last_modified self.cardData = self.card_data self.__dict__.update(**kwargs)
class_definition
31,453
36,212
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
63
class SpaceInfo: """ Contains information about a Space on the Hub. <Tip> Most attributes of this class are optional. This is because the data returned by the Hub depends on the query made. In general, the more specific the query, the more information is returned. On the contrary, when listing spaces using [`list_spaces`] only a subset of the attributes are returned. </Tip> Attributes: id (`str`): ID of the Space. author (`str`, *optional*): Author of the Space. sha (`str`, *optional*): Repo SHA at this particular revision. created_at (`datetime`, *optional*): Date of creation of the repo on the Hub. Note that the lowest value is `2022-03-02T23:29:04.000Z`, corresponding to the date when we began to store creation dates. last_modified (`datetime`, *optional*): Date of last commit to the repo. private (`bool`): Is the repo private. gated (`Literal["auto", "manual", False]`, *optional*): Is the repo gated. If so, whether there is manual or automatic approval. disabled (`bool`, *optional*): Is the Space disabled. host (`str`, *optional*): Host URL of the Space. subdomain (`str`, *optional*): Subdomain of the Space. likes (`int`): Number of likes of the Space. tags (`List[str]`): List of tags of the Space. siblings (`List[RepoSibling]`): List of [`huggingface_hub.hf_api.RepoSibling`] objects that constitute the Space. card_data (`SpaceCardData`, *optional*): Space Card Metadata as a [`huggingface_hub.repocard_data.SpaceCardData`] object. runtime (`SpaceRuntime`, *optional*): Space runtime information as a [`huggingface_hub.hf_api.SpaceRuntime`] object. sdk (`str`, *optional*): SDK used by the Space. models (`List[str]`, *optional*): List of models used by the Space. datasets (`List[str]`, *optional*): List of datasets used by the Space. trending_score (`int`, *optional*): Trending score of the Space. """ id: str author: Optional[str] sha: Optional[str] created_at: Optional[datetime] last_modified: Optional[datetime] private: Optional[bool] gated: Optional[Literal["auto", "manual", False]] disabled: Optional[bool] host: Optional[str] subdomain: Optional[str] likes: Optional[int] sdk: Optional[str] tags: Optional[List[str]] siblings: Optional[List[RepoSibling]] trending_score: Optional[int] card_data: Optional[SpaceCardData] runtime: Optional[SpaceRuntime] models: Optional[List[str]] datasets: Optional[List[str]] def __init__(self, **kwargs): self.id = kwargs.pop("id") self.author = kwargs.pop("author", None) self.sha = kwargs.pop("sha", None) created_at = kwargs.pop("createdAt", None) or kwargs.pop("created_at", None) self.created_at = parse_datetime(created_at) if created_at else None last_modified = kwargs.pop("lastModified", None) or kwargs.pop("last_modified", None) self.last_modified = parse_datetime(last_modified) if last_modified else None self.private = kwargs.pop("private", None) self.gated = kwargs.pop("gated", None) self.disabled = kwargs.pop("disabled", None) self.host = kwargs.pop("host", None) self.subdomain = kwargs.pop("subdomain", None) self.likes = kwargs.pop("likes", None) self.sdk = kwargs.pop("sdk", None) self.tags = kwargs.pop("tags", None) self.trending_score = kwargs.pop("trendingScore", None) card_data = kwargs.pop("cardData", None) or kwargs.pop("card_data", None) self.card_data = ( SpaceCardData(**card_data, ignore_metadata_errors=True) if isinstance(card_data, dict) else card_data ) siblings = kwargs.pop("siblings", None) self.siblings = ( [ RepoSibling( rfilename=sibling["rfilename"], size=sibling.get("size"), blob_id=sibling.get("blobId"), lfs=( BlobLfsInfo( size=sibling["lfs"]["size"], sha256=sibling["lfs"]["sha256"], pointer_size=sibling["lfs"]["pointerSize"], ) if sibling.get("lfs") else None ), ) for sibling in siblings ] if siblings is not None else None ) runtime = kwargs.pop("runtime", None) self.runtime = SpaceRuntime(runtime) if runtime else None self.models = kwargs.pop("models", None) self.datasets = kwargs.pop("datasets", None) # backwards compatibility self.lastModified = self.last_modified self.cardData = self.card_data self.__dict__.update(**kwargs)
class_definition
36,226
41,445
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
64
class CollectionItem: """ Contains information about an item of a Collection (model, dataset, Space or paper). Attributes: item_object_id (`str`): Unique ID of the item in the collection. item_id (`str`): ID of the underlying object on the Hub. Can be either a repo_id or a paper id e.g. `"jbilcke-hf/ai-comic-factory"`, `"2307.09288"`. item_type (`str`): Type of the underlying object. Can be one of `"model"`, `"dataset"`, `"space"` or `"paper"`. position (`int`): Position of the item in the collection. note (`str`, *optional*): Note associated with the item, as plain text. """ item_object_id: str # id in database item_id: str # repo_id or paper id item_type: str position: int note: Optional[str] = None def __init__( self, _id: str, id: str, type: CollectionItemType_T, position: int, note: Optional[Dict] = None, **kwargs ) -> None: self.item_object_id: str = _id # id in database self.item_id: str = id # repo_id or paper id self.item_type: CollectionItemType_T = type self.position: int = position self.note: str = note["text"] if note is not None else None
class_definition
41,459
42,738
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
65
class Collection: """ Contains information about a Collection on the Hub. Attributes: slug (`str`): Slug of the collection. E.g. `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. title (`str`): Title of the collection. E.g. `"Recent models"`. owner (`str`): Owner of the collection. E.g. `"TheBloke"`. items (`List[CollectionItem]`): List of items in the collection. last_updated (`datetime`): Date of the last update of the collection. position (`int`): Position of the collection in the list of collections of the owner. private (`bool`): Whether the collection is private or not. theme (`str`): Theme of the collection. E.g. `"green"`. upvotes (`int`): Number of upvotes of the collection. description (`str`, *optional*): Description of the collection, as plain text. url (`str`): (property) URL of the collection on the Hub. """ slug: str title: str owner: str items: List[CollectionItem] last_updated: datetime position: int private: bool theme: str upvotes: int description: Optional[str] = None def __init__(self, **kwargs) -> None: self.slug = kwargs.pop("slug") self.title = kwargs.pop("title") self.owner = kwargs.pop("owner") self.items = [CollectionItem(**item) for item in kwargs.pop("items")] self.last_updated = parse_datetime(kwargs.pop("lastUpdated")) self.position = kwargs.pop("position") self.private = kwargs.pop("private") self.theme = kwargs.pop("theme") self.upvotes = kwargs.pop("upvotes") self.description = kwargs.pop("description", None) endpoint = kwargs.pop("endpoint", None) if endpoint is None: endpoint = constants.ENDPOINT self._url = f"{endpoint}/collections/{self.slug}" @property def url(self) -> str: """Returns the URL of the collection on the Hub.""" return self._url
class_definition
42,752
44,887
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
66
class GitRefInfo: """ Contains information about a git reference for a repo on the Hub. Attributes: name (`str`): Name of the reference (e.g. tag name or branch name). ref (`str`): Full git ref on the Hub (e.g. `"refs/heads/main"` or `"refs/tags/v1.0"`). target_commit (`str`): OID of the target commit for the ref (e.g. `"e7da7f221d5bf496a48136c0cd264e630fe9fcc8"`) """ name: str ref: str target_commit: str
class_definition
44,901
45,399
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
67
class GitRefs: """ Contains information about all git references for a repo on the Hub. Object is returned by [`list_repo_refs`]. Attributes: branches (`List[GitRefInfo]`): A list of [`GitRefInfo`] containing information about branches on the repo. converts (`List[GitRefInfo]`): A list of [`GitRefInfo`] containing information about "convert" refs on the repo. Converts are refs used (internally) to push preprocessed data in Dataset repos. tags (`List[GitRefInfo]`): A list of [`GitRefInfo`] containing information about tags on the repo. pull_requests (`List[GitRefInfo]`, *optional*): A list of [`GitRefInfo`] containing information about pull requests on the repo. Only returned if `include_prs=True` is set. """ branches: List[GitRefInfo] converts: List[GitRefInfo] tags: List[GitRefInfo] pull_requests: Optional[List[GitRefInfo]] = None
class_definition
45,413
46,399
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
68
class GitCommitInfo: """ Contains information about a git commit for a repo on the Hub. Check out [`list_repo_commits`] for more details. Attributes: commit_id (`str`): OID of the commit (e.g. `"e7da7f221d5bf496a48136c0cd264e630fe9fcc8"`) authors (`List[str]`): List of authors of the commit. created_at (`datetime`): Datetime when the commit was created. title (`str`): Title of the commit. This is a free-text value entered by the authors. message (`str`): Description of the commit. This is a free-text value entered by the authors. formatted_title (`str`): Title of the commit formatted as HTML. Only returned if `formatted=True` is set. formatted_message (`str`): Description of the commit formatted as HTML. Only returned if `formatted=True` is set. """ commit_id: str authors: List[str] created_at: datetime title: str message: str formatted_title: Optional[str] formatted_message: Optional[str]
class_definition
46,413
47,503
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
69
class UserLikes: """ Contains information about a user likes on the Hub. Attributes: user (`str`): Name of the user for which we fetched the likes. total (`int`): Total number of likes. datasets (`List[str]`): List of datasets liked by the user (as repo_ids). models (`List[str]`): List of models liked by the user (as repo_ids). spaces (`List[str]`): List of spaces liked by the user (as repo_ids). """ # Metadata user: str total: int # User likes datasets: List[str] models: List[str] spaces: List[str]
class_definition
47,517
48,168
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
70
class Organization: """ Contains information about an organization on the Hub. Attributes: avatar_url (`str`): URL of the organization's avatar. name (`str`): Name of the organization on the Hub (unique). fullname (`str`): Organization's full name. """ avatar_url: str name: str fullname: str def __init__(self, **kwargs) -> None: self.avatar_url = kwargs.pop("avatarUrl", "") self.name = kwargs.pop("name", "") self.fullname = kwargs.pop("fullname", "") # forward compatibility self.__dict__.update(**kwargs)
class_definition
48,182
48,827
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
71
class User: """ Contains information about a user on the Hub. Attributes: username (`str`): Name of the user on the Hub (unique). fullname (`str`): User's full name. avatar_url (`str`): URL of the user's avatar. details (`str`, *optional*): User's details. is_following (`bool`, *optional*): Whether the authenticated user is following this user. is_pro (`bool`, *optional*): Whether the user is a pro user. num_models (`int`, *optional*): Number of models created by the user. num_datasets (`int`, *optional*): Number of datasets created by the user. num_spaces (`int`, *optional*): Number of spaces created by the user. num_discussions (`int`, *optional*): Number of discussions initiated by the user. num_papers (`int`, *optional*): Number of papers authored by the user. num_upvotes (`int`, *optional*): Number of upvotes received by the user. num_likes (`int`, *optional*): Number of likes given by the user. num_following (`int`, *optional*): Number of users this user is following. num_followers (`int`, *optional*): Number of users following this user. orgs (list of [`Organization`]): List of organizations the user is part of. """ # Metadata username: str fullname: str avatar_url: str details: Optional[str] = None is_following: Optional[bool] = None is_pro: Optional[bool] = None num_models: Optional[int] = None num_datasets: Optional[int] = None num_spaces: Optional[int] = None num_discussions: Optional[int] = None num_papers: Optional[int] = None num_upvotes: Optional[int] = None num_likes: Optional[int] = None num_following: Optional[int] = None num_followers: Optional[int] = None orgs: List[Organization] = field(default_factory=list) def __init__(self, **kwargs) -> None: self.username = kwargs.pop("user", "") self.fullname = kwargs.pop("fullname", "") self.avatar_url = kwargs.pop("avatarUrl", "") self.is_following = kwargs.pop("isFollowing", None) self.is_pro = kwargs.pop("isPro", None) self.details = kwargs.pop("details", None) self.num_models = kwargs.pop("numModels", None) self.num_datasets = kwargs.pop("numDatasets", None) self.num_spaces = kwargs.pop("numSpaces", None) self.num_discussions = kwargs.pop("numDiscussions", None) self.num_papers = kwargs.pop("numPapers", None) self.num_upvotes = kwargs.pop("numUpvotes", None) self.num_likes = kwargs.pop("numLikes", None) self.num_following = kwargs.pop("numFollowing", None) self.num_followers = kwargs.pop("numFollowers", None) self.user_type = kwargs.pop("type", None) self.orgs = [Organization(**org) for org in kwargs.pop("orgs", [])] # forward compatibility self.__dict__.update(**kwargs)
class_definition
48,841
51,985
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
72
class PaperInfo: """ Contains information about a paper on the Hub. Attributes: id (`str`): arXiv paper ID. authors (`List[str]`, **optional**): Names of paper authors published_at (`datetime`, **optional**): Date paper published. title (`str`, **optional**): Title of the paper. summary (`str`, **optional**): Summary of the paper. upvotes (`int`, **optional**): Number of upvotes for the paper on the Hub. discussion_id (`str`, **optional**): Discussion ID for the paper on the Hub. source (`str`, **optional**): Source of the paper. comments (`int`, **optional**): Number of comments for the paper on the Hub. submitted_at (`datetime`, **optional**): Date paper appeared in daily papers on the Hub. submitted_by (`User`, **optional**): Information about who submitted the daily paper. """ id: str authors: Optional[List[str]] published_at: Optional[datetime] title: Optional[str] summary: Optional[str] upvotes: Optional[int] discussion_id: Optional[str] source: Optional[str] comments: Optional[int] submitted_at: Optional[datetime] submitted_by: Optional[User] def __init__(self, **kwargs) -> None: paper = kwargs.pop("paper", {}) self.id = kwargs.pop("id", None) or paper.pop("id", None) authors = paper.pop("authors", None) or kwargs.pop("authors", None) self.authors = [author.pop("name", None) for author in authors] if authors else None published_at = paper.pop("publishedAt", None) or kwargs.pop("publishedAt", None) self.published_at = parse_datetime(published_at) if published_at else None self.title = kwargs.pop("title", None) self.source = kwargs.pop("source", None) self.summary = paper.pop("summary", None) or kwargs.pop("summary", None) self.upvotes = paper.pop("upvotes", None) or kwargs.pop("upvotes", None) self.discussion_id = paper.pop("discussionId", None) or kwargs.pop("discussionId", None) self.comments = kwargs.pop("numComments", 0) submitted_at = kwargs.pop("publishedAt", None) or kwargs.pop("submittedOnDailyAt", None) self.submitted_at = parse_datetime(submitted_at) if submitted_at else None submitted_by = kwargs.pop("submittedBy", None) or kwargs.pop("submittedOnDailyBy", None) self.submitted_by = User(**submitted_by) if submitted_by else None # forward compatibility self.__dict__.update(**kwargs)
class_definition
51,999
54,668
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
73
class HfApi: """ Client to interact with the Hugging Face Hub via HTTP. The client is initialized with some high-level settings used in all requests made to the Hub (HF endpoint, authentication, user agents...). Using the `HfApi` client is preferred but not mandatory as all of its public methods are exposed directly at the root of `huggingface_hub`. Args: endpoint (`str`, *optional*): Endpoint of the Hub. Defaults to <https://huggingface.co>. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. library_name (`str`, *optional*): The name of the library that is making the HTTP request. Will be added to the user-agent header. Example: `"transformers"`. library_version (`str`, *optional*): The version of the library that is making the HTTP request. Will be added to the user-agent header. Example: `"4.24.0"`. user_agent (`str`, `dict`, *optional*): The user agent info in the form of a dictionary or a single string. It will be completed with information about the installed packages. headers (`dict`, *optional*): Additional headers to be sent with each request. Example: `{"X-My-Header": "value"}`. Headers passed here are taking precedence over the default headers. """ def __init__( self, endpoint: Optional[str] = None, token: Union[str, bool, None] = None, library_name: Optional[str] = None, library_version: Optional[str] = None, user_agent: Union[Dict, str, None] = None, headers: Optional[Dict[str, str]] = None, ) -> None: self.endpoint = endpoint if endpoint is not None else constants.ENDPOINT self.token = token self.library_name = library_name self.library_version = library_version self.user_agent = user_agent self.headers = headers self._thread_pool: Optional[ThreadPoolExecutor] = None def run_as_future(self, fn: Callable[..., R], *args, **kwargs) -> Future[R]: """ Run a method in the background and return a Future instance. The main goal is to run methods without blocking the main thread (e.g. to push data during a training). Background jobs are queued to preserve order but are not ran in parallel. If you need to speed-up your scripts by parallelizing lots of call to the API, you must setup and use your own [ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor). Note: Most-used methods like [`upload_file`], [`upload_folder`] and [`create_commit`] have a `run_as_future: bool` argument to directly call them in the background. This is equivalent to calling `api.run_as_future(...)` on them but less verbose. Args: fn (`Callable`): The method to run in the background. *args, **kwargs: Arguments with which the method will be called. Return: `Future`: a [Future](https://docs.python.org/3/library/concurrent.futures.html#future-objects) instance to get the result of the task. Example: ```py >>> from huggingface_hub import HfApi >>> api = HfApi() >>> future = api.run_as_future(api.whoami) # instant >>> future.done() False >>> future.result() # wait until complete and return result (...) >>> future.done() True ``` """ if self._thread_pool is None: self._thread_pool = ThreadPoolExecutor(max_workers=1) self._thread_pool return self._thread_pool.submit(fn, *args, **kwargs) @validate_hf_hub_args def whoami(self, token: Union[bool, str, None] = None) -> Dict: """ Call HF API to know "whoami". Args: token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. """ r = get_session().get( f"{self.endpoint}/api/whoami-v2", headers=self._build_hf_headers( # If `token` is provided and not `None`, it will be used by default. # Otherwise, the token must be retrieved from cache or env variable. token=(token or self.token or True), ), ) try: hf_raise_for_status(r) except HTTPError as e: raise HTTPError( "Invalid user token. If you didn't pass a user token, make sure you " "are properly logged in by executing `huggingface-cli login`, and " "if you did pass a user token, double-check it's correct.", request=e.request, response=e.response, ) from e return r.json() @_deprecate_method( version="1.0", message=( "Permissions are more complex than when `get_token_permission` was first introduced. " "OAuth and fine-grain tokens allows for more detailed permissions. " "If you need to know the permissions associated with a token, please use `whoami` and check the `'auth'` key." ), ) def get_token_permission( self, token: Union[bool, str, None] = None ) -> Literal["read", "write", "fineGrained", None]: """ Check if a given `token` is valid and return its permissions. <Tip warning={true}> This method is deprecated and will be removed in version 1.0. Permissions are more complex than when `get_token_permission` was first introduced. OAuth and fine-grain tokens allows for more detailed permissions. If you need to know the permissions associated with a token, please use `whoami` and check the `'auth'` key. </Tip> For more details about tokens, please refer to https://huggingface.co/docs/hub/security-tokens#what-are-user-access-tokens. Args: token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Literal["read", "write", "fineGrained", None]`: Permission granted by the token ("read" or "write"). Returns `None` if no token passed, if token is invalid or if role is not returned by the server. This typically happens when the token is an OAuth token. """ try: return self.whoami(token=token)["auth"]["accessToken"]["role"] except (LocalTokenNotFoundError, HTTPError, KeyError): return None def get_model_tags(self) -> Dict: """ List all valid model tags as a nested namespace object """ path = f"{self.endpoint}/api/models-tags-by-type" r = get_session().get(path) hf_raise_for_status(r) return r.json() def get_dataset_tags(self) -> Dict: """ List all valid dataset tags as a nested namespace object. """ path = f"{self.endpoint}/api/datasets-tags-by-type" r = get_session().get(path) hf_raise_for_status(r) return r.json() @validate_hf_hub_args def list_models( self, *, # Search-query parameter filter: Union[str, Iterable[str], None] = None, author: Optional[str] = None, gated: Optional[bool] = None, inference: Optional[Literal["cold", "frozen", "warm"]] = None, library: Optional[Union[str, List[str]]] = None, language: Optional[Union[str, List[str]]] = None, model_name: Optional[str] = None, task: Optional[Union[str, List[str]]] = None, trained_dataset: Optional[Union[str, List[str]]] = None, tags: Optional[Union[str, List[str]]] = None, search: Optional[str] = None, pipeline_tag: Optional[str] = None, emissions_thresholds: Optional[Tuple[float, float]] = None, # Sorting and pagination parameters sort: Union[Literal["last_modified"], str, None] = None, direction: Optional[Literal[-1]] = None, limit: Optional[int] = None, # Additional data to fetch expand: Optional[List[ExpandModelProperty_T]] = None, full: Optional[bool] = None, cardData: bool = False, fetch_config: bool = False, token: Union[bool, str, None] = None, ) -> Iterable[ModelInfo]: """ List models hosted on the Huggingface Hub, given some filters. Args: filter (`str` or `Iterable[str]`, *optional*): A string or list of string to filter models on the Hub. author (`str`, *optional*): A string which identify the author (user or organization) of the returned models. gated (`bool`, *optional*): A boolean to filter models on the Hub that are gated or not. By default, all models are returned. If `gated=True` is passed, only gated models are returned. If `gated=False` is passed, only non-gated models are returned. inference (`Literal["cold", "frozen", "warm"]`, *optional*): A string to filter models on the Hub by their state on the Inference API. Warm models are available for immediate use. Cold models will be loaded on first inference call. Frozen models are not available in Inference API. library (`str` or `List`, *optional*): A string or list of strings of foundational libraries models were originally trained from, such as pytorch, tensorflow, or allennlp. language (`str` or `List`, *optional*): A string or list of strings of languages, both by name and country code, such as "en" or "English" model_name (`str`, *optional*): A string that contain complete or partial names for models on the Hub, such as "bert" or "bert-base-cased" task (`str` or `List`, *optional*): A string or list of strings of tasks models were designed for, such as: "fill-mask" or "automatic-speech-recognition" trained_dataset (`str` or `List`, *optional*): A string tag or a list of string tags of the trained dataset for a model on the Hub. tags (`str` or `List`, *optional*): A string tag or a list of tags to filter models on the Hub by, such as `text-generation` or `spacy`. search (`str`, *optional*): A string that will be contained in the returned model ids. pipeline_tag (`str`, *optional*): A string pipeline tag to filter models on the Hub by, such as `summarization`. emissions_thresholds (`Tuple`, *optional*): A tuple of two ints or floats representing a minimum and maximum carbon footprint to filter the resulting models with in grams. sort (`Literal["last_modified"]` or `str`, *optional*): The key with which to sort the resulting models. Possible values are "last_modified", "trending_score", "created_at", "downloads" and "likes". direction (`Literal[-1]` or `int`, *optional*): Direction in which to sort. The value `-1` sorts by descending order while all other values sort by ascending order. limit (`int`, *optional*): The limit on the number of models fetched. Leaving this option to `None` fetches all models. expand (`List[ExpandModelProperty_T]`, *optional*): List properties to return in the response. When used, only the properties in the list will be returned. This parameter cannot be used if `full`, `cardData` or `fetch_config` are passed. Possible values are `"author"`, `"baseModels"`, `"cardData"`, `"childrenModelCount"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"` and `"widgetData"`. full (`bool`, *optional*): Whether to fetch all model data, including the `last_modified`, the `sha`, the files and the `tags`. This is set to `True` by default when using a filter. cardData (`bool`, *optional*): Whether to grab the metadata for the model as well. Can contain useful information such as carbon emissions, metrics, and datasets trained on. fetch_config (`bool`, *optional*): Whether to fetch the model configs as well. This is not included in `full` due to its size. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Iterable[ModelInfo]`: an iterable of [`huggingface_hub.hf_api.ModelInfo`] objects. Example usage with the `filter` argument: ```python >>> from huggingface_hub import HfApi >>> api = HfApi() # List all models >>> api.list_models() # List only the text classification models >>> api.list_models(filter="text-classification") # List only models from the AllenNLP library >>> api.list_models(filter="allennlp") ``` Example usage with the `search` argument: ```python >>> from huggingface_hub import HfApi >>> api = HfApi() # List all models with "bert" in their name >>> api.list_models(search="bert") # List all models with "bert" in their name made by google >>> api.list_models(search="bert", author="google") ``` """ if expand and (full or cardData or fetch_config): raise ValueError("`expand` cannot be used if `full`, `cardData` or `fetch_config` are passed.") if emissions_thresholds is not None and cardData is None: raise ValueError("`emissions_thresholds` were passed without setting `cardData=True`.") path = f"{self.endpoint}/api/models" headers = self._build_hf_headers(token=token) params: Dict[str, Any] = {} # Build the filter list filter_list: List[str] = [] if filter: filter_list.extend([filter] if isinstance(filter, str) else filter) if library: filter_list.extend([library] if isinstance(library, str) else library) if task: filter_list.extend([task] if isinstance(task, str) else task) if trained_dataset: if isinstance(trained_dataset, str): trained_dataset = [trained_dataset] for dataset in trained_dataset: if not dataset.startswith("dataset:"): dataset = f"dataset:{dataset}" filter_list.append(dataset) if language: filter_list.extend([language] if isinstance(language, str) else language) if tags: filter_list.extend([tags] if isinstance(tags, str) else tags) if len(filter_list) > 0: params["filter"] = filter_list # Handle other query params if author: params["author"] = author if gated is not None: params["gated"] = gated if inference is not None: params["inference"] = inference if pipeline_tag: params["pipeline_tag"] = pipeline_tag search_list = [] if model_name: search_list.append(model_name) if search: search_list.append(search) if len(search_list) > 0: params["search"] = search_list if sort is not None: params["sort"] = ( "lastModified" if sort == "last_modified" else "trendingScore" if sort == "trending_score" else "createdAt" if sort == "created_at" else sort ) if direction is not None: params["direction"] = direction if limit is not None: params["limit"] = limit # Request additional data if full: params["full"] = True if fetch_config: params["config"] = True if cardData: params["cardData"] = True if expand: params["expand"] = expand # `items` is a generator items = paginate(path, params=params, headers=headers) if limit is not None: items = islice(items, limit) # Do not iterate over all pages for item in items: if "siblings" not in item: item["siblings"] = None model_info = ModelInfo(**item) if emissions_thresholds is None or _is_emission_within_threshold(model_info, *emissions_thresholds): yield model_info @validate_hf_hub_args def list_datasets( self, *, # Search-query parameter filter: Union[str, Iterable[str], None] = None, author: Optional[str] = None, benchmark: Optional[Union[str, List[str]]] = None, dataset_name: Optional[str] = None, gated: Optional[bool] = None, language_creators: Optional[Union[str, List[str]]] = None, language: Optional[Union[str, List[str]]] = None, multilinguality: Optional[Union[str, List[str]]] = None, size_categories: Optional[Union[str, List[str]]] = None, tags: Optional[Union[str, List[str]]] = None, task_categories: Optional[Union[str, List[str]]] = None, task_ids: Optional[Union[str, List[str]]] = None, search: Optional[str] = None, # Sorting and pagination parameters sort: Optional[Union[Literal["last_modified"], str]] = None, direction: Optional[Literal[-1]] = None, limit: Optional[int] = None, # Additional data to fetch expand: Optional[List[ExpandDatasetProperty_T]] = None, full: Optional[bool] = None, token: Union[bool, str, None] = None, ) -> Iterable[DatasetInfo]: """ List datasets hosted on the Huggingface Hub, given some filters. Args: filter (`str` or `Iterable[str]`, *optional*): A string or list of string to filter datasets on the hub. author (`str`, *optional*): A string which identify the author of the returned datasets. benchmark (`str` or `List`, *optional*): A string or list of strings that can be used to identify datasets on the Hub by their official benchmark. dataset_name (`str`, *optional*): A string or list of strings that can be used to identify datasets on the Hub by its name, such as `SQAC` or `wikineural` gated (`bool`, *optional*): A boolean to filter datasets on the Hub that are gated or not. By default, all datasets are returned. If `gated=True` is passed, only gated datasets are returned. If `gated=False` is passed, only non-gated datasets are returned. language_creators (`str` or `List`, *optional*): A string or list of strings that can be used to identify datasets on the Hub with how the data was curated, such as `crowdsourced` or `machine_generated`. language (`str` or `List`, *optional*): A string or list of strings representing a two-character language to filter datasets by on the Hub. multilinguality (`str` or `List`, *optional*): A string or list of strings representing a filter for datasets that contain multiple languages. size_categories (`str` or `List`, *optional*): A string or list of strings that can be used to identify datasets on the Hub by the size of the dataset such as `100K<n<1M` or `1M<n<10M`. tags (`str` or `List`, *optional*): A string tag or a list of tags to filter datasets on the Hub. task_categories (`str` or `List`, *optional*): A string or list of strings that can be used to identify datasets on the Hub by the designed task, such as `audio_classification` or `named_entity_recognition`. task_ids (`str` or `List`, *optional*): A string or list of strings that can be used to identify datasets on the Hub by the specific task such as `speech_emotion_recognition` or `paraphrase`. search (`str`, *optional*): A string that will be contained in the returned datasets. sort (`Literal["last_modified"]` or `str`, *optional*): The key with which to sort the resulting models. Possible values are "last_modified", "trending_score", "created_at", "downloads" and "likes". direction (`Literal[-1]` or `int`, *optional*): Direction in which to sort. The value `-1` sorts by descending order while all other values sort by ascending order. limit (`int`, *optional*): The limit on the number of datasets fetched. Leaving this option to `None` fetches all datasets. expand (`List[ExpandDatasetProperty_T]`, *optional*): List properties to return in the response. When used, only the properties in the list will be returned. This parameter cannot be used if `full` is passed. Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"` and `"trendingScore"`. full (`bool`, *optional*): Whether to fetch all dataset data, including the `last_modified`, the `card_data` and the files. Can contain useful information such as the PapersWithCode ID. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Iterable[DatasetInfo]`: an iterable of [`huggingface_hub.hf_api.DatasetInfo`] objects. Example usage with the `filter` argument: ```python >>> from huggingface_hub import HfApi >>> api = HfApi() # List all datasets >>> api.list_datasets() # List only the text classification datasets >>> api.list_datasets(filter="task_categories:text-classification") # List only the datasets in russian for language modeling >>> api.list_datasets( ... filter=("language:ru", "task_ids:language-modeling") ... ) # List FiftyOne datasets (identified by the tag "fiftyone" in dataset card) >>> api.list_datasets(tags="fiftyone") ``` Example usage with the `search` argument: ```python >>> from huggingface_hub import HfApi >>> api = HfApi() # List all datasets with "text" in their name >>> api.list_datasets(search="text") # List all datasets with "text" in their name made by google >>> api.list_datasets(search="text", author="google") ``` """ if expand and full: raise ValueError("`expand` cannot be used if `full` is passed.") path = f"{self.endpoint}/api/datasets" headers = self._build_hf_headers(token=token) params: Dict[str, Any] = {} # Build `filter` list filter_list = [] if filter is not None: if isinstance(filter, str): filter_list.append(filter) else: filter_list.extend(filter) for key, value in ( ("benchmark", benchmark), ("language_creators", language_creators), ("language", language), ("multilinguality", multilinguality), ("size_categories", size_categories), ("task_categories", task_categories), ("task_ids", task_ids), ): if value: if isinstance(value, str): value = [value] for value_item in value: if not value_item.startswith(f"{key}:"): data = f"{key}:{value_item}" filter_list.append(data) if tags is not None: filter_list.extend([tags] if isinstance(tags, str) else tags) if len(filter_list) > 0: params["filter"] = filter_list # Handle other query params if author: params["author"] = author if gated is not None: params["gated"] = gated search_list = [] if dataset_name: search_list.append(dataset_name) if search: search_list.append(search) if len(search_list) > 0: params["search"] = search_list if sort is not None: params["sort"] = ( "lastModified" if sort == "last_modified" else "trendingScore" if sort == "trending_score" else "createdAt" if sort == "created_at" else sort ) if direction is not None: params["direction"] = direction if limit is not None: params["limit"] = limit # Request additional data if expand: params["expand"] = expand if full: params["full"] = True items = paginate(path, params=params, headers=headers) if limit is not None: items = islice(items, limit) # Do not iterate over all pages for item in items: if "siblings" not in item: item["siblings"] = None yield DatasetInfo(**item) @validate_hf_hub_args def list_spaces( self, *, # Search-query parameter filter: Union[str, Iterable[str], None] = None, author: Optional[str] = None, search: Optional[str] = None, datasets: Union[str, Iterable[str], None] = None, models: Union[str, Iterable[str], None] = None, linked: bool = False, # Sorting and pagination parameters sort: Union[Literal["last_modified"], str, None] = None, direction: Optional[Literal[-1]] = None, limit: Optional[int] = None, # Additional data to fetch expand: Optional[List[ExpandSpaceProperty_T]] = None, full: Optional[bool] = None, token: Union[bool, str, None] = None, ) -> Iterable[SpaceInfo]: """ List spaces hosted on the Huggingface Hub, given some filters. Args: filter (`str` or `Iterable`, *optional*): A string tag or list of tags that can be used to identify Spaces on the Hub. author (`str`, *optional*): A string which identify the author of the returned Spaces. search (`str`, *optional*): A string that will be contained in the returned Spaces. datasets (`str` or `Iterable`, *optional*): Whether to return Spaces that make use of a dataset. The name of a specific dataset can be passed as a string. models (`str` or `Iterable`, *optional*): Whether to return Spaces that make use of a model. The name of a specific model can be passed as a string. linked (`bool`, *optional*): Whether to return Spaces that make use of either a model or a dataset. sort (`Literal["last_modified"]` or `str`, *optional*): The key with which to sort the resulting models. Possible values are "last_modified", "trending_score", "created_at" and "likes". direction (`Literal[-1]` or `int`, *optional*): Direction in which to sort. The value `-1` sorts by descending order while all other values sort by ascending order. limit (`int`, *optional*): The limit on the number of Spaces fetched. Leaving this option to `None` fetches all Spaces. expand (`List[ExpandSpaceProperty_T]`, *optional*): List properties to return in the response. When used, only the properties in the list will be returned. This parameter cannot be used if `full` is passed. Possible values are `"author"`, `"cardData"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"createdAt"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"` and `"trendingScore"`. full (`bool`, *optional*): Whether to fetch all Spaces data, including the `last_modified`, `siblings` and `card_data` fields. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Iterable[SpaceInfo]`: an iterable of [`huggingface_hub.hf_api.SpaceInfo`] objects. """ if expand and full: raise ValueError("`expand` cannot be used if `full` is passed.") path = f"{self.endpoint}/api/spaces" headers = self._build_hf_headers(token=token) params: Dict[str, Any] = {} if filter is not None: params["filter"] = filter if author is not None: params["author"] = author if search is not None: params["search"] = search if sort is not None: params["sort"] = ( "lastModified" if sort == "last_modified" else "trendingScore" if sort == "trending_score" else "createdAt" if sort == "created_at" else sort ) if direction is not None: params["direction"] = direction if limit is not None: params["limit"] = limit if linked: params["linked"] = True if datasets is not None: params["datasets"] = datasets if models is not None: params["models"] = models # Request additional data if expand: params["expand"] = expand if full: params["full"] = True items = paginate(path, params=params, headers=headers) if limit is not None: items = islice(items, limit) # Do not iterate over all pages for item in items: if "siblings" not in item: item["siblings"] = None yield SpaceInfo(**item) @validate_hf_hub_args def like( self, repo_id: str, *, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, ) -> None: """ Like a given repo on the Hub (e.g. set as favorite). See also [`unlike`] and [`list_liked_repos`]. Args: repo_id (`str`): The repository to like. Example: `"user/my-cool-model"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if liking a dataset or space, `None` or `"model"` if liking a model. Default is `None`. Raises: [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. Example: ```python >>> from huggingface_hub import like, list_liked_repos, unlike >>> like("gpt2") >>> "gpt2" in list_liked_repos().models True >>> unlike("gpt2") >>> "gpt2" in list_liked_repos().models False ``` """ if repo_type is None: repo_type = constants.REPO_TYPE_MODEL response = get_session().post( url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/like", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) @validate_hf_hub_args def unlike( self, repo_id: str, *, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, ) -> None: """ Unlike a given repo on the Hub (e.g. remove from favorite list). See also [`like`] and [`list_liked_repos`]. Args: repo_id (`str`): The repository to unlike. Example: `"user/my-cool-model"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if unliking a dataset or space, `None` or `"model"` if unliking a model. Default is `None`. Raises: [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. Example: ```python >>> from huggingface_hub import like, list_liked_repos, unlike >>> like("gpt2") >>> "gpt2" in list_liked_repos().models True >>> unlike("gpt2") >>> "gpt2" in list_liked_repos().models False ``` """ if repo_type is None: repo_type = constants.REPO_TYPE_MODEL response = get_session().delete( url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/like", headers=self._build_hf_headers(token=token) ) hf_raise_for_status(response) @validate_hf_hub_args def list_liked_repos( self, user: Optional[str] = None, *, token: Union[bool, str, None] = None, ) -> UserLikes: """ List all public repos liked by a user on huggingface.co. This list is public so token is optional. If `user` is not passed, it defaults to the logged in user. See also [`like`] and [`unlike`]. Args: user (`str`, *optional*): Name of the user for which you want to fetch the likes. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`UserLikes`]: object containing the user name and 3 lists of repo ids (1 for models, 1 for datasets and 1 for Spaces). Raises: [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) If `user` is not passed and no token found (either from argument or from machine). Example: ```python >>> from huggingface_hub import list_liked_repos >>> likes = list_liked_repos("julien-c") >>> likes.user "julien-c" >>> likes.models ["osanseviero/streamlit_1.15", "Xhaheen/ChatGPT_HF", ...] ``` """ # User is either provided explicitly or retrieved from current token. if user is None: me = self.whoami(token=token) if me["type"] == "user": user = me["name"] else: raise ValueError( "Cannot list liked repos. You must provide a 'user' as input or be logged in as a user." ) path = f"{self.endpoint}/api/users/{user}/likes" headers = self._build_hf_headers(token=token) likes = list(paginate(path, params={}, headers=headers)) # Looping over a list of items similar to: # { # 'createdAt': '2021-09-09T21:53:27.000Z', # 'repo': { # 'name': 'PaddlePaddle/PaddleOCR', # 'type': 'space' # } # } # Let's loop 3 times over the received list. Less efficient but more straightforward to read. return UserLikes( user=user, total=len(likes), models=[like["repo"]["name"] for like in likes if like["repo"]["type"] == "model"], datasets=[like["repo"]["name"] for like in likes if like["repo"]["type"] == "dataset"], spaces=[like["repo"]["name"] for like in likes if like["repo"]["type"] == "space"], ) @validate_hf_hub_args def list_repo_likers( self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None, ) -> Iterable[User]: """ List all users who liked a given repo on the hugging Face Hub. See also [`like`] and [`list_liked_repos`]. Args: repo_id (`str`): The repository to retrieve . Example: `"user/my-cool-model"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. Returns: `Iterable[User]`: an iterable of [`huggingface_hub.hf_api.User`] objects. """ # Construct the API endpoint if repo_type is None: repo_type = constants.REPO_TYPE_MODEL path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/likers" for liker in paginate(path, params={}, headers=self._build_hf_headers(token=token)): yield User(username=liker["user"], fullname=liker["fullname"], avatar_url=liker["avatarUrl"]) @validate_hf_hub_args def model_info( self, repo_id: str, *, revision: Optional[str] = None, timeout: Optional[float] = None, securityStatus: Optional[bool] = None, files_metadata: bool = False, expand: Optional[List[ExpandModelProperty_T]] = None, token: Union[bool, str, None] = None, ) -> ModelInfo: """ Get info on one specific model on huggingface.co Model can be private if you pass an acceptable token or are logged in. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. revision (`str`, *optional*): The revision of the model repository from which to get the information. timeout (`float`, *optional*): Whether to set a timeout for the request to the Hub. securityStatus (`bool`, *optional*): Whether to retrieve the security status from the model repository as well. The security status will be returned in the `security_repo_status` field. files_metadata (`bool`, *optional*): Whether or not to retrieve metadata for files in the repository (size, LFS metadata, etc). Defaults to `False`. expand (`List[ExpandModelProperty_T]`, *optional*): List properties to return in the response. When used, only the properties in the list will be returned. This parameter cannot be used if `securityStatus` or `files_metadata` are passed. Possible values are `"author"`, `"baseModels"`, `"cardData"`, `"childrenModelCount"`, `"config"`, `"createdAt"`, `"disabled"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"gguf"`, `"inference"`, `"lastModified"`, `"library_name"`, `"likes"`, `"mask_token"`, `"model-index"`, `"pipeline_tag"`, `"private"`, `"safetensors"`, `"sha"`, `"siblings"`, `"spaces"`, `"tags"`, `"transformersInfo"`, `"trendingScore"` and `"widgetData"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`huggingface_hub.hf_api.ModelInfo`]: The model repository information. <Tip> Raises the following errors: - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. - [`~utils.RevisionNotFoundError`] If the revision to download from cannot be found. </Tip> """ if expand and (securityStatus or files_metadata): raise ValueError("`expand` cannot be used if `securityStatus` or `files_metadata` are set.") headers = self._build_hf_headers(token=token) path = ( f"{self.endpoint}/api/models/{repo_id}" if revision is None else (f"{self.endpoint}/api/models/{repo_id}/revision/{quote(revision, safe='')}") ) params: Dict = {} if securityStatus: params["securityStatus"] = True if files_metadata: params["blobs"] = True if expand: params["expand"] = expand r = get_session().get(path, headers=headers, timeout=timeout, params=params) hf_raise_for_status(r) data = r.json() return ModelInfo(**data) @validate_hf_hub_args def dataset_info( self, repo_id: str, *, revision: Optional[str] = None, timeout: Optional[float] = None, files_metadata: bool = False, expand: Optional[List[ExpandDatasetProperty_T]] = None, token: Union[bool, str, None] = None, ) -> DatasetInfo: """ Get info on one specific dataset on huggingface.co. Dataset can be private if you pass an acceptable token. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. revision (`str`, *optional*): The revision of the dataset repository from which to get the information. timeout (`float`, *optional*): Whether to set a timeout for the request to the Hub. files_metadata (`bool`, *optional*): Whether or not to retrieve metadata for files in the repository (size, LFS metadata, etc). Defaults to `False`. expand (`List[ExpandDatasetProperty_T]`, *optional*): List properties to return in the response. When used, only the properties in the list will be returned. This parameter cannot be used if `files_metadata` is passed. Possible values are `"author"`, `"cardData"`, `"citation"`, `"createdAt"`, `"disabled"`, `"description"`, `"downloads"`, `"downloadsAllTime"`, `"gated"`, `"lastModified"`, `"likes"`, `"paperswithcode_id"`, `"private"`, `"siblings"`, `"sha"`, `"tags"` and `"trendingScore"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`hf_api.DatasetInfo`]: The dataset repository information. <Tip> Raises the following errors: - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. - [`~utils.RevisionNotFoundError`] If the revision to download from cannot be found. </Tip> """ if expand and files_metadata: raise ValueError("`expand` cannot be used if `files_metadata` is set.") headers = self._build_hf_headers(token=token) path = ( f"{self.endpoint}/api/datasets/{repo_id}" if revision is None else (f"{self.endpoint}/api/datasets/{repo_id}/revision/{quote(revision, safe='')}") ) params: Dict = {} if files_metadata: params["blobs"] = True if expand: params["expand"] = expand r = get_session().get(path, headers=headers, timeout=timeout, params=params) hf_raise_for_status(r) data = r.json() return DatasetInfo(**data) @validate_hf_hub_args def space_info( self, repo_id: str, *, revision: Optional[str] = None, timeout: Optional[float] = None, files_metadata: bool = False, expand: Optional[List[ExpandModelProperty_T]] = None, token: Union[bool, str, None] = None, ) -> SpaceInfo: """ Get info on one specific Space on huggingface.co. Space can be private if you pass an acceptable token. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. revision (`str`, *optional*): The revision of the space repository from which to get the information. timeout (`float`, *optional*): Whether to set a timeout for the request to the Hub. files_metadata (`bool`, *optional*): Whether or not to retrieve metadata for files in the repository (size, LFS metadata, etc). Defaults to `False`. expand (`List[ExpandSpaceProperty_T]`, *optional*): List properties to return in the response. When used, only the properties in the list will be returned. This parameter cannot be used if `full` is passed. Possible values are `"author"`, `"cardData"`, `"createdAt"`, `"datasets"`, `"disabled"`, `"lastModified"`, `"likes"`, `"models"`, `"private"`, `"runtime"`, `"sdk"`, `"siblings"`, `"sha"`, `"subdomain"`, `"tags"` and `"trendingScore"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`~hf_api.SpaceInfo`]: The space repository information. <Tip> Raises the following errors: - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. - [`~utils.RevisionNotFoundError`] If the revision to download from cannot be found. </Tip> """ if expand and files_metadata: raise ValueError("`expand` cannot be used if `files_metadata` is set.") headers = self._build_hf_headers(token=token) path = ( f"{self.endpoint}/api/spaces/{repo_id}" if revision is None else (f"{self.endpoint}/api/spaces/{repo_id}/revision/{quote(revision, safe='')}") ) params: Dict = {} if files_metadata: params["blobs"] = True if expand: params["expand"] = expand r = get_session().get(path, headers=headers, timeout=timeout, params=params) hf_raise_for_status(r) data = r.json() return SpaceInfo(**data) @validate_hf_hub_args def repo_info( self, repo_id: str, *, revision: Optional[str] = None, repo_type: Optional[str] = None, timeout: Optional[float] = None, files_metadata: bool = False, expand: Optional[Union[ExpandModelProperty_T, ExpandDatasetProperty_T, ExpandSpaceProperty_T]] = None, token: Union[bool, str, None] = None, ) -> Union[ModelInfo, DatasetInfo, SpaceInfo]: """ Get the info object for a given repo of a given type. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. revision (`str`, *optional*): The revision of the repository from which to get the information. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if getting repository info from a dataset or a space, `None` or `"model"` if getting repository info from a model. Default is `None`. timeout (`float`, *optional*): Whether to set a timeout for the request to the Hub. expand (`ExpandModelProperty_T` or `ExpandDatasetProperty_T` or `ExpandSpaceProperty_T`, *optional*): List properties to return in the response. When used, only the properties in the list will be returned. This parameter cannot be used if `files_metadata` is passed. For an exhaustive list of available properties, check out [`model_info`], [`dataset_info`] or [`space_info`]. files_metadata (`bool`, *optional*): Whether or not to retrieve metadata for files in the repository (size, LFS metadata, etc). Defaults to `False`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Union[SpaceInfo, DatasetInfo, ModelInfo]`: The repository information, as a [`huggingface_hub.hf_api.DatasetInfo`], [`huggingface_hub.hf_api.ModelInfo`] or [`huggingface_hub.hf_api.SpaceInfo`] object. <Tip> Raises the following errors: - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. - [`~utils.RevisionNotFoundError`] If the revision to download from cannot be found. </Tip> """ if repo_type is None or repo_type == "model": method = self.model_info elif repo_type == "dataset": method = self.dataset_info # type: ignore elif repo_type == "space": method = self.space_info # type: ignore else: raise ValueError("Unsupported repo type.") return method( repo_id, revision=revision, token=token, timeout=timeout, expand=expand, # type: ignore[arg-type] files_metadata=files_metadata, ) @validate_hf_hub_args def repo_exists( self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[str, bool, None] = None, ) -> bool: """ Checks if a repository exists on the Hugging Face Hub. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if getting repository info from a dataset or a space, `None` or `"model"` if getting repository info from a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: True if the repository exists, False otherwise. Examples: ```py >>> from huggingface_hub import repo_exists >>> repo_exists("google/gemma-7b") True >>> repo_exists("google/not-a-repo") False ``` """ try: self.repo_info(repo_id=repo_id, repo_type=repo_type, token=token) return True except GatedRepoError: return True # we don't have access but it exists except RepositoryNotFoundError: return False @validate_hf_hub_args def revision_exists( self, repo_id: str, revision: str, *, repo_type: Optional[str] = None, token: Union[str, bool, None] = None, ) -> bool: """ Checks if a specific revision exists on a repo on the Hugging Face Hub. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. revision (`str`): The revision of the repository to check. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if getting repository info from a dataset or a space, `None` or `"model"` if getting repository info from a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: True if the repository and the revision exists, False otherwise. Examples: ```py >>> from huggingface_hub import revision_exists >>> revision_exists("google/gemma-7b", "float16") True >>> revision_exists("google/gemma-7b", "not-a-revision") False ``` """ try: self.repo_info(repo_id=repo_id, revision=revision, repo_type=repo_type, token=token) return True except RevisionNotFoundError: return False except RepositoryNotFoundError: return False @validate_hf_hub_args def file_exists( self, repo_id: str, filename: str, *, repo_type: Optional[str] = None, revision: Optional[str] = None, token: Union[str, bool, None] = None, ) -> bool: """ Checks if a file exists in a repository on the Hugging Face Hub. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. filename (`str`): The name of the file to check, for example: `"config.json"` repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if getting repository info from a dataset or a space, `None` or `"model"` if getting repository info from a model. Default is `None`. revision (`str`, *optional*): The revision of the repository from which to get the information. Defaults to `"main"` branch. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: True if the file exists, False otherwise. Examples: ```py >>> from huggingface_hub import file_exists >>> file_exists("bigcode/starcoder", "config.json") True >>> file_exists("bigcode/starcoder", "not-a-file") False >>> file_exists("bigcode/not-a-repo", "config.json") False ``` """ url = hf_hub_url( repo_id=repo_id, repo_type=repo_type, revision=revision, filename=filename, endpoint=self.endpoint ) try: if token is None: token = self.token get_hf_file_metadata(url, token=token) return True except GatedRepoError: # raise specifically on gated repo raise except (RepositoryNotFoundError, EntryNotFoundError, RevisionNotFoundError): return False @validate_hf_hub_args def list_repo_files( self, repo_id: str, *, revision: Optional[str] = None, repo_type: Optional[str] = None, token: Union[str, bool, None] = None, ) -> List[str]: """ Get the list of files in a given repo. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. revision (`str`, *optional*): The revision of the repository from which to get the information. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `List[str]`: the list of files in a given repository. """ return [ f.rfilename for f in self.list_repo_tree( repo_id=repo_id, recursive=True, revision=revision, repo_type=repo_type, token=token ) if isinstance(f, RepoFile) ] @validate_hf_hub_args def list_repo_tree( self, repo_id: str, path_in_repo: Optional[str] = None, *, recursive: bool = False, expand: bool = False, revision: Optional[str] = None, repo_type: Optional[str] = None, token: Union[str, bool, None] = None, ) -> Iterable[Union[RepoFile, RepoFolder]]: """ List a repo tree's files and folders and get information about them. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. path_in_repo (`str`, *optional*): Relative path of the tree (folder) in the repo, for example: `"checkpoints/1fec34a/results"`. Will default to the root tree (folder) of the repository. recursive (`bool`, *optional*, defaults to `False`): Whether to list tree's files and folders recursively. expand (`bool`, *optional*, defaults to `False`): Whether to fetch more information about the tree's files and folders (e.g. last commit and files' security scan results). This operation is more expensive for the server so only 50 results are returned per page (instead of 1000). As pagination is implemented in `huggingface_hub`, this is transparent for you except for the time it takes to get the results. revision (`str`, *optional*): The revision of the repository from which to get the tree. Defaults to `"main"` branch. repo_type (`str`, *optional*): The type of the repository from which to get the tree (`"model"`, `"dataset"` or `"space"`. Defaults to `"model"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Iterable[Union[RepoFile, RepoFolder]]`: The information about the tree's files and folders, as an iterable of [`RepoFile`] and [`RepoFolder`] objects. The order of the files and folders is not guaranteed. Raises: [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. [`~utils.RevisionNotFoundError`]: If revision is not found (error 404) on the repo. [`~utils.EntryNotFoundError`]: If the tree (folder) does not exist (error 404) on the repo. Examples: Get information about a repo's tree. ```py >>> from huggingface_hub import list_repo_tree >>> repo_tree = list_repo_tree("lysandre/arxiv-nlp") >>> repo_tree <generator object HfApi.list_repo_tree at 0x7fa4088e1ac0> >>> list(repo_tree) [ RepoFile(path='.gitattributes', size=391, blob_id='ae8c63daedbd4206d7d40126955d4e6ab1c80f8f', lfs=None, last_commit=None, security=None), RepoFile(path='README.md', size=391, blob_id='43bd404b159de6fba7c2f4d3264347668d43af25', lfs=None, last_commit=None, security=None), RepoFile(path='config.json', size=554, blob_id='2f9618c3a19b9a61add74f70bfb121335aeef666', lfs=None, last_commit=None, security=None), RepoFile( path='flax_model.msgpack', size=497764107, blob_id='8095a62ccb4d806da7666fcda07467e2d150218e', lfs={'size': 497764107, 'sha256': 'd88b0d6a6ff9c3f8151f9d3228f57092aaea997f09af009eefd7373a77b5abb9', 'pointer_size': 134}, last_commit=None, security=None ), RepoFile(path='merges.txt', size=456318, blob_id='226b0752cac7789c48f0cb3ec53eda48b7be36cc', lfs=None, last_commit=None, security=None), RepoFile( path='pytorch_model.bin', size=548123560, blob_id='64eaa9c526867e404b68f2c5d66fd78e27026523', lfs={'size': 548123560, 'sha256': '9be78edb5b928eba33aa88f431551348f7466ba9f5ef3daf1d552398722a5436', 'pointer_size': 134}, last_commit=None, security=None ), RepoFile(path='vocab.json', size=898669, blob_id='b00361fece0387ca34b4b8b8539ed830d644dbeb', lfs=None, last_commit=None, security=None)] ] ``` Get even more information about a repo's tree (last commit and files' security scan results) ```py >>> from huggingface_hub import list_repo_tree >>> repo_tree = list_repo_tree("prompthero/openjourney-v4", expand=True) >>> list(repo_tree) [ RepoFolder( path='feature_extractor', tree_id='aa536c4ea18073388b5b0bc791057a7296a00398', last_commit={ 'oid': '47b62b20b20e06b9de610e840282b7e6c3d51190', 'title': 'Upload diffusers weights (#48)', 'date': datetime.datetime(2023, 3, 21, 9, 5, 27, tzinfo=datetime.timezone.utc) } ), RepoFolder( path='safety_checker', tree_id='65aef9d787e5557373fdf714d6c34d4fcdd70440', last_commit={ 'oid': '47b62b20b20e06b9de610e840282b7e6c3d51190', 'title': 'Upload diffusers weights (#48)', 'date': datetime.datetime(2023, 3, 21, 9, 5, 27, tzinfo=datetime.timezone.utc) } ), RepoFile( path='model_index.json', size=582, blob_id='d3d7c1e8c3e78eeb1640b8e2041ee256e24c9ee1', lfs=None, last_commit={ 'oid': 'b195ed2d503f3eb29637050a886d77bd81d35f0e', 'title': 'Fix deprecation warning by changing `CLIPFeatureExtractor` to `CLIPImageProcessor`. (#54)', 'date': datetime.datetime(2023, 5, 15, 21, 41, 59, tzinfo=datetime.timezone.utc) }, security={ 'safe': True, 'av_scan': {'virusFound': False, 'virusNames': None}, 'pickle_import_scan': None } ) ... ] ``` """ repo_type = repo_type or constants.REPO_TYPE_MODEL revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION headers = self._build_hf_headers(token=token) encoded_path_in_repo = "/" + quote(path_in_repo, safe="") if path_in_repo else "" tree_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/tree/{revision}{encoded_path_in_repo}" for path_info in paginate(path=tree_url, headers=headers, params={"recursive": recursive, "expand": expand}): yield (RepoFile(**path_info) if path_info["type"] == "file" else RepoFolder(**path_info)) @validate_hf_hub_args def list_repo_refs( self, repo_id: str, *, repo_type: Optional[str] = None, include_pull_requests: bool = False, token: Union[str, bool, None] = None, ) -> GitRefs: """ Get the list of refs of a given repo (both tags and branches). Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if listing refs from a dataset or a Space, `None` or `"model"` if listing from a model. Default is `None`. include_pull_requests (`bool`, *optional*): Whether to include refs from pull requests in the list. Defaults to `False`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Example: ```py >>> from huggingface_hub import HfApi >>> api = HfApi() >>> api.list_repo_refs("gpt2") GitRefs(branches=[GitRefInfo(name='main', ref='refs/heads/main', target_commit='e7da7f221d5bf496a48136c0cd264e630fe9fcc8')], converts=[], tags=[]) >>> api.list_repo_refs("bigcode/the-stack", repo_type='dataset') GitRefs( branches=[ GitRefInfo(name='main', ref='refs/heads/main', target_commit='18edc1591d9ce72aa82f56c4431b3c969b210ae3'), GitRefInfo(name='v1.1.a1', ref='refs/heads/v1.1.a1', target_commit='f9826b862d1567f3822d3d25649b0d6d22ace714') ], converts=[], tags=[ GitRefInfo(name='v1.0', ref='refs/tags/v1.0', target_commit='c37a8cd1e382064d8aced5e05543c5f7753834da') ] ) ``` Returns: [`GitRefs`]: object containing all information about branches and tags for a repo on the Hub. """ repo_type = repo_type or constants.REPO_TYPE_MODEL response = get_session().get( f"{self.endpoint}/api/{repo_type}s/{repo_id}/refs", headers=self._build_hf_headers(token=token), params={"include_prs": 1} if include_pull_requests else {}, ) hf_raise_for_status(response) data = response.json() def _format_as_git_ref_info(item: Dict) -> GitRefInfo: return GitRefInfo(name=item["name"], ref=item["ref"], target_commit=item["targetCommit"]) return GitRefs( branches=[_format_as_git_ref_info(item) for item in data["branches"]], converts=[_format_as_git_ref_info(item) for item in data["converts"]], tags=[_format_as_git_ref_info(item) for item in data["tags"]], pull_requests=[_format_as_git_ref_info(item) for item in data["pullRequests"]] if include_pull_requests else None, ) @validate_hf_hub_args def list_repo_commits( self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None, revision: Optional[str] = None, formatted: bool = False, ) -> List[GitCommitInfo]: """ Get the list of commits of a given revision for a repo on the Hub. Commits are sorted by date (last commit first). Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if listing commits from a dataset or a Space, `None` or `"model"` if listing from a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. revision (`str`, *optional*): The git revision to commit from. Defaults to the head of the `"main"` branch. formatted (`bool`): Whether to return the HTML-formatted title and description of the commits. Defaults to False. Example: ```py >>> from huggingface_hub import HfApi >>> api = HfApi() # Commits are sorted by date (last commit first) >>> initial_commit = api.list_repo_commits("gpt2")[-1] # Initial commit is always a system commit containing the `.gitattributes` file. >>> initial_commit GitCommitInfo( commit_id='9b865efde13a30c13e0a33e536cf3e4a5a9d71d8', authors=['system'], created_at=datetime.datetime(2019, 2, 18, 10, 36, 15, tzinfo=datetime.timezone.utc), title='initial commit', message='', formatted_title=None, formatted_message=None ) # Create an empty branch by deriving from initial commit >>> api.create_branch("gpt2", "new_empty_branch", revision=initial_commit.commit_id) ``` Returns: List[[`GitCommitInfo`]]: list of objects containing information about the commits for a repo on the Hub. Raises: [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. [`~utils.RevisionNotFoundError`]: If revision is not found (error 404) on the repo. """ repo_type = repo_type or constants.REPO_TYPE_MODEL revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION # Paginate over results and return the list of commits. return [ GitCommitInfo( commit_id=item["id"], authors=[author["user"] for author in item["authors"]], created_at=parse_datetime(item["date"]), title=item["title"], message=item["message"], formatted_title=item.get("formatted", {}).get("title"), formatted_message=item.get("formatted", {}).get("message"), ) for item in paginate( f"{self.endpoint}/api/{repo_type}s/{repo_id}/commits/{revision}", headers=self._build_hf_headers(token=token), params={"expand[]": "formatted"} if formatted else {}, ) ] @validate_hf_hub_args def get_paths_info( self, repo_id: str, paths: Union[List[str], str], *, expand: bool = False, revision: Optional[str] = None, repo_type: Optional[str] = None, token: Union[str, bool, None] = None, ) -> List[Union[RepoFile, RepoFolder]]: """ Get information about a repo's paths. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. paths (`Union[List[str], str]`, *optional*): The paths to get information about. If a path do not exist, it is ignored without raising an exception. expand (`bool`, *optional*, defaults to `False`): Whether to fetch more information about the paths (e.g. last commit and files' security scan results). This operation is more expensive for the server so only 50 results are returned per page (instead of 1000). As pagination is implemented in `huggingface_hub`, this is transparent for you except for the time it takes to get the results. revision (`str`, *optional*): The revision of the repository from which to get the information. Defaults to `"main"` branch. repo_type (`str`, *optional*): The type of the repository from which to get the information (`"model"`, `"dataset"` or `"space"`. Defaults to `"model"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `List[Union[RepoFile, RepoFolder]]`: The information about the paths, as a list of [`RepoFile`] and [`RepoFolder`] objects. Raises: [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. [`~utils.RevisionNotFoundError`]: If revision is not found (error 404) on the repo. Example: ```py >>> from huggingface_hub import get_paths_info >>> paths_info = get_paths_info("allenai/c4", ["README.md", "en"], repo_type="dataset") >>> paths_info [ RepoFile(path='README.md', size=2379, blob_id='f84cb4c97182890fc1dbdeaf1a6a468fd27b4fff', lfs=None, last_commit=None, security=None), RepoFolder(path='en', tree_id='dc943c4c40f53d02b31ced1defa7e5f438d5862e', last_commit=None) ] ``` """ repo_type = repo_type or constants.REPO_TYPE_MODEL revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION headers = self._build_hf_headers(token=token) response = get_session().post( f"{self.endpoint}/api/{repo_type}s/{repo_id}/paths-info/{revision}", data={ "paths": paths if isinstance(paths, list) else [paths], "expand": expand, }, headers=headers, ) hf_raise_for_status(response) paths_info = response.json() return [ RepoFile(**path_info) if path_info["type"] == "file" else RepoFolder(**path_info) for path_info in paths_info ] @validate_hf_hub_args def super_squash_history( self, repo_id: str, *, branch: Optional[str] = None, commit_message: Optional[str] = None, repo_type: Optional[str] = None, token: Union[str, bool, None] = None, ) -> None: """Squash commit history on a branch for a repo on the Hub. Squashing the repo history is useful when you know you'll make hundreds of commits and you don't want to clutter the history. Squashing commits can only be performed from the head of a branch. <Tip warning={true}> Once squashed, the commit history cannot be retrieved. This is a non-revertible operation. </Tip> <Tip warning={true}> Once the history of a branch has been squashed, it is not possible to merge it back into another branch since their history will have diverged. </Tip> Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. branch (`str`, *optional*): The branch to squash. Defaults to the head of the `"main"` branch. commit_message (`str`, *optional*): The commit message to use for the squashed commit. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if listing commits from a dataset or a Space, `None` or `"model"` if listing from a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Raises: [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. [`~utils.RevisionNotFoundError`]: If the branch to squash cannot be found. [`~utils.BadRequestError`]: If invalid reference for a branch. You cannot squash history on tags. Example: ```py >>> from huggingface_hub import HfApi >>> api = HfApi() # Create repo >>> repo_id = api.create_repo("test-squash").repo_id # Make a lot of commits. >>> api.upload_file(repo_id=repo_id, path_in_repo="file.txt", path_or_fileobj=b"content") >>> api.upload_file(repo_id=repo_id, path_in_repo="lfs.bin", path_or_fileobj=b"content") >>> api.upload_file(repo_id=repo_id, path_in_repo="file.txt", path_or_fileobj=b"another_content") # Squash history >>> api.super_squash_history(repo_id=repo_id) ``` """ if repo_type is None: repo_type = constants.REPO_TYPE_MODEL if repo_type not in constants.REPO_TYPES: raise ValueError("Invalid repo type") if branch is None: branch = constants.DEFAULT_REVISION # Prepare request url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/super-squash/{branch}" headers = self._build_hf_headers(token=token) commit_message = commit_message or f"Super-squash branch '{branch}' using huggingface_hub" # Super-squash response = get_session().post(url=url, headers=headers, json={"message": commit_message}) hf_raise_for_status(response) @validate_hf_hub_args def create_repo( self, repo_id: str, *, token: Union[str, bool, None] = None, private: Optional[bool] = None, repo_type: Optional[str] = None, exist_ok: bool = False, resource_group_id: Optional[str] = None, space_sdk: Optional[str] = None, space_hardware: Optional[SpaceHardware] = None, space_storage: Optional[SpaceStorage] = None, space_sleep_time: Optional[int] = None, space_secrets: Optional[List[Dict[str, str]]] = None, space_variables: Optional[List[Dict[str, str]]] = None, ) -> RepoUrl: """Create an empty repo on the HuggingFace Hub. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. private (`bool`, *optional*): Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. exist_ok (`bool`, *optional*, defaults to `False`): If `True`, do not raise an error if repo already exists. resource_group_id (`str`, *optional*): Resource group in which to create the repo. Resource groups is only available for organizations and allow to define which members of the organization can access the resource. The ID of a resource group can be found in the URL of the resource's page on the Hub (e.g. `"66670e5163145ca562cb1988"`). To learn more about resource groups, see https://huggingface.co/docs/hub/en/security-resource-groups. space_sdk (`str`, *optional*): Choice of SDK to use if repo_type is "space". Can be "streamlit", "gradio", "docker", or "static". space_hardware (`SpaceHardware` or `str`, *optional*): Choice of Hardware if repo_type is "space". See [`SpaceHardware`] for a complete list. space_storage (`SpaceStorage` or `str`, *optional*): Choice of persistent storage tier. Example: `"small"`. See [`SpaceStorage`] for a complete list. space_sleep_time (`int`, *optional*): Number of seconds of inactivity to wait before a Space is put to sleep. Set to `-1` if you don't want your Space to sleep (default behavior for upgraded hardware). For free hardware, you can't configure the sleep time (value is fixed to 48 hours of inactivity). See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details. space_secrets (`List[Dict[str, str]]`, *optional*): A list of secret keys to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional. For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets. space_variables (`List[Dict[str, str]]`, *optional*): A list of public environment variables to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional. For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables. Returns: [`RepoUrl`]: URL to the newly created repo. Value is a subclass of `str` containing attributes like `endpoint`, `repo_type` and `repo_id`. """ organization, name = repo_id.split("/") if "/" in repo_id else (None, repo_id) path = f"{self.endpoint}/api/repos/create" if repo_type not in constants.REPO_TYPES: raise ValueError("Invalid repo type") json: Dict[str, Any] = {"name": name, "organization": organization} if private is not None: json["private"] = private if repo_type is not None: json["type"] = repo_type if repo_type == "space": if space_sdk is None: raise ValueError( "No space_sdk provided. `create_repo` expects space_sdk to be one" f" of {constants.SPACES_SDK_TYPES} when repo_type is 'space'`" ) if space_sdk not in constants.SPACES_SDK_TYPES: raise ValueError(f"Invalid space_sdk. Please choose one of {constants.SPACES_SDK_TYPES}.") json["sdk"] = space_sdk if space_sdk is not None and repo_type != "space": warnings.warn("Ignoring provided space_sdk because repo_type is not 'space'.") function_args = [ "space_hardware", "space_storage", "space_sleep_time", "space_secrets", "space_variables", ] json_keys = ["hardware", "storageTier", "sleepTimeSeconds", "secrets", "variables"] values = [space_hardware, space_storage, space_sleep_time, space_secrets, space_variables] if repo_type == "space": json.update({k: v for k, v in zip(json_keys, values) if v is not None}) else: provided_space_args = [key for key, value in zip(function_args, values) if value is not None] if provided_space_args: warnings.warn(f"Ignoring provided {', '.join(provided_space_args)} because repo_type is not 'space'.") if getattr(self, "_lfsmultipartthresh", None): # Testing purposes only. # See https://github.com/huggingface/huggingface_hub/pull/733/files#r820604472 json["lfsmultipartthresh"] = self._lfsmultipartthresh # type: ignore if resource_group_id is not None: json["resourceGroupId"] = resource_group_id headers = self._build_hf_headers(token=token) while True: r = get_session().post(path, headers=headers, json=json) if r.status_code == 409 and "Cannot create repo: another conflicting operation is in progress" in r.text: # Since https://github.com/huggingface/moon-landing/pull/7272 (private repo), it is not possible to # concurrently create repos on the Hub for a same user. This is rarely an issue, except when running # tests. To avoid any inconvenience, we retry to create the repo for this specific error. # NOTE: This could have being fixed directly in the tests but adding it here should fixed CIs for all # dependent libraries. # NOTE: If a fix is implemented server-side, we should be able to remove this retry mechanism. logger.debug("Create repo failed due to a concurrency issue. Retrying...") continue break try: hf_raise_for_status(r) except HTTPError as err: if exist_ok and err.response.status_code == 409: # Repo already exists and `exist_ok=True` pass elif exist_ok and err.response.status_code == 403: # No write permission on the namespace but repo might already exist try: self.repo_info(repo_id=repo_id, repo_type=repo_type, token=token) if repo_type is None or repo_type == constants.REPO_TYPE_MODEL: return RepoUrl(f"{self.endpoint}/{repo_id}") return RepoUrl(f"{self.endpoint}/{repo_type}/{repo_id}") except HfHubHTTPError: raise err else: raise d = r.json() return RepoUrl(d["url"], endpoint=self.endpoint) @validate_hf_hub_args def delete_repo( self, repo_id: str, *, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, missing_ok: bool = False, ) -> None: """ Delete a repo from the HuggingFace Hub. CAUTION: this is irreversible. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. missing_ok (`bool`, *optional*, defaults to `False`): If `True`, do not raise an error if repo does not exist. Raises: [`~utils.RepositoryNotFoundError`] If the repository to delete from cannot be found and `missing_ok` is set to False (default). """ organization, name = repo_id.split("/") if "/" in repo_id else (None, repo_id) path = f"{self.endpoint}/api/repos/delete" if repo_type not in constants.REPO_TYPES: raise ValueError("Invalid repo type") json = {"name": name, "organization": organization} if repo_type is not None: json["type"] = repo_type headers = self._build_hf_headers(token=token) r = get_session().delete(path, headers=headers, json=json) try: hf_raise_for_status(r) except RepositoryNotFoundError: if not missing_ok: raise @_deprecate_method(version="0.29", message="Please use `update_repo_settings` instead.") @validate_hf_hub_args def update_repo_visibility( self, repo_id: str, private: bool = False, *, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, ) -> Dict[str, bool]: """Update the visibility setting of a repository. Deprecated. Use `update_repo_settings` instead. Args: repo_id (`str`, *optional*): A namespace (user or an organization) and a repo name separated by a `/`. private (`bool`, *optional*, defaults to `False`): Whether the repository should be private. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. Returns: The HTTP response in json. <Tip> Raises the following errors: - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip> """ if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") if repo_type is None: repo_type = constants.REPO_TYPE_MODEL # default repo type r = get_session().put( url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/settings", headers=self._build_hf_headers(token=token), json={"private": private}, ) hf_raise_for_status(r) return r.json() @validate_hf_hub_args def update_repo_settings( self, repo_id: str, *, gated: Optional[Literal["auto", "manual", False]] = None, private: Optional[bool] = None, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, ) -> None: """ Update the settings of a repository, including gated access and visibility. To give more control over how repos are used, the Hub allows repo authors to enable access requests for their repos, and also to set the visibility of the repo to private. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a /. gated (`Literal["auto", "manual", False]`, *optional*): The gated status for the repository. If set to `None` (default), the `gated` setting of the repository won't be updated. * "auto": The repository is gated, and access requests are automatically approved or denied based on predefined criteria. * "manual": The repository is gated, and access requests require manual approval. * False : The repository is not gated, and anyone can access it. private (`bool`, *optional*): Whether the repository should be private. token (`Union[str, bool, None]`, *optional*): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass False. repo_type (`str`, *optional*): The type of the repository to update settings from (`"model"`, `"dataset"` or `"space"`). Defaults to `"model"`. Raises: [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) If gated is not one of "auto", "manual", or False. [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) If repo_type is not one of the values in constants.REPO_TYPES. [`~utils.HfHubHTTPError`]: If the request to the Hugging Face Hub API fails. [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. """ if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") if repo_type is None: repo_type = constants.REPO_TYPE_MODEL # default repo type # Check if both gated and private are None if gated is None and private is None: raise ValueError("At least one of 'gated' or 'private' must be provided.") # Build headers headers = self._build_hf_headers(token=token) # Prepare the JSON payload for the PUT request payload: Dict = {} if gated is not None: if gated not in ["auto", "manual", False]: raise ValueError(f"Invalid gated status, must be one of 'auto', 'manual', or False. Got '{gated}'.") payload["gated"] = gated if private is not None: payload["private"] = private r = get_session().put( url=f"{self.endpoint}/api/{repo_type}s/{repo_id}/settings", headers=headers, json=payload, ) hf_raise_for_status(r) def move_repo( self, from_id: str, to_id: str, *, repo_type: Optional[str] = None, token: Union[str, bool, None] = None, ): """ Moving a repository from namespace1/repo_name1 to namespace2/repo_name2 Note there are certain limitations. For more information about moving repositories, please see https://hf.co/docs/hub/repositories-settings#renaming-or-transferring-a-repo. Args: from_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. Original repository identifier. to_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. Final repository identifier. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. <Tip> Raises the following errors: - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip> """ if len(from_id.split("/")) != 2: raise ValueError(f"Invalid repo_id: {from_id}. It should have a namespace (:namespace:/:repo_name:)") if len(to_id.split("/")) != 2: raise ValueError(f"Invalid repo_id: {to_id}. It should have a namespace (:namespace:/:repo_name:)") if repo_type is None: repo_type = constants.REPO_TYPE_MODEL # Hub won't accept `None`. json = {"fromRepo": from_id, "toRepo": to_id, "type": repo_type} path = f"{self.endpoint}/api/repos/move" headers = self._build_hf_headers(token=token) r = get_session().post(path, headers=headers, json=json) try: hf_raise_for_status(r) except HfHubHTTPError as e: e.append_to_message( "\nFor additional documentation please see" " https://hf.co/docs/hub/repositories-settings#renaming-or-transferring-a-repo." ) raise @overload def create_commit( # type: ignore self, repo_id: str, operations: Iterable[CommitOperation], *, commit_message: str, commit_description: Optional[str] = None, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, create_pr: Optional[bool] = None, num_threads: int = 5, parent_commit: Optional[str] = None, run_as_future: Literal[False] = ..., ) -> CommitInfo: ... @overload def create_commit( self, repo_id: str, operations: Iterable[CommitOperation], *, commit_message: str, commit_description: Optional[str] = None, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, create_pr: Optional[bool] = None, num_threads: int = 5, parent_commit: Optional[str] = None, run_as_future: Literal[True] = ..., ) -> Future[CommitInfo]: ... @validate_hf_hub_args @future_compatible def create_commit( self, repo_id: str, operations: Iterable[CommitOperation], *, commit_message: str, commit_description: Optional[str] = None, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, create_pr: Optional[bool] = None, num_threads: int = 5, parent_commit: Optional[str] = None, run_as_future: bool = False, ) -> Union[CommitInfo, Future[CommitInfo]]: """ Creates a commit in the given repo, deleting & uploading files as needed. <Tip warning={true}> The input list of `CommitOperation` will be mutated during the commit process. Do not reuse the same objects for multiple commits. </Tip> <Tip warning={true}> `create_commit` assumes that the repo already exists on the Hub. If you get a Client error 404, please make sure you are authenticated and that `repo_id` and `repo_type` are set correctly. If repo does not exist, create it first using [`~hf_api.create_repo`]. </Tip> <Tip warning={true}> `create_commit` is limited to 25k LFS files and a 1GB payload for regular files. </Tip> Args: repo_id (`str`): The repository in which the commit will be created, for example: `"username/custom_transformers"` operations (`Iterable` of [`~hf_api.CommitOperation`]): An iterable of operations to include in the commit, either: - [`~hf_api.CommitOperationAdd`] to upload a file - [`~hf_api.CommitOperationDelete`] to delete a file - [`~hf_api.CommitOperationCopy`] to copy a file Operation objects will be mutated to include information relative to the upload. Do not reuse the same objects for multiple commits. commit_message (`str`): The summary (first line) of the commit that will be created. commit_description (`str`, *optional*): The description of the commit that will be created token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. revision (`str`, *optional*): The git revision to commit from. Defaults to the head of the `"main"` branch. create_pr (`boolean`, *optional*): Whether or not to create a Pull Request with that commit. Defaults to `False`. If `revision` is not set, PR is opened against the `"main"` branch. If `revision` is set and is a branch, PR is opened against this branch. If `revision` is set and is not a branch name (example: a commit oid), an `RevisionNotFoundError` is returned by the server. num_threads (`int`, *optional*): Number of concurrent threads for uploading files. Defaults to 5. Setting it to 2 means at most 2 files will be uploaded concurrently. parent_commit (`str`, *optional*): The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be especially useful if the repo is updated / committed to concurrently. run_as_future (`bool`, *optional*): Whether or not to run this method in the background. Background jobs are run sequentially without blocking the main thread. Passing `run_as_future=True` will return a [Future](https://docs.python.org/3/library/concurrent.futures.html#future-objects) object. Defaults to `False`. Returns: [`CommitInfo`] or `Future`: Instance of [`CommitInfo`] containing information about the newly created commit (commit hash, commit url, pr url, commit message,...). If `run_as_future=True` is passed, returns a Future object which will contain the result when executed. Raises: [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) If commit message is empty. [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) If parent commit is not a valid commit OID. [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) If a README.md file with an invalid metadata section is committed. In this case, the commit will fail early, before trying to upload any file. [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) If `create_pr` is `True` and revision is neither `None` nor `"main"`. [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. """ if parent_commit is not None and not constants.REGEX_COMMIT_OID.fullmatch(parent_commit): raise ValueError( f"`parent_commit` is not a valid commit OID. It must match the following regex: {constants.REGEX_COMMIT_OID}" ) if commit_message is None or len(commit_message) == 0: raise ValueError("`commit_message` can't be empty, please pass a value.") commit_description = commit_description if commit_description is not None else "" repo_type = repo_type if repo_type is not None else constants.REPO_TYPE_MODEL if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") unquoted_revision = revision or constants.DEFAULT_REVISION revision = quote(unquoted_revision, safe="") create_pr = create_pr if create_pr is not None else False headers = self._build_hf_headers(token=token) operations = list(operations) additions = [op for op in operations if isinstance(op, CommitOperationAdd)] copies = [op for op in operations if isinstance(op, CommitOperationCopy)] nb_additions = len(additions) nb_copies = len(copies) nb_deletions = len(operations) - nb_additions - nb_copies for addition in additions: if addition._is_committed: raise ValueError( f"CommitOperationAdd {addition} has already being committed and cannot be reused. Please create a" " new CommitOperationAdd object if you want to create a new commit." ) if repo_type != "dataset": for addition in additions: if addition.path_in_repo.endswith((".arrow", ".parquet")): warnings.warn( f"It seems that you are about to commit a data file ({addition.path_in_repo}) to a {repo_type}" " repository. You are sure this is intended? If you are trying to upload a dataset, please" " set `repo_type='dataset'` or `--repo-type=dataset` in a CLI." ) logger.debug( f"About to commit to the hub: {len(additions)} addition(s), {len(copies)} copie(s) and" f" {nb_deletions} deletion(s)." ) # If updating a README.md file, make sure the metadata format is valid # It's better to fail early than to fail after all the files have been uploaded. for addition in additions: if addition.path_in_repo == "README.md": with addition.as_file() as file: content = file.read().decode() self._validate_yaml(content, repo_type=repo_type, token=token) # Skip other additions after `README.md` has been processed break # If updating twice the same file or update then delete a file in a single commit _warn_on_overwriting_operations(operations) self.preupload_lfs_files( repo_id=repo_id, additions=additions, token=token, repo_type=repo_type, revision=unquoted_revision, # first-class methods take unquoted revision create_pr=create_pr, num_threads=num_threads, free_memory=False, # do not remove `CommitOperationAdd.path_or_fileobj` on LFS files for "normal" users ) # Remove no-op operations (files that have not changed) operations_without_no_op = [] for operation in operations: if ( isinstance(operation, CommitOperationAdd) and operation._remote_oid is not None and operation._remote_oid == operation._local_oid ): # File already exists on the Hub and has not changed: we can skip it. logger.debug(f"Skipping upload for '{operation.path_in_repo}' as the file has not changed.") continue operations_without_no_op.append(operation) if len(operations) != len(operations_without_no_op): logger.info( f"Removing {len(operations) - len(operations_without_no_op)} file(s) from commit that have not changed." ) # Return early if empty commit if len(operations_without_no_op) == 0: logger.warning("No files have been modified since last commit. Skipping to prevent empty commit.") # Get latest commit info try: info = self.repo_info(repo_id=repo_id, repo_type=repo_type, revision=unquoted_revision, token=token) except RepositoryNotFoundError as e: e.append_to_message(_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE) raise # Return commit info based on latest commit url_prefix = self.endpoint if repo_type is not None and repo_type != constants.REPO_TYPE_MODEL: url_prefix = f"{url_prefix}/{repo_type}s" return CommitInfo( commit_url=f"{url_prefix}/{repo_id}/commit/{info.sha}", commit_message=commit_message, commit_description=commit_description, oid=info.sha, # type: ignore[arg-type] ) files_to_copy = _fetch_files_to_copy( copies=copies, repo_type=repo_type, repo_id=repo_id, headers=headers, revision=unquoted_revision, endpoint=self.endpoint, ) commit_payload = _prepare_commit_payload( operations=operations, files_to_copy=files_to_copy, commit_message=commit_message, commit_description=commit_description, parent_commit=parent_commit, ) commit_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/commit/{revision}" def _payload_as_ndjson() -> Iterable[bytes]: for item in commit_payload: yield json.dumps(item).encode() yield b"\n" headers = { # See https://github.com/huggingface/huggingface_hub/issues/1085#issuecomment-1265208073 "Content-Type": "application/x-ndjson", **headers, } data = b"".join(_payload_as_ndjson()) params = {"create_pr": "1"} if create_pr else None try: commit_resp = get_session().post(url=commit_url, headers=headers, data=data, params=params) hf_raise_for_status(commit_resp, endpoint_name="commit") except RepositoryNotFoundError as e: e.append_to_message(_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE) raise except EntryNotFoundError as e: if nb_deletions > 0 and "A file with this name doesn't exist" in str(e): e.append_to_message( "\nMake sure to differentiate file and folder paths in delete" " operations with a trailing '/' or using `is_folder=True/False`." ) raise # Mark additions as committed (cannot be reused in another commit) for addition in additions: addition._is_committed = True commit_data = commit_resp.json() return CommitInfo( commit_url=commit_data["commitUrl"], commit_message=commit_message, commit_description=commit_description, oid=commit_data["commitOid"], pr_url=commit_data["pullRequestUrl"] if create_pr else None, ) def preupload_lfs_files( self, repo_id: str, additions: Iterable[CommitOperationAdd], *, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, create_pr: Optional[bool] = None, num_threads: int = 5, free_memory: bool = True, gitignore_content: Optional[str] = None, ): """Pre-upload LFS files to S3 in preparation on a future commit. This method is useful if you are generating the files to upload on-the-fly and you don't want to store them in memory before uploading them all at once. <Tip warning={true}> This is a power-user method. You shouldn't need to call it directly to make a normal commit. Use [`create_commit`] directly instead. </Tip> <Tip warning={true}> Commit operations will be mutated during the process. In particular, the attached `path_or_fileobj` will be removed after the upload to save memory (and replaced by an empty `bytes` object). Do not reuse the same objects except to pass them to [`create_commit`]. If you don't want to remove the attached content from the commit operation object, pass `free_memory=False`. </Tip> Args: repo_id (`str`): The repository in which you will commit the files, for example: `"username/custom_transformers"`. operations (`Iterable` of [`CommitOperationAdd`]): The list of files to upload. Warning: the objects in this list will be mutated to include information relative to the upload. Do not reuse the same objects for multiple commits. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): The type of repository to upload to (e.g. `"model"` -default-, `"dataset"` or `"space"`). revision (`str`, *optional*): The git revision to commit from. Defaults to the head of the `"main"` branch. create_pr (`boolean`, *optional*): Whether or not you plan to create a Pull Request with that commit. Defaults to `False`. num_threads (`int`, *optional*): Number of concurrent threads for uploading files. Defaults to 5. Setting it to 2 means at most 2 files will be uploaded concurrently. gitignore_content (`str`, *optional*): The content of the `.gitignore` file to know which files should be ignored. The order of priority is to first check if `gitignore_content` is passed, then check if the `.gitignore` file is present in the list of files to commit and finally default to the `.gitignore` file already hosted on the Hub (if any). Example: ```py >>> from huggingface_hub import CommitOperationAdd, preupload_lfs_files, create_commit, create_repo >>> repo_id = create_repo("test_preupload").repo_id # Generate and preupload LFS files one by one >>> operations = [] # List of all `CommitOperationAdd` objects that will be generated >>> for i in range(5): ... content = ... # generate binary content ... addition = CommitOperationAdd(path_in_repo=f"shard_{i}_of_5.bin", path_or_fileobj=content) ... preupload_lfs_files(repo_id, additions=[addition]) # upload + free memory ... operations.append(addition) # Create commit >>> create_commit(repo_id, operations=operations, commit_message="Commit all shards") ``` """ repo_type = repo_type if repo_type is not None else constants.REPO_TYPE_MODEL if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION create_pr = create_pr if create_pr is not None else False headers = self._build_hf_headers(token=token) # Check if a `gitignore` file is being committed to the Hub. additions = list(additions) if gitignore_content is None: for addition in additions: if addition.path_in_repo == ".gitignore": with addition.as_file() as f: gitignore_content = f.read().decode() break # Filter out already uploaded files new_additions = [addition for addition in additions if not addition._is_uploaded] # Check which new files are LFS try: _fetch_upload_modes( additions=new_additions, repo_type=repo_type, repo_id=repo_id, headers=headers, revision=revision, endpoint=self.endpoint, create_pr=create_pr or False, gitignore_content=gitignore_content, ) except RepositoryNotFoundError as e: e.append_to_message(_CREATE_COMMIT_NO_REPO_ERROR_MESSAGE) raise # Filter out regular files new_lfs_additions = [addition for addition in new_additions if addition._upload_mode == "lfs"] # Filter out files listed in .gitignore new_lfs_additions_to_upload = [] for addition in new_lfs_additions: if addition._should_ignore: logger.debug(f"Skipping upload for LFS file '{addition.path_in_repo}' (ignored by gitignore file).") else: new_lfs_additions_to_upload.append(addition) if len(new_lfs_additions) != len(new_lfs_additions_to_upload): logger.info( f"Skipped upload for {len(new_lfs_additions) - len(new_lfs_additions_to_upload)} LFS file(s) " "(ignored by gitignore file)." ) # Upload new LFS files _upload_lfs_files( additions=new_lfs_additions_to_upload, repo_type=repo_type, repo_id=repo_id, headers=headers, endpoint=self.endpoint, num_threads=num_threads, # If `create_pr`, we don't want to check user permission on the revision as users with read permission # should still be able to create PRs even if they don't have write permission on the target branch of the # PR (i.e. `revision`). revision=revision if not create_pr else None, ) for addition in new_lfs_additions_to_upload: addition._is_uploaded = True if free_memory: addition.path_or_fileobj = b"" @overload def upload_file( # type: ignore self, *, path_or_fileobj: Union[str, Path, bytes, BinaryIO], path_in_repo: str, repo_id: str, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, commit_message: Optional[str] = None, commit_description: Optional[str] = None, create_pr: Optional[bool] = None, parent_commit: Optional[str] = None, run_as_future: Literal[False] = ..., ) -> CommitInfo: ... @overload def upload_file( self, *, path_or_fileobj: Union[str, Path, bytes, BinaryIO], path_in_repo: str, repo_id: str, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, commit_message: Optional[str] = None, commit_description: Optional[str] = None, create_pr: Optional[bool] = None, parent_commit: Optional[str] = None, run_as_future: Literal[True] = ..., ) -> Future[CommitInfo]: ... @validate_hf_hub_args @future_compatible def upload_file( self, *, path_or_fileobj: Union[str, Path, bytes, BinaryIO], path_in_repo: str, repo_id: str, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, commit_message: Optional[str] = None, commit_description: Optional[str] = None, create_pr: Optional[bool] = None, parent_commit: Optional[str] = None, run_as_future: bool = False, ) -> Union[CommitInfo, Future[CommitInfo]]: """ Upload a local file (up to 50 GB) to the given repo. The upload is done through a HTTP post request, and doesn't require git or git-lfs to be installed. Args: path_or_fileobj (`str`, `Path`, `bytes`, or `IO`): Path to a file on the local machine or binary data stream / fileobj / buffer. path_in_repo (`str`): Relative filepath in the repo, for example: `"checkpoints/1fec34a/weights.bin"` repo_id (`str`): The repository to which the file will be uploaded, for example: `"username/custom_transformers"` token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. revision (`str`, *optional*): The git revision to commit from. Defaults to the head of the `"main"` branch. commit_message (`str`, *optional*): The summary / title / first line of the generated commit commit_description (`str` *optional*) The description of the generated commit create_pr (`boolean`, *optional*): Whether or not to create a Pull Request with that commit. Defaults to `False`. If `revision` is not set, PR is opened against the `"main"` branch. If `revision` is set and is a branch, PR is opened against this branch. If `revision` is set and is not a branch name (example: a commit oid), an `RevisionNotFoundError` is returned by the server. parent_commit (`str`, *optional*): The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be especially useful if the repo is updated / committed to concurrently. run_as_future (`bool`, *optional*): Whether or not to run this method in the background. Background jobs are run sequentially without blocking the main thread. Passing `run_as_future=True` will return a [Future](https://docs.python.org/3/library/concurrent.futures.html#future-objects) object. Defaults to `False`. Returns: [`CommitInfo`] or `Future`: Instance of [`CommitInfo`] containing information about the newly created commit (commit hash, commit url, pr url, commit message,...). If `run_as_future=True` is passed, returns a Future object which will contain the result when executed. <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. - [`~utils.RevisionNotFoundError`] If the revision to download from cannot be found. </Tip> <Tip warning={true}> `upload_file` assumes that the repo already exists on the Hub. If you get a Client error 404, please make sure you are authenticated and that `repo_id` and `repo_type` are set correctly. If repo does not exist, create it first using [`~hf_api.create_repo`]. </Tip> Example: ```python >>> from huggingface_hub import upload_file >>> with open("./local/filepath", "rb") as fobj: ... upload_file( ... path_or_fileobj=fileobj, ... path_in_repo="remote/file/path.h5", ... repo_id="username/my-dataset", ... repo_type="dataset", ... token="my_token", ... ) "https://huggingface.co/datasets/username/my-dataset/blob/main/remote/file/path.h5" >>> upload_file( ... path_or_fileobj=".\\\\local\\\\file\\\\path", ... path_in_repo="remote/file/path.h5", ... repo_id="username/my-model", ... token="my_token", ... ) "https://huggingface.co/username/my-model/blob/main/remote/file/path.h5" >>> upload_file( ... path_or_fileobj=".\\\\local\\\\file\\\\path", ... path_in_repo="remote/file/path.h5", ... repo_id="username/my-model", ... token="my_token", ... create_pr=True, ... ) "https://huggingface.co/username/my-model/blob/refs%2Fpr%2F1/remote/file/path.h5" ``` """ if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") commit_message = ( commit_message if commit_message is not None else f"Upload {path_in_repo} with huggingface_hub" ) operation = CommitOperationAdd( path_or_fileobj=path_or_fileobj, path_in_repo=path_in_repo, ) commit_info = self.create_commit( repo_id=repo_id, repo_type=repo_type, operations=[operation], commit_message=commit_message, commit_description=commit_description, token=token, revision=revision, create_pr=create_pr, parent_commit=parent_commit, ) if commit_info.pr_url is not None: revision = quote(_parse_revision_from_pr_url(commit_info.pr_url), safe="") if repo_type in constants.REPO_TYPES_URL_PREFIXES: repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id revision = revision if revision is not None else constants.DEFAULT_REVISION return CommitInfo( commit_url=commit_info.commit_url, commit_message=commit_info.commit_message, commit_description=commit_info.commit_description, oid=commit_info.oid, pr_url=commit_info.pr_url, # Similar to `hf_hub_url` but it's "blob" instead of "resolve" # TODO: remove this in v1.0 _url=f"{self.endpoint}/{repo_id}/blob/{revision}/{path_in_repo}", ) @overload def upload_folder( # type: ignore self, *, repo_id: str, folder_path: Union[str, Path], path_in_repo: Optional[str] = None, commit_message: Optional[str] = None, commit_description: Optional[str] = None, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, create_pr: Optional[bool] = None, parent_commit: Optional[str] = None, allow_patterns: Optional[Union[List[str], str]] = None, ignore_patterns: Optional[Union[List[str], str]] = None, delete_patterns: Optional[Union[List[str], str]] = None, run_as_future: Literal[False] = ..., ) -> CommitInfo: ... @overload def upload_folder( # type: ignore self, *, repo_id: str, folder_path: Union[str, Path], path_in_repo: Optional[str] = None, commit_message: Optional[str] = None, commit_description: Optional[str] = None, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, create_pr: Optional[bool] = None, parent_commit: Optional[str] = None, allow_patterns: Optional[Union[List[str], str]] = None, ignore_patterns: Optional[Union[List[str], str]] = None, delete_patterns: Optional[Union[List[str], str]] = None, run_as_future: Literal[True] = ..., ) -> Future[CommitInfo]: ... @validate_hf_hub_args @future_compatible def upload_folder( self, *, repo_id: str, folder_path: Union[str, Path], path_in_repo: Optional[str] = None, commit_message: Optional[str] = None, commit_description: Optional[str] = None, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, create_pr: Optional[bool] = None, parent_commit: Optional[str] = None, allow_patterns: Optional[Union[List[str], str]] = None, ignore_patterns: Optional[Union[List[str], str]] = None, delete_patterns: Optional[Union[List[str], str]] = None, run_as_future: bool = False, ) -> Union[CommitInfo, Future[CommitInfo]]: """ Upload a local folder to the given repo. The upload is done through a HTTP requests, and doesn't require git or git-lfs to be installed. The structure of the folder will be preserved. Files with the same name already present in the repository will be overwritten. Others will be left untouched. Use the `allow_patterns` and `ignore_patterns` arguments to specify which files to upload. These parameters accept either a single pattern or a list of patterns. Patterns are Standard Wildcards (globbing patterns) as documented [here](https://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm). If both `allow_patterns` and `ignore_patterns` are provided, both constraints apply. By default, all files from the folder are uploaded. Use the `delete_patterns` argument to specify remote files you want to delete. Input type is the same as for `allow_patterns` (see above). If `path_in_repo` is also provided, the patterns are matched against paths relative to this folder. For example, `upload_folder(..., path_in_repo="experiment", delete_patterns="logs/*")` will delete any remote file under `./experiment/logs/`. Note that the `.gitattributes` file will not be deleted even if it matches the patterns. Any `.git/` folder present in any subdirectory will be ignored. However, please be aware that the `.gitignore` file is not taken into account. Uses `HfApi.create_commit` under the hood. Args: repo_id (`str`): The repository to which the file will be uploaded, for example: `"username/custom_transformers"` folder_path (`str` or `Path`): Path to the folder to upload on the local file system path_in_repo (`str`, *optional*): Relative path of the directory in the repo, for example: `"checkpoints/1fec34a/results"`. Will default to the root folder of the repository. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. revision (`str`, *optional*): The git revision to commit from. Defaults to the head of the `"main"` branch. commit_message (`str`, *optional*): The summary / title / first line of the generated commit. Defaults to: `f"Upload {path_in_repo} with huggingface_hub"` commit_description (`str` *optional*): The description of the generated commit create_pr (`boolean`, *optional*): Whether or not to create a Pull Request with that commit. Defaults to `False`. If `revision` is not set, PR is opened against the `"main"` branch. If `revision` is set and is a branch, PR is opened against this branch. If `revision` is set and is not a branch name (example: a commit oid), an `RevisionNotFoundError` is returned by the server. parent_commit (`str`, *optional*): The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be especially useful if the repo is updated / committed to concurrently. allow_patterns (`List[str]` or `str`, *optional*): If provided, only files matching at least one pattern are uploaded. ignore_patterns (`List[str]` or `str`, *optional*): If provided, files matching any of the patterns are not uploaded. delete_patterns (`List[str]` or `str`, *optional*): If provided, remote files matching any of the patterns will be deleted from the repo while committing new files. This is useful if you don't know which files have already been uploaded. Note: to avoid discrepancies the `.gitattributes` file is not deleted even if it matches the pattern. run_as_future (`bool`, *optional*): Whether or not to run this method in the background. Background jobs are run sequentially without blocking the main thread. Passing `run_as_future=True` will return a [Future](https://docs.python.org/3/library/concurrent.futures.html#future-objects) object. Defaults to `False`. Returns: [`CommitInfo`] or `Future`: Instance of [`CommitInfo`] containing information about the newly created commit (commit hash, commit url, pr url, commit message,...). If `run_as_future=True` is passed, returns a Future object which will contain the result when executed. <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid </Tip> <Tip warning={true}> `upload_folder` assumes that the repo already exists on the Hub. If you get a Client error 404, please make sure you are authenticated and that `repo_id` and `repo_type` are set correctly. If repo does not exist, create it first using [`~hf_api.create_repo`]. </Tip> <Tip> When dealing with a large folder (thousands of files or hundreds of GB), we recommend using [`~hf_api.upload_large_folder`] instead. </Tip> Example: ```python # Upload checkpoints folder except the log files >>> upload_folder( ... folder_path="local/checkpoints", ... path_in_repo="remote/experiment/checkpoints", ... repo_id="username/my-dataset", ... repo_type="datasets", ... token="my_token", ... ignore_patterns="**/logs/*.txt", ... ) # "https://huggingface.co/datasets/username/my-dataset/tree/main/remote/experiment/checkpoints" # Upload checkpoints folder including logs while deleting existing logs from the repo # Useful if you don't know exactly which log files have already being pushed >>> upload_folder( ... folder_path="local/checkpoints", ... path_in_repo="remote/experiment/checkpoints", ... repo_id="username/my-dataset", ... repo_type="datasets", ... token="my_token", ... delete_patterns="**/logs/*.txt", ... ) "https://huggingface.co/datasets/username/my-dataset/tree/main/remote/experiment/checkpoints" # Upload checkpoints folder while creating a PR >>> upload_folder( ... folder_path="local/checkpoints", ... path_in_repo="remote/experiment/checkpoints", ... repo_id="username/my-dataset", ... repo_type="datasets", ... token="my_token", ... create_pr=True, ... ) "https://huggingface.co/datasets/username/my-dataset/tree/refs%2Fpr%2F1/remote/experiment/checkpoints" ``` """ if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") # By default, upload folder to the root directory in repo. if path_in_repo is None: path_in_repo = "" # Do not upload .git folder if ignore_patterns is None: ignore_patterns = [] elif isinstance(ignore_patterns, str): ignore_patterns = [ignore_patterns] ignore_patterns += DEFAULT_IGNORE_PATTERNS delete_operations = self._prepare_folder_deletions( repo_id=repo_id, repo_type=repo_type, revision=constants.DEFAULT_REVISION if create_pr else revision, token=token, path_in_repo=path_in_repo, delete_patterns=delete_patterns, ) add_operations = self._prepare_upload_folder_additions( folder_path, path_in_repo, allow_patterns=allow_patterns, ignore_patterns=ignore_patterns, token=token, repo_type=repo_type, ) # Optimize operations: if some files will be overwritten, we don't need to delete them first if len(add_operations) > 0: added_paths = set(op.path_in_repo for op in add_operations) delete_operations = [ delete_op for delete_op in delete_operations if delete_op.path_in_repo not in added_paths ] commit_operations = delete_operations + add_operations commit_message = commit_message or "Upload folder using huggingface_hub" commit_info = self.create_commit( repo_type=repo_type, repo_id=repo_id, operations=commit_operations, commit_message=commit_message, commit_description=commit_description, token=token, revision=revision, create_pr=create_pr, parent_commit=parent_commit, ) # Create url to uploaded folder (for legacy return value) if create_pr and commit_info.pr_url is not None: revision = quote(_parse_revision_from_pr_url(commit_info.pr_url), safe="") if repo_type in constants.REPO_TYPES_URL_PREFIXES: repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id revision = revision if revision is not None else constants.DEFAULT_REVISION return CommitInfo( commit_url=commit_info.commit_url, commit_message=commit_info.commit_message, commit_description=commit_info.commit_description, oid=commit_info.oid, pr_url=commit_info.pr_url, # Similar to `hf_hub_url` but it's "tree" instead of "resolve" # TODO: remove this in v1.0 _url=f"{self.endpoint}/{repo_id}/tree/{revision}/{path_in_repo}", ) @validate_hf_hub_args def delete_file( self, path_in_repo: str, repo_id: str, *, token: Union[str, bool, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, commit_message: Optional[str] = None, commit_description: Optional[str] = None, create_pr: Optional[bool] = None, parent_commit: Optional[str] = None, ) -> CommitInfo: """ Deletes a file in the given repo. Args: path_in_repo (`str`): Relative filepath in the repo, for example: `"checkpoints/1fec34a/weights.bin"` repo_id (`str`): The repository from which the file will be deleted, for example: `"username/custom_transformers"` token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if the file is in a dataset or space, `None` or `"model"` if in a model. Default is `None`. revision (`str`, *optional*): The git revision to commit from. Defaults to the head of the `"main"` branch. commit_message (`str`, *optional*): The summary / title / first line of the generated commit. Defaults to `f"Delete {path_in_repo} with huggingface_hub"`. commit_description (`str` *optional*) The description of the generated commit create_pr (`boolean`, *optional*): Whether or not to create a Pull Request with that commit. Defaults to `False`. If `revision` is not set, PR is opened against the `"main"` branch. If `revision` is set and is a branch, PR is opened against this branch. If `revision` is set and is not a branch name (example: a commit oid), an `RevisionNotFoundError` is returned by the server. parent_commit (`str`, *optional*): The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be especially useful if the repo is updated / committed to concurrently. <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. - [`~utils.RevisionNotFoundError`] If the revision to download from cannot be found. - [`~utils.EntryNotFoundError`] If the file to download cannot be found. </Tip> """ commit_message = ( commit_message if commit_message is not None else f"Delete {path_in_repo} with huggingface_hub" ) operations = [CommitOperationDelete(path_in_repo=path_in_repo)] return self.create_commit( repo_id=repo_id, repo_type=repo_type, token=token, operations=operations, revision=revision, commit_message=commit_message, commit_description=commit_description, create_pr=create_pr, parent_commit=parent_commit, ) @validate_hf_hub_args def delete_files( self, repo_id: str, delete_patterns: List[str], *, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, commit_message: Optional[str] = None, commit_description: Optional[str] = None, create_pr: Optional[bool] = None, parent_commit: Optional[str] = None, ) -> CommitInfo: """ Delete files from a repository on the Hub. If a folder path is provided, the entire folder is deleted as well as all files it contained. Args: repo_id (`str`): The repository from which the folder will be deleted, for example: `"username/custom_transformers"` delete_patterns (`List[str]`): List of files or folders to delete. Each string can either be a file path, a folder path or a Unix shell-style wildcard. E.g. `["file.txt", "folder/", "data/*.parquet"]` token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. to the stored token. repo_type (`str`, *optional*): Type of the repo to delete files from. Can be `"model"`, `"dataset"` or `"space"`. Defaults to `"model"`. revision (`str`, *optional*): The git revision to commit from. Defaults to the head of the `"main"` branch. commit_message (`str`, *optional*): The summary (first line) of the generated commit. Defaults to `f"Delete files using huggingface_hub"`. commit_description (`str` *optional*) The description of the generated commit. create_pr (`boolean`, *optional*): Whether or not to create a Pull Request with that commit. Defaults to `False`. If `revision` is not set, PR is opened against the `"main"` branch. If `revision` is set and is a branch, PR is opened against this branch. If `revision` is set and is not a branch name (example: a commit oid), an `RevisionNotFoundError` is returned by the server. parent_commit (`str`, *optional*): The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be especially useful if the repo is updated / committed to concurrently. """ operations = self._prepare_folder_deletions( repo_id=repo_id, repo_type=repo_type, delete_patterns=delete_patterns, path_in_repo="", revision=revision ) if commit_message is None: commit_message = f"Delete files {' '.join(delete_patterns)} with huggingface_hub" return self.create_commit( repo_id=repo_id, repo_type=repo_type, token=token, operations=operations, revision=revision, commit_message=commit_message, commit_description=commit_description, create_pr=create_pr, parent_commit=parent_commit, ) @validate_hf_hub_args def delete_folder( self, path_in_repo: str, repo_id: str, *, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, commit_message: Optional[str] = None, commit_description: Optional[str] = None, create_pr: Optional[bool] = None, parent_commit: Optional[str] = None, ) -> CommitInfo: """ Deletes a folder in the given repo. Simple wrapper around [`create_commit`] method. Args: path_in_repo (`str`): Relative folder path in the repo, for example: `"checkpoints/1fec34a"`. repo_id (`str`): The repository from which the folder will be deleted, for example: `"username/custom_transformers"` token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. to the stored token. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if the folder is in a dataset or space, `None` or `"model"` if in a model. Default is `None`. revision (`str`, *optional*): The git revision to commit from. Defaults to the head of the `"main"` branch. commit_message (`str`, *optional*): The summary / title / first line of the generated commit. Defaults to `f"Delete folder {path_in_repo} with huggingface_hub"`. commit_description (`str` *optional*) The description of the generated commit. create_pr (`boolean`, *optional*): Whether or not to create a Pull Request with that commit. Defaults to `False`. If `revision` is not set, PR is opened against the `"main"` branch. If `revision` is set and is a branch, PR is opened against this branch. If `revision` is set and is not a branch name (example: a commit oid), an `RevisionNotFoundError` is returned by the server. parent_commit (`str`, *optional*): The OID / SHA of the parent commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. If specified and `create_pr` is `False`, the commit will fail if `revision` does not point to `parent_commit`. If specified and `create_pr` is `True`, the pull request will be created from `parent_commit`. Specifying `parent_commit` ensures the repo has not changed before committing the changes, and can be especially useful if the repo is updated / committed to concurrently. """ return self.create_commit( repo_id=repo_id, repo_type=repo_type, token=token, operations=[CommitOperationDelete(path_in_repo=path_in_repo, is_folder=True)], revision=revision, commit_message=( commit_message if commit_message is not None else f"Delete folder {path_in_repo} with huggingface_hub" ), commit_description=commit_description, create_pr=create_pr, parent_commit=parent_commit, ) def upload_large_folder( self, repo_id: str, folder_path: Union[str, Path], *, repo_type: str, # Repo type is required! revision: Optional[str] = None, private: Optional[bool] = None, allow_patterns: Optional[Union[List[str], str]] = None, ignore_patterns: Optional[Union[List[str], str]] = None, num_workers: Optional[int] = None, print_report: bool = True, print_report_every: int = 60, ) -> None: """Upload a large folder to the Hub in the most resilient way possible. Several workers are started to upload files in an optimized way. Before being committed to a repo, files must be hashed and be pre-uploaded if they are LFS files. Workers will perform these tasks for each file in the folder. At each step, some metadata information about the upload process is saved in the folder under `.cache/.huggingface/` to be able to resume the process if interrupted. The whole process might result in several commits. Args: repo_id (`str`): The repository to which the file will be uploaded. E.g. `"HuggingFaceTB/smollm-corpus"`. folder_path (`str` or `Path`): Path to the folder to upload on the local file system. repo_type (`str`): Type of the repository. Must be one of `"model"`, `"dataset"` or `"space"`. Unlike in all other `HfApi` methods, `repo_type` is explicitly required here. This is to avoid any mistake when uploading a large folder to the Hub, and therefore prevent from having to re-upload everything. revision (`str`, `optional`): The branch to commit to. If not provided, the `main` branch will be used. private (`bool`, `optional`): Whether the repository should be private. If `None` (default), the repo will be public unless the organization's default is private. allow_patterns (`List[str]` or `str`, *optional*): If provided, only files matching at least one pattern are uploaded. ignore_patterns (`List[str]` or `str`, *optional*): If provided, files matching any of the patterns are not uploaded. num_workers (`int`, *optional*): Number of workers to start. Defaults to `os.cpu_count() - 2` (minimum 2). A higher number of workers may speed up the process if your machine allows it. However, on machines with a slower connection, it is recommended to keep the number of workers low to ensure better resumability. Indeed, partially uploaded files will have to be completely re-uploaded if the process is interrupted. print_report (`bool`, *optional*): Whether to print a report of the upload progress. Defaults to True. Report is printed to `sys.stdout` every X seconds (60 by defaults) and overwrites the previous report. print_report_every (`int`, *optional*): Frequency at which the report is printed. Defaults to 60 seconds. <Tip> A few things to keep in mind: - Repository limits still apply: https://huggingface.co/docs/hub/repositories-recommendations - Do not start several processes in parallel. - You can interrupt and resume the process at any time. - Do not upload the same folder to several repositories. If you need to do so, you must delete the local `.cache/.huggingface/` folder first. </Tip> <Tip warning={true}> While being much more robust to upload large folders, `upload_large_folder` is more limited than [`upload_folder`] feature-wise. In practice: - you cannot set a custom `path_in_repo`. If you want to upload to a subfolder, you need to set the proper structure locally. - you cannot set a custom `commit_message` and `commit_description` since multiple commits are created. - you cannot delete from the repo while uploading. Please make a separate commit first. - you cannot create a PR directly. Please create a PR first (from the UI or using [`create_pull_request`]) and then commit to it by passing `revision`. </Tip> **Technical details:** `upload_large_folder` process is as follow: 1. (Check parameters and setup.) 2. Create repo if missing. 3. List local files to upload. 4. Start workers. Workers can perform the following tasks: - Hash a file. - Get upload mode (regular or LFS) for a list of files. - Pre-upload an LFS file. - Commit a bunch of files. Once a worker finishes a task, it will move on to the next task based on the priority list (see below) until all files are uploaded and committed. 5. While workers are up, regularly print a report to sys.stdout. Order of priority: 1. Commit if more than 5 minutes since last commit attempt (and at least 1 file). 2. Commit if at least 150 files are ready to commit. 3. Get upload mode if at least 10 files have been hashed. 4. Pre-upload LFS file if at least 1 file and no worker is pre-uploading. 5. Hash file if at least 1 file and no worker is hashing. 6. Get upload mode if at least 1 file and no worker is getting upload mode. 7. Pre-upload LFS file if at least 1 file (exception: if hf_transfer is enabled, only 1 worker can preupload LFS at a time). 8. Hash file if at least 1 file to hash. 9. Get upload mode if at least 1 file to get upload mode. 10. Commit if at least 1 file to commit and at least 1 min since last commit attempt. 11. Commit if at least 1 file to commit and all other queues are empty. Special rules: - If `hf_transfer` is enabled, only 1 LFS uploader at a time. Otherwise the CPU would be bloated by `hf_transfer`. - Only one worker can commit at a time. - If no tasks are available, the worker waits for 10 seconds before checking again. """ return upload_large_folder_internal( self, repo_id=repo_id, folder_path=folder_path, repo_type=repo_type, revision=revision, private=private, allow_patterns=allow_patterns, ignore_patterns=ignore_patterns, num_workers=num_workers, print_report=print_report, print_report_every=print_report_every, ) @validate_hf_hub_args def get_hf_file_metadata( self, *, url: str, token: Union[bool, str, None] = None, proxies: Optional[Dict] = None, timeout: Optional[float] = constants.DEFAULT_REQUEST_TIMEOUT, ) -> HfFileMetadata: """Fetch metadata of a file versioned on the Hub for a given url. Args: url (`str`): File url, for example returned by [`hf_hub_url`]. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. proxies (`dict`, *optional*): Dictionary mapping protocol to the URL of the proxy passed to `requests.request`. timeout (`float`, *optional*, defaults to 10): How many seconds to wait for the server to send metadata before giving up. Returns: A [`HfFileMetadata`] object containing metadata such as location, etag, size and commit_hash. """ if token is None: # Cannot do `token = token or self.token` as token can be `False`. token = self.token return get_hf_file_metadata( url=url, token=token, proxies=proxies, timeout=timeout, library_name=self.library_name, library_version=self.library_version, user_agent=self.user_agent, ) @validate_hf_hub_args def hf_hub_download( self, repo_id: str, filename: str, *, subfolder: Optional[str] = None, repo_type: Optional[str] = None, revision: Optional[str] = None, cache_dir: Union[str, Path, None] = None, local_dir: Union[str, Path, None] = None, force_download: bool = False, proxies: Optional[Dict] = None, etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT, token: Union[bool, str, None] = None, local_files_only: bool = False, # Deprecated args resume_download: Optional[bool] = None, force_filename: Optional[str] = None, local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto", ) -> str: """Download a given file if it's not already present in the local cache. The new cache file layout looks like this: - The cache directory contains one subfolder per repo_id (namespaced by repo type) - inside each repo folder: - refs is a list of the latest known revision => commit_hash pairs - blobs contains the actual file blobs (identified by their git-sha or sha256, depending on whether they're LFS files or not) - snapshots contains one subfolder per commit, each "commit" contains the subset of the files that have been resolved at that particular commit. Each filename is a symlink to the blob at that particular commit. ``` [ 96] . └── [ 160] models--julien-c--EsperBERTo-small ├── [ 160] blobs │ ├── [321M] 403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd │ ├── [ 398] 7cb18dc9bafbfcf74629a4b760af1b160957a83e │ └── [1.4K] d7edf6bd2a681fb0175f7735299831ee1b22b812 ├── [ 96] refs │ └── [ 40] main └── [ 128] snapshots ├── [ 128] 2439f60ef33a0d46d85da5001d52aeda5b00ce9f │ ├── [ 52] README.md -> ../../blobs/d7edf6bd2a681fb0175f7735299831ee1b22b812 │ └── [ 76] pytorch_model.bin -> ../../blobs/403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd └── [ 128] bbc77c8132af1cc5cf678da3f1ddf2de43606d48 ├── [ 52] README.md -> ../../blobs/7cb18dc9bafbfcf74629a4b760af1b160957a83e └── [ 76] pytorch_model.bin -> ../../blobs/403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd ``` If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir` to store some metadata related to the downloaded files. While this mechanism is not as robust as the main cache-system, it's optimized for regularly pulling the latest version of a repository. Args: repo_id (`str`): A user or an organization name and a repo name separated by a `/`. filename (`str`): The name of the file in the repo. subfolder (`str`, *optional*): An optional value corresponding to a folder inside the repository. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if downloading from a dataset or space, `None` or `"model"` if downloading from a model. Default is `None`. revision (`str`, *optional*): An optional Git revision id which can be a branch name, a tag, or a commit hash. cache_dir (`str`, `Path`, *optional*): Path to the folder where cached files are stored. local_dir (`str` or `Path`, *optional*): If provided, the downloaded file will be placed under this directory. force_download (`bool`, *optional*, defaults to `False`): Whether the file should be downloaded even if it already exists in the local cache. proxies (`dict`, *optional*): Dictionary mapping protocol to the URL of the proxy passed to `requests.request`. etag_timeout (`float`, *optional*, defaults to `10`): When fetching ETag, how many seconds to wait for the server to send data before giving up which is passed to `requests.request`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. local_files_only (`bool`, *optional*, defaults to `False`): If `True`, avoid downloading the file and return the path to the local cached file if it exists. Returns: `str`: Local path of file or if networking is off, last version of file cached on disk. Raises: [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. [`~utils.RevisionNotFoundError`] If the revision to download from cannot be found. [`~utils.EntryNotFoundError`] If the file to download cannot be found. [`~utils.LocalEntryNotFoundError`] If network is disabled or unavailable and file is not found in cache. [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) If `token=True` but the token cannot be found. [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) If ETag cannot be determined. [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) If some parameter value is invalid. """ from .file_download import hf_hub_download if token is None: # Cannot do `token = token or self.token` as token can be `False`. token = self.token return hf_hub_download( repo_id=repo_id, filename=filename, subfolder=subfolder, repo_type=repo_type, revision=revision, endpoint=self.endpoint, library_name=self.library_name, library_version=self.library_version, cache_dir=cache_dir, local_dir=local_dir, local_dir_use_symlinks=local_dir_use_symlinks, user_agent=self.user_agent, force_download=force_download, force_filename=force_filename, proxies=proxies, etag_timeout=etag_timeout, resume_download=resume_download, token=token, headers=self.headers, local_files_only=local_files_only, ) @validate_hf_hub_args def snapshot_download( self, repo_id: str, *, repo_type: Optional[str] = None, revision: Optional[str] = None, cache_dir: Union[str, Path, None] = None, local_dir: Union[str, Path, None] = None, proxies: Optional[Dict] = None, etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT, force_download: bool = False, token: Union[bool, str, None] = None, local_files_only: bool = False, allow_patterns: Optional[Union[List[str], str]] = None, ignore_patterns: Optional[Union[List[str], str]] = None, max_workers: int = 8, tqdm_class: Optional[base_tqdm] = None, # Deprecated args local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto", resume_download: Optional[bool] = None, ) -> str: """Download repo files. Download a whole snapshot of a repo's files at the specified revision. This is useful when you want all files from a repo, because you don't know which ones you will need a priori. All files are nested inside a folder in order to keep their actual filename relative to that folder. You can also filter which files to download using `allow_patterns` and `ignore_patterns`. If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir` to store some metadata related to the downloaded files.While this mechanism is not as robust as the main cache-system, it's optimized for regularly pulling the latest version of a repository. An alternative would be to clone the repo but this requires git and git-lfs to be installed and properly configured. It is also not possible to filter which files to download when cloning a repository using git. Args: repo_id (`str`): A user or an organization name and a repo name separated by a `/`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if downloading from a dataset or space, `None` or `"model"` if downloading from a model. Default is `None`. revision (`str`, *optional*): An optional Git revision id which can be a branch name, a tag, or a commit hash. cache_dir (`str`, `Path`, *optional*): Path to the folder where cached files are stored. local_dir (`str` or `Path`, *optional*): If provided, the downloaded files will be placed under this directory. proxies (`dict`, *optional*): Dictionary mapping protocol to the URL of the proxy passed to `requests.request`. etag_timeout (`float`, *optional*, defaults to `10`): When fetching ETag, how many seconds to wait for the server to send data before giving up which is passed to `requests.request`. force_download (`bool`, *optional*, defaults to `False`): Whether the file should be downloaded even if it already exists in the local cache. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. local_files_only (`bool`, *optional*, defaults to `False`): If `True`, avoid downloading the file and return the path to the local cached file if it exists. allow_patterns (`List[str]` or `str`, *optional*): If provided, only files matching at least one pattern are downloaded. ignore_patterns (`List[str]` or `str`, *optional*): If provided, files matching any of the patterns are not downloaded. max_workers (`int`, *optional*): Number of concurrent threads to download files (1 thread = 1 file download). Defaults to 8. tqdm_class (`tqdm`, *optional*): If provided, overwrites the default behavior for the progress bar. Passed argument must inherit from `tqdm.auto.tqdm` or at least mimic its behavior. Note that the `tqdm_class` is not passed to each individual download. Defaults to the custom HF progress bar that can be disabled by setting `HF_HUB_DISABLE_PROGRESS_BARS` environment variable. Returns: `str`: folder path of the repo snapshot. Raises: [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. [`~utils.RevisionNotFoundError`] If the revision to download from cannot be found. [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError) If `token=True` and the token cannot be found. [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) if ETag cannot be determined. [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid. """ from ._snapshot_download import snapshot_download if token is None: # Cannot do `token = token or self.token` as token can be `False`. token = self.token return snapshot_download( repo_id=repo_id, repo_type=repo_type, revision=revision, endpoint=self.endpoint, cache_dir=cache_dir, local_dir=local_dir, local_dir_use_symlinks=local_dir_use_symlinks, library_name=self.library_name, library_version=self.library_version, user_agent=self.user_agent, proxies=proxies, etag_timeout=etag_timeout, resume_download=resume_download, force_download=force_download, token=token, local_files_only=local_files_only, allow_patterns=allow_patterns, ignore_patterns=ignore_patterns, max_workers=max_workers, tqdm_class=tqdm_class, ) def get_safetensors_metadata( self, repo_id: str, *, repo_type: Optional[str] = None, revision: Optional[str] = None, token: Union[bool, str, None] = None, ) -> SafetensorsRepoMetadata: """ Parse metadata for a safetensors repo on the Hub. We first check if the repo has a single safetensors file or a sharded safetensors repo. If it's a single safetensors file, we parse the metadata from this file. If it's a sharded safetensors repo, we parse the metadata from the index file and then parse the metadata from each shard. To parse metadata from a single safetensors file, use [`parse_safetensors_file_metadata`]. For more details regarding the safetensors format, check out https://huggingface.co/docs/safetensors/index#format. Args: repo_id (`str`): A user or an organization name and a repo name separated by a `/`. filename (`str`): The name of the file in the repo. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if the file is in a dataset or space, `None` or `"model"` if in a model. Default is `None`. revision (`str`, *optional*): The git revision to fetch the file from. Can be a branch name, a tag, or a commit hash. Defaults to the head of the `"main"` branch. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`SafetensorsRepoMetadata`]: information related to safetensors repo. Raises: [`NotASafetensorsRepoError`] If the repo is not a safetensors repo i.e. doesn't have either a `model.safetensors` or a `model.safetensors.index.json` file. [`SafetensorsParsingError`] If a safetensors file header couldn't be parsed correctly. Example: ```py # Parse repo with single weights file >>> metadata = get_safetensors_metadata("bigscience/bloomz-560m") >>> metadata SafetensorsRepoMetadata( metadata=None, sharded=False, weight_map={'h.0.input_layernorm.bias': 'model.safetensors', ...}, files_metadata={'model.safetensors': SafetensorsFileMetadata(...)} ) >>> metadata.files_metadata["model.safetensors"].metadata {'format': 'pt'} # Parse repo with sharded model >>> metadata = get_safetensors_metadata("bigscience/bloom") Parse safetensors files: 100%|██████████████████████████████████████████| 72/72 [00:12<00:00, 5.78it/s] >>> metadata SafetensorsRepoMetadata(metadata={'total_size': 352494542848}, sharded=True, weight_map={...}, files_metadata={...}) >>> len(metadata.files_metadata) 72 # All safetensors files have been fetched # Parse repo with sharded model >>> get_safetensors_metadata("runwayml/stable-diffusion-v1-5") NotASafetensorsRepoError: 'runwayml/stable-diffusion-v1-5' is not a safetensors repo. Couldn't find 'model.safetensors.index.json' or 'model.safetensors' files. ``` """ if self.file_exists( # Single safetensors file => non-sharded model repo_id=repo_id, filename=constants.SAFETENSORS_SINGLE_FILE, repo_type=repo_type, revision=revision, token=token, ): file_metadata = self.parse_safetensors_file_metadata( repo_id=repo_id, filename=constants.SAFETENSORS_SINGLE_FILE, repo_type=repo_type, revision=revision, token=token, ) return SafetensorsRepoMetadata( metadata=None, sharded=False, weight_map={ tensor_name: constants.SAFETENSORS_SINGLE_FILE for tensor_name in file_metadata.tensors.keys() }, files_metadata={constants.SAFETENSORS_SINGLE_FILE: file_metadata}, ) elif self.file_exists( # Multiple safetensors files => sharded with index repo_id=repo_id, filename=constants.SAFETENSORS_INDEX_FILE, repo_type=repo_type, revision=revision, token=token, ): # Fetch index index_file = self.hf_hub_download( repo_id=repo_id, filename=constants.SAFETENSORS_INDEX_FILE, repo_type=repo_type, revision=revision, token=token, ) with open(index_file) as f: index = json.load(f) weight_map = index.get("weight_map", {}) # Fetch metadata per shard files_metadata = {} def _parse(filename: str) -> None: files_metadata[filename] = self.parse_safetensors_file_metadata( repo_id=repo_id, filename=filename, repo_type=repo_type, revision=revision, token=token ) thread_map( _parse, set(weight_map.values()), desc="Parse safetensors files", tqdm_class=hf_tqdm, ) return SafetensorsRepoMetadata( metadata=index.get("metadata", None), sharded=True, weight_map=weight_map, files_metadata=files_metadata, ) else: # Not a safetensors repo raise NotASafetensorsRepoError( f"'{repo_id}' is not a safetensors repo. Couldn't find '{constants.SAFETENSORS_INDEX_FILE}' or '{constants.SAFETENSORS_SINGLE_FILE}' files." ) def parse_safetensors_file_metadata( self, repo_id: str, filename: str, *, repo_type: Optional[str] = None, revision: Optional[str] = None, token: Union[bool, str, None] = None, ) -> SafetensorsFileMetadata: """ Parse metadata from a safetensors file on the Hub. To parse metadata from all safetensors files in a repo at once, use [`get_safetensors_metadata`]. For more details regarding the safetensors format, check out https://huggingface.co/docs/safetensors/index#format. Args: repo_id (`str`): A user or an organization name and a repo name separated by a `/`. filename (`str`): The name of the file in the repo. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if the file is in a dataset or space, `None` or `"model"` if in a model. Default is `None`. revision (`str`, *optional*): The git revision to fetch the file from. Can be a branch name, a tag, or a commit hash. Defaults to the head of the `"main"` branch. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`SafetensorsFileMetadata`]: information related to a safetensors file. Raises: [`NotASafetensorsRepoError`]: If the repo is not a safetensors repo i.e. doesn't have either a `model.safetensors` or a `model.safetensors.index.json` file. [`SafetensorsParsingError`]: If a safetensors file header couldn't be parsed correctly. """ url = hf_hub_url( repo_id=repo_id, filename=filename, repo_type=repo_type, revision=revision, endpoint=self.endpoint ) _headers = self._build_hf_headers(token=token) # 1. Fetch first 100kb # Empirically, 97% of safetensors files have a metadata size < 100kb (over the top 1000 models on the Hub). # We assume fetching 100kb is faster than making 2 GET requests. Therefore we always fetch the first 100kb to # avoid the 2nd GET in most cases. # See https://github.com/huggingface/huggingface_hub/pull/1855#discussion_r1404286419. response = get_session().get(url, headers={**_headers, "range": "bytes=0-100000"}) hf_raise_for_status(response) # 2. Parse metadata size metadata_size = struct.unpack("<Q", response.content[:8])[0] if metadata_size > constants.SAFETENSORS_MAX_HEADER_LENGTH: raise SafetensorsParsingError( f"Failed to parse safetensors header for '{filename}' (repo '{repo_id}', revision " f"'{revision or constants.DEFAULT_REVISION}'): safetensors header is too big. Maximum supported size is " f"{constants.SAFETENSORS_MAX_HEADER_LENGTH} bytes (got {metadata_size})." ) # 3.a. Get metadata from payload if metadata_size <= 100000: metadata_as_bytes = response.content[8 : 8 + metadata_size] else: # 3.b. Request full metadata response = get_session().get(url, headers={**_headers, "range": f"bytes=8-{metadata_size+7}"}) hf_raise_for_status(response) metadata_as_bytes = response.content # 4. Parse json header try: metadata_as_dict = json.loads(metadata_as_bytes.decode(errors="ignore")) except json.JSONDecodeError as e: raise SafetensorsParsingError( f"Failed to parse safetensors header for '{filename}' (repo '{repo_id}', revision " f"'{revision or constants.DEFAULT_REVISION}'): header is not json-encoded string. Please make sure this is a " "correctly formatted safetensors file." ) from e try: return SafetensorsFileMetadata( metadata=metadata_as_dict.get("__metadata__", {}), tensors={ key: TensorInfo( dtype=tensor["dtype"], shape=tensor["shape"], data_offsets=tuple(tensor["data_offsets"]), # type: ignore ) for key, tensor in metadata_as_dict.items() if key != "__metadata__" }, ) except (KeyError, IndexError) as e: raise SafetensorsParsingError( f"Failed to parse safetensors header for '{filename}' (repo '{repo_id}', revision " f"'{revision or constants.DEFAULT_REVISION}'): header format not recognized. Please make sure this is a correctly" " formatted safetensors file." ) from e @validate_hf_hub_args def create_branch( self, repo_id: str, *, branch: str, revision: Optional[str] = None, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, exist_ok: bool = False, ) -> None: """ Create a new branch for a repo on the Hub, starting from the specified revision (defaults to `main`). To find a revision suiting your needs, you can use [`list_repo_refs`] or [`list_repo_commits`]. Args: repo_id (`str`): The repository in which the branch will be created. Example: `"user/my-cool-model"`. branch (`str`): The name of the branch to create. revision (`str`, *optional*): The git revision to create the branch from. It can be a branch name or the OID/SHA of a commit, as a hexadecimal string. Defaults to the head of the `"main"` branch. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if creating a branch on a dataset or space, `None` or `"model"` if tagging a model. Default is `None`. exist_ok (`bool`, *optional*, defaults to `False`): If `True`, do not raise an error if branch already exists. Raises: [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. [`~utils.BadRequestError`]: If invalid reference for a branch. Ex: `refs/pr/5` or 'refs/foo/bar'. [`~utils.HfHubHTTPError`]: If the branch already exists on the repo (error 409) and `exist_ok` is set to `False`. """ if repo_type is None: repo_type = constants.REPO_TYPE_MODEL branch = quote(branch, safe="") # Prepare request branch_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/branch/{branch}" headers = self._build_hf_headers(token=token) payload = {} if revision is not None: payload["startingPoint"] = revision # Create branch response = get_session().post(url=branch_url, headers=headers, json=payload) try: hf_raise_for_status(response) except HfHubHTTPError as e: if exist_ok and e.response.status_code == 409: return elif exist_ok and e.response.status_code == 403: # No write permission on the namespace but branch might already exist try: refs = self.list_repo_refs(repo_id=repo_id, repo_type=repo_type, token=token) for branch_ref in refs.branches: if branch_ref.name == branch: return # Branch already exists => do not raise except HfHubHTTPError: pass # We raise the original error if the branch does not exist raise @validate_hf_hub_args def delete_branch( self, repo_id: str, *, branch: str, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, ) -> None: """ Delete a branch from a repo on the Hub. Args: repo_id (`str`): The repository in which a branch will be deleted. Example: `"user/my-cool-model"`. branch (`str`): The name of the branch to delete. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if creating a branch on a dataset or space, `None` or `"model"` if tagging a model. Default is `None`. Raises: [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. [`~utils.HfHubHTTPError`]: If trying to delete a protected branch. Ex: `main` cannot be deleted. [`~utils.HfHubHTTPError`]: If trying to delete a branch that does not exist. """ if repo_type is None: repo_type = constants.REPO_TYPE_MODEL branch = quote(branch, safe="") # Prepare request branch_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/branch/{branch}" headers = self._build_hf_headers(token=token) # Delete branch response = get_session().delete(url=branch_url, headers=headers) hf_raise_for_status(response) @validate_hf_hub_args def create_tag( self, repo_id: str, *, tag: str, tag_message: Optional[str] = None, revision: Optional[str] = None, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, exist_ok: bool = False, ) -> None: """ Tag a given commit of a repo on the Hub. Args: repo_id (`str`): The repository in which a commit will be tagged. Example: `"user/my-cool-model"`. tag (`str`): The name of the tag to create. tag_message (`str`, *optional*): The description of the tag to create. revision (`str`, *optional*): The git revision to tag. It can be a branch name or the OID/SHA of a commit, as a hexadecimal string. Shorthands (7 first characters) are also supported. Defaults to the head of the `"main"` branch. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if tagging a dataset or space, `None` or `"model"` if tagging a model. Default is `None`. exist_ok (`bool`, *optional*, defaults to `False`): If `True`, do not raise an error if tag already exists. Raises: [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. [`~utils.RevisionNotFoundError`]: If revision is not found (error 404) on the repo. [`~utils.HfHubHTTPError`]: If the branch already exists on the repo (error 409) and `exist_ok` is set to `False`. """ if repo_type is None: repo_type = constants.REPO_TYPE_MODEL revision = quote(revision, safe="") if revision is not None else constants.DEFAULT_REVISION # Prepare request tag_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/tag/{revision}" headers = self._build_hf_headers(token=token) payload = {"tag": tag} if tag_message is not None: payload["message"] = tag_message # Tag response = get_session().post(url=tag_url, headers=headers, json=payload) try: hf_raise_for_status(response) except HfHubHTTPError as e: if not (e.response.status_code == 409 and exist_ok): raise @validate_hf_hub_args def delete_tag( self, repo_id: str, *, tag: str, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, ) -> None: """ Delete a tag from a repo on the Hub. Args: repo_id (`str`): The repository in which a tag will be deleted. Example: `"user/my-cool-model"`. tag (`str`): The name of the tag to delete. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if tagging a dataset or space, `None` or `"model"` if tagging a model. Default is `None`. Raises: [`~utils.RepositoryNotFoundError`]: If repository is not found (error 404): wrong repo_id/repo_type, private but not authenticated or repo does not exist. [`~utils.RevisionNotFoundError`]: If tag is not found. """ if repo_type is None: repo_type = constants.REPO_TYPE_MODEL tag = quote(tag, safe="") # Prepare request tag_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/tag/{tag}" headers = self._build_hf_headers(token=token) # Un-tag response = get_session().delete(url=tag_url, headers=headers) hf_raise_for_status(response) @validate_hf_hub_args def get_full_repo_name( self, model_id: str, *, organization: Optional[str] = None, token: Union[bool, str, None] = None, ): """ Returns the repository name for a given model ID and optional organization. Args: model_id (`str`): The name of the model. organization (`str`, *optional*): If passed, the repository name will be in the organization namespace instead of the user namespace. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `str`: The repository name in the user's namespace ({username}/{model_id}) if no organization is passed, and under the organization namespace ({organization}/{model_id}) otherwise. """ if organization is None: if "/" in model_id: username = model_id.split("/")[0] else: username = self.whoami(token=token)["name"] # type: ignore return f"{username}/{model_id}" else: return f"{organization}/{model_id}" @validate_hf_hub_args def get_repo_discussions( self, repo_id: str, *, author: Optional[str] = None, discussion_type: Optional[constants.DiscussionTypeFilter] = None, discussion_status: Optional[constants.DiscussionStatusFilter] = None, repo_type: Optional[str] = None, token: Union[bool, str, None] = None, ) -> Iterator[Discussion]: """ Fetches Discussions and Pull Requests for the given repo. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. author (`str`, *optional*): Pass a value to filter by discussion author. `None` means no filter. Default is `None`. discussion_type (`str`, *optional*): Set to `"pull_request"` to fetch only pull requests, `"discussion"` to fetch only discussions. Set to `"all"` or `None` to fetch both. Default is `None`. discussion_status (`str`, *optional*): Set to `"open"` (respectively `"closed"`) to fetch only open (respectively closed) discussions. Set to `"all"` or `None` to fetch both. Default is `None`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if fetching from a dataset or space, `None` or `"model"` if fetching from a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Iterator[Discussion]`: An iterator of [`Discussion`] objects. Example: Collecting all discussions of a repo in a list: ```python >>> from huggingface_hub import get_repo_discussions >>> discussions_list = list(get_repo_discussions(repo_id="bert-base-uncased")) ``` Iterating over discussions of a repo: ```python >>> from huggingface_hub import get_repo_discussions >>> for discussion in get_repo_discussions(repo_id="bert-base-uncased"): ... print(discussion.num, discussion.title) ``` """ if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") if repo_type is None: repo_type = constants.REPO_TYPE_MODEL if discussion_type is not None and discussion_type not in constants.DISCUSSION_TYPES: raise ValueError(f"Invalid discussion_type, must be one of {constants.DISCUSSION_TYPES}") if discussion_status is not None and discussion_status not in constants.DISCUSSION_STATUS: raise ValueError(f"Invalid discussion_status, must be one of {constants.DISCUSSION_STATUS}") headers = self._build_hf_headers(token=token) path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/discussions" params: Dict[str, Union[str, int]] = {} if discussion_type is not None: params["type"] = discussion_type if discussion_status is not None: params["status"] = discussion_status if author is not None: params["author"] = author def _fetch_discussion_page(page_index: int): params["p"] = page_index resp = get_session().get(path, headers=headers, params=params) hf_raise_for_status(resp) paginated_discussions = resp.json() total = paginated_discussions["count"] start = paginated_discussions["start"] discussions = paginated_discussions["discussions"] has_next = (start + len(discussions)) < total return discussions, has_next has_next, page_index = True, 0 while has_next: discussions, has_next = _fetch_discussion_page(page_index=page_index) for discussion in discussions: yield Discussion( title=discussion["title"], num=discussion["num"], author=discussion.get("author", {}).get("name", "deleted"), created_at=parse_datetime(discussion["createdAt"]), status=discussion["status"], repo_id=discussion["repo"]["name"], repo_type=discussion["repo"]["type"], is_pull_request=discussion["isPullRequest"], endpoint=self.endpoint, ) page_index = page_index + 1 @validate_hf_hub_args def get_discussion_details( self, repo_id: str, discussion_num: int, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None, ) -> DiscussionWithDetails: """Fetches a Discussion's / Pull Request 's details from the Hub. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. discussion_num (`int`): The number of the Discussion or Pull Request . Must be a strictly positive integer. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`DiscussionWithDetails`] <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip> """ if not isinstance(discussion_num, int) or discussion_num <= 0: raise ValueError("Invalid discussion_num, must be a positive integer") if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") if repo_type is None: repo_type = constants.REPO_TYPE_MODEL path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/discussions/{discussion_num}" headers = self._build_hf_headers(token=token) resp = get_session().get(path, params={"diff": "1"}, headers=headers) hf_raise_for_status(resp) discussion_details = resp.json() is_pull_request = discussion_details["isPullRequest"] target_branch = discussion_details["changes"]["base"] if is_pull_request else None conflicting_files = discussion_details["filesWithConflicts"] if is_pull_request else None merge_commit_oid = discussion_details["changes"].get("mergeCommitId", None) if is_pull_request else None return DiscussionWithDetails( title=discussion_details["title"], num=discussion_details["num"], author=discussion_details.get("author", {}).get("name", "deleted"), created_at=parse_datetime(discussion_details["createdAt"]), status=discussion_details["status"], repo_id=discussion_details["repo"]["name"], repo_type=discussion_details["repo"]["type"], is_pull_request=discussion_details["isPullRequest"], events=[deserialize_event(evt) for evt in discussion_details["events"]], conflicting_files=conflicting_files, target_branch=target_branch, merge_commit_oid=merge_commit_oid, diff=discussion_details.get("diff"), endpoint=self.endpoint, ) @validate_hf_hub_args def create_discussion( self, repo_id: str, title: str, *, token: Union[bool, str, None] = None, description: Optional[str] = None, repo_type: Optional[str] = None, pull_request: bool = False, ) -> DiscussionWithDetails: """Creates a Discussion or Pull Request. Pull Requests created programmatically will be in `"draft"` status. Creating a Pull Request with changes can also be done at once with [`HfApi.create_commit`]. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. title (`str`): The title of the discussion. It can be up to 200 characters long, and must be at least 3 characters long. Leading and trailing whitespaces will be stripped. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. description (`str`, *optional*): An optional description for the Pull Request. Defaults to `"Discussion opened with the huggingface_hub Python library"` pull_request (`bool`, *optional*): Whether to create a Pull Request or discussion. If `True`, creates a Pull Request. If `False`, creates a discussion. Defaults to `False`. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. Returns: [`DiscussionWithDetails`] <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip>""" if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") if repo_type is None: repo_type = constants.REPO_TYPE_MODEL if description is not None: description = description.strip() description = ( description if description else ( f"{'Pull Request' if pull_request else 'Discussion'} opened with the" " [huggingface_hub Python" " library](https://huggingface.co/docs/huggingface_hub)" ) ) headers = self._build_hf_headers(token=token) resp = get_session().post( f"{self.endpoint}/api/{repo_type}s/{repo_id}/discussions", json={ "title": title.strip(), "description": description, "pullRequest": pull_request, }, headers=headers, ) hf_raise_for_status(resp) num = resp.json()["num"] return self.get_discussion_details( repo_id=repo_id, repo_type=repo_type, discussion_num=num, token=token, ) @validate_hf_hub_args def create_pull_request( self, repo_id: str, title: str, *, token: Union[bool, str, None] = None, description: Optional[str] = None, repo_type: Optional[str] = None, ) -> DiscussionWithDetails: """Creates a Pull Request . Pull Requests created programmatically will be in `"draft"` status. Creating a Pull Request with changes can also be done at once with [`HfApi.create_commit`]; This is a wrapper around [`HfApi.create_discussion`]. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. title (`str`): The title of the discussion. It can be up to 200 characters long, and must be at least 3 characters long. Leading and trailing whitespaces will be stripped. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. description (`str`, *optional*): An optional description for the Pull Request. Defaults to `"Discussion opened with the huggingface_hub Python library"` repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. Returns: [`DiscussionWithDetails`] <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip>""" return self.create_discussion( repo_id=repo_id, title=title, token=token, description=description, repo_type=repo_type, pull_request=True, ) def _post_discussion_changes( self, *, repo_id: str, discussion_num: int, resource: str, body: Optional[dict] = None, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, ) -> requests.Response: """Internal utility to POST changes to a Discussion or Pull Request""" if not isinstance(discussion_num, int) or discussion_num <= 0: raise ValueError("Invalid discussion_num, must be a positive integer") if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") if repo_type is None: repo_type = constants.REPO_TYPE_MODEL repo_id = f"{repo_type}s/{repo_id}" path = f"{self.endpoint}/api/{repo_id}/discussions/{discussion_num}/{resource}" headers = self._build_hf_headers(token=token) resp = requests.post(path, headers=headers, json=body) hf_raise_for_status(resp) return resp @validate_hf_hub_args def comment_discussion( self, repo_id: str, discussion_num: int, comment: str, *, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, ) -> DiscussionComment: """Creates a new comment on the given Discussion. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. discussion_num (`int`): The number of the Discussion or Pull Request . Must be a strictly positive integer. comment (`str`): The content of the comment to create. Comments support markdown formatting. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`DiscussionComment`]: the newly created comment Examples: ```python >>> comment = \"\"\" ... Hello @otheruser! ... ... # This is a title ... ... **This is bold**, *this is italic* and ~this is strikethrough~ ... And [this](http://url) is a link ... \"\"\" >>> HfApi().comment_discussion( ... repo_id="username/repo_name", ... discussion_num=34 ... comment=comment ... ) # DiscussionComment(id='deadbeef0000000', type='comment', ...) ``` <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip> """ resp = self._post_discussion_changes( repo_id=repo_id, repo_type=repo_type, discussion_num=discussion_num, token=token, resource="comment", body={"comment": comment}, ) return deserialize_event(resp.json()["newMessage"]) # type: ignore @validate_hf_hub_args def rename_discussion( self, repo_id: str, discussion_num: int, new_title: str, *, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, ) -> DiscussionTitleChange: """Renames a Discussion. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. discussion_num (`int`): The number of the Discussion or Pull Request . Must be a strictly positive integer. new_title (`str`): The new title for the discussion repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`DiscussionTitleChange`]: the title change event Examples: ```python >>> new_title = "New title, fixing a typo" >>> HfApi().rename_discussion( ... repo_id="username/repo_name", ... discussion_num=34 ... new_title=new_title ... ) # DiscussionTitleChange(id='deadbeef0000000', type='title-change', ...) ``` <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip> """ resp = self._post_discussion_changes( repo_id=repo_id, repo_type=repo_type, discussion_num=discussion_num, token=token, resource="title", body={"title": new_title}, ) return deserialize_event(resp.json()["newTitle"]) # type: ignore @validate_hf_hub_args def change_discussion_status( self, repo_id: str, discussion_num: int, new_status: Literal["open", "closed"], *, token: Union[bool, str, None] = None, comment: Optional[str] = None, repo_type: Optional[str] = None, ) -> DiscussionStatusChange: """Closes or re-opens a Discussion or Pull Request. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. discussion_num (`int`): The number of the Discussion or Pull Request . Must be a strictly positive integer. new_status (`str`): The new status for the discussion, either `"open"` or `"closed"`. comment (`str`, *optional*): An optional comment to post with the status change. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`DiscussionStatusChange`]: the status change event Examples: ```python >>> new_title = "New title, fixing a typo" >>> HfApi().rename_discussion( ... repo_id="username/repo_name", ... discussion_num=34 ... new_title=new_title ... ) # DiscussionStatusChange(id='deadbeef0000000', type='status-change', ...) ``` <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip> """ if new_status not in ["open", "closed"]: raise ValueError("Invalid status, valid statuses are: 'open' and 'closed'") body: Dict[str, str] = {"status": new_status} if comment and comment.strip(): body["comment"] = comment.strip() resp = self._post_discussion_changes( repo_id=repo_id, repo_type=repo_type, discussion_num=discussion_num, token=token, resource="status", body=body, ) return deserialize_event(resp.json()["newStatus"]) # type: ignore @validate_hf_hub_args def merge_pull_request( self, repo_id: str, discussion_num: int, *, token: Union[bool, str, None] = None, comment: Optional[str] = None, repo_type: Optional[str] = None, ): """Merges a Pull Request. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. discussion_num (`int`): The number of the Discussion or Pull Request . Must be a strictly positive integer. comment (`str`, *optional*): An optional comment to post with the status change. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`DiscussionStatusChange`]: the status change event <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip> """ self._post_discussion_changes( repo_id=repo_id, repo_type=repo_type, discussion_num=discussion_num, token=token, resource="merge", body={"comment": comment.strip()} if comment and comment.strip() else None, ) @validate_hf_hub_args def edit_discussion_comment( self, repo_id: str, discussion_num: int, comment_id: str, new_content: str, *, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, ) -> DiscussionComment: """Edits a comment on a Discussion / Pull Request. Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. discussion_num (`int`): The number of the Discussion or Pull Request . Must be a strictly positive integer. comment_id (`str`): The ID of the comment to edit. new_content (`str`): The new content of the comment. Comments support markdown formatting. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`DiscussionComment`]: the edited comment <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip> """ resp = self._post_discussion_changes( repo_id=repo_id, repo_type=repo_type, discussion_num=discussion_num, token=token, resource=f"comment/{comment_id.lower()}/edit", body={"content": new_content}, ) return deserialize_event(resp.json()["updatedComment"]) # type: ignore @validate_hf_hub_args def hide_discussion_comment( self, repo_id: str, discussion_num: int, comment_id: str, *, token: Union[bool, str, None] = None, repo_type: Optional[str] = None, ) -> DiscussionComment: """Hides a comment on a Discussion / Pull Request. <Tip warning={true}> Hidden comments' content cannot be retrieved anymore. Hiding a comment is irreversible. </Tip> Args: repo_id (`str`): A namespace (user or an organization) and a repo name separated by a `/`. discussion_num (`int`): The number of the Discussion or Pull Request . Must be a strictly positive integer. comment_id (`str`): The ID of the comment to edit. repo_type (`str`, *optional*): Set to `"dataset"` or `"space"` if uploading to a dataset or space, `None` or `"model"` if uploading to a model. Default is `None`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`DiscussionComment`]: the hidden comment <Tip> Raises the following errors: - [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError) if the HuggingFace API returned an error - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if some parameter value is invalid - [`~utils.RepositoryNotFoundError`] If the repository to download from cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. </Tip> """ warnings.warn( "Hidden comments' content cannot be retrieved anymore. Hiding a comment is irreversible.", UserWarning, ) resp = self._post_discussion_changes( repo_id=repo_id, repo_type=repo_type, discussion_num=discussion_num, token=token, resource=f"comment/{comment_id.lower()}/hide", ) return deserialize_event(resp.json()["updatedComment"]) # type: ignore @validate_hf_hub_args def add_space_secret( self, repo_id: str, key: str, value: str, *, description: Optional[str] = None, token: Union[bool, str, None] = None, ) -> None: """Adds or updates a secret in a Space. Secrets allow to set secret keys or tokens to a Space without hardcoding them. For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets. Args: repo_id (`str`): ID of the repo to update. Example: `"bigcode/in-the-stack"`. key (`str`): Secret key. Example: `"GITHUB_API_KEY"` value (`str`): Secret value. Example: `"your_github_api_key"`. description (`str`, *optional*): Secret description. Example: `"Github API key to access the Github API"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. """ payload = {"key": key, "value": value} if description is not None: payload["description"] = description r = get_session().post( f"{self.endpoint}/api/spaces/{repo_id}/secrets", headers=self._build_hf_headers(token=token), json=payload, ) hf_raise_for_status(r) @validate_hf_hub_args def delete_space_secret(self, repo_id: str, key: str, *, token: Union[bool, str, None] = None) -> None: """Deletes a secret from a Space. Secrets allow to set secret keys or tokens to a Space without hardcoding them. For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets. Args: repo_id (`str`): ID of the repo to update. Example: `"bigcode/in-the-stack"`. key (`str`): Secret key. Example: `"GITHUB_API_KEY"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. """ r = get_session().delete( f"{self.endpoint}/api/spaces/{repo_id}/secrets", headers=self._build_hf_headers(token=token), json={"key": key}, ) hf_raise_for_status(r) @validate_hf_hub_args def get_space_variables(self, repo_id: str, *, token: Union[bool, str, None] = None) -> Dict[str, SpaceVariable]: """Gets all variables from a Space. Variables allow to set environment variables to a Space without hardcoding them. For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables Args: repo_id (`str`): ID of the repo to query. Example: `"bigcode/in-the-stack"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. """ r = get_session().get( f"{self.endpoint}/api/spaces/{repo_id}/variables", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(r) return {k: SpaceVariable(k, v) for k, v in r.json().items()} @validate_hf_hub_args def add_space_variable( self, repo_id: str, key: str, value: str, *, description: Optional[str] = None, token: Union[bool, str, None] = None, ) -> Dict[str, SpaceVariable]: """Adds or updates a variable in a Space. Variables allow to set environment variables to a Space without hardcoding them. For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables Args: repo_id (`str`): ID of the repo to update. Example: `"bigcode/in-the-stack"`. key (`str`): Variable key. Example: `"MODEL_REPO_ID"` value (`str`): Variable value. Example: `"the_model_repo_id"`. description (`str`): Description of the variable. Example: `"Model Repo ID of the implemented model"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. """ payload = {"key": key, "value": value} if description is not None: payload["description"] = description r = get_session().post( f"{self.endpoint}/api/spaces/{repo_id}/variables", headers=self._build_hf_headers(token=token), json=payload, ) hf_raise_for_status(r) return {k: SpaceVariable(k, v) for k, v in r.json().items()} @validate_hf_hub_args def delete_space_variable( self, repo_id: str, key: str, *, token: Union[bool, str, None] = None ) -> Dict[str, SpaceVariable]: """Deletes a variable from a Space. Variables allow to set environment variables to a Space without hardcoding them. For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables Args: repo_id (`str`): ID of the repo to update. Example: `"bigcode/in-the-stack"`. key (`str`): Variable key. Example: `"MODEL_REPO_ID"` token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. """ r = get_session().delete( f"{self.endpoint}/api/spaces/{repo_id}/variables", headers=self._build_hf_headers(token=token), json={"key": key}, ) hf_raise_for_status(r) return {k: SpaceVariable(k, v) for k, v in r.json().items()} @validate_hf_hub_args def get_space_runtime(self, repo_id: str, *, token: Union[bool, str, None] = None) -> SpaceRuntime: """Gets runtime information about a Space. Args: repo_id (`str`): ID of the repo to update. Example: `"bigcode/in-the-stack"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`SpaceRuntime`]: Runtime information about a Space including Space stage and hardware. """ r = get_session().get( f"{self.endpoint}/api/spaces/{repo_id}/runtime", headers=self._build_hf_headers(token=token) ) hf_raise_for_status(r) return SpaceRuntime(r.json()) @validate_hf_hub_args def request_space_hardware( self, repo_id: str, hardware: SpaceHardware, *, token: Union[bool, str, None] = None, sleep_time: Optional[int] = None, ) -> SpaceRuntime: """Request new hardware for a Space. Args: repo_id (`str`): ID of the repo to update. Example: `"bigcode/in-the-stack"`. hardware (`str` or [`SpaceHardware`]): Hardware on which to run the Space. Example: `"t4-medium"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. sleep_time (`int`, *optional*): Number of seconds of inactivity to wait before a Space is put to sleep. Set to `-1` if you don't want your Space to sleep (default behavior for upgraded hardware). For free hardware, you can't configure the sleep time (value is fixed to 48 hours of inactivity). See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details. Returns: [`SpaceRuntime`]: Runtime information about a Space including Space stage and hardware. <Tip> It is also possible to request hardware directly when creating the Space repo! See [`create_repo`] for details. </Tip> """ if sleep_time is not None and hardware == SpaceHardware.CPU_BASIC: warnings.warn( "If your Space runs on the default 'cpu-basic' hardware, it will go to sleep if inactive for more" " than 48 hours. This value is not configurable. If you don't want your Space to deactivate or if" " you want to set a custom sleep time, you need to upgrade to a paid Hardware.", UserWarning, ) payload: Dict[str, Any] = {"flavor": hardware} if sleep_time is not None: payload["sleepTimeSeconds"] = sleep_time r = get_session().post( f"{self.endpoint}/api/spaces/{repo_id}/hardware", headers=self._build_hf_headers(token=token), json=payload, ) hf_raise_for_status(r) return SpaceRuntime(r.json()) @validate_hf_hub_args def set_space_sleep_time( self, repo_id: str, sleep_time: int, *, token: Union[bool, str, None] = None ) -> SpaceRuntime: """Set a custom sleep time for a Space running on upgraded hardware.. Your Space will go to sleep after X seconds of inactivity. You are not billed when your Space is in "sleep" mode. If a new visitor lands on your Space, it will "wake it up". Only upgraded hardware can have a configurable sleep time. To know more about the sleep stage, please refer to https://huggingface.co/docs/hub/spaces-gpus#sleep-time. Args: repo_id (`str`): ID of the repo to update. Example: `"bigcode/in-the-stack"`. sleep_time (`int`, *optional*): Number of seconds of inactivity to wait before a Space is put to sleep. Set to `-1` if you don't want your Space to pause (default behavior for upgraded hardware). For free hardware, you can't configure the sleep time (value is fixed to 48 hours of inactivity). See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`SpaceRuntime`]: Runtime information about a Space including Space stage and hardware. <Tip> It is also possible to set a custom sleep time when requesting hardware with [`request_space_hardware`]. </Tip> """ r = get_session().post( f"{self.endpoint}/api/spaces/{repo_id}/sleeptime", headers=self._build_hf_headers(token=token), json={"seconds": sleep_time}, ) hf_raise_for_status(r) runtime = SpaceRuntime(r.json()) hardware = runtime.requested_hardware or runtime.hardware if hardware == SpaceHardware.CPU_BASIC: warnings.warn( "If your Space runs on the default 'cpu-basic' hardware, it will go to sleep if inactive for more" " than 48 hours. This value is not configurable. If you don't want your Space to deactivate or if" " you want to set a custom sleep time, you need to upgrade to a paid Hardware.", UserWarning, ) return runtime @validate_hf_hub_args def pause_space(self, repo_id: str, *, token: Union[bool, str, None] = None) -> SpaceRuntime: """Pause your Space. A paused Space stops executing until manually restarted by its owner. This is different from the sleeping state in which free Spaces go after 48h of inactivity. Paused time is not billed to your account, no matter the hardware you've selected. To restart your Space, use [`restart_space`] and go to your Space settings page. For more details, please visit [the docs](https://huggingface.co/docs/hub/spaces-gpus#pause). Args: repo_id (`str`): ID of the Space to pause. Example: `"Salesforce/BLIP2"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`SpaceRuntime`]: Runtime information about your Space including `stage=PAUSED` and requested hardware. Raises: [`~utils.RepositoryNotFoundError`]: If your Space is not found (error 404). Most probably wrong repo_id or your space is private but you are not authenticated. [`~utils.HfHubHTTPError`]: 403 Forbidden: only the owner of a Space can pause it. If you want to manage a Space that you don't own, either ask the owner by opening a Discussion or duplicate the Space. [`~utils.BadRequestError`]: If your Space is a static Space. Static Spaces are always running and never billed. If you want to hide a static Space, you can set it to private. """ r = get_session().post( f"{self.endpoint}/api/spaces/{repo_id}/pause", headers=self._build_hf_headers(token=token) ) hf_raise_for_status(r) return SpaceRuntime(r.json()) @validate_hf_hub_args def restart_space( self, repo_id: str, *, token: Union[bool, str, None] = None, factory_reboot: bool = False ) -> SpaceRuntime: """Restart your Space. This is the only way to programmatically restart a Space if you've put it on Pause (see [`pause_space`]). You must be the owner of the Space to restart it. If you are using an upgraded hardware, your account will be billed as soon as the Space is restarted. You can trigger a restart no matter the current state of a Space. For more details, please visit [the docs](https://huggingface.co/docs/hub/spaces-gpus#pause). Args: repo_id (`str`): ID of the Space to restart. Example: `"Salesforce/BLIP2"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. factory_reboot (`bool`, *optional*): If `True`, the Space will be rebuilt from scratch without caching any requirements. Returns: [`SpaceRuntime`]: Runtime information about your Space. Raises: [`~utils.RepositoryNotFoundError`]: If your Space is not found (error 404). Most probably wrong repo_id or your space is private but you are not authenticated. [`~utils.HfHubHTTPError`]: 403 Forbidden: only the owner of a Space can restart it. If you want to restart a Space that you don't own, either ask the owner by opening a Discussion or duplicate the Space. [`~utils.BadRequestError`]: If your Space is a static Space. Static Spaces are always running and never billed. If you want to hide a static Space, you can set it to private. """ params = {} if factory_reboot: params["factory"] = "true" r = get_session().post( f"{self.endpoint}/api/spaces/{repo_id}/restart", headers=self._build_hf_headers(token=token), params=params ) hf_raise_for_status(r) return SpaceRuntime(r.json()) @validate_hf_hub_args def duplicate_space( self, from_id: str, to_id: Optional[str] = None, *, private: Optional[bool] = None, token: Union[bool, str, None] = None, exist_ok: bool = False, hardware: Optional[SpaceHardware] = None, storage: Optional[SpaceStorage] = None, sleep_time: Optional[int] = None, secrets: Optional[List[Dict[str, str]]] = None, variables: Optional[List[Dict[str, str]]] = None, ) -> RepoUrl: """Duplicate a Space. Programmatically duplicate a Space. The new Space will be created in your account and will be in the same state as the original Space (running or paused). You can duplicate a Space no matter the current state of a Space. Args: from_id (`str`): ID of the Space to duplicate. Example: `"pharma/CLIP-Interrogator"`. to_id (`str`, *optional*): ID of the new Space. Example: `"dog/CLIP-Interrogator"`. If not provided, the new Space will have the same name as the original Space, but in your account. private (`bool`, *optional*): Whether the new Space should be private or not. Defaults to the same privacy as the original Space. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. exist_ok (`bool`, *optional*, defaults to `False`): If `True`, do not raise an error if repo already exists. hardware (`SpaceHardware` or `str`, *optional*): Choice of Hardware. Example: `"t4-medium"`. See [`SpaceHardware`] for a complete list. storage (`SpaceStorage` or `str`, *optional*): Choice of persistent storage tier. Example: `"small"`. See [`SpaceStorage`] for a complete list. sleep_time (`int`, *optional*): Number of seconds of inactivity to wait before a Space is put to sleep. Set to `-1` if you don't want your Space to sleep (default behavior for upgraded hardware). For free hardware, you can't configure the sleep time (value is fixed to 48 hours of inactivity). See https://huggingface.co/docs/hub/spaces-gpus#sleep-time for more details. secrets (`List[Dict[str, str]]`, *optional*): A list of secret keys to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional. For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets. variables (`List[Dict[str, str]]`, *optional*): A list of public environment variables to set in your Space. Each item is in the form `{"key": ..., "value": ..., "description": ...}` where description is optional. For more details, see https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables. Returns: [`RepoUrl`]: URL to the newly created repo. Value is a subclass of `str` containing attributes like `endpoint`, `repo_type` and `repo_id`. Raises: [`~utils.RepositoryNotFoundError`]: If one of `from_id` or `to_id` cannot be found. This may be because it doesn't exist, or because it is set to `private` and you do not have access. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): If the HuggingFace API returned an error Example: ```python >>> from huggingface_hub import duplicate_space # Duplicate a Space to your account >>> duplicate_space("multimodalart/dreambooth-training") RepoUrl('https://huggingface.co/spaces/nateraw/dreambooth-training',...) # Can set custom destination id and visibility flag. >>> duplicate_space("multimodalart/dreambooth-training", to_id="my-dreambooth", private=True) RepoUrl('https://huggingface.co/spaces/nateraw/my-dreambooth',...) ``` """ # Parse to_id if provided parsed_to_id = RepoUrl(to_id) if to_id is not None else None # Infer target repo_id to_namespace = ( # set namespace manually or default to username parsed_to_id.namespace if parsed_to_id is not None and parsed_to_id.namespace is not None else self.whoami(token)["name"] ) to_repo_name = parsed_to_id.repo_name if to_id is not None else RepoUrl(from_id).repo_name # type: ignore # repository must be a valid repo_id (namespace/repo_name). payload: Dict[str, Any] = {"repository": f"{to_namespace}/{to_repo_name}"} keys = ["private", "hardware", "storageTier", "sleepTimeSeconds", "secrets", "variables"] values = [private, hardware, storage, sleep_time, secrets, variables] payload.update({k: v for k, v in zip(keys, values) if v is not None}) if sleep_time is not None and hardware == SpaceHardware.CPU_BASIC: warnings.warn( "If your Space runs on the default 'cpu-basic' hardware, it will go to sleep if inactive for more" " than 48 hours. This value is not configurable. If you don't want your Space to deactivate or if" " you want to set a custom sleep time, you need to upgrade to a paid Hardware.", UserWarning, ) r = get_session().post( f"{self.endpoint}/api/spaces/{from_id}/duplicate", headers=self._build_hf_headers(token=token), json=payload, ) try: hf_raise_for_status(r) except HTTPError as err: if exist_ok and err.response.status_code == 409: # Repo already exists and `exist_ok=True` pass else: raise return RepoUrl(r.json()["url"], endpoint=self.endpoint) @validate_hf_hub_args def request_space_storage( self, repo_id: str, storage: SpaceStorage, *, token: Union[bool, str, None] = None, ) -> SpaceRuntime: """Request persistent storage for a Space. Args: repo_id (`str`): ID of the Space to update. Example: `"open-llm-leaderboard/open_llm_leaderboard"`. storage (`str` or [`SpaceStorage`]): Storage tier. Either 'small', 'medium', or 'large'. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`SpaceRuntime`]: Runtime information about a Space including Space stage and hardware. <Tip> It is not possible to decrease persistent storage after its granted. To do so, you must delete it via [`delete_space_storage`]. </Tip> """ payload: Dict[str, SpaceStorage] = {"tier": storage} r = get_session().post( f"{self.endpoint}/api/spaces/{repo_id}/storage", headers=self._build_hf_headers(token=token), json=payload, ) hf_raise_for_status(r) return SpaceRuntime(r.json()) @validate_hf_hub_args def delete_space_storage( self, repo_id: str, *, token: Union[bool, str, None] = None, ) -> SpaceRuntime: """Delete persistent storage for a Space. Args: repo_id (`str`): ID of the Space to update. Example: `"open-llm-leaderboard/open_llm_leaderboard"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`SpaceRuntime`]: Runtime information about a Space including Space stage and hardware. Raises: [`BadRequestError`] If space has no persistent storage. """ r = get_session().delete( f"{self.endpoint}/api/spaces/{repo_id}/storage", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(r) return SpaceRuntime(r.json()) ####################### # Inference Endpoints # ####################### def list_inference_endpoints( self, namespace: Optional[str] = None, *, token: Union[bool, str, None] = None ) -> List[InferenceEndpoint]: """Lists all inference endpoints for the given namespace. Args: namespace (`str`, *optional*): The namespace to list endpoints for. Defaults to the current user. Set to `"*"` to list all endpoints from all namespaces (i.e. personal namespace and all orgs the user belongs to). token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: List[`InferenceEndpoint`]: A list of all inference endpoints for the given namespace. Example: ```python >>> from huggingface_hub import HfApi >>> api = HfApi() >>> api.list_inference_endpoints() [InferenceEndpoint(name='my-endpoint', ...), ...] ``` """ # Special case: list all endpoints for all namespaces the user has access to if namespace == "*": user = self.whoami(token=token) # List personal endpoints first endpoints: List[InferenceEndpoint] = list_inference_endpoints(namespace=self._get_namespace(token=token)) # Then list endpoints for all orgs the user belongs to and ignore 401 errors (no billing or no access) for org in user.get("orgs", []): try: endpoints += list_inference_endpoints(namespace=org["name"], token=token) except HfHubHTTPError as error: if error.response.status_code == 401: # Either no billing or user don't have access) logger.debug("Cannot list Inference Endpoints for org '%s': %s", org["name"], error) pass return endpoints # Normal case: list endpoints for a specific namespace namespace = namespace or self._get_namespace(token=token) response = get_session().get( f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) return [ InferenceEndpoint.from_raw(endpoint, namespace=namespace, token=token) for endpoint in response.json()["items"] ] def create_inference_endpoint( self, name: str, *, repository: str, framework: str, accelerator: str, instance_size: str, instance_type: str, region: str, vendor: str, account_id: Optional[str] = None, min_replica: int = 0, max_replica: int = 1, scale_to_zero_timeout: int = 15, revision: Optional[str] = None, task: Optional[str] = None, custom_image: Optional[Dict] = None, secrets: Optional[Dict[str, str]] = None, type: InferenceEndpointType = InferenceEndpointType.PROTECTED, namespace: Optional[str] = None, token: Union[bool, str, None] = None, ) -> InferenceEndpoint: """Create a new Inference Endpoint. Args: name (`str`): The unique name for the new Inference Endpoint. repository (`str`): The name of the model repository associated with the Inference Endpoint (e.g. `"gpt2"`). framework (`str`): The machine learning framework used for the model (e.g. `"custom"`). accelerator (`str`): The hardware accelerator to be used for inference (e.g. `"cpu"`). instance_size (`str`): The size or type of the instance to be used for hosting the model (e.g. `"x4"`). instance_type (`str`): The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`). region (`str`): The cloud region in which the Inference Endpoint will be created (e.g. `"us-east-1"`). vendor (`str`): The cloud provider or vendor where the Inference Endpoint will be hosted (e.g. `"aws"`). account_id (`str`, *optional*): The account ID used to link a VPC to a private Inference Endpoint (if applicable). min_replica (`int`, *optional*): The minimum number of replicas (instances) to keep running for the Inference Endpoint. Defaults to 0. max_replica (`int`, *optional*): The maximum number of replicas (instances) to scale to for the Inference Endpoint. Defaults to 1. scale_to_zero_timeout (`int`, *optional*): The duration in minutes before an inactive endpoint is scaled to zero. Defaults to 15. revision (`str`, *optional*): The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`). task (`str`, *optional*): The task on which to deploy the model (e.g. `"text-classification"`). custom_image (`Dict`, *optional*): A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples). secrets (`Dict[str, str]`, *optional*): Secret values to inject in the container environment. type ([`InferenceEndpointType]`, *optional*): The type of the Inference Endpoint, which can be `"protected"` (default), `"public"` or `"private"`. namespace (`str`, *optional*): The namespace where the Inference Endpoint will be created. Defaults to the current user's namespace. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`InferenceEndpoint`]: information about the updated Inference Endpoint. Example: ```python >>> from huggingface_hub import HfApi >>> api = HfApi() >>> endpoint = api.create_inference_endpoint( ... "my-endpoint-name", ... repository="gpt2", ... framework="pytorch", ... task="text-generation", ... accelerator="cpu", ... vendor="aws", ... region="us-east-1", ... type="protected", ... instance_size="x2", ... instance_type="intel-icl", ... ) >>> endpoint InferenceEndpoint(name='my-endpoint-name', status="pending",...) # Run inference on the endpoint >>> endpoint.client.text_generation(...) "..." ``` ```python # Start an Inference Endpoint running Zephyr-7b-beta on TGI >>> from huggingface_hub import HfApi >>> api = HfApi() >>> endpoint = api.create_inference_endpoint( ... "aws-zephyr-7b-beta-0486", ... repository="HuggingFaceH4/zephyr-7b-beta", ... framework="pytorch", ... task="text-generation", ... accelerator="gpu", ... vendor="aws", ... region="us-east-1", ... type="protected", ... instance_size="x1", ... instance_type="nvidia-a10g", ... custom_image={ ... "health_route": "/health", ... "env": { ... "MAX_BATCH_PREFILL_TOKENS": "2048", ... "MAX_INPUT_LENGTH": "1024", ... "MAX_TOTAL_TOKENS": "1512", ... "MODEL_ID": "/repository" ... }, ... "url": "ghcr.io/huggingface/text-generation-inference:1.1.0", ... }, ... secrets={"MY_SECRET_KEY": "secret_value"}, ... ) ``` """ namespace = namespace or self._get_namespace(token=token) image = {"custom": custom_image} if custom_image is not None else {"huggingface": {}} payload: Dict = { "accountId": account_id, "compute": { "accelerator": accelerator, "instanceSize": instance_size, "instanceType": instance_type, "scaling": { "maxReplica": max_replica, "minReplica": min_replica, "scaleToZeroTimeout": scale_to_zero_timeout, }, }, "model": { "framework": framework, "repository": repository, "revision": revision, "task": task, "image": image, }, "name": name, "provider": { "region": region, "vendor": vendor, }, "type": type, } if secrets: payload["model"]["secrets"] = secrets response = get_session().post( f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}", headers=self._build_hf_headers(token=token), json=payload, ) hf_raise_for_status(response) return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) def get_inference_endpoint( self, name: str, *, namespace: Optional[str] = None, token: Union[bool, str, None] = None ) -> InferenceEndpoint: """Get information about an Inference Endpoint. Args: name (`str`): The name of the Inference Endpoint to retrieve information about. namespace (`str`, *optional*): The namespace in which the Inference Endpoint is located. Defaults to the current user. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`InferenceEndpoint`]: information about the requested Inference Endpoint. Example: ```python >>> from huggingface_hub import HfApi >>> api = HfApi() >>> endpoint = api.get_inference_endpoint("my-text-to-image") >>> endpoint InferenceEndpoint(name='my-text-to-image', ...) # Get status >>> endpoint.status 'running' >>> endpoint.url 'https://my-text-to-image.region.vendor.endpoints.huggingface.cloud' # Run inference >>> endpoint.client.text_to_image(...) ``` """ namespace = namespace or self._get_namespace(token=token) response = get_session().get( f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) def update_inference_endpoint( self, name: str, *, # Compute update accelerator: Optional[str] = None, instance_size: Optional[str] = None, instance_type: Optional[str] = None, min_replica: Optional[int] = None, max_replica: Optional[int] = None, scale_to_zero_timeout: Optional[int] = None, # Model update repository: Optional[str] = None, framework: Optional[str] = None, revision: Optional[str] = None, task: Optional[str] = None, custom_image: Optional[Dict] = None, secrets: Optional[Dict[str, str]] = None, # Other namespace: Optional[str] = None, token: Union[bool, str, None] = None, ) -> InferenceEndpoint: """Update an Inference Endpoint. This method allows the update of either the compute configuration, the deployed model, or both. All arguments are optional but at least one must be provided. For convenience, you can also update an Inference Endpoint using [`InferenceEndpoint.update`]. Args: name (`str`): The name of the Inference Endpoint to update. accelerator (`str`, *optional*): The hardware accelerator to be used for inference (e.g. `"cpu"`). instance_size (`str`, *optional*): The size or type of the instance to be used for hosting the model (e.g. `"x4"`). instance_type (`str`, *optional*): The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`). min_replica (`int`, *optional*): The minimum number of replicas (instances) to keep running for the Inference Endpoint. max_replica (`int`, *optional*): The maximum number of replicas (instances) to scale to for the Inference Endpoint. scale_to_zero_timeout (`int`, *optional*): The duration in minutes before an inactive endpoint is scaled to zero. repository (`str`, *optional*): The name of the model repository associated with the Inference Endpoint (e.g. `"gpt2"`). framework (`str`, *optional*): The machine learning framework used for the model (e.g. `"custom"`). revision (`str`, *optional*): The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`). task (`str`, *optional*): The task on which to deploy the model (e.g. `"text-classification"`). custom_image (`Dict`, *optional*): A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples). secrets (`Dict[str, str]`, *optional*): Secret values to inject in the container environment. namespace (`str`, *optional*): The namespace where the Inference Endpoint will be updated. Defaults to the current user's namespace. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`InferenceEndpoint`]: information about the updated Inference Endpoint. """ namespace = namespace or self._get_namespace(token=token) # Populate only the fields that are not None payload: Dict = defaultdict(lambda: defaultdict(dict)) if accelerator is not None: payload["compute"]["accelerator"] = accelerator if instance_size is not None: payload["compute"]["instanceSize"] = instance_size if instance_type is not None: payload["compute"]["instanceType"] = instance_type if max_replica is not None: payload["compute"]["scaling"]["maxReplica"] = max_replica if min_replica is not None: payload["compute"]["scaling"]["minReplica"] = min_replica if scale_to_zero_timeout is not None: payload["compute"]["scaling"]["scaleToZeroTimeout"] = scale_to_zero_timeout if repository is not None: payload["model"]["repository"] = repository if framework is not None: payload["model"]["framework"] = framework if revision is not None: payload["model"]["revision"] = revision if task is not None: payload["model"]["task"] = task if custom_image is not None: payload["model"]["image"] = {"custom": custom_image} if secrets is not None: payload["model"]["secrets"] = secrets response = get_session().put( f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}", headers=self._build_hf_headers(token=token), json=payload, ) hf_raise_for_status(response) return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) def delete_inference_endpoint( self, name: str, *, namespace: Optional[str] = None, token: Union[bool, str, None] = None ) -> None: """Delete an Inference Endpoint. This operation is not reversible. If you don't want to be charged for an Inference Endpoint, it is preferable to pause it with [`pause_inference_endpoint`] or scale it to zero with [`scale_to_zero_inference_endpoint`]. For convenience, you can also delete an Inference Endpoint using [`InferenceEndpoint.delete`]. Args: name (`str`): The name of the Inference Endpoint to delete. namespace (`str`, *optional*): The namespace in which the Inference Endpoint is located. Defaults to the current user. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. """ namespace = namespace or self._get_namespace(token=token) response = get_session().delete( f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) def pause_inference_endpoint( self, name: str, *, namespace: Optional[str] = None, token: Union[bool, str, None] = None ) -> InferenceEndpoint: """Pause an Inference Endpoint. A paused Inference Endpoint will not be charged. It can be resumed at any time using [`resume_inference_endpoint`]. This is different than scaling the Inference Endpoint to zero with [`scale_to_zero_inference_endpoint`], which would be automatically restarted when a request is made to it. For convenience, you can also pause an Inference Endpoint using [`pause_inference_endpoint`]. Args: name (`str`): The name of the Inference Endpoint to pause. namespace (`str`, *optional*): The namespace in which the Inference Endpoint is located. Defaults to the current user. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`InferenceEndpoint`]: information about the paused Inference Endpoint. """ namespace = namespace or self._get_namespace(token=token) response = get_session().post( f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/pause", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) def resume_inference_endpoint( self, name: str, *, namespace: Optional[str] = None, running_ok: bool = True, token: Union[bool, str, None] = None, ) -> InferenceEndpoint: """Resume an Inference Endpoint. For convenience, you can also resume an Inference Endpoint using [`InferenceEndpoint.resume`]. Args: name (`str`): The name of the Inference Endpoint to resume. namespace (`str`, *optional*): The namespace in which the Inference Endpoint is located. Defaults to the current user. running_ok (`bool`, *optional*): If `True`, the method will not raise an error if the Inference Endpoint is already running. Defaults to `True`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`InferenceEndpoint`]: information about the resumed Inference Endpoint. """ namespace = namespace or self._get_namespace(token=token) response = get_session().post( f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/resume", headers=self._build_hf_headers(token=token), ) try: hf_raise_for_status(response) except HfHubHTTPError as error: # If already running (and it's ok), then fetch current status and return if running_ok and error.response.status_code == 400 and "already running" in error.response.text: return self.get_inference_endpoint(name, namespace=namespace, token=token) # Otherwise, raise the error raise return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) def scale_to_zero_inference_endpoint( self, name: str, *, namespace: Optional[str] = None, token: Union[bool, str, None] = None ) -> InferenceEndpoint: """Scale Inference Endpoint to zero. An Inference Endpoint scaled to zero will not be charged. It will be resume on the next request to it, with a cold start delay. This is different than pausing the Inference Endpoint with [`pause_inference_endpoint`], which would require a manual resume with [`resume_inference_endpoint`]. For convenience, you can also scale an Inference Endpoint to zero using [`InferenceEndpoint.scale_to_zero`]. Args: name (`str`): The name of the Inference Endpoint to scale to zero. namespace (`str`, *optional*): The namespace in which the Inference Endpoint is located. Defaults to the current user. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`InferenceEndpoint`]: information about the scaled-to-zero Inference Endpoint. """ namespace = namespace or self._get_namespace(token=token) response = get_session().post( f"{constants.INFERENCE_ENDPOINTS_ENDPOINT}/endpoint/{namespace}/{name}/scale-to-zero", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) return InferenceEndpoint.from_raw(response.json(), namespace=namespace, token=token) def _get_namespace(self, token: Union[bool, str, None] = None) -> str: """Get the default namespace for the current user.""" me = self.whoami(token=token) if me["type"] == "user": return me["name"] else: raise ValueError( "Cannot determine default namespace. You must provide a 'namespace' as input or be logged in as a" " user." ) ######################## # Collection Endpoints # ######################## @validate_hf_hub_args def list_collections( self, *, owner: Union[List[str], str, None] = None, item: Union[List[str], str, None] = None, sort: Optional[Literal["lastModified", "trending", "upvotes"]] = None, limit: Optional[int] = None, token: Union[bool, str, None] = None, ) -> Iterable[Collection]: """List collections on the Huggingface Hub, given some filters. <Tip warning={true}> When listing collections, the item list per collection is truncated to 4 items maximum. To retrieve all items from a collection, you must use [`get_collection`]. </Tip> Args: owner (`List[str]` or `str`, *optional*): Filter by owner's username. item (`List[str]` or `str`, *optional*): Filter collections containing a particular items. Example: `"models/teknium/OpenHermes-2.5-Mistral-7B"`, `"datasets/squad"` or `"papers/2311.12983"`. sort (`Literal["lastModified", "trending", "upvotes"]`, *optional*): Sort collections by last modified, trending or upvotes. limit (`int`, *optional*): Maximum number of collections to be returned. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Iterable[Collection]`: an iterable of [`Collection`] objects. """ # Construct the API endpoint path = f"{self.endpoint}/api/collections" headers = self._build_hf_headers(token=token) params: Dict = {} if owner is not None: params.update({"owner": owner}) if item is not None: params.update({"item": item}) if sort is not None: params.update({"sort": sort}) if limit is not None: params.update({"limit": limit}) # Paginate over the results until limit is reached items = paginate(path, headers=headers, params=params) if limit is not None: items = islice(items, limit) # Do not iterate over all pages # Parse as Collection and return for position, collection_data in enumerate(items): yield Collection(position=position, **collection_data) def get_collection(self, collection_slug: str, *, token: Union[bool, str, None] = None) -> Collection: """Gets information about a Collection on the Hub. Args: collection_slug (`str`): Slug of the collection of the Hub. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`Collection`] Example: ```py >>> from huggingface_hub import get_collection >>> collection = get_collection("TheBloke/recent-models-64f9a55bb3115b4f513ec026") >>> collection.title 'Recent models' >>> len(collection.items) 37 >>> collection.items[0] CollectionItem( item_object_id='651446103cd773a050bf64c2', item_id='TheBloke/U-Amethyst-20B-AWQ', item_type='model', position=88, note=None ) ``` """ r = get_session().get( f"{self.endpoint}/api/collections/{collection_slug}", headers=self._build_hf_headers(token=token) ) hf_raise_for_status(r) return Collection(**{**r.json(), "endpoint": self.endpoint}) def create_collection( self, title: str, *, namespace: Optional[str] = None, description: Optional[str] = None, private: bool = False, exists_ok: bool = False, token: Union[bool, str, None] = None, ) -> Collection: """Create a new Collection on the Hub. Args: title (`str`): Title of the collection to create. Example: `"Recent models"`. namespace (`str`, *optional*): Namespace of the collection to create (username or org). Will default to the owner name. description (`str`, *optional*): Description of the collection to create. private (`bool`, *optional*): Whether the collection should be private or not. Defaults to `False` (i.e. public collection). exists_ok (`bool`, *optional*): If `True`, do not raise an error if collection already exists. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`Collection`] Example: ```py >>> from huggingface_hub import create_collection >>> collection = create_collection( ... title="ICCV 2023", ... description="Portfolio of models, papers and demos I presented at ICCV 2023", ... ) >>> collection.slug "username/iccv-2023-64f9a55bb3115b4f513ec026" ``` """ if namespace is None: namespace = self.whoami(token)["name"] payload = { "title": title, "namespace": namespace, "private": private, } if description is not None: payload["description"] = description r = get_session().post( f"{self.endpoint}/api/collections", headers=self._build_hf_headers(token=token), json=payload ) try: hf_raise_for_status(r) except HTTPError as err: if exists_ok and err.response.status_code == 409: # Collection already exists and `exists_ok=True` slug = r.json()["slug"] return self.get_collection(slug, token=token) else: raise return Collection(**{**r.json(), "endpoint": self.endpoint}) def update_collection_metadata( self, collection_slug: str, *, title: Optional[str] = None, description: Optional[str] = None, position: Optional[int] = None, private: Optional[bool] = None, theme: Optional[str] = None, token: Union[bool, str, None] = None, ) -> Collection: """Update metadata of a collection on the Hub. All arguments are optional. Only provided metadata will be updated. Args: collection_slug (`str`): Slug of the collection to update. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. title (`str`): Title of the collection to update. description (`str`, *optional*): Description of the collection to update. position (`int`, *optional*): New position of the collection in the list of collections of the user. private (`bool`, *optional*): Whether the collection should be private or not. theme (`str`, *optional*): Theme of the collection on the Hub. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`Collection`] Example: ```py >>> from huggingface_hub import update_collection_metadata >>> collection = update_collection_metadata( ... collection_slug="username/iccv-2023-64f9a55bb3115b4f513ec026", ... title="ICCV Oct. 2023" ... description="Portfolio of models, datasets, papers and demos I presented at ICCV Oct. 2023", ... private=False, ... theme="pink", ... ) >>> collection.slug "username/iccv-oct-2023-64f9a55bb3115b4f513ec026" # ^collection slug got updated but not the trailing ID ``` """ payload = { "position": position, "private": private, "theme": theme, "title": title, "description": description, } r = get_session().patch( f"{self.endpoint}/api/collections/{collection_slug}", headers=self._build_hf_headers(token=token), # Only send not-none values to the API json={key: value for key, value in payload.items() if value is not None}, ) hf_raise_for_status(r) return Collection(**{**r.json()["data"], "endpoint": self.endpoint}) def delete_collection( self, collection_slug: str, *, missing_ok: bool = False, token: Union[bool, str, None] = None ) -> None: """Delete a collection on the Hub. Args: collection_slug (`str`): Slug of the collection to delete. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. missing_ok (`bool`, *optional*): If `True`, do not raise an error if collection doesn't exists. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Example: ```py >>> from huggingface_hub import delete_collection >>> collection = delete_collection("username/useless-collection-64f9a55bb3115b4f513ec026", missing_ok=True) ``` <Tip warning={true}> This is a non-revertible action. A deleted collection cannot be restored. </Tip> """ r = get_session().delete( f"{self.endpoint}/api/collections/{collection_slug}", headers=self._build_hf_headers(token=token) ) try: hf_raise_for_status(r) except HTTPError as err: if missing_ok and err.response.status_code == 404: # Collection doesn't exists and `missing_ok=True` return else: raise def add_collection_item( self, collection_slug: str, item_id: str, item_type: CollectionItemType_T, *, note: Optional[str] = None, exists_ok: bool = False, token: Union[bool, str, None] = None, ) -> Collection: """Add an item to a collection on the Hub. Args: collection_slug (`str`): Slug of the collection to update. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. item_id (`str`): ID of the item to add to the collection. It can be the ID of a repo on the Hub (e.g. `"facebook/bart-large-mnli"`) or a paper id (e.g. `"2307.09288"`). item_type (`str`): Type of the item to add. Can be one of `"model"`, `"dataset"`, `"space"` or `"paper"`. note (`str`, *optional*): A note to attach to the item in the collection. The maximum size for a note is 500 characters. exists_ok (`bool`, *optional*): If `True`, do not raise an error if item already exists. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`Collection`] Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` or `admin` role in the organization the repo belongs to or if you passed a `read` token. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the item you try to add to the collection does not exist on the Hub. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 409 if the item you try to add to the collection is already in the collection (and exists_ok=False) Example: ```py >>> from huggingface_hub import add_collection_item >>> collection = add_collection_item( ... collection_slug="davanstrien/climate-64f99dc2a5067f6b65531bab", ... item_id="pierre-loic/climate-news-articles", ... item_type="dataset" ... ) >>> collection.items[-1].item_id "pierre-loic/climate-news-articles" # ^item got added to the collection on last position # Add item with a note >>> add_collection_item( ... collection_slug="davanstrien/climate-64f99dc2a5067f6b65531bab", ... item_id="datasets/climate_fever", ... item_type="dataset" ... note="This dataset adopts the FEVER methodology that consists of 1,535 real-world claims regarding climate-change collected on the internet." ... ) (...) ``` """ payload: Dict[str, Any] = {"item": {"id": item_id, "type": item_type}} if note is not None: payload["note"] = note r = get_session().post( f"{self.endpoint}/api/collections/{collection_slug}/items", headers=self._build_hf_headers(token=token), json=payload, ) try: hf_raise_for_status(r) except HTTPError as err: if exists_ok and err.response.status_code == 409: # Item already exists and `exists_ok=True` return self.get_collection(collection_slug, token=token) else: raise return Collection(**{**r.json(), "endpoint": self.endpoint}) def update_collection_item( self, collection_slug: str, item_object_id: str, *, note: Optional[str] = None, position: Optional[int] = None, token: Union[bool, str, None] = None, ) -> None: """Update an item in a collection. Args: collection_slug (`str`): Slug of the collection to update. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. item_object_id (`str`): ID of the item in the collection. This is not the id of the item on the Hub (repo_id or paper id). It must be retrieved from a [`CollectionItem`] object. Example: `collection.items[0].item_object_id`. note (`str`, *optional*): A note to attach to the item in the collection. The maximum size for a note is 500 characters. position (`int`, *optional*): New position of the item in the collection. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Example: ```py >>> from huggingface_hub import get_collection, update_collection_item # Get collection first >>> collection = get_collection("TheBloke/recent-models-64f9a55bb3115b4f513ec026") # Update item based on its ID (add note + update position) >>> update_collection_item( ... collection_slug="TheBloke/recent-models-64f9a55bb3115b4f513ec026", ... item_object_id=collection.items[-1].item_object_id, ... note="Newly updated model!" ... position=0, ... ) ``` """ payload = {"position": position, "note": note} r = get_session().patch( f"{self.endpoint}/api/collections/{collection_slug}/items/{item_object_id}", headers=self._build_hf_headers(token=token), # Only send not-none values to the API json={key: value for key, value in payload.items() if value is not None}, ) hf_raise_for_status(r) def delete_collection_item( self, collection_slug: str, item_object_id: str, *, missing_ok: bool = False, token: Union[bool, str, None] = None, ) -> None: """Delete an item from a collection. Args: collection_slug (`str`): Slug of the collection to update. Example: `"TheBloke/recent-models-64f9a55bb3115b4f513ec026"`. item_object_id (`str`): ID of the item in the collection. This is not the id of the item on the Hub (repo_id or paper id). It must be retrieved from a [`CollectionItem`] object. Example: `collection.items[0].item_object_id`. missing_ok (`bool`, *optional*): If `True`, do not raise an error if item doesn't exists. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Example: ```py >>> from huggingface_hub import get_collection, delete_collection_item # Get collection first >>> collection = get_collection("TheBloke/recent-models-64f9a55bb3115b4f513ec026") # Delete item based on its ID >>> delete_collection_item( ... collection_slug="TheBloke/recent-models-64f9a55bb3115b4f513ec026", ... item_object_id=collection.items[-1].item_object_id, ... ) ``` """ r = get_session().delete( f"{self.endpoint}/api/collections/{collection_slug}/items/{item_object_id}", headers=self._build_hf_headers(token=token), ) try: hf_raise_for_status(r) except HTTPError as err: if missing_ok and err.response.status_code == 404: # Item already deleted and `missing_ok=True` return else: raise ########################## # Manage access requests # ########################## @validate_hf_hub_args def list_pending_access_requests( self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None ) -> List[AccessRequest]: """ Get pending access requests for a given gated repo. A pending request means the user has requested access to the repo but the request has not been processed yet. If the approval mode is automatic, this list should be empty. Pending requests can be accepted or rejected using [`accept_access_request`] and [`reject_access_request`]. For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. Args: repo_id (`str`): The id of the repo to get access requests for. repo_type (`str`, *optional*): The type of the repo to get access requests for. Must be one of `model`, `dataset` or `space`. Defaults to `model`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `List[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`, `status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will be populated with user's answers. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 400 if the repo is not gated. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` or `admin` role in the organization the repo belongs to or if you passed a `read` token. Example: ```py >>> from huggingface_hub import list_pending_access_requests, accept_access_request # List pending requests >>> requests = list_pending_access_requests("meta-llama/Llama-2-7b") >>> len(requests) 411 >>> requests[0] [ AccessRequest( username='clem', fullname='Clem 🤗', email='***', timestamp=datetime.datetime(2023, 11, 23, 18, 4, 53, 828000, tzinfo=datetime.timezone.utc), status='pending', fields=None, ), ... ] # Accept Clem's request >>> accept_access_request("meta-llama/Llama-2-7b", "clem") ``` """ return self._list_access_requests(repo_id, "pending", repo_type=repo_type, token=token) @validate_hf_hub_args def list_accepted_access_requests( self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None ) -> List[AccessRequest]: """ Get accepted access requests for a given gated repo. An accepted request means the user has requested access to the repo and the request has been accepted. The user can download any file of the repo. If the approval mode is automatic, this list should contains by default all requests. Accepted requests can be cancelled or rejected at any time using [`cancel_access_request`] and [`reject_access_request`]. A cancelled request will go back to the pending list while a rejected request will go to the rejected list. In both cases, the user will lose access to the repo. For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. Args: repo_id (`str`): The id of the repo to get access requests for. repo_type (`str`, *optional*): The type of the repo to get access requests for. Must be one of `model`, `dataset` or `space`. Defaults to `model`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `List[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`, `status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will be populated with user's answers. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 400 if the repo is not gated. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` or `admin` role in the organization the repo belongs to or if you passed a `read` token. Example: ```py >>> from huggingface_hub import list_accepted_access_requests >>> requests = list_accepted_access_requests("meta-llama/Llama-2-7b") >>> len(requests) 411 >>> requests[0] [ AccessRequest( username='clem', fullname='Clem 🤗', email='***', timestamp=datetime.datetime(2023, 11, 23, 18, 4, 53, 828000, tzinfo=datetime.timezone.utc), status='accepted', fields=None, ), ... ] ``` """ return self._list_access_requests(repo_id, "accepted", repo_type=repo_type, token=token) @validate_hf_hub_args def list_rejected_access_requests( self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None ) -> List[AccessRequest]: """ Get rejected access requests for a given gated repo. A rejected request means the user has requested access to the repo and the request has been explicitly rejected by a repo owner (either you or another user from your organization). The user cannot download any file of the repo. Rejected requests can be accepted or cancelled at any time using [`accept_access_request`] and [`cancel_access_request`]. A cancelled request will go back to the pending list while an accepted request will go to the accepted list. For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. Args: repo_id (`str`): The id of the repo to get access requests for. repo_type (`str`, *optional*): The type of the repo to get access requests for. Must be one of `model`, `dataset` or `space`. Defaults to `model`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `List[AccessRequest]`: A list of [`AccessRequest`] objects. Each time contains a `username`, `email`, `status` and `timestamp` attribute. If the gated repo has a custom form, the `fields` attribute will be populated with user's answers. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 400 if the repo is not gated. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` or `admin` role in the organization the repo belongs to or if you passed a `read` token. Example: ```py >>> from huggingface_hub import list_rejected_access_requests >>> requests = list_rejected_access_requests("meta-llama/Llama-2-7b") >>> len(requests) 411 >>> requests[0] [ AccessRequest( username='clem', fullname='Clem 🤗', email='***', timestamp=datetime.datetime(2023, 11, 23, 18, 4, 53, 828000, tzinfo=datetime.timezone.utc), status='rejected', fields=None, ), ... ] ``` """ return self._list_access_requests(repo_id, "rejected", repo_type=repo_type, token=token) def _list_access_requests( self, repo_id: str, status: Literal["accepted", "rejected", "pending"], repo_type: Optional[str] = None, token: Union[bool, str, None] = None, ) -> List[AccessRequest]: if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") if repo_type is None: repo_type = constants.REPO_TYPE_MODEL response = get_session().get( f"{constants.ENDPOINT}/api/{repo_type}s/{repo_id}/user-access-request/{status}", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) return [ AccessRequest( username=request["user"]["user"], fullname=request["user"]["fullname"], email=request["user"].get("email"), status=request["status"], timestamp=parse_datetime(request["timestamp"]), fields=request.get("fields"), # only if custom fields in form ) for request in response.json() ] @validate_hf_hub_args def cancel_access_request( self, repo_id: str, user: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None ) -> None: """ Cancel an access request from a user for a given gated repo. A cancelled request will go back to the pending list and the user will lose access to the repo. For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. Args: repo_id (`str`): The id of the repo to cancel access request for. user (`str`): The username of the user which access request should be cancelled. repo_type (`str`, *optional*): The type of the repo to cancel access request for. Must be one of `model`, `dataset` or `space`. Defaults to `model`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 400 if the repo is not gated. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` or `admin` role in the organization the repo belongs to or if you passed a `read` token. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the user does not exist on the Hub. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the user access request cannot be found. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the user access request is already in the pending list. """ self._handle_access_request(repo_id, user, "pending", repo_type=repo_type, token=token) @validate_hf_hub_args def accept_access_request( self, repo_id: str, user: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None ) -> None: """ Accept an access request from a user for a given gated repo. Once the request is accepted, the user will be able to download any file of the repo and access the community tab. If the approval mode is automatic, you don't have to accept requests manually. An accepted request can be cancelled or rejected at any time using [`cancel_access_request`] and [`reject_access_request`]. For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. Args: repo_id (`str`): The id of the repo to accept access request for. user (`str`): The username of the user which access request should be accepted. repo_type (`str`, *optional*): The type of the repo to accept access request for. Must be one of `model`, `dataset` or `space`. Defaults to `model`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 400 if the repo is not gated. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` or `admin` role in the organization the repo belongs to or if you passed a `read` token. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the user does not exist on the Hub. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the user access request cannot be found. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the user access request is already in the accepted list. """ self._handle_access_request(repo_id, user, "accepted", repo_type=repo_type, token=token) @validate_hf_hub_args def reject_access_request( self, repo_id: str, user: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None ) -> None: """ Reject an access request from a user for a given gated repo. A rejected request will go to the rejected list. The user cannot download any file of the repo. Rejected requests can be accepted or cancelled at any time using [`accept_access_request`] and [`cancel_access_request`]. A cancelled request will go back to the pending list while an accepted request will go to the accepted list. For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. Args: repo_id (`str`): The id of the repo to reject access request for. user (`str`): The username of the user which access request should be rejected. repo_type (`str`, *optional*): The type of the repo to reject access request for. Must be one of `model`, `dataset` or `space`. Defaults to `model`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 400 if the repo is not gated. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` or `admin` role in the organization the repo belongs to or if you passed a `read` token. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the user does not exist on the Hub. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the user access request cannot be found. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the user access request is already in the rejected list. """ self._handle_access_request(repo_id, user, "rejected", repo_type=repo_type, token=token) @validate_hf_hub_args def _handle_access_request( self, repo_id: str, user: str, status: Literal["accepted", "rejected", "pending"], repo_type: Optional[str] = None, token: Union[bool, str, None] = None, ) -> None: if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") if repo_type is None: repo_type = constants.REPO_TYPE_MODEL response = get_session().post( f"{constants.ENDPOINT}/api/{repo_type}s/{repo_id}/user-access-request/handle", headers=self._build_hf_headers(token=token), json={"user": user, "status": status}, ) hf_raise_for_status(response) @validate_hf_hub_args def grant_access( self, repo_id: str, user: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None ) -> None: """ Grant access to a user for a given gated repo. Granting access don't require for the user to send an access request by themselves. The user is automatically added to the accepted list meaning they can download the files You can revoke the granted access at any time using [`cancel_access_request`] or [`reject_access_request`]. For more info about gated repos, see https://huggingface.co/docs/hub/models-gated. Args: repo_id (`str`): The id of the repo to grant access to. user (`str`): The username of the user to grant access. repo_type (`str`, *optional*): The type of the repo to grant access to. Must be one of `model`, `dataset` or `space`. Defaults to `model`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 400 if the repo is not gated. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 400 if the user already has access to the repo. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 403 if you only have read-only access to the repo. This can be the case if you don't have `write` or `admin` role in the organization the repo belongs to or if you passed a `read` token. [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 if the user does not exist on the Hub. """ if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") if repo_type is None: repo_type = constants.REPO_TYPE_MODEL response = get_session().post( f"{constants.ENDPOINT}/api/{repo_type}s/{repo_id}/user-access-request/grant", headers=self._build_hf_headers(token=token), json={"user": user}, ) hf_raise_for_status(response) return response.json() ################### # Manage webhooks # ################### @validate_hf_hub_args def get_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo: """Get a webhook by its id. Args: webhook_id (`str`): The unique identifier of the webhook to get. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`WebhookInfo`]: Info about the webhook. Example: ```python >>> from huggingface_hub import get_webhook >>> webhook = get_webhook("654bbbc16f2ec14d77f109cc") >>> print(webhook) WebhookInfo( id="654bbbc16f2ec14d77f109cc", watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", secret="my-secret", domains=["repo", "discussion"], disabled=False, ) ``` """ response = get_session().get( f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) webhook_data = response.json()["webhook"] watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]] webhook = WebhookInfo( id=webhook_data["id"], url=webhook_data["url"], watched=watched_items, domains=webhook_data["domains"], secret=webhook_data.get("secret"), disabled=webhook_data["disabled"], ) return webhook @validate_hf_hub_args def list_webhooks(self, *, token: Union[bool, str, None] = None) -> List[WebhookInfo]: """List all configured webhooks. Args: token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `List[WebhookInfo]`: List of webhook info objects. Example: ```python >>> from huggingface_hub import list_webhooks >>> webhooks = list_webhooks() >>> len(webhooks) 2 >>> webhooks[0] WebhookInfo( id="654bbbc16f2ec14d77f109cc", watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", secret="my-secret", domains=["repo", "discussion"], disabled=False, ) ``` """ response = get_session().get( f"{constants.ENDPOINT}/api/settings/webhooks", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) webhooks_data = response.json() return [ WebhookInfo( id=webhook["id"], url=webhook["url"], watched=[WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook["watched"]], domains=webhook["domains"], secret=webhook.get("secret"), disabled=webhook["disabled"], ) for webhook in webhooks_data ] @validate_hf_hub_args def create_webhook( self, *, url: str, watched: List[Union[Dict, WebhookWatchedItem]], domains: Optional[List[constants.WEBHOOK_DOMAIN_T]] = None, secret: Optional[str] = None, token: Union[bool, str, None] = None, ) -> WebhookInfo: """Create a new webhook. Args: url (`str`): URL to send the payload to. watched (`List[WebhookWatchedItem]`): List of [`WebhookWatchedItem`] to be watched by the webhook. It can be users, orgs, models, datasets or spaces. Watched items can also be provided as plain dictionaries. domains (`List[Literal["repo", "discussion"]]`, optional): List of domains to watch. It can be "repo", "discussion" or both. secret (`str`, optional): A secret to sign the payload with. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`WebhookInfo`]: Info about the newly created webhook. Example: ```python >>> from huggingface_hub import create_webhook >>> payload = create_webhook( ... watched=[{"type": "user", "name": "julien-c"}, {"type": "org", "name": "HuggingFaceH4"}], ... url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", ... domains=["repo", "discussion"], ... secret="my-secret", ... ) >>> print(payload) WebhookInfo( id="654bbbc16f2ec14d77f109cc", url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], domains=["repo", "discussion"], secret="my-secret", disabled=False, ) ``` """ watched_dicts = [asdict(item) if isinstance(item, WebhookWatchedItem) else item for item in watched] response = get_session().post( f"{constants.ENDPOINT}/api/settings/webhooks", json={"watched": watched_dicts, "url": url, "domains": domains, "secret": secret}, headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) webhook_data = response.json()["webhook"] watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]] webhook = WebhookInfo( id=webhook_data["id"], url=webhook_data["url"], watched=watched_items, domains=webhook_data["domains"], secret=webhook_data.get("secret"), disabled=webhook_data["disabled"], ) return webhook @validate_hf_hub_args def update_webhook( self, webhook_id: str, *, url: Optional[str] = None, watched: Optional[List[Union[Dict, WebhookWatchedItem]]] = None, domains: Optional[List[constants.WEBHOOK_DOMAIN_T]] = None, secret: Optional[str] = None, token: Union[bool, str, None] = None, ) -> WebhookInfo: """Update an existing webhook. Args: webhook_id (`str`): The unique identifier of the webhook to be updated. url (`str`, optional): The URL to which the payload will be sent. watched (`List[WebhookWatchedItem]`, optional): List of items to watch. It can be users, orgs, models, datasets, or spaces. Refer to [`WebhookWatchedItem`] for more details. Watched items can also be provided as plain dictionaries. domains (`List[Literal["repo", "discussion"]]`, optional): The domains to watch. This can include "repo", "discussion", or both. secret (`str`, optional): A secret to sign the payload with, providing an additional layer of security. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`WebhookInfo`]: Info about the updated webhook. Example: ```python >>> from huggingface_hub import update_webhook >>> updated_payload = update_webhook( ... webhook_id="654bbbc16f2ec14d77f109cc", ... url="https://new.webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", ... watched=[{"type": "user", "name": "julien-c"}, {"type": "org", "name": "HuggingFaceH4"}], ... domains=["repo"], ... secret="my-secret", ... ) >>> print(updated_payload) WebhookInfo( id="654bbbc16f2ec14d77f109cc", url="https://new.webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], domains=["repo"], secret="my-secret", disabled=False, ``` """ if watched is None: watched = [] watched_dicts = [asdict(item) if isinstance(item, WebhookWatchedItem) else item for item in watched] response = get_session().post( f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}", json={"watched": watched_dicts, "url": url, "domains": domains, "secret": secret}, headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) webhook_data = response.json()["webhook"] watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]] webhook = WebhookInfo( id=webhook_data["id"], url=webhook_data["url"], watched=watched_items, domains=webhook_data["domains"], secret=webhook_data.get("secret"), disabled=webhook_data["disabled"], ) return webhook @validate_hf_hub_args def enable_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo: """Enable a webhook (makes it "active"). Args: webhook_id (`str`): The unique identifier of the webhook to enable. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`WebhookInfo`]: Info about the enabled webhook. Example: ```python >>> from huggingface_hub import enable_webhook >>> enabled_webhook = enable_webhook("654bbbc16f2ec14d77f109cc") >>> enabled_webhook WebhookInfo( id="654bbbc16f2ec14d77f109cc", url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], domains=["repo", "discussion"], secret="my-secret", disabled=False, ) ``` """ response = get_session().post( f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}/enable", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) webhook_data = response.json()["webhook"] watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]] webhook = WebhookInfo( id=webhook_data["id"], url=webhook_data["url"], watched=watched_items, domains=webhook_data["domains"], secret=webhook_data.get("secret"), disabled=webhook_data["disabled"], ) return webhook @validate_hf_hub_args def disable_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> WebhookInfo: """Disable a webhook (makes it "disabled"). Args: webhook_id (`str`): The unique identifier of the webhook to disable. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: [`WebhookInfo`]: Info about the disabled webhook. Example: ```python >>> from huggingface_hub import disable_webhook >>> disabled_webhook = disable_webhook("654bbbc16f2ec14d77f109cc") >>> disabled_webhook WebhookInfo( id="654bbbc16f2ec14d77f109cc", url="https://webhook.site/a2176e82-5720-43ee-9e06-f91cb4c91548", watched=[WebhookWatchedItem(type="user", name="julien-c"), WebhookWatchedItem(type="org", name="HuggingFaceH4")], domains=["repo", "discussion"], secret="my-secret", disabled=True, ) ``` """ response = get_session().post( f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}/disable", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) webhook_data = response.json()["webhook"] watched_items = [WebhookWatchedItem(type=item["type"], name=item["name"]) for item in webhook_data["watched"]] webhook = WebhookInfo( id=webhook_data["id"], url=webhook_data["url"], watched=watched_items, domains=webhook_data["domains"], secret=webhook_data.get("secret"), disabled=webhook_data["disabled"], ) return webhook @validate_hf_hub_args def delete_webhook(self, webhook_id: str, *, token: Union[bool, str, None] = None) -> None: """Delete a webhook. Args: webhook_id (`str`): The unique identifier of the webhook to delete. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `None` Example: ```python >>> from huggingface_hub import delete_webhook >>> delete_webhook("654bbbc16f2ec14d77f109cc") ``` """ response = get_session().delete( f"{constants.ENDPOINT}/api/settings/webhooks/{webhook_id}", headers=self._build_hf_headers(token=token), ) hf_raise_for_status(response) ############# # Internals # ############# def _build_hf_headers( self, token: Union[bool, str, None] = None, library_name: Optional[str] = None, library_version: Optional[str] = None, user_agent: Union[Dict, str, None] = None, ) -> Dict[str, str]: """ Alias for [`build_hf_headers`] that uses the token from [`HfApi`] client when `token` is not provided. """ if token is None: # Cannot do `token = token or self.token` as token can be `False`. token = self.token return build_hf_headers( token=token, library_name=library_name or self.library_name, library_version=library_version or self.library_version, user_agent=user_agent or self.user_agent, headers=self.headers, ) def _prepare_folder_deletions( self, repo_id: str, repo_type: Optional[str], revision: Optional[str], path_in_repo: str, delete_patterns: Optional[Union[List[str], str]], token: Union[bool, str, None] = None, ) -> List[CommitOperationDelete]: """Generate the list of Delete operations for a commit to delete files from a repo. List remote files and match them against the `delete_patterns` constraints. Returns a list of [`CommitOperationDelete`] with the matching items. Note: `.gitattributes` file is essential to make a repo work properly on the Hub. This file will always be kept even if it matches the `delete_patterns` constraints. """ if delete_patterns is None: # If no delete patterns, no need to list and filter remote files return [] # List remote files filenames = self.list_repo_files(repo_id=repo_id, revision=revision, repo_type=repo_type, token=token) # Compute relative path in repo if path_in_repo and path_in_repo not in (".", "./"): path_in_repo = path_in_repo.strip("/") + "/" # harmonize relpath_to_abspath = { file[len(path_in_repo) :]: file for file in filenames if file.startswith(path_in_repo) } else: relpath_to_abspath = {file: file for file in filenames} # Apply filter on relative paths and return return [ CommitOperationDelete(path_in_repo=relpath_to_abspath[relpath], is_folder=False) for relpath in filter_repo_objects(relpath_to_abspath.keys(), allow_patterns=delete_patterns) if relpath_to_abspath[relpath] != ".gitattributes" ] def _prepare_upload_folder_additions( self, folder_path: Union[str, Path], path_in_repo: str, allow_patterns: Optional[Union[List[str], str]] = None, ignore_patterns: Optional[Union[List[str], str]] = None, repo_type: Optional[str] = None, token: Union[bool, str, None] = None, ) -> List[CommitOperationAdd]: """Generate the list of Add operations for a commit to upload a folder. Files not matching the `allow_patterns` (allowlist) and `ignore_patterns` (denylist) constraints are discarded. """ folder_path = Path(folder_path).expanduser().resolve() if not folder_path.is_dir(): raise ValueError(f"Provided path: '{folder_path}' is not a directory") # List files from folder relpath_to_abspath = { path.relative_to(folder_path).as_posix(): path for path in sorted(folder_path.glob("**/*")) # sorted to be deterministic if path.is_file() } # Filter files # Patterns are applied on the path relative to `folder_path`. `path_in_repo` is prefixed after the filtering. filtered_repo_objects = list( filter_repo_objects( relpath_to_abspath.keys(), allow_patterns=allow_patterns, ignore_patterns=ignore_patterns ) ) prefix = f"{path_in_repo.strip('/')}/" if path_in_repo else "" # If updating a README.md file, make sure the metadata format is valid # It's better to fail early than to fail after all the files have been hashed. if "README.md" in filtered_repo_objects: self._validate_yaml( content=relpath_to_abspath["README.md"].read_text(encoding="utf8"), repo_type=repo_type, token=token, ) if len(filtered_repo_objects) > 30: log = logger.warning if len(filtered_repo_objects) > 200 else logger.info log( "It seems you are trying to upload a large folder at once. This might take some time and then fail if " "the folder is too large. For such cases, it is recommended to upload in smaller batches or to use " "`HfApi().upload_large_folder(...)`/`huggingface-cli upload-large-folder` instead. For more details, " "check out https://huggingface.co/docs/huggingface_hub/main/en/guides/upload#upload-a-large-folder." ) logger.info(f"Start hashing {len(filtered_repo_objects)} files.") operations = [ CommitOperationAdd( path_or_fileobj=relpath_to_abspath[relpath], # absolute path on disk path_in_repo=prefix + relpath, # "absolute" path in repo ) for relpath in filtered_repo_objects ] logger.info(f"Finished hashing {len(filtered_repo_objects)} files.") return operations def _validate_yaml(self, content: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None): """ Validate YAML from `README.md`, used before file hashing and upload. Args: content (`str`): Content of `README.md` to validate. repo_type (`str`, *optional*): The type of the repo to grant access to. Must be one of `model`, `dataset` or `space`. Defaults to `model`. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Raises: - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError) if YAML is invalid """ repo_type = repo_type if repo_type is not None else constants.REPO_TYPE_MODEL headers = self._build_hf_headers(token=token) response = get_session().post( f"{self.endpoint}/api/validate-yaml", json={"content": content, "repoType": repo_type}, headers=headers, ) # Handle warnings (example: empty metadata) response_content = response.json() message = "\n".join([f"- {warning.get('message')}" for warning in response_content.get("warnings", [])]) if message: warnings.warn(f"Warnings while validating metadata in README.md:\n{message}") # Raise on errors try: hf_raise_for_status(response) except BadRequestError as e: errors = response_content.get("errors", []) message = "\n".join([f"- {error.get('message')}" for error in errors]) raise ValueError(f"Invalid metadata in README.md.\n{message}") from e def get_user_overview(self, username: str, token: Union[bool, str, None] = None) -> User: """ Get an overview of a user on the Hub. Args: username (`str`): Username of the user to get an overview of. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `User`: A [`User`] object with the user's overview. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 If the user does not exist on the Hub. """ r = get_session().get( f"{constants.ENDPOINT}/api/users/{username}/overview", headers=self._build_hf_headers(token=token) ) hf_raise_for_status(r) return User(**r.json()) def list_organization_members(self, organization: str, token: Union[bool, str, None] = None) -> Iterable[User]: """ List of members of an organization on the Hub. Args: organization (`str`): Name of the organization to get the members of. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Iterable[User]`: A list of [`User`] objects with the members of the organization. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 If the organization does not exist on the Hub. """ for member in paginate( path=f"{constants.ENDPOINT}/api/organizations/{organization}/members", params={}, headers=self._build_hf_headers(token=token), ): yield User(**member) def list_user_followers(self, username: str, token: Union[bool, str, None] = None) -> Iterable[User]: """ Get the list of followers of a user on the Hub. Args: username (`str`): Username of the user to get the followers of. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Iterable[User]`: A list of [`User`] objects with the followers of the user. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 If the user does not exist on the Hub. """ for follower in paginate( path=f"{constants.ENDPOINT}/api/users/{username}/followers", params={}, headers=self._build_hf_headers(token=token), ): yield User(**follower) def list_user_following(self, username: str, token: Union[bool, str, None] = None) -> Iterable[User]: """ Get the list of users followed by a user on the Hub. Args: username (`str`): Username of the user to get the users followed by. token (Union[bool, str, None], optional): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Iterable[User]`: A list of [`User`] objects with the users followed by the user. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 If the user does not exist on the Hub. """ for followed_user in paginate( path=f"{constants.ENDPOINT}/api/users/{username}/following", params={}, headers=self._build_hf_headers(token=token), ): yield User(**followed_user) def list_papers( self, *, query: Optional[str] = None, token: Union[bool, str, None] = None, ) -> Iterable[PaperInfo]: """ List daily papers on the Hugging Face Hub given a search query. Args: query (`str`, *optional*): A search query string to find papers. If provided, returns papers that match the query. token (Union[bool, str, None], *optional*): A valid user access token (string). Defaults to the locally saved token, which is the recommended method for authentication (see https://huggingface.co/docs/huggingface_hub/quick-start#authentication). To disable authentication, pass `False`. Returns: `Iterable[PaperInfo]`: an iterable of [`huggingface_hub.hf_api.PaperInfo`] objects. Example: ```python >>> from huggingface_hub import HfApi >>> api = HfApi() # List all papers with "attention" in their title >>> api.list_papers(query="attention") ``` """ path = f"{self.endpoint}/api/papers/search" params = {} if query: params["q"] = query r = get_session().get( path, params=params, headers=self._build_hf_headers(token=token), ) hf_raise_for_status(r) for paper in r.json(): yield PaperInfo(**paper) def paper_info(self, id: str) -> PaperInfo: """ Get information for a paper on the Hub. Args: id (`str`, **optional**): ArXiv id of the paper. Returns: `PaperInfo`: A `PaperInfo` object. Raises: [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError): HTTP 404 If the paper does not exist on the Hub. """ path = f"{self.endpoint}/api/papers/{id}" r = get_session().get(path) hf_raise_for_status(r) return PaperInfo(**r.json()) def auth_check( self, repo_id: str, *, repo_type: Optional[str] = None, token: Union[bool, str, None] = None ) -> None: """ Check if the provided user token has access to a specific repository on the Hugging Face Hub. This method verifies whether the user, authenticated via the provided token, has access to the specified repository. If the repository is not found or if the user lacks the required permissions to access it, the method raises an appropriate exception. Args: repo_id (`str`): The repository to check for access. Format should be `"user/repo_name"`. Example: `"user/my-cool-model"`. repo_type (`str`, *optional*): The type of the repository. Should be one of `"model"`, `"dataset"`, or `"space"`. If not specified, the default is `"model"`. token `(Union[bool, str, None]`, *optional*): A valid user access token. If not provided, the locally saved token will be used, which is the recommended authentication method. Set to `False` to disable authentication. Refer to: https://huggingface.co/docs/huggingface_hub/quick-start#authentication. Raises: [`~utils.RepositoryNotFoundError`]: Raised if the repository does not exist, is private, or the user does not have access. This can occur if the `repo_id` or `repo_type` is incorrect or if the repository is private but the user is not authenticated. [`~utils.GatedRepoError`]: Raised if the repository exists but is gated and the user is not authorized to access it. Example: Check if the user has access to a repository: ```python >>> from huggingface_hub import auth_check >>> from huggingface_hub.utils import GatedRepoError, RepositoryNotFoundError try: auth_check("user/my-cool-model") except GatedRepoError: # Handle gated repository error print("You do not have permission to access this gated repository.") except RepositoryNotFoundError: # Handle repository not found error print("The repository was not found or you do not have access.") ``` In this example: - If the user has access, the method completes successfully. - If the repository is gated or does not exist, appropriate exceptions are raised, allowing the user to handle them accordingly. """ headers = self._build_hf_headers(token=token) if repo_type is None: repo_type = constants.REPO_TYPE_MODEL if repo_type not in constants.REPO_TYPES: raise ValueError(f"Invalid repo type, must be one of {constants.REPO_TYPES}") path = f"{self.endpoint}/api/{repo_type}s/{repo_id}/auth-check" r = get_session().get(path, headers=headers) hf_raise_for_status(r)
class_definition
55,944
417,565
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/hf_api.py
null
74
class CacheNotFound(Exception): """Exception thrown when the Huggingface cache is not found.""" cache_dir: Union[str, Path] def __init__(self, msg: str, cache_dir: Union[str, Path], *args, **kwargs): super().__init__(msg, *args, **kwargs) self.cache_dir = cache_dir
class_definition
156
451
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
75
class CorruptedCacheException(Exception): """Exception for any unexpected structure in the Huggingface cache-system."""
class_definition
454
577
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
76
class LocalTokenNotFoundError(EnvironmentError): """Raised if local token is required but not found."""
class_definition
599
706
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
77
class OfflineModeIsEnabled(ConnectionError): """Raised when a request is made but `HF_HUB_OFFLINE=1` is set as environment variable."""
class_definition
725
864
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
78
class HfHubHTTPError(HTTPError): """ HTTPError to inherit from for any custom HTTP Error raised in HF Hub. Any HTTPError is converted at least into a `HfHubHTTPError`. If some information is sent back by the server, it will be added to the error message. Added details: - Request id from "X-Request-Id" header if exists. If not, fallback to "X-Amzn-Trace-Id" header if exists. - Server error message from the header "X-Error-Message". - Server error message if we can found one in the response body. Example: ```py import requests from huggingface_hub.utils import get_session, hf_raise_for_status, HfHubHTTPError response = get_session().post(...) try: hf_raise_for_status(response) except HfHubHTTPError as e: print(str(e)) # formatted message e.request_id, e.server_message # details returned by server # Complete the error message with additional information once it's raised e.append_to_message("\n`create_commit` expects the repository to exist.") raise ``` """ def __init__(self, message: str, response: Optional[Response] = None, *, server_message: Optional[str] = None): self.request_id = ( response.headers.get("x-request-id") or response.headers.get("X-Amzn-Trace-Id") if response is not None else None ) self.server_message = server_message super().__init__( message, response=response, # type: ignore [arg-type] request=response.request if response is not None else None, # type: ignore [arg-type] ) def append_to_message(self, additional_message: str) -> None: """Append additional information to the `HfHubHTTPError` initial message.""" self.args = (self.args[0] + additional_message,) + self.args[1:]
class_definition
867
2,789
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
79
class InferenceTimeoutError(HTTPError, TimeoutError): """Error raised when a model is unavailable or the request times out."""
class_definition
2,820
2,950
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
80
class InferenceEndpointError(Exception): """Generic exception when dealing with Inference Endpoints."""
class_definition
2,983
3,090
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
81
class InferenceEndpointTimeoutError(InferenceEndpointError, TimeoutError): """Exception for timeouts while waiting for Inference Endpoint."""
class_definition
3,093
3,238
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
82
class SafetensorsParsingError(Exception): """Raised when failing to parse a safetensors file metadata. This can be the case if the file is not a safetensors file or does not respect the specification. """
class_definition
3,264
3,481
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
83
class NotASafetensorsRepoError(Exception): """Raised when a repo is not a Safetensors repo i.e. doesn't have either a `model.safetensors` or a `model.safetensors.index.json` file. """
class_definition
3,484
3,679
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
84
class TextGenerationError(HTTPError): """Generic error raised if text-generation went wrong."""
class_definition
3,709
3,808
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
85
class ValidationError(TextGenerationError): """Server-side validation error."""
class_definition
3,846
3,929
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
86
class GenerationError(TextGenerationError): pass
class_definition
3,932
3,984
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
87
class OverloadedError(TextGenerationError): pass
class_definition
3,987
4,039
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
88
class IncompleteGenerationError(TextGenerationError): pass
class_definition
4,042
4,104
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
89
class UnknownError(TextGenerationError): pass
class_definition
4,107
4,156
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
90
class HFValidationError(ValueError): """Generic exception thrown by `huggingface_hub` validators. Inherits from [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError). """
class_definition
4,181
4,387
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
91
class FileMetadataError(OSError): """Error triggered when the metadata of a file on the Hub cannot be retrieved (missing ETag or commit_hash). Inherits from `OSError` for backward compatibility. """
class_definition
4,415
4,626
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
92
class RepositoryNotFoundError(HfHubHTTPError): """ Raised when trying to access a hf.co URL with an invalid repository name, or with a private repo name the user does not have access to. Example: ```py >>> from huggingface_hub import model_info >>> model_info("<non_existent_repository>") (...) huggingface_hub.utils._errors.RepositoryNotFoundError: 401 Client Error. (Request ID: PvMw_VjBMjVdMz53WKIzP) Repository Not Found for url: https://huggingface.co/api/models/%3Cnon_existent_repository%3E. Please make sure you specified the correct `repo_id` and `repo_type`. If the repo is private, make sure you are authenticated. Invalid username or password. ``` """
class_definition
4,651
5,377
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
93
class GatedRepoError(RepositoryNotFoundError): """ Raised when trying to access a gated repository for which the user is not on the authorized list. Note: derives from `RepositoryNotFoundError` to ensure backward compatibility. Example: ```py >>> from huggingface_hub import model_info >>> model_info("<gated_repository>") (...) huggingface_hub.utils._errors.GatedRepoError: 403 Client Error. (Request ID: ViT1Bf7O_026LGSQuVqfa) Cannot access gated repo for url https://huggingface.co/api/models/ardent-figment/gated-model. Access to model ardent-figment/gated-model is restricted and you are not in the authorized list. Visit https://huggingface.co/ardent-figment/gated-model to ask for access. ``` """
class_definition
5,380
6,147
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
94
class DisabledRepoError(HfHubHTTPError): """ Raised when trying to access a repository that has been disabled by its author. Example: ```py >>> from huggingface_hub import dataset_info >>> dataset_info("laion/laion-art") (...) huggingface_hub.utils._errors.DisabledRepoError: 403 Client Error. (Request ID: Root=1-659fc3fa-3031673e0f92c71a2260dbe2;bc6f4dfb-b30a-4862-af0a-5cfe827610d8) Cannot access repository for url https://huggingface.co/api/datasets/laion/laion-art. Access to this resource is disabled. ``` """
class_definition
6,150
6,717
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
95
class RevisionNotFoundError(HfHubHTTPError): """ Raised when trying to access a hf.co URL with a valid repository but an invalid revision. Example: ```py >>> from huggingface_hub import hf_hub_download >>> hf_hub_download('bert-base-cased', 'config.json', revision='<non-existent-revision>') (...) huggingface_hub.utils._errors.RevisionNotFoundError: 404 Client Error. (Request ID: Mwhe_c3Kt650GcdKEFomX) Revision Not Found for url: https://huggingface.co/bert-base-cased/resolve/%3Cnon-existent-revision%3E/config.json. ``` """
class_definition
6,739
7,318
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
96
class EntryNotFoundError(HfHubHTTPError): """ Raised when trying to access a hf.co URL with a valid repository and revision but an invalid filename. Example: ```py >>> from huggingface_hub import hf_hub_download >>> hf_hub_download('bert-base-cased', '<non-existent-file>') (...) huggingface_hub.utils._errors.EntryNotFoundError: 404 Client Error. (Request ID: 53pNl6M0MxsnG5Sw8JA6x) Entry Not Found for url: https://huggingface.co/bert-base-cased/resolve/main/%3Cnon-existent-file%3E. ``` """
class_definition
7,336
7,880
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
97
class LocalEntryNotFoundError(EntryNotFoundError, FileNotFoundError, ValueError): """ Raised when trying to access a file or snapshot that is not on the disk when network is disabled or unavailable (connection issue). The entry may exist on the Hub. Note: `ValueError` type is to ensure backward compatibility. Note: `LocalEntryNotFoundError` derives from `HTTPError` because of `EntryNotFoundError` even when it is not a network issue. Example: ```py >>> from huggingface_hub import hf_hub_download >>> hf_hub_download('bert-base-cased', '<non-cached-file>', local_files_only=True) (...) huggingface_hub.utils._errors.LocalEntryNotFoundError: Cannot find the requested files in the disk cache and outgoing traffic has been disabled. To enable hf.co look-ups and downloads online, set 'local_files_only' to False. ``` """ def __init__(self, message: str): super().__init__(message, response=None)
class_definition
7,883
8,858
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
98
class BadRequestError(HfHubHTTPError, ValueError): """ Raised by `hf_raise_for_status` when the server returns a HTTP 400 error. Example: ```py >>> resp = requests.post("hf.co/api/check", ...) >>> hf_raise_for_status(resp, endpoint_name="check") huggingface_hub.utils._errors.BadRequestError: Bad request for check endpoint: {details} (Request ID: XXX) ``` """
class_definition
8,877
9,275
0
/Users/nielsrogge/Documents/python_projecten/huggingface_hub/src/huggingface_hub/errors.py
null
99