|
--- |
|
language: |
|
- en |
|
tags: |
|
- ColBERT |
|
- PyLate |
|
- sentence-transformers |
|
- sentence-similarity |
|
- feature-extraction |
|
- generated_from_trainer |
|
- dataset_size:443147 |
|
- loss:Distillation |
|
base_model: artiwise-ai/modernbert-base-tr-uncased |
|
datasets: |
|
- Speedsy/msmarco-cleaned-gemini-bge-tr-uncased |
|
pipeline_tag: sentence-similarity |
|
library_name: PyLate |
|
metrics: |
|
- MaxSim_accuracy@1 |
|
- MaxSim_accuracy@3 |
|
- MaxSim_accuracy@5 |
|
- MaxSim_accuracy@10 |
|
- MaxSim_precision@1 |
|
- MaxSim_precision@3 |
|
- MaxSim_precision@5 |
|
- MaxSim_precision@10 |
|
- MaxSim_recall@1 |
|
- MaxSim_recall@3 |
|
- MaxSim_recall@5 |
|
- MaxSim_recall@10 |
|
- MaxSim_ndcg@10 |
|
- MaxSim_mrr@10 |
|
- MaxSim_map@100 |
|
model-index: |
|
- name: PyLate model based on artiwise-ai/modernbert-base-tr-uncased |
|
results: |
|
- task: |
|
type: py-late-information-retrieval |
|
name: Py Late Information Retrieval |
|
dataset: |
|
name: NanoDBPedia |
|
type: NanoDBPedia |
|
metrics: |
|
- type: MaxSim_accuracy@1 |
|
value: 0.8 |
|
name: Maxsim Accuracy@1 |
|
- type: MaxSim_accuracy@3 |
|
value: 0.92 |
|
name: Maxsim Accuracy@3 |
|
- type: MaxSim_accuracy@5 |
|
value: 0.96 |
|
name: Maxsim Accuracy@5 |
|
- type: MaxSim_accuracy@10 |
|
value: 1.0 |
|
name: Maxsim Accuracy@10 |
|
- type: MaxSim_precision@1 |
|
value: 0.8 |
|
name: Maxsim Precision@1 |
|
- type: MaxSim_precision@3 |
|
value: 0.6733333333333333 |
|
name: Maxsim Precision@3 |
|
- type: MaxSim_precision@5 |
|
value: 0.6 |
|
name: Maxsim Precision@5 |
|
- type: MaxSim_precision@10 |
|
value: 0.548 |
|
name: Maxsim Precision@10 |
|
- type: MaxSim_recall@1 |
|
value: 0.08578717061354299 |
|
name: Maxsim Recall@1 |
|
- type: MaxSim_recall@3 |
|
value: 0.1830130267260073 |
|
name: Maxsim Recall@3 |
|
- type: MaxSim_recall@5 |
|
value: 0.2593375700877878 |
|
name: Maxsim Recall@5 |
|
- type: MaxSim_recall@10 |
|
value: 0.39135854315858964 |
|
name: Maxsim Recall@10 |
|
- type: MaxSim_ndcg@10 |
|
value: 0.6725979752170759 |
|
name: Maxsim Ndcg@10 |
|
- type: MaxSim_mrr@10 |
|
value: 0.8711111111111113 |
|
name: Maxsim Mrr@10 |
|
- type: MaxSim_map@100 |
|
value: 0.5248067100703537 |
|
name: Maxsim Map@100 |
|
- task: |
|
type: py-late-information-retrieval |
|
name: Py Late Information Retrieval |
|
dataset: |
|
name: NanoFiQA2018 |
|
type: NanoFiQA2018 |
|
metrics: |
|
- type: MaxSim_accuracy@1 |
|
value: 0.46 |
|
name: Maxsim Accuracy@1 |
|
- type: MaxSim_accuracy@3 |
|
value: 0.68 |
|
name: Maxsim Accuracy@3 |
|
- type: MaxSim_accuracy@5 |
|
value: 0.72 |
|
name: Maxsim Accuracy@5 |
|
- type: MaxSim_accuracy@10 |
|
value: 0.72 |
|
name: Maxsim Accuracy@10 |
|
- type: MaxSim_precision@1 |
|
value: 0.46 |
|
name: Maxsim Precision@1 |
|
- type: MaxSim_precision@3 |
|
value: 0.3 |
|
name: Maxsim Precision@3 |
|
- type: MaxSim_precision@5 |
|
value: 0.22399999999999998 |
|
name: Maxsim Precision@5 |
|
- type: MaxSim_precision@10 |
|
value: 0.128 |
|
name: Maxsim Precision@10 |
|
- type: MaxSim_recall@1 |
|
value: 0.23257936507936505 |
|
name: Maxsim Recall@1 |
|
- type: MaxSim_recall@3 |
|
value: 0.4590714285714285 |
|
name: Maxsim Recall@3 |
|
- type: MaxSim_recall@5 |
|
value: 0.5128174603174602 |
|
name: Maxsim Recall@5 |
|
- type: MaxSim_recall@10 |
|
value: 0.5457063492063492 |
|
name: Maxsim Recall@10 |
|
- type: MaxSim_ndcg@10 |
|
value: 0.4798674129130085 |
|
name: Maxsim Ndcg@10 |
|
- type: MaxSim_mrr@10 |
|
value: 0.5623333333333332 |
|
name: Maxsim Mrr@10 |
|
- type: MaxSim_map@100 |
|
value: 0.4143816306136937 |
|
name: Maxsim Map@100 |
|
- task: |
|
type: py-late-information-retrieval |
|
name: Py Late Information Retrieval |
|
dataset: |
|
name: NanoHotpotQA |
|
type: NanoHotpotQA |
|
metrics: |
|
- type: MaxSim_accuracy@1 |
|
value: 0.9 |
|
name: Maxsim Accuracy@1 |
|
- type: MaxSim_accuracy@3 |
|
value: 1.0 |
|
name: Maxsim Accuracy@3 |
|
- type: MaxSim_accuracy@5 |
|
value: 1.0 |
|
name: Maxsim Accuracy@5 |
|
- type: MaxSim_accuracy@10 |
|
value: 1.0 |
|
name: Maxsim Accuracy@10 |
|
- type: MaxSim_precision@1 |
|
value: 0.9 |
|
name: Maxsim Precision@1 |
|
- type: MaxSim_precision@3 |
|
value: 0.5133333333333333 |
|
name: Maxsim Precision@3 |
|
- type: MaxSim_precision@5 |
|
value: 0.32799999999999996 |
|
name: Maxsim Precision@5 |
|
- type: MaxSim_precision@10 |
|
value: 0.16799999999999998 |
|
name: Maxsim Precision@10 |
|
- type: MaxSim_recall@1 |
|
value: 0.45 |
|
name: Maxsim Recall@1 |
|
- type: MaxSim_recall@3 |
|
value: 0.77 |
|
name: Maxsim Recall@3 |
|
- type: MaxSim_recall@5 |
|
value: 0.82 |
|
name: Maxsim Recall@5 |
|
- type: MaxSim_recall@10 |
|
value: 0.84 |
|
name: Maxsim Recall@10 |
|
- type: MaxSim_ndcg@10 |
|
value: 0.8249212341756258 |
|
name: Maxsim Ndcg@10 |
|
- type: MaxSim_mrr@10 |
|
value: 0.9466666666666668 |
|
name: Maxsim Mrr@10 |
|
- type: MaxSim_map@100 |
|
value: 0.7682039396944715 |
|
name: Maxsim Map@100 |
|
- task: |
|
type: py-late-information-retrieval |
|
name: Py Late Information Retrieval |
|
dataset: |
|
name: NanoMSMARCO |
|
type: NanoMSMARCO |
|
metrics: |
|
- type: MaxSim_accuracy@1 |
|
value: 0.46 |
|
name: Maxsim Accuracy@1 |
|
- type: MaxSim_accuracy@3 |
|
value: 0.62 |
|
name: Maxsim Accuracy@3 |
|
- type: MaxSim_accuracy@5 |
|
value: 0.7 |
|
name: Maxsim Accuracy@5 |
|
- type: MaxSim_accuracy@10 |
|
value: 0.82 |
|
name: Maxsim Accuracy@10 |
|
- type: MaxSim_precision@1 |
|
value: 0.46 |
|
name: Maxsim Precision@1 |
|
- type: MaxSim_precision@3 |
|
value: 0.20666666666666667 |
|
name: Maxsim Precision@3 |
|
- type: MaxSim_precision@5 |
|
value: 0.14 |
|
name: Maxsim Precision@5 |
|
- type: MaxSim_precision@10 |
|
value: 0.08199999999999999 |
|
name: Maxsim Precision@10 |
|
- type: MaxSim_recall@1 |
|
value: 0.46 |
|
name: Maxsim Recall@1 |
|
- type: MaxSim_recall@3 |
|
value: 0.62 |
|
name: Maxsim Recall@3 |
|
- type: MaxSim_recall@5 |
|
value: 0.7 |
|
name: Maxsim Recall@5 |
|
- type: MaxSim_recall@10 |
|
value: 0.82 |
|
name: Maxsim Recall@10 |
|
- type: MaxSim_ndcg@10 |
|
value: 0.6299271879198127 |
|
name: Maxsim Ndcg@10 |
|
- type: MaxSim_mrr@10 |
|
value: 0.5706666666666667 |
|
name: Maxsim Mrr@10 |
|
- type: MaxSim_map@100 |
|
value: 0.5763825115906536 |
|
name: Maxsim Map@100 |
|
- task: |
|
type: py-late-information-retrieval |
|
name: Py Late Information Retrieval |
|
dataset: |
|
name: NanoNQ |
|
type: NanoNQ |
|
metrics: |
|
- type: MaxSim_accuracy@1 |
|
value: 0.58 |
|
name: Maxsim Accuracy@1 |
|
- type: MaxSim_accuracy@3 |
|
value: 0.68 |
|
name: Maxsim Accuracy@3 |
|
- type: MaxSim_accuracy@5 |
|
value: 0.78 |
|
name: Maxsim Accuracy@5 |
|
- type: MaxSim_accuracy@10 |
|
value: 0.82 |
|
name: Maxsim Accuracy@10 |
|
- type: MaxSim_precision@1 |
|
value: 0.58 |
|
name: Maxsim Precision@1 |
|
- type: MaxSim_precision@3 |
|
value: 0.2333333333333333 |
|
name: Maxsim Precision@3 |
|
- type: MaxSim_precision@5 |
|
value: 0.16399999999999998 |
|
name: Maxsim Precision@5 |
|
- type: MaxSim_precision@10 |
|
value: 0.088 |
|
name: Maxsim Precision@10 |
|
- type: MaxSim_recall@1 |
|
value: 0.57 |
|
name: Maxsim Recall@1 |
|
- type: MaxSim_recall@3 |
|
value: 0.67 |
|
name: Maxsim Recall@3 |
|
- type: MaxSim_recall@5 |
|
value: 0.75 |
|
name: Maxsim Recall@5 |
|
- type: MaxSim_recall@10 |
|
value: 0.8 |
|
name: Maxsim Recall@10 |
|
- type: MaxSim_ndcg@10 |
|
value: 0.6865185478036829 |
|
name: Maxsim Ndcg@10 |
|
- type: MaxSim_mrr@10 |
|
value: 0.6540238095238096 |
|
name: Maxsim Mrr@10 |
|
- type: MaxSim_map@100 |
|
value: 0.6518842133610925 |
|
name: Maxsim Map@100 |
|
- task: |
|
type: py-late-information-retrieval |
|
name: Py Late Information Retrieval |
|
dataset: |
|
name: NanoSCIDOCS |
|
type: NanoSCIDOCS |
|
metrics: |
|
- type: MaxSim_accuracy@1 |
|
value: 0.42 |
|
name: Maxsim Accuracy@1 |
|
- type: MaxSim_accuracy@3 |
|
value: 0.6 |
|
name: Maxsim Accuracy@3 |
|
- type: MaxSim_accuracy@5 |
|
value: 0.64 |
|
name: Maxsim Accuracy@5 |
|
- type: MaxSim_accuracy@10 |
|
value: 0.8 |
|
name: Maxsim Accuracy@10 |
|
- type: MaxSim_precision@1 |
|
value: 0.42 |
|
name: Maxsim Precision@1 |
|
- type: MaxSim_precision@3 |
|
value: 0.2866666666666666 |
|
name: Maxsim Precision@3 |
|
- type: MaxSim_precision@5 |
|
value: 0.22399999999999998 |
|
name: Maxsim Precision@5 |
|
- type: MaxSim_precision@10 |
|
value: 0.158 |
|
name: Maxsim Precision@10 |
|
- type: MaxSim_recall@1 |
|
value: 0.08866666666666667 |
|
name: Maxsim Recall@1 |
|
- type: MaxSim_recall@3 |
|
value: 0.17766666666666667 |
|
name: Maxsim Recall@3 |
|
- type: MaxSim_recall@5 |
|
value: 0.2306666666666667 |
|
name: Maxsim Recall@5 |
|
- type: MaxSim_recall@10 |
|
value: 0.32466666666666666 |
|
name: Maxsim Recall@10 |
|
- type: MaxSim_ndcg@10 |
|
value: 0.3241741723269819 |
|
name: Maxsim Ndcg@10 |
|
- type: MaxSim_mrr@10 |
|
value: 0.5367777777777778 |
|
name: Maxsim Mrr@10 |
|
- type: MaxSim_map@100 |
|
value: 0.24410449875234425 |
|
name: Maxsim Map@100 |
|
- task: |
|
type: pylate-custom-nano-beir |
|
name: Pylate Custom Nano BEIR |
|
dataset: |
|
name: NanoBEIR mean |
|
type: NanoBEIR_mean |
|
metrics: |
|
- type: MaxSim_accuracy@1 |
|
value: 0.6033333333333334 |
|
name: Maxsim Accuracy@1 |
|
- type: MaxSim_accuracy@3 |
|
value: 0.75 |
|
name: Maxsim Accuracy@3 |
|
- type: MaxSim_accuracy@5 |
|
value: 0.7999999999999999 |
|
name: Maxsim Accuracy@5 |
|
- type: MaxSim_accuracy@10 |
|
value: 0.8599999999999999 |
|
name: Maxsim Accuracy@10 |
|
- type: MaxSim_precision@1 |
|
value: 0.6033333333333334 |
|
name: Maxsim Precision@1 |
|
- type: MaxSim_precision@3 |
|
value: 0.3688888888888889 |
|
name: Maxsim Precision@3 |
|
- type: MaxSim_precision@5 |
|
value: 0.27999999999999997 |
|
name: Maxsim Precision@5 |
|
- type: MaxSim_precision@10 |
|
value: 0.19533333333333333 |
|
name: Maxsim Precision@10 |
|
- type: MaxSim_recall@1 |
|
value: 0.3145055337265958 |
|
name: Maxsim Recall@1 |
|
- type: MaxSim_recall@3 |
|
value: 0.4799585203273504 |
|
name: Maxsim Recall@3 |
|
- type: MaxSim_recall@5 |
|
value: 0.5454702828453192 |
|
name: Maxsim Recall@5 |
|
- type: MaxSim_recall@10 |
|
value: 0.6202885931719342 |
|
name: Maxsim Recall@10 |
|
- type: MaxSim_ndcg@10 |
|
value: 0.6030010883926978 |
|
name: Maxsim Ndcg@10 |
|
- type: MaxSim_mrr@10 |
|
value: 0.6902632275132276 |
|
name: Maxsim Mrr@10 |
|
- type: MaxSim_map@100 |
|
value: 0.5299605840137683 |
|
name: Maxsim Map@100 |
|
--- |
|
|
|
# PyLate model based on artiwise-ai/modernbert-base-tr-uncased |
|
|
|
This is a [PyLate](https://github.com/lightonai/pylate) model finetuned from [artiwise-ai/modernbert-base-tr-uncased](https://huggingface.co/artiwise-ai/modernbert-base-tr-uncased) on the [train](https://huggingface.co/datasets/Speedsy/msmarco-cleaned-gemini-bge-tr-uncased) dataset. It maps sentences & paragraphs to sequences of 128-dimensional dense vectors and can be used for semantic textual similarity using the MaxSim operator. |
|
|
|
## Model Details |
|
|
|
### Model Description |
|
- **Model Type:** PyLate model |
|
- **Base model:** [artiwise-ai/modernbert-base-tr-uncased](https://huggingface.co/artiwise-ai/modernbert-base-tr-uncased) <!-- at revision fe2ec5fcfd7afd1e0378d295dfd7fadfb55ea965 --> |
|
- **Document Length:** 180 tokens |
|
- **Query Length:** 32 tokens |
|
- **Output Dimensionality:** 128 tokens |
|
- **Similarity Function:** MaxSim |
|
- **Training Dataset:** |
|
- [train](https://huggingface.co/datasets/Speedsy/msmarco-cleaned-gemini-bge-tr-uncased) |
|
- **Language:** en |
|
<!-- - **License:** Unknown --> |
|
|
|
### Model Sources |
|
|
|
- **Documentation:** [PyLate Documentation](https://lightonai.github.io/pylate/) |
|
- **Repository:** [PyLate on GitHub](https://github.com/lightonai/pylate) |
|
- **Hugging Face:** [PyLate models on Hugging Face](https://huggingface.co/models?library=PyLate) |
|
|
|
### Full Model Architecture |
|
|
|
``` |
|
ColBERT( |
|
(0): Transformer({'max_seq_length': 179, 'do_lower_case': False}) with Transformer model: ModernBertModel |
|
(1): Dense({'in_features': 768, 'out_features': 128, 'bias': False, 'activation_function': 'torch.nn.modules.linear.Identity'}) |
|
) |
|
``` |
|
|
|
## Usage |
|
First install the PyLate library: |
|
|
|
```bash |
|
pip install -U pylate |
|
``` |
|
|
|
### Retrieval |
|
|
|
PyLate provides a streamlined interface to index and retrieve documents using ColBERT models. The index leverages the Voyager HNSW index to efficiently handle document embeddings and enable fast retrieval. |
|
|
|
#### Indexing documents |
|
|
|
First, load the ColBERT model and initialize the Voyager index, then encode and index your documents: |
|
|
|
```python |
|
from pylate import indexes, models, retrieve |
|
|
|
# Step 1: Load the ColBERT model |
|
model = models.ColBERT( |
|
model_name_or_path=pylate_model_id, |
|
) |
|
|
|
# Step 2: Initialize the Voyager index |
|
index = indexes.Voyager( |
|
index_folder="pylate-index", |
|
index_name="index", |
|
override=True, # This overwrites the existing index if any |
|
) |
|
|
|
# Step 3: Encode the documents |
|
documents_ids = ["1", "2", "3"] |
|
documents = ["document 1 text", "document 2 text", "document 3 text"] |
|
|
|
documents_embeddings = model.encode( |
|
documents, |
|
batch_size=32, |
|
is_query=False, # Ensure that it is set to False to indicate that these are documents, not queries |
|
show_progress_bar=True, |
|
) |
|
|
|
# Step 4: Add document embeddings to the index by providing embeddings and corresponding ids |
|
index.add_documents( |
|
documents_ids=documents_ids, |
|
documents_embeddings=documents_embeddings, |
|
) |
|
``` |
|
|
|
Note that you do not have to recreate the index and encode the documents every time. Once you have created an index and added the documents, you can re-use the index later by loading it: |
|
|
|
```python |
|
# To load an index, simply instantiate it with the correct folder/name and without overriding it |
|
index = indexes.Voyager( |
|
index_folder="pylate-index", |
|
index_name="index", |
|
) |
|
``` |
|
|
|
#### Retrieving top-k documents for queries |
|
|
|
Once the documents are indexed, you can retrieve the top-k most relevant documents for a given set of queries. |
|
To do so, initialize the ColBERT retriever with the index you want to search in, encode the queries and then retrieve the top-k documents to get the top matches ids and relevance scores: |
|
|
|
```python |
|
# Step 1: Initialize the ColBERT retriever |
|
retriever = retrieve.ColBERT(index=index) |
|
|
|
# Step 2: Encode the queries |
|
queries_embeddings = model.encode( |
|
["query for document 3", "query for document 1"], |
|
batch_size=32, |
|
is_query=True, # # Ensure that it is set to False to indicate that these are queries |
|
show_progress_bar=True, |
|
) |
|
|
|
# Step 3: Retrieve top-k documents |
|
scores = retriever.retrieve( |
|
queries_embeddings=queries_embeddings, |
|
k=10, # Retrieve the top 10 matches for each query |
|
) |
|
``` |
|
|
|
### Reranking |
|
If you only want to use the ColBERT model to perform reranking on top of your first-stage retrieval pipeline without building an index, you can simply use rank function and pass the queries and documents to rerank: |
|
|
|
```python |
|
from pylate import rank, models |
|
|
|
queries = [ |
|
"query A", |
|
"query B", |
|
] |
|
|
|
documents = [ |
|
["document A", "document B"], |
|
["document 1", "document C", "document B"], |
|
] |
|
|
|
documents_ids = [ |
|
[1, 2], |
|
[1, 3, 2], |
|
] |
|
|
|
model = models.ColBERT( |
|
model_name_or_path=pylate_model_id, |
|
) |
|
|
|
queries_embeddings = model.encode( |
|
queries, |
|
is_query=True, |
|
) |
|
|
|
documents_embeddings = model.encode( |
|
documents, |
|
is_query=False, |
|
) |
|
|
|
reranked_documents = rank.rerank( |
|
documents_ids=documents_ids, |
|
queries_embeddings=queries_embeddings, |
|
documents_embeddings=documents_embeddings, |
|
) |
|
``` |
|
|
|
<!-- |
|
### Direct Usage (Transformers) |
|
|
|
<details><summary>Click to see the direct usage in Transformers</summary> |
|
|
|
</details> |
|
--> |
|
|
|
<!-- |
|
### Downstream Usage (Sentence Transformers) |
|
|
|
You can finetune this model on your own dataset. |
|
|
|
<details><summary>Click to expand</summary> |
|
|
|
</details> |
|
--> |
|
|
|
<!-- |
|
### Out-of-Scope Use |
|
|
|
*List how the model may foreseeably be misused and address what users ought not to do with the model.* |
|
--> |
|
|
|
## Evaluation |
|
|
|
### Metrics |
|
|
|
#### Py Late Information Retrieval |
|
* Dataset: `['NanoDBPedia', 'NanoFiQA2018', 'NanoHotpotQA', 'NanoMSMARCO', 'NanoNQ', 'NanoSCIDOCS']` |
|
* Evaluated with <code>pylate.evaluation.pylate_information_retrieval_evaluator.PyLateInformationRetrievalEvaluator</code> |
|
|
|
| Metric | NanoDBPedia | NanoFiQA2018 | NanoHotpotQA | NanoMSMARCO | NanoNQ | NanoSCIDOCS | |
|
|:--------------------|:------------|:-------------|:-------------|:------------|:-----------|:------------| |
|
| MaxSim_accuracy@1 | 0.8 | 0.46 | 0.9 | 0.46 | 0.58 | 0.42 | |
|
| MaxSim_accuracy@3 | 0.92 | 0.68 | 1.0 | 0.62 | 0.68 | 0.6 | |
|
| MaxSim_accuracy@5 | 0.96 | 0.72 | 1.0 | 0.7 | 0.78 | 0.64 | |
|
| MaxSim_accuracy@10 | 1.0 | 0.72 | 1.0 | 0.82 | 0.82 | 0.8 | |
|
| MaxSim_precision@1 | 0.8 | 0.46 | 0.9 | 0.46 | 0.58 | 0.42 | |
|
| MaxSim_precision@3 | 0.6733 | 0.3 | 0.5133 | 0.2067 | 0.2333 | 0.2867 | |
|
| MaxSim_precision@5 | 0.6 | 0.224 | 0.328 | 0.14 | 0.164 | 0.224 | |
|
| MaxSim_precision@10 | 0.548 | 0.128 | 0.168 | 0.082 | 0.088 | 0.158 | |
|
| MaxSim_recall@1 | 0.0858 | 0.2326 | 0.45 | 0.46 | 0.57 | 0.0887 | |
|
| MaxSim_recall@3 | 0.183 | 0.4591 | 0.77 | 0.62 | 0.67 | 0.1777 | |
|
| MaxSim_recall@5 | 0.2593 | 0.5128 | 0.82 | 0.7 | 0.75 | 0.2307 | |
|
| MaxSim_recall@10 | 0.3914 | 0.5457 | 0.84 | 0.82 | 0.8 | 0.3247 | |
|
| **MaxSim_ndcg@10** | **0.6726** | **0.4799** | **0.8249** | **0.6299** | **0.6865** | **0.3242** | |
|
| MaxSim_mrr@10 | 0.8711 | 0.5623 | 0.9467 | 0.5707 | 0.654 | 0.5368 | |
|
| MaxSim_map@100 | 0.5248 | 0.4144 | 0.7682 | 0.5764 | 0.6519 | 0.2441 | |
|
|
|
#### Pylate Custom Nano BEIR |
|
* Dataset: `NanoBEIR_mean` |
|
* Evaluated with <code>pylate_nano_beir_evaluator.PylateCustomNanoBEIREvaluator</code> |
|
|
|
| Metric | Value | |
|
|:--------------------|:----------| |
|
| MaxSim_accuracy@1 | 0.6033 | |
|
| MaxSim_accuracy@3 | 0.75 | |
|
| MaxSim_accuracy@5 | 0.8 | |
|
| MaxSim_accuracy@10 | 0.86 | |
|
| MaxSim_precision@1 | 0.6033 | |
|
| MaxSim_precision@3 | 0.3689 | |
|
| MaxSim_precision@5 | 0.28 | |
|
| MaxSim_precision@10 | 0.1953 | |
|
| MaxSim_recall@1 | 0.3145 | |
|
| MaxSim_recall@3 | 0.48 | |
|
| MaxSim_recall@5 | 0.5455 | |
|
| MaxSim_recall@10 | 0.6203 | |
|
| **MaxSim_ndcg@10** | **0.603** | |
|
| MaxSim_mrr@10 | 0.6903 | |
|
| MaxSim_map@100 | 0.53 | |
|
|
|
<!-- |
|
## Bias, Risks and Limitations |
|
|
|
*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.* |
|
--> |
|
|
|
<!-- |
|
### Recommendations |
|
|
|
*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.* |
|
--> |
|
|
|
## Training Details |
|
|
|
### Training Dataset |
|
|
|
#### train |
|
|
|
* Dataset: [train](https://huggingface.co/datasets/Speedsy/msmarco-cleaned-gemini-bge-tr-uncased) at [bd034f5](https://huggingface.co/datasets/Speedsy/msmarco-cleaned-gemini-bge-tr-uncased/tree/bd034f56291b3b7a7dcde55ab0bd933977cc233e) |
|
* Size: 443,147 training samples |
|
* Columns: <code>query_id</code>, <code>document_ids</code>, and <code>scores</code> |
|
* Approximate statistics based on the first 1000 samples: |
|
| | query_id | document_ids | scores | |
|
|:--------|:--------------------------------------------------------------------------------|:------------------------------------|:------------------------------------| |
|
| type | string | list | list | |
|
| details | <ul><li>min: 5 tokens</li><li>mean: 6.21 tokens</li><li>max: 8 tokens</li></ul> | <ul><li>size: 32 elements</li></ul> | <ul><li>size: 32 elements</li></ul> | |
|
* Samples: |
|
| query_id | document_ids | scores | |
|
|:---------------------|:--------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------| |
|
| <code>817836</code> | <code>['2716076', '6741935', '2681109', '5562684', '3507339', ...]</code> | <code>[1.0, 0.7059561610221863, 0.21702419221401215, 0.38270196318626404, 0.20812414586544037, ...]</code> | |
|
| <code>1045170</code> | <code>['5088671', '2953295', '8783471', '4268439', '6339935', ...]</code> | <code>[1.0, 0.6493034362792969, 0.0692221149802208, 0.17963139712810516, 0.6697239875793457, ...]</code> | |
|
| <code>1069432</code> | <code>['3724008', '314949', '8657336', '7420456', '879004', ...]</code> | <code>[1.0, 0.3706032931804657, 0.3508036434650421, 0.2823200523853302, 0.17563475668430328, ...]</code> | |
|
* Loss: <code>pylate.losses.distillation.Distillation</code> |
|
|
|
### Training Hyperparameters |
|
#### Non-Default Hyperparameters |
|
|
|
- `eval_strategy`: steps |
|
- `gradient_accumulation_steps`: 2 |
|
- `learning_rate`: 3e-05 |
|
- `num_train_epochs`: 1 |
|
- `bf16`: True |
|
|
|
#### All Hyperparameters |
|
<details><summary>Click to expand</summary> |
|
|
|
- `overwrite_output_dir`: False |
|
- `do_predict`: False |
|
- `eval_strategy`: steps |
|
- `prediction_loss_only`: True |
|
- `per_device_train_batch_size`: 8 |
|
- `per_device_eval_batch_size`: 8 |
|
- `per_gpu_train_batch_size`: None |
|
- `per_gpu_eval_batch_size`: None |
|
- `gradient_accumulation_steps`: 2 |
|
- `eval_accumulation_steps`: None |
|
- `torch_empty_cache_steps`: None |
|
- `learning_rate`: 3e-05 |
|
- `weight_decay`: 0.0 |
|
- `adam_beta1`: 0.9 |
|
- `adam_beta2`: 0.999 |
|
- `adam_epsilon`: 1e-08 |
|
- `max_grad_norm`: 1.0 |
|
- `num_train_epochs`: 1 |
|
- `max_steps`: -1 |
|
- `lr_scheduler_type`: linear |
|
- `lr_scheduler_kwargs`: {} |
|
- `warmup_ratio`: 0.0 |
|
- `warmup_steps`: 0 |
|
- `log_level`: passive |
|
- `log_level_replica`: warning |
|
- `log_on_each_node`: True |
|
- `logging_nan_inf_filter`: True |
|
- `save_safetensors`: True |
|
- `save_on_each_node`: False |
|
- `save_only_model`: False |
|
- `restore_callback_states_from_checkpoint`: False |
|
- `no_cuda`: False |
|
- `use_cpu`: False |
|
- `use_mps_device`: False |
|
- `seed`: 42 |
|
- `data_seed`: None |
|
- `jit_mode_eval`: False |
|
- `use_ipex`: False |
|
- `bf16`: True |
|
- `fp16`: False |
|
- `fp16_opt_level`: O1 |
|
- `half_precision_backend`: auto |
|
- `bf16_full_eval`: False |
|
- `fp16_full_eval`: False |
|
- `tf32`: None |
|
- `local_rank`: 0 |
|
- `ddp_backend`: None |
|
- `tpu_num_cores`: None |
|
- `tpu_metrics_debug`: False |
|
- `debug`: [] |
|
- `dataloader_drop_last`: False |
|
- `dataloader_num_workers`: 0 |
|
- `dataloader_prefetch_factor`: None |
|
- `past_index`: -1 |
|
- `disable_tqdm`: False |
|
- `remove_unused_columns`: True |
|
- `label_names`: None |
|
- `load_best_model_at_end`: False |
|
- `ignore_data_skip`: False |
|
- `fsdp`: [] |
|
- `fsdp_min_num_params`: 0 |
|
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False} |
|
- `fsdp_transformer_layer_cls_to_wrap`: None |
|
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None} |
|
- `deepspeed`: None |
|
- `label_smoothing_factor`: 0.0 |
|
- `optim`: adamw_torch |
|
- `optim_args`: None |
|
- `adafactor`: False |
|
- `group_by_length`: False |
|
- `length_column_name`: length |
|
- `ddp_find_unused_parameters`: None |
|
- `ddp_bucket_cap_mb`: None |
|
- `ddp_broadcast_buffers`: False |
|
- `dataloader_pin_memory`: True |
|
- `dataloader_persistent_workers`: False |
|
- `skip_memory_metrics`: True |
|
- `use_legacy_prediction_loop`: False |
|
- `push_to_hub`: False |
|
- `resume_from_checkpoint`: None |
|
- `hub_model_id`: None |
|
- `hub_strategy`: every_save |
|
- `hub_private_repo`: None |
|
- `hub_always_push`: False |
|
- `gradient_checkpointing`: False |
|
- `gradient_checkpointing_kwargs`: None |
|
- `include_inputs_for_metrics`: False |
|
- `include_for_metrics`: [] |
|
- `eval_do_concat_batches`: True |
|
- `fp16_backend`: auto |
|
- `push_to_hub_model_id`: None |
|
- `push_to_hub_organization`: None |
|
- `mp_parameters`: |
|
- `auto_find_batch_size`: False |
|
- `full_determinism`: False |
|
- `torchdynamo`: None |
|
- `ray_scope`: last |
|
- `ddp_timeout`: 1800 |
|
- `torch_compile`: False |
|
- `torch_compile_backend`: None |
|
- `torch_compile_mode`: None |
|
- `dispatch_batches`: None |
|
- `split_batches`: None |
|
- `include_tokens_per_second`: False |
|
- `include_num_input_tokens_seen`: False |
|
- `neftune_noise_alpha`: None |
|
- `optim_target_modules`: None |
|
- `batch_eval_metrics`: False |
|
- `eval_on_start`: False |
|
- `use_liger_kernel`: False |
|
- `eval_use_gather_object`: False |
|
- `average_tokens_across_devices`: False |
|
- `prompts`: None |
|
- `batch_sampler`: batch_sampler |
|
- `multi_dataset_batch_sampler`: proportional |
|
|
|
</details> |
|
|
|
### Training Logs |
|
<details><summary>Click to expand</summary> |
|
|
|
| Epoch | Step | Training Loss | NanoDBPedia_MaxSim_ndcg@10 | NanoFiQA2018_MaxSim_ndcg@10 | NanoHotpotQA_MaxSim_ndcg@10 | NanoMSMARCO_MaxSim_ndcg@10 | NanoNQ_MaxSim_ndcg@10 | NanoSCIDOCS_MaxSim_ndcg@10 | NanoBEIR_mean_MaxSim_ndcg@10 | |
|
|:------:|:-----:|:-------------:|:--------------------------:|:---------------------------:|:---------------------------:|:--------------------------:|:---------------------:|:--------------------------:|:----------------------------:| |
|
| 0.0036 | 100 | 0.0649 | - | - | - | - | - | - | - | |
|
| 0.0072 | 200 | 0.0559 | - | - | - | - | - | - | - | |
|
| 0.0108 | 300 | 0.0518 | - | - | - | - | - | - | - | |
|
| 0.0144 | 400 | 0.051 | - | - | - | - | - | - | - | |
|
| 0.0181 | 500 | 0.0492 | 0.6421 | 0.3808 | 0.7993 | 0.5565 | 0.5826 | 0.3050 | 0.5444 | |
|
| 0.0217 | 600 | 0.0467 | - | - | - | - | - | - | - | |
|
| 0.0253 | 700 | 0.0451 | - | - | - | - | - | - | - | |
|
| 0.0289 | 800 | 0.0443 | - | - | - | - | - | - | - | |
|
| 0.0325 | 900 | 0.0443 | - | - | - | - | - | - | - | |
|
| 0.0361 | 1000 | 0.0437 | 0.6449 | 0.4015 | 0.8003 | 0.5437 | 0.6092 | 0.3134 | 0.5522 | |
|
| 0.0397 | 1100 | 0.0433 | - | - | - | - | - | - | - | |
|
| 0.0433 | 1200 | 0.0427 | - | - | - | - | - | - | - | |
|
| 0.0469 | 1300 | 0.0414 | - | - | - | - | - | - | - | |
|
| 0.0505 | 1400 | 0.0417 | - | - | - | - | - | - | - | |
|
| 0.0542 | 1500 | 0.0418 | 0.6412 | 0.4285 | 0.8154 | 0.5866 | 0.6181 | 0.3219 | 0.5686 | |
|
| 0.0578 | 1600 | 0.0404 | - | - | - | - | - | - | - | |
|
| 0.0614 | 1700 | 0.0417 | - | - | - | - | - | - | - | |
|
| 0.0650 | 1800 | 0.0407 | - | - | - | - | - | - | - | |
|
| 0.0686 | 1900 | 0.0398 | - | - | - | - | - | - | - | |
|
| 0.0722 | 2000 | 0.0401 | 0.6499 | 0.4354 | 0.8150 | 0.5610 | 0.6445 | 0.3152 | 0.5702 | |
|
| 0.0758 | 2100 | 0.0404 | - | - | - | - | - | - | - | |
|
| 0.0794 | 2200 | 0.0395 | - | - | - | - | - | - | - | |
|
| 0.0830 | 2300 | 0.0404 | - | - | - | - | - | - | - | |
|
| 0.0867 | 2400 | 0.0393 | - | - | - | - | - | - | - | |
|
| 0.0903 | 2500 | 0.0387 | 0.6571 | 0.4435 | 0.8112 | 0.5786 | 0.6809 | 0.3232 | 0.5824 | |
|
| 0.0939 | 2600 | 0.0397 | - | - | - | - | - | - | - | |
|
| 0.0975 | 2700 | 0.0393 | - | - | - | - | - | - | - | |
|
| 0.1011 | 2800 | 0.0384 | - | - | - | - | - | - | - | |
|
| 0.1047 | 2900 | 0.0382 | - | - | - | - | - | - | - | |
|
| 0.1083 | 3000 | 0.0381 | 0.6437 | 0.4751 | 0.8175 | 0.5711 | 0.6422 | 0.3203 | 0.5783 | |
|
| 0.1119 | 3100 | 0.0382 | - | - | - | - | - | - | - | |
|
| 0.1155 | 3200 | 0.0381 | - | - | - | - | - | - | - | |
|
| 0.1191 | 3300 | 0.0385 | - | - | - | - | - | - | - | |
|
| 0.1228 | 3400 | 0.0374 | - | - | - | - | - | - | - | |
|
| 0.1264 | 3500 | 0.0382 | 0.6437 | 0.4833 | 0.8282 | 0.5955 | 0.6436 | 0.3190 | 0.5856 | |
|
| 0.1300 | 3600 | 0.0365 | - | - | - | - | - | - | - | |
|
| 0.1336 | 3700 | 0.0379 | - | - | - | - | - | - | - | |
|
| 0.1372 | 3800 | 0.0376 | - | - | - | - | - | - | - | |
|
| 0.1408 | 3900 | 0.0376 | - | - | - | - | - | - | - | |
|
| 0.1444 | 4000 | 0.0378 | 0.6511 | 0.4760 | 0.8151 | 0.5806 | 0.6874 | 0.3140 | 0.5874 | |
|
| 0.1480 | 4100 | 0.0365 | - | - | - | - | - | - | - | |
|
| 0.1516 | 4200 | 0.0362 | - | - | - | - | - | - | - | |
|
| 0.1553 | 4300 | 0.0374 | - | - | - | - | - | - | - | |
|
| 0.1589 | 4400 | 0.0359 | - | - | - | - | - | - | - | |
|
| 0.1625 | 4500 | 0.0368 | 0.6530 | 0.4458 | 0.8122 | 0.6101 | 0.6896 | 0.3174 | 0.5880 | |
|
| 0.1661 | 4600 | 0.0356 | - | - | - | - | - | - | - | |
|
| 0.1697 | 4700 | 0.0364 | - | - | - | - | - | - | - | |
|
| 0.1733 | 4800 | 0.0352 | - | - | - | - | - | - | - | |
|
| 0.1769 | 4900 | 0.0357 | - | - | - | - | - | - | - | |
|
| 0.1805 | 5000 | 0.0366 | 0.6611 | 0.4680 | 0.8152 | 0.6260 | 0.6715 | 0.3252 | 0.5945 | |
|
| 0.1841 | 5100 | 0.0358 | - | - | - | - | - | - | - | |
|
| 0.1877 | 5200 | 0.0366 | - | - | - | - | - | - | - | |
|
| 0.1914 | 5300 | 0.0348 | - | - | - | - | - | - | - | |
|
| 0.1950 | 5400 | 0.036 | - | - | - | - | - | - | - | |
|
| 0.1986 | 5500 | 0.0337 | 0.6595 | 0.4823 | 0.8162 | 0.6241 | 0.6620 | 0.3216 | 0.5943 | |
|
| 0.2022 | 5600 | 0.0347 | - | - | - | - | - | - | - | |
|
| 0.2058 | 5700 | 0.0361 | - | - | - | - | - | - | - | |
|
| 0.2094 | 5800 | 0.0356 | - | - | - | - | - | - | - | |
|
| 0.2130 | 5900 | 0.0359 | - | - | - | - | - | - | - | |
|
| 0.2166 | 6000 | 0.0359 | 0.6560 | 0.4820 | 0.8121 | 0.6457 | 0.6587 | 0.3181 | 0.5954 | |
|
| 0.2202 | 6100 | 0.0347 | - | - | - | - | - | - | - | |
|
| 0.2239 | 6200 | 0.0355 | - | - | - | - | - | - | - | |
|
| 0.2275 | 6300 | 0.0356 | - | - | - | - | - | - | - | |
|
| 0.2311 | 6400 | 0.0351 | - | - | - | - | - | - | - | |
|
| 0.2347 | 6500 | 0.0351 | 0.6650 | 0.4658 | 0.8291 | 0.6167 | 0.6742 | 0.3146 | 0.5942 | |
|
| 0.2383 | 6600 | 0.0361 | - | - | - | - | - | - | - | |
|
| 0.2419 | 6700 | 0.0352 | - | - | - | - | - | - | - | |
|
| 0.2455 | 6800 | 0.0358 | - | - | - | - | - | - | - | |
|
| 0.2491 | 6900 | 0.0339 | - | - | - | - | - | - | - | |
|
| 0.2527 | 7000 | 0.0345 | 0.6600 | 0.4700 | 0.8413 | 0.6449 | 0.6862 | 0.3163 | 0.6031 | |
|
| 0.2563 | 7100 | 0.0347 | - | - | - | - | - | - | - | |
|
| 0.2600 | 7200 | 0.0346 | - | - | - | - | - | - | - | |
|
| 0.2636 | 7300 | 0.0342 | - | - | - | - | - | - | - | |
|
| 0.2672 | 7400 | 0.0346 | - | - | - | - | - | - | - | |
|
| 0.2708 | 7500 | 0.0339 | 0.6583 | 0.4792 | 0.8295 | 0.6257 | 0.6788 | 0.3204 | 0.5986 | |
|
| 0.2744 | 7600 | 0.0344 | - | - | - | - | - | - | - | |
|
| 0.2780 | 7700 | 0.0323 | - | - | - | - | - | - | - | |
|
| 0.2816 | 7800 | 0.0333 | - | - | - | - | - | - | - | |
|
| 0.2852 | 7900 | 0.0334 | - | - | - | - | - | - | - | |
|
| 0.2888 | 8000 | 0.0333 | 0.6633 | 0.4660 | 0.8257 | 0.6251 | 0.6847 | 0.3229 | 0.5979 | |
|
| 0.2925 | 8100 | 0.0337 | - | - | - | - | - | - | - | |
|
| 0.2961 | 8200 | 0.0339 | - | - | - | - | - | - | - | |
|
| 0.2997 | 8300 | 0.0332 | - | - | - | - | - | - | - | |
|
| 0.3033 | 8400 | 0.0334 | - | - | - | - | - | - | - | |
|
| 0.3069 | 8500 | 0.0334 | 0.6744 | 0.4791 | 0.8204 | 0.6139 | 0.6654 | 0.3130 | 0.5944 | |
|
| 0.3105 | 8600 | 0.032 | - | - | - | - | - | - | - | |
|
| 0.3141 | 8700 | 0.0342 | - | - | - | - | - | - | - | |
|
| 0.3177 | 8800 | 0.0337 | - | - | - | - | - | - | - | |
|
| 0.3213 | 8900 | 0.0343 | - | - | - | - | - | - | - | |
|
| 0.3249 | 9000 | 0.0342 | 0.6643 | 0.4395 | 0.8270 | 0.6252 | 0.6828 | 0.3146 | 0.5922 | |
|
| 0.3286 | 9100 | 0.0332 | - | - | - | - | - | - | - | |
|
| 0.3322 | 9200 | 0.0337 | - | - | - | - | - | - | - | |
|
| 0.3358 | 9300 | 0.033 | - | - | - | - | - | - | - | |
|
| 0.3394 | 9400 | 0.0327 | - | - | - | - | - | - | - | |
|
| 0.3430 | 9500 | 0.0332 | 0.6676 | 0.4530 | 0.8400 | 0.6220 | 0.6753 | 0.3139 | 0.5953 | |
|
| 0.3466 | 9600 | 0.0315 | - | - | - | - | - | - | - | |
|
| 0.3502 | 9700 | 0.033 | - | - | - | - | - | - | - | |
|
| 0.3538 | 9800 | 0.0331 | - | - | - | - | - | - | - | |
|
| 0.3574 | 9900 | 0.0341 | - | - | - | - | - | - | - | |
|
| 0.3610 | 10000 | 0.0327 | 0.6602 | 0.4887 | 0.8308 | 0.6267 | 0.6806 | 0.3241 | 0.6018 | |
|
| 0.3647 | 10100 | 0.0338 | - | - | - | - | - | - | - | |
|
| 0.3683 | 10200 | 0.0327 | - | - | - | - | - | - | - | |
|
| 0.3719 | 10300 | 0.0325 | - | - | - | - | - | - | - | |
|
| 0.3755 | 10400 | 0.0342 | - | - | - | - | - | - | - | |
|
| 0.3791 | 10500 | 0.034 | 0.6659 | 0.4723 | 0.8313 | 0.6156 | 0.6803 | 0.3240 | 0.5982 | |
|
| 0.3827 | 10600 | 0.0323 | - | - | - | - | - | - | - | |
|
| 0.3863 | 10700 | 0.0329 | - | - | - | - | - | - | - | |
|
| 0.3899 | 10800 | 0.0328 | - | - | - | - | - | - | - | |
|
| 0.3935 | 10900 | 0.0324 | - | - | - | - | - | - | - | |
|
| 0.3972 | 11000 | 0.0321 | 0.6628 | 0.4937 | 0.8340 | 0.6373 | 0.6945 | 0.3268 | 0.6082 | |
|
| 0.4008 | 11100 | 0.0329 | - | - | - | - | - | - | - | |
|
| 0.4044 | 11200 | 0.0329 | - | - | - | - | - | - | - | |
|
| 0.4080 | 11300 | 0.0325 | - | - | - | - | - | - | - | |
|
| 0.4116 | 11400 | 0.0321 | - | - | - | - | - | - | - | |
|
| 0.4152 | 11500 | 0.0325 | 0.6617 | 0.4698 | 0.8419 | 0.6231 | 0.6853 | 0.3191 | 0.6002 | |
|
| 0.4188 | 11600 | 0.0327 | - | - | - | - | - | - | - | |
|
| 0.4224 | 11700 | 0.0327 | - | - | - | - | - | - | - | |
|
| 0.4260 | 11800 | 0.0326 | - | - | - | - | - | - | - | |
|
| 0.4296 | 11900 | 0.0329 | - | - | - | - | - | - | - | |
|
| 0.4333 | 12000 | 0.0332 | 0.6559 | 0.4860 | 0.8324 | 0.6160 | 0.6966 | 0.3219 | 0.6015 | |
|
| 0.4369 | 12100 | 0.0323 | - | - | - | - | - | - | - | |
|
| 0.4405 | 12200 | 0.0327 | - | - | - | - | - | - | - | |
|
| 0.4441 | 12300 | 0.0321 | - | - | - | - | - | - | - | |
|
| 0.4477 | 12400 | 0.0321 | - | - | - | - | - | - | - | |
|
| 0.4513 | 12500 | 0.0319 | 0.6630 | 0.4877 | 0.8310 | 0.6197 | 0.6943 | 0.3296 | 0.6042 | |
|
| 0.4549 | 12600 | 0.0326 | - | - | - | - | - | - | - | |
|
| 0.4585 | 12700 | 0.032 | - | - | - | - | - | - | - | |
|
| 0.4621 | 12800 | 0.032 | - | - | - | - | - | - | - | |
|
| 0.4658 | 12900 | 0.0302 | - | - | - | - | - | - | - | |
|
| 0.4694 | 13000 | 0.0311 | 0.6687 | 0.4726 | 0.8305 | 0.6191 | 0.6929 | 0.3233 | 0.6012 | |
|
| 0.4730 | 13100 | 0.0321 | - | - | - | - | - | - | - | |
|
| 0.4766 | 13200 | 0.0318 | - | - | - | - | - | - | - | |
|
| 0.4802 | 13300 | 0.032 | - | - | - | - | - | - | - | |
|
| 0.4838 | 13400 | 0.0315 | - | - | - | - | - | - | - | |
|
| 0.4874 | 13500 | 0.0317 | 0.6628 | 0.4781 | 0.8257 | 0.6153 | 0.6795 | 0.3172 | 0.5964 | |
|
| 0.4910 | 13600 | 0.0316 | - | - | - | - | - | - | - | |
|
| 0.4946 | 13700 | 0.0335 | - | - | - | - | - | - | - | |
|
| 0.4982 | 13800 | 0.0313 | - | - | - | - | - | - | - | |
|
| 0.5019 | 13900 | 0.0317 | - | - | - | - | - | - | - | |
|
| 0.5055 | 14000 | 0.0321 | 0.6579 | 0.4676 | 0.8351 | 0.6088 | 0.6774 | 0.3211 | 0.5946 | |
|
| 0.5091 | 14100 | 0.0318 | - | - | - | - | - | - | - | |
|
| 0.5127 | 14200 | 0.0328 | - | - | - | - | - | - | - | |
|
| 0.5163 | 14300 | 0.0307 | - | - | - | - | - | - | - | |
|
| 0.5199 | 14400 | 0.0326 | - | - | - | - | - | - | - | |
|
| 0.5235 | 14500 | 0.0322 | 0.6558 | 0.5042 | 0.8344 | 0.6093 | 0.6963 | 0.3244 | 0.6041 | |
|
| 0.5271 | 14600 | 0.0321 | - | - | - | - | - | - | - | |
|
| 0.5307 | 14700 | 0.0308 | - | - | - | - | - | - | - | |
|
| 0.5344 | 14800 | 0.0315 | - | - | - | - | - | - | - | |
|
| 0.5380 | 14900 | 0.0324 | - | - | - | - | - | - | - | |
|
| 0.5416 | 15000 | 0.0305 | 0.6598 | 0.4898 | 0.8402 | 0.6081 | 0.6945 | 0.3207 | 0.6022 | |
|
| 0.5452 | 15100 | 0.0324 | - | - | - | - | - | - | - | |
|
| 0.5488 | 15200 | 0.0315 | - | - | - | - | - | - | - | |
|
| 0.5524 | 15300 | 0.0311 | - | - | - | - | - | - | - | |
|
| 0.5560 | 15400 | 0.0317 | - | - | - | - | - | - | - | |
|
| 0.5596 | 15500 | 0.0309 | 0.6541 | 0.4770 | 0.8309 | 0.6234 | 0.6946 | 0.3282 | 0.6014 | |
|
| 0.5632 | 15600 | 0.0322 | - | - | - | - | - | - | - | |
|
| 0.5668 | 15700 | 0.0314 | - | - | - | - | - | - | - | |
|
| 0.5705 | 15800 | 0.0312 | - | - | - | - | - | - | - | |
|
| 0.5741 | 15900 | 0.0301 | - | - | - | - | - | - | - | |
|
| 0.5777 | 16000 | 0.0316 | 0.6699 | 0.4869 | 0.8348 | 0.6061 | 0.7020 | 0.3182 | 0.6030 | |
|
| 0.5813 | 16100 | 0.0309 | - | - | - | - | - | - | - | |
|
| 0.5849 | 16200 | 0.0297 | - | - | - | - | - | - | - | |
|
| 0.5885 | 16300 | 0.0319 | - | - | - | - | - | - | - | |
|
| 0.5921 | 16400 | 0.0305 | - | - | - | - | - | - | - | |
|
| 0.5957 | 16500 | 0.0309 | 0.6725 | 0.4863 | 0.8270 | 0.6131 | 0.6957 | 0.3254 | 0.6033 | |
|
| 0.5993 | 16600 | 0.0312 | - | - | - | - | - | - | - | |
|
| 0.6030 | 16700 | 0.0305 | - | - | - | - | - | - | - | |
|
| 0.6066 | 16800 | 0.0306 | - | - | - | - | - | - | - | |
|
| 0.6102 | 16900 | 0.0314 | - | - | - | - | - | - | - | |
|
| 0.6138 | 17000 | 0.0308 | 0.6720 | 0.4886 | 0.8269 | 0.6115 | 0.6809 | 0.3239 | 0.6006 | |
|
| 0.6174 | 17100 | 0.0307 | - | - | - | - | - | - | - | |
|
| 0.6210 | 17200 | 0.03 | - | - | - | - | - | - | - | |
|
| 0.6246 | 17300 | 0.0315 | - | - | - | - | - | - | - | |
|
| 0.6282 | 17400 | 0.0304 | - | - | - | - | - | - | - | |
|
| 0.6318 | 17500 | 0.0313 | 0.6646 | 0.4817 | 0.8216 | 0.6176 | 0.6967 | 0.3257 | 0.6013 | |
|
| 0.6354 | 17600 | 0.03 | - | - | - | - | - | - | - | |
|
| 0.6391 | 17700 | 0.0323 | - | - | - | - | - | - | - | |
|
| 0.6427 | 17800 | 0.0311 | - | - | - | - | - | - | - | |
|
| 0.6463 | 17900 | 0.0295 | - | - | - | - | - | - | - | |
|
| 0.6499 | 18000 | 0.0307 | 0.6726 | 0.4799 | 0.8249 | 0.6299 | 0.6865 | 0.3242 | 0.6030 | |
|
|
|
</details> |
|
|
|
### Framework Versions |
|
- Python: 3.11.12 |
|
- Sentence Transformers: 4.0.2 |
|
- PyLate: 1.2.0 |
|
- Transformers: 4.48.2 |
|
- PyTorch: 2.6.0+cu124 |
|
- Accelerate: 1.6.0 |
|
- Datasets: 3.6.0 |
|
- Tokenizers: 0.21.1 |
|
|
|
|
|
## Citation |
|
|
|
### BibTeX |
|
|
|
#### Sentence Transformers |
|
```bibtex |
|
@inproceedings{reimers-2019-sentence-bert, |
|
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks", |
|
author = "Reimers, Nils and Gurevych, Iryna", |
|
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing", |
|
month = "11", |
|
year = "2019", |
|
publisher = "Association for Computational Linguistics", |
|
url = "https://arxiv.org/abs/1908.10084" |
|
} |
|
``` |
|
|
|
#### PyLate |
|
```bibtex |
|
@misc{PyLate, |
|
title={PyLate: Flexible Training and Retrieval for Late Interaction Models}, |
|
author={Chaffin, Antoine and Sourty, Raphaël}, |
|
url={https://github.com/lightonai/pylate}, |
|
year={2024} |
|
} |
|
``` |
|
|
|
<!-- |
|
## Glossary |
|
|
|
*Clearly define terms in order to be accessible across audiences.* |
|
--> |
|
|
|
<!-- |
|
## Model Card Authors |
|
|
|
*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.* |
|
--> |
|
|
|
<!-- |
|
## Model Card Contact |
|
|
|
*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.* |
|
--> |