Edit model card

mistral-indo-7b

Mistral 7b v0.1 fine-tuned on Indonesian's instructions dataset.

Prompt template:

### Human: {Instruction}### Assistant: {response}

Example of Usage

import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, AutoTokenizer, GenerationConfig

model_id = "sarahlintang/mistral-indo-7b"

model = AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True).to("cuda")
tokenizer = AutoTokenizer.from_pretrained(model_id)

def create_instruction(instruction):
    prompt = f"### Human: {instruction} ### Assistant: "
    return prompt


def generate(
        instruction,
        max_new_tokens=128,
        temperature=0.1,
        top_p=0.75,
        top_k=40,
        num_beams=4,
        **kwargs
):
    
    prompt = create_instruction(instruction)
    inputs = tokenizer(prompt, return_tensors="pt")
    input_ids = inputs["input_ids"].to("cuda")
    attention_mask = inputs["attention_mask"].to("cuda")
    generation_config = GenerationConfig(
        temperature=temperature,
        top_p=top_p,
        top_k=top_k,
        num_beams=num_beams,
        **kwargs,
    )
    with torch.no_grad():
        generation_output = model.generate(
            input_ids=input_ids,
            attention_mask=attention_mask,
            generation_config=generation_config,
            return_dict_in_generate=True,
            output_scores=True,
            max_new_tokens=max_new_tokens,
            early_stopping=True
        )
    s = generation_output.sequences[0]
    output = tokenizer.decode(s)
    return output.split("### Assistant:")[1].strip()

instruction = "Sebutkan lima macam makanan khas Indonesia."
print(generate(instruction))
Downloads last month
84
Safetensors
Model size
7.24B params
Tensor type
BF16
Β·
Inference Examples
This model does not have enough activity to be deployed to Inference API (serverless) yet. Increase its social visibility and check back later, or deploy to Inference Endpoints (dedicated) instead.

Model tree for sarahlintang/mistral-indo-7b

Finetunes
1 model

Spaces using sarahlintang/mistral-indo-7b 5