bpavlsh commited on
Commit
934682e
·
verified ·
1 Parent(s): ac5b12e

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +16 -6
README.md CHANGED
@@ -39,19 +39,29 @@ login("Huggingface access token")
39
  model_id = "mistralai/Mistral-7B-Instruct-v0.1"
40
  peft_model_name="bpavlsh/Mistral-crypto-news"
41
 
42
- tokenizer = AutoTokenizer.from_pretrained(model_id)
 
 
 
 
 
43
  base_model = AutoModelForCausalLM.from_pretrained( model_id, load_in_4bit=True,
44
  device_map="auto", torch_dtype="auto")
45
  model = PeftModel.from_pretrained(base_model, peft_model_name)
46
 
47
- text=""" News text for analysis, from 1Kb to 10Kb """
 
 
 
48
 
49
- prompt = f"""<s>[INST] <<SYS>>
50
- You are an expert in analyzing news for fake content, propaganda, and offensive language.
51
- <</SYS>>
52
 
53
- Please analyze the following text: {text} [/INST]"""
 
 
54
 
 
 
55
 
56
  inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
57
  output = model.generate(**inputs, max_new_tokens=1500)
 
39
  model_id = "mistralai/Mistral-7B-Instruct-v0.1"
40
  peft_model_name="bpavlsh/Mistral-crypto-news"
41
 
42
+ #Choose prompt query
43
+ prompt_query_1="Generate a knowledge graph from cryptocurrency news:"
44
+ prompt_query_2="Generate summaries of cryptocurrency news and detect sentiment signals:"
45
+ prompt_query_3="Create a JSON representation of the summary of cryptocurrency news:"
46
+
47
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
48
  base_model = AutoModelForCausalLM.from_pretrained( model_id, load_in_4bit=True,
49
  device_map="auto", torch_dtype="auto")
50
  model = PeftModel.from_pretrained(base_model, peft_model_name)
51
 
52
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
53
+ base_model = AutoModelForCausalLM.from_pretrained( model_id, load_in_4bit=True,
54
+ device_map="auto", torch_dtype="auto")
55
+ model = PeftModel.from_pretrained(base_model, peft_model_name)
56
 
57
+ text=""" News text for analysis, from 1Kb to 10Kb """
 
 
58
 
59
+ prompt = f"""<s>[INST] <<SYS>>
60
+ You are an expert in analyzing cryptocurrency news.
61
+ <</SYS>>
62
 
63
+ {prompt_query_1}
64
+ {text} [/INST]"""
65
 
66
  inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
67
  output = model.generate(**inputs, max_new_tokens=1500)