AndreaAlessandrelli4 commited on
Commit
4473b6c
·
verified ·
1 Parent(s): da8eb3e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -7
app.py CHANGED
@@ -24,13 +24,7 @@ if torch.cuda.is_available():
24
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", load_in_4bit=True)
25
  tokenizer = AutoTokenizer.from_pretrained(model_id)
26
  tokenizer.use_default_system_prompt = False
27
- query_pipeline1 = Pipeline()
28
- query_pipeline1.add_component(
29
- "text_embedder",
30
- SentenceTransformersTextEmbedder(
31
- model="intfloat/multilingual-e5-large",
32
- prefix="query:",
33
- ))
34
 
35
 
36
 
@@ -92,6 +86,13 @@ def generate(
92
  Firmati alla fine di ogni risposta '-AvvoChat'.'''})
93
  for user, assistant in chat_history:
94
  conversation.extend([{"role": "user", "content": user}, {"role": "assistant", "content": assistant}])
 
 
 
 
 
 
 
95
  materiali = richiamo_materiali(message, alpha=1.0, n_items=5)
96
  documenti = ''
97
  for idx, d in enumerate(materiali):
 
24
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", load_in_4bit=True)
25
  tokenizer = AutoTokenizer.from_pretrained(model_id)
26
  tokenizer.use_default_system_prompt = False
27
+
 
 
 
 
 
 
28
 
29
 
30
 
 
86
  Firmati alla fine di ogni risposta '-AvvoChat'.'''})
87
  for user, assistant in chat_history:
88
  conversation.extend([{"role": "user", "content": user}, {"role": "assistant", "content": assistant}])
89
+ query_pipeline1 = Pipeline()
90
+ query_pipeline1.add_component(
91
+ "text_embedder",
92
+ SentenceTransformersTextEmbedder(
93
+ model="intfloat/multilingual-e5-large",
94
+ prefix="query:",
95
+ ))
96
  materiali = richiamo_materiali(message, alpha=1.0, n_items=5)
97
  documenti = ''
98
  for idx, d in enumerate(materiali):