Kevin Hu commited on
Commit
40df4d9
·
1 Parent(s): d048400

fix bedrock issue (#2776)

Browse files

### What problem does this PR solve?

#2722

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

Files changed (1) hide show
  1. rag/llm/chat_model.py +2 -2
rag/llm/chat_model.py CHANGED
@@ -661,7 +661,7 @@ class BedrockChat(Base):
661
  modelId=self.model_name,
662
  messages=history,
663
  inferenceConfig=gen_conf,
664
- system=[{"text": system}] if system else None,
665
  )
666
  ans = response["output"]["message"]["content"][0]["text"]
667
  return ans, num_tokens_from_string(ans)
@@ -676,7 +676,7 @@ class BedrockChat(Base):
676
  modelId=self.model_name,
677
  messages=history,
678
  inferenceConfig=gen_conf,
679
- system=[{"text": system if system else ""}],
680
  )
681
 
682
  # Extract and print the streamed response text in real-time.
 
661
  modelId=self.model_name,
662
  messages=history,
663
  inferenceConfig=gen_conf,
664
+ system=[{"text": (system if system else "Answer the user's message.")}]
665
  )
666
  ans = response["output"]["message"]["content"][0]["text"]
667
  return ans, num_tokens_from_string(ans)
 
676
  modelId=self.model_name,
677
  messages=history,
678
  inferenceConfig=gen_conf,
679
+ system=[{"text": (system if system else "Answer the user's message.")}]
680
  )
681
 
682
  # Extract and print the streamed response text in real-time.