bstraehle commited on
Commit
75e5169
·
verified ·
1 Parent(s): 0f04474

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -15
app.py CHANGED
@@ -9,10 +9,10 @@ aws_secret_access_key = os.environ["AWS_SECRET_ACCESS_KEY"]
9
 
10
  config = {
11
  "max_tokens": 1000,
12
- "model": "anthropic.claude-3-opus-20240229-v1:0", #"amazon.nova-pro-v1:0",
13
  "temperature": 0,
14
  "top_k": 250,
15
- #"top_p": 0.999,
16
  }
17
 
18
  bedrock_runtime = boto3.client(
@@ -22,7 +22,7 @@ bedrock_runtime = boto3.client(
22
  region_name = "us-west-2"
23
  )
24
 
25
- def invoke(prompt, model):
26
  if not prompt:
27
  raise gr.Error("Prompt is required.")
28
 
@@ -32,21 +32,21 @@ def invoke(prompt, model):
32
 
33
  try:
34
  body = {
35
- #"anthropic_version": "bedrock-2023-05-31",
36
  "messages": [
37
  {"role": "user", "content": [{"type": "text", "text": prompt}]},
38
  ],
39
  "system": "You are a honest, helpful, and harmless bot."
40
  }
41
- model_id = model #config["model"]
42
- #model_kwargs = {
43
- #"max_tokens": config["max_tokens"],
44
- #"stop_sequences": ["\n\nHuman"],
45
- #"temperature": config["temperature"],
46
- #"top_k": config["top_k"],
47
- #"top_p": config["top_p"]
48
- #}
49
- #body.update(model_kwargs)
50
 
51
  response = bedrock_runtime.invoke_model(modelId=model_id,
52
  body=json.dumps(body))
@@ -66,8 +66,7 @@ description = """<a href='https://www.gradio.app/'>Gradio</a> UI using the <a hr
66
  gr.close_all()
67
 
68
  demo = gr.Interface(fn = invoke,
69
- inputs = [gr.Textbox(label = "Prompt", value = "If I dry one shirt in the sun, it takes 1 hour. How long do 3 shirts take?", lines = 1),
70
- gr.Dropdown(["arn:aws:bedrock:us-west-2:819097794827:inference-profile/us.amazon.nova-pro-v1:0", "anthropic.claude-3-opus-20240229-v1:0"], label="Model")],
71
  outputs = [gr.Textbox(label = "Completion", lines = 1)],
72
  description = description)
73
 
 
9
 
10
  config = {
11
  "max_tokens": 1000,
12
+ "model": "anthropic.claude-3-opus-20240229-v1:0",
13
  "temperature": 0,
14
  "top_k": 250,
15
+ "top_p": 0.999,
16
  }
17
 
18
  bedrock_runtime = boto3.client(
 
22
  region_name = "us-west-2"
23
  )
24
 
25
+ def invoke(prompt):
26
  if not prompt:
27
  raise gr.Error("Prompt is required.")
28
 
 
32
 
33
  try:
34
  body = {
35
+ "anthropic_version": "bedrock-2023-05-31",
36
  "messages": [
37
  {"role": "user", "content": [{"type": "text", "text": prompt}]},
38
  ],
39
  "system": "You are a honest, helpful, and harmless bot."
40
  }
41
+ model_id = config["model"]
42
+ model_kwargs = {
43
+ "max_tokens": config["max_tokens"],
44
+ "stop_sequences": ["\n\nHuman"],
45
+ "temperature": config["temperature"],
46
+ "top_k": config["top_k"],
47
+ "top_p": config["top_p"]
48
+ }
49
+ body.update(model_kwargs)
50
 
51
  response = bedrock_runtime.invoke_model(modelId=model_id,
52
  body=json.dumps(body))
 
66
  gr.close_all()
67
 
68
  demo = gr.Interface(fn = invoke,
69
+ inputs = [gr.Textbox(label = "Prompt", value = "If I dry one shirt in the sun, it takes 1 hour. How long do 3 shirts take?", lines = 1)],
 
70
  outputs = [gr.Textbox(label = "Completion", lines = 1)],
71
  description = description)
72