BAJUKA commited on
Commit
1c8864c
·
verified ·
1 Parent(s): dcc653f

Update inference code

Browse files
Files changed (1) hide show
  1. README.md +4 -2
README.md CHANGED
@@ -99,6 +99,8 @@ import torch
99
  from transformers import Qwen2_5_VLForConditionalGeneration, AutoTokenizer, AutoProcessor
100
  from PIL import Image
101
  import groundcua
 
 
102
 
103
  model_name = "ServiceNow/GroundNext-7B-V0"
104
 
@@ -120,12 +122,12 @@ model.generation_config.do_sample = False
120
  model.generation_config.use_cache = True
121
 
122
  # Load and prepare image
123
- url = "https://huggingface.co/datasets/ServiceNow/GroundCUA/resolve/main/images/LibreOffice Writer/00c4bac63f95985ccd9a4210fa752e8a5148a5f69ecb8bcfb3e499f5a3becc0d.png"
124
  image = Image.open(io.BytesIO(urlopen(url).read()))
125
  image, (width, height) = groundcua.prepare_image(image)
126
 
127
  # Create messages and generate
128
- instruction = "Click on the 'Save' icon"
129
  messages = groundcua.create_messages(instruction, image, width, height)
130
 
131
  input_text = tokenizer.apply_chat_template(messages, add_generation_prompt=True, tokenize=False)
 
99
  from transformers import Qwen2_5_VLForConditionalGeneration, AutoTokenizer, AutoProcessor
100
  from PIL import Image
101
  import groundcua
102
+ import io
103
+ from urllib.request import urlopen
104
 
105
  model_name = "ServiceNow/GroundNext-7B-V0"
106
 
 
122
  model.generation_config.use_cache = True
123
 
124
  # Load and prepare image
125
+ url = "https://huggingface.co/datasets/ServiceNow/GroundCUA/resolve/main/images/7-Zip/001f0079a489909eb94e47c2374b7bf36ab1842e314592ce30a34d18a54eb1df.png"
126
  image = Image.open(io.BytesIO(urlopen(url).read()))
127
  image, (width, height) = groundcua.prepare_image(image)
128
 
129
  # Create messages and generate
130
+ instruction = "Click on the 'File' button"
131
  messages = groundcua.create_messages(instruction, image, width, height)
132
 
133
  input_text = tokenizer.apply_chat_template(messages, add_generation_prompt=True, tokenize=False)