alvarobartt HF Staff commited on
Commit
74b74f6
·
verified ·
1 Parent(s): 0549f55

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +14 -6
handler.py CHANGED
@@ -1,5 +1,4 @@
1
  # Adapted from https://huggingface.co/nvidia/NVLM-D-72B#inference
2
-
3
  import math
4
  from typing import Any, Dict, List
5
 
@@ -152,15 +151,24 @@ class EndpointHandler:
152
  )
153
 
154
  def __call__(self, data: Dict[str, Any]) -> Dict[str, List[Any]]:
155
- if "instances" not in data:
 
 
 
 
156
  raise ValueError(
157
- "The request body must contain a key 'instances' with a list of instances."
158
  )
159
-
160
  logger.debug(f"Received incoming request with {data=}")
161
 
162
  predictions = []
163
- for input in data["instances"]:
 
 
 
 
 
164
  generation_config = input.get("generation_config", dict(max_new_tokens=1024, do_sample=False))
165
 
166
  if "image_url" not in input:
@@ -186,4 +194,4 @@ class EndpointHandler:
186
  )
187
 
188
  predictions.append(response)
189
- return {"predictions": predictions}
 
1
  # Adapted from https://huggingface.co/nvidia/NVLM-D-72B#inference
 
2
  import math
3
  from typing import Any, Dict, List
4
 
 
151
  )
152
 
153
  def __call__(self, data: Dict[str, Any]) -> Dict[str, List[Any]]:
154
+ if "instances" in data:
155
+ logger.warning("Using `instances` instead of `inputs` is deprecated.")
156
+ data["inputs"] = data.pop("instances")
157
+
158
+ if "inputs" not in data:
159
  raise ValueError(
160
+ "The request body must contain a key 'inputs' with a list of inputs."
161
  )
162
+
163
  logger.debug(f"Received incoming request with {data=}")
164
 
165
  predictions = []
166
+ for input in data["inputs"]:
167
+ if "prompt" not in input:
168
+ raise ValueError(
169
+ "The request input body must contain a key 'prompt' with the prompt to use."
170
+ )
171
+
172
  generation_config = input.get("generation_config", dict(max_new_tokens=1024, do_sample=False))
173
 
174
  if "image_url" not in input:
 
194
  )
195
 
196
  predictions.append(response)
197
+ return {"predictions": predictions}