zhichyu commited on
Commit
be98b1d
·
1 Parent(s): 1a8a214

Rename page_num_list, top_list, position_list (#3940)

Browse files

### What problem does this PR solve?

Rename page_num_list, top_list, position_list to page_num_int, top_int,
position_int

### Type of change

- [x] Refactoring

Dockerfile CHANGED
@@ -34,9 +34,15 @@ RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/,target=/deps
34
  cp /deps/cl100k_base.tiktoken /ragflow/9b5ad71b2ce5302211f9c61530b329a4922fc6a4
35
 
36
  ENV TIKA_SERVER_JAR="file:///ragflow/tika-server-standard-3.0.0.jar"
 
37
 
38
  # Setup apt
39
- # cv2 requires libGL.so.1
 
 
 
 
 
40
  RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
41
  if [ "$NEED_MIRROR" == "1" ]; then \
42
  sed -i 's|http://archive.ubuntu.com|https://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list; \
@@ -47,8 +53,12 @@ RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
47
  apt update && \
48
  apt --no-install-recommends install -y ca-certificates && \
49
  apt update && \
50
- DEBIAN_FRONTEND=noninteractive apt install -y curl libpython3-dev nginx libglib2.0-0 libglx-mesa0 pkg-config libicu-dev libgdiplus default-jdk python3-pip pipx \
51
- libatk-bridge2.0-0 libgtk-4-1 libnss3 xdg-utils unzip libgbm-dev wget git nginx libgl1 vim less
 
 
 
 
52
 
53
  RUN if [ "$NEED_MIRROR" == "1" ]; then \
54
  pip3 config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple && \
 
34
  cp /deps/cl100k_base.tiktoken /ragflow/9b5ad71b2ce5302211f9c61530b329a4922fc6a4
35
 
36
  ENV TIKA_SERVER_JAR="file:///ragflow/tika-server-standard-3.0.0.jar"
37
+ ENV DEBIAN_FRONTEND=noninteractive
38
 
39
  # Setup apt
40
+ # Python package and implicit dependencies:
41
+ # opencv-python: libglib2.0-0 libglx-mesa0 libgl1
42
+ # aspose-slides: pkg-config libicu-dev libgdiplus libssl1.1_1.1.1f-1ubuntu2_amd64.deb
43
+ # python-pptx: default-jdk tika-server-standard-3.0.0.jar
44
+ # selenium: libatk-bridge2.0-0 chrome-linux64-121-0-6167-85
45
+ # Building C extensions: libpython3-dev libgtk-4-1 libnss3 xdg-utils libgbm-dev
46
  RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
47
  if [ "$NEED_MIRROR" == "1" ]; then \
48
  sed -i 's|http://archive.ubuntu.com|https://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list; \
 
53
  apt update && \
54
  apt --no-install-recommends install -y ca-certificates && \
55
  apt update && \
56
+ apt install -y libglib2.0-0 libglx-mesa0 libgl1 && \
57
+ apt install -y pkg-config libicu-dev libgdiplus && \
58
+ apt install -y default-jdk && \
59
+ apt install -y libatk-bridge2.0-0 && \
60
+ apt install -y libpython3-dev libgtk-4-1 libnss3 xdg-utils libgbm-dev && \
61
+ apt install -y python3-pip pipx nginx unzip curl wget git vim less
62
 
63
  RUN if [ "$NEED_MIRROR" == "1" ]; then \
64
  pip3 config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple && \
api/apps/chunk_app.py CHANGED
@@ -71,7 +71,7 @@ def list_chunk():
71
  "question_kwd": sres.field[id].get("question_kwd", []),
72
  "image_id": sres.field[id].get("img_id", ""),
73
  "available_int": int(sres.field[id].get("available_int", 1)),
74
- "positions": json.loads(sres.field[id].get("position_list", "[]")),
75
  }
76
  assert isinstance(d["positions"], list)
77
  assert len(d["positions"]) == 0 or (isinstance(d["positions"][0], list) and len(d["positions"][0]) == 5)
 
71
  "question_kwd": sres.field[id].get("question_kwd", []),
72
  "image_id": sres.field[id].get("img_id", ""),
73
  "available_int": int(sres.field[id].get("available_int", 1)),
74
+ "positions": sres.field[id].get("position_int", []),
75
  }
76
  assert isinstance(d["positions"], list)
77
  assert len(d["positions"]) == 0 or (isinstance(d["positions"][0], list) and len(d["positions"][0]) == 5)
api/apps/sdk/doc.py CHANGED
@@ -846,7 +846,7 @@ def list_chunks(tenant_id, dataset_id, document_id):
846
  "question_kwd": sres.field[id].get("question_kwd", []),
847
  "img_id": sres.field[id].get("img_id", ""),
848
  "available_int": sres.field[id].get("available_int", 1),
849
- "positions": sres.field[id].get("position_int", "").split("\t"),
850
  }
851
  if len(d["positions"]) % 5 == 0:
852
  poss = []
 
846
  "question_kwd": sres.field[id].get("question_kwd", []),
847
  "img_id": sres.field[id].get("img_id", ""),
848
  "available_int": sres.field[id].get("available_int", 1),
849
+ "positions": sres.field[id].get("position_int", []),
850
  }
851
  if len(d["positions"]) % 5 == 0:
852
  poss = []
conf/infinity_mapping.json CHANGED
@@ -16,9 +16,9 @@
16
  "content_with_weight": {"type": "varchar", "default": ""},
17
  "content_ltks": {"type": "varchar", "default": ""},
18
  "content_sm_ltks": {"type": "varchar", "default": ""},
19
- "page_num_list": {"type": "varchar", "default": ""},
20
- "top_list": {"type": "varchar", "default": ""},
21
- "position_list": {"type": "varchar", "default": ""},
22
  "weight_int": {"type": "integer", "default": 0},
23
  "weight_flt": {"type": "float", "default": 0.0},
24
  "rank_int": {"type": "integer", "default": 0},
 
16
  "content_with_weight": {"type": "varchar", "default": ""},
17
  "content_ltks": {"type": "varchar", "default": ""},
18
  "content_sm_ltks": {"type": "varchar", "default": ""},
19
+ "page_num_int": {"type": "varchar", "default": ""},
20
+ "top_int": {"type": "varchar", "default": ""},
21
+ "position_int": {"type": "varchar", "default": ""},
22
  "weight_int": {"type": "integer", "default": 0},
23
  "weight_flt": {"type": "float", "default": 0.0},
24
  "rank_int": {"type": "integer", "default": 0},
graphrag/search.py CHANGED
@@ -58,7 +58,7 @@ class KGSearch(Dealer):
58
  matchDense = self.get_vector(qst, emb_mdl, 1024, req.get("similarity", 0.1))
59
  q_vec = matchDense.embedding_data
60
  src = req.get("fields", ["docnm_kwd", "content_ltks", "kb_id", "img_id", "title_tks", "important_kwd",
61
- "doc_id", f"q_{len(q_vec)}_vec", "position_list", "name_kwd",
62
  "available_int", "content_with_weight",
63
  "weight_int", "weight_flt"
64
  ])
 
58
  matchDense = self.get_vector(qst, emb_mdl, 1024, req.get("similarity", 0.1))
59
  q_vec = matchDense.embedding_data
60
  src = req.get("fields", ["docnm_kwd", "content_ltks", "kb_id", "img_id", "title_tks", "important_kwd",
61
+ "doc_id", f"q_{len(q_vec)}_vec", "position_int", "name_kwd",
62
  "available_int", "content_with_weight",
63
  "weight_int", "weight_flt"
64
  ])
rag/app/presentation.py CHANGED
@@ -20,7 +20,6 @@ from rag.nlp import tokenize, is_english
20
  from rag.nlp import rag_tokenizer
21
  from deepdoc.parser import PdfParser, PptParser, PlainParser
22
  from PyPDF2 import PdfReader as pdf2_read
23
- import json
24
 
25
 
26
  class Ppt(PptParser):
@@ -109,9 +108,9 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
109
  d = copy.deepcopy(doc)
110
  pn += from_page
111
  d["image"] = img
112
- d["page_num_list"] = json.dumps([pn + 1])
113
- d["top_list"] = json.dumps([0])
114
- d["position_list"] = json.dumps([(pn + 1, 0, img.size[0], 0, img.size[1])])
115
  tokenize(d, txt, eng)
116
  res.append(d)
117
  return res
@@ -125,10 +124,9 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
125
  pn += from_page
126
  if img:
127
  d["image"] = img
128
- d["page_num_list"] = json.dumps([pn + 1])
129
- d["top_list"] = json.dumps([0])
130
- d["position_list"] = json.dumps([
131
- (pn + 1, 0, img.size[0] if img else 0, 0, img.size[1] if img else 0)])
132
  tokenize(d, txt, eng)
133
  res.append(d)
134
  return res
 
20
  from rag.nlp import rag_tokenizer
21
  from deepdoc.parser import PdfParser, PptParser, PlainParser
22
  from PyPDF2 import PdfReader as pdf2_read
 
23
 
24
 
25
  class Ppt(PptParser):
 
108
  d = copy.deepcopy(doc)
109
  pn += from_page
110
  d["image"] = img
111
+ d["page_num_int"] = [pn + 1]
112
+ d["top_int"] = [0]
113
+ d["position_int"] = [(pn + 1, 0, img.size[0], 0, img.size[1])]
114
  tokenize(d, txt, eng)
115
  res.append(d)
116
  return res
 
124
  pn += from_page
125
  if img:
126
  d["image"] = img
127
+ d["page_num_int"] = [pn + 1]
128
+ d["top_int"] = [0]
129
+ d["position_int"] = [(pn + 1, 0, img.size[0] if img else 0, 0, img.size[1] if img else 0)]
 
130
  tokenize(d, txt, eng)
131
  res.append(d)
132
  return res
rag/nlp/__init__.py CHANGED
@@ -22,7 +22,6 @@ from rag.utils import num_tokens_from_string
22
  from . import rag_tokenizer
23
  import re
24
  import copy
25
- import json
26
  import roman_numbers as r
27
  from word2number import w2n
28
  from cn2an import cn2an
@@ -311,16 +310,16 @@ def tokenize_table(tbls, doc, eng, batch_size=10):
311
  def add_positions(d, poss):
312
  if not poss:
313
  return
314
- page_num_list = []
315
- position_list = []
316
- top_list = []
317
  for pn, left, right, top, bottom in poss:
318
- page_num_list.append(int(pn + 1))
319
- top_list.append(int(top))
320
- position_list.append((int(pn + 1), int(left), int(right), int(top), int(bottom)))
321
- d["page_num_list"] = json.dumps(page_num_list)
322
- d["position_list"] = json.dumps(position_list)
323
- d["top_list"] = json.dumps(top_list)
324
 
325
 
326
  def remove_contents_table(sections, eng=False):
 
22
  from . import rag_tokenizer
23
  import re
24
  import copy
 
25
  import roman_numbers as r
26
  from word2number import w2n
27
  from cn2an import cn2an
 
310
  def add_positions(d, poss):
311
  if not poss:
312
  return
313
+ page_num_int = []
314
+ position_int = []
315
+ top_int = []
316
  for pn, left, right, top, bottom in poss:
317
+ page_num_int.append(int(pn + 1))
318
+ top_int.append(int(top))
319
+ position_int.append((int(pn + 1), int(left), int(right), int(top), int(bottom)))
320
+ d["page_num_int"] = page_num_int
321
+ d["position_int"] = position_int
322
+ d["top_int"] = top_int
323
 
324
 
325
  def remove_contents_table(sections, eng=False):
rag/nlp/search.py CHANGED
@@ -16,7 +16,6 @@
16
 
17
  import logging
18
  import re
19
- import json
20
  from dataclasses import dataclass
21
 
22
  from rag.utils import rmSpace
@@ -74,7 +73,7 @@ class Dealer:
74
  offset, limit = pg * ps, (pg + 1) * ps
75
 
76
  src = req.get("fields", ["docnm_kwd", "content_ltks", "kb_id", "img_id", "title_tks", "important_kwd",
77
- "doc_id", "position_list", "knowledge_graph_kwd", "question_kwd", "question_tks",
78
  "available_int", "content_with_weight", "pagerank_fea"])
79
  kwds = set([])
80
 
@@ -82,6 +81,8 @@ class Dealer:
82
  q_vec = []
83
  if not qst:
84
  if req.get("sort"):
 
 
85
  orderBy.desc("create_timestamp_flt")
86
  res = self.dataStore.search(src, [], filters, [], orderBy, offset, limit, idx_names, kb_ids)
87
  total=self.dataStore.getTotal(res)
@@ -340,7 +341,7 @@ class Dealer:
340
  chunk = sres.field[id]
341
  dnm = chunk["docnm_kwd"]
342
  did = chunk["doc_id"]
343
- position_list = chunk.get("position_list", "[]")
344
  d = {
345
  "chunk_id": id,
346
  "content_ltks": chunk["content_ltks"],
@@ -354,7 +355,7 @@ class Dealer:
354
  "vector_similarity": vsim[i],
355
  "term_similarity": tsim[i],
356
  "vector": chunk.get(vector_column, zero_vector),
357
- "positions": json.loads(position_list)
358
  }
359
  if highlight and sres.highlight:
360
  if id in sres.highlight:
 
16
 
17
  import logging
18
  import re
 
19
  from dataclasses import dataclass
20
 
21
  from rag.utils import rmSpace
 
73
  offset, limit = pg * ps, (pg + 1) * ps
74
 
75
  src = req.get("fields", ["docnm_kwd", "content_ltks", "kb_id", "img_id", "title_tks", "important_kwd",
76
+ "doc_id", "page_num_int", "top_int", "create_timestamp_flt", "knowledge_graph_kwd", "question_kwd", "question_tks",
77
  "available_int", "content_with_weight", "pagerank_fea"])
78
  kwds = set([])
79
 
 
81
  q_vec = []
82
  if not qst:
83
  if req.get("sort"):
84
+ orderBy.asc("page_num_int")
85
+ orderBy.asc("top_int")
86
  orderBy.desc("create_timestamp_flt")
87
  res = self.dataStore.search(src, [], filters, [], orderBy, offset, limit, idx_names, kb_ids)
88
  total=self.dataStore.getTotal(res)
 
341
  chunk = sres.field[id]
342
  dnm = chunk["docnm_kwd"]
343
  did = chunk["doc_id"]
344
+ position_int = chunk.get("position_int", [])
345
  d = {
346
  "chunk_id": id,
347
  "content_ltks": chunk["content_ltks"],
 
355
  "vector_similarity": vsim[i],
356
  "term_similarity": tsim[i],
357
  "vector": chunk.get(vector_column, zero_vector),
358
+ "positions": position_int,
359
  }
360
  if highlight and sres.highlight:
361
  if id in sres.highlight:
rag/svr/task_executor.py CHANGED
@@ -211,9 +211,9 @@ def build_chunks(task, progress_callback):
211
  if not d.get("image"):
212
  _ = d.pop("image", None)
213
  d["img_id"] = ""
214
- d["page_num_list"] = json.dumps([])
215
- d["position_list"] = json.dumps([])
216
- d["top_list"] = json.dumps([])
217
  docs.append(d)
218
  continue
219
 
 
211
  if not d.get("image"):
212
  _ = d.pop("image", None)
213
  d["img_id"] = ""
214
+ d["page_num_int"] = []
215
+ d["position_int"] = []
216
+ d["top_int"] = []
217
  docs.append(d)
218
  continue
219
 
rag/utils/es_conn.py CHANGED
@@ -185,8 +185,14 @@ class ESConnection(DocStoreConnection):
185
  orders = list()
186
  for field, order in orderBy.fields:
187
  order = "asc" if order == 0 else "desc"
188
- orders.append({field: {"order": order, "unmapped_type": "float",
189
- "mode": "avg", "numeric_type": "double"}})
 
 
 
 
 
 
190
  s = s.sort(*orders)
191
 
192
  if limit > 0:
 
185
  orders = list()
186
  for field, order in orderBy.fields:
187
  order = "asc" if order == 0 else "desc"
188
+ if field in ["page_num_int", "top_int"]:
189
+ order_info = {"order": order, "unmapped_type": "float",
190
+ "mode": "avg", "numeric_type": "double"}
191
+ elif field.endswith("_int") or field.endswith("_flt"):
192
+ order_info = {"order": order, "unmapped_type": "float"}
193
+ else:
194
+ order_info = {"order": order, "unmapped_type": "text"}
195
+ orders.append({field: order_info})
196
  s = s.sort(*orders)
197
 
198
  if limit > 0:
rag/utils/infinity_conn.py CHANGED
@@ -297,7 +297,7 @@ class InfinityConnection(DocStoreConnection):
297
  df_list.append(kb_res)
298
  self.connPool.release_conn(inf_conn)
299
  res = concat_dataframes(df_list, selectFields)
300
- logger.debug("INFINITY search tables: " + str(table_list))
301
  return res
302
 
303
  def get(
@@ -307,8 +307,10 @@ class InfinityConnection(DocStoreConnection):
307
  db_instance = inf_conn.get_database(self.dbName)
308
  df_list = list()
309
  assert isinstance(knowledgebaseIds, list)
 
310
  for knowledgebaseId in knowledgebaseIds:
311
  table_name = f"{indexName}_{knowledgebaseId}"
 
312
  table_instance = db_instance.get_table(table_name)
313
  kb_res = table_instance.output(["*"]).filter(f"id = '{chunkId}'").to_pl()
314
  if len(kb_res) != 0 and kb_res.shape[0] > 0:
@@ -316,6 +318,7 @@ class InfinityConnection(DocStoreConnection):
316
 
317
  self.connPool.release_conn(inf_conn)
318
  res = concat_dataframes(df_list, ["id"])
 
319
  res_fields = self.getFields(res, res.columns)
320
  return res_fields.get(chunkId, None)
321
 
@@ -349,15 +352,22 @@ class InfinityConnection(DocStoreConnection):
349
  for k, v in d.items():
350
  if k.endswith("_kwd") and isinstance(v, list):
351
  d[k] = " ".join(v)
352
- if k == 'kb_id':
353
  if isinstance(d[k], list):
354
  d[k] = d[k][0] # since d[k] is a list, but we need a str
 
 
 
 
 
 
 
355
  ids = ["'{}'".format(d["id"]) for d in documents]
356
  str_ids = ", ".join(ids)
357
  str_filter = f"id IN ({str_ids})"
358
  table_instance.delete(str_filter)
359
  # for doc in documents:
360
- # logger.info(f"insert position_list: {doc['position_list']}")
361
  # logger.info(f"InfinityConnection.insert {json.dumps(documents)}")
362
  table_instance.insert(documents)
363
  self.connPool.release_conn(inf_conn)
@@ -367,8 +377,8 @@ class InfinityConnection(DocStoreConnection):
367
  def update(
368
  self, condition: dict, newValue: dict, indexName: str, knowledgebaseId: str
369
  ) -> bool:
370
- # if 'position_list' in newValue:
371
- # logger.info(f"upsert position_list: {newValue['position_list']}")
372
  inf_conn = self.connPool.get_conn()
373
  db_instance = inf_conn.get_database(self.dbName)
374
  table_name = f"{indexName}_{knowledgebaseId}"
@@ -377,6 +387,16 @@ class InfinityConnection(DocStoreConnection):
377
  for k, v in newValue.items():
378
  if k.endswith("_kwd") and isinstance(v, list):
379
  newValue[k] = " ".join(v)
 
 
 
 
 
 
 
 
 
 
380
  table_instance.update(filter, newValue)
381
  self.connPool.release_conn(inf_conn)
382
  return True
@@ -423,9 +443,22 @@ class InfinityConnection(DocStoreConnection):
423
  v = res[fieldnm][i]
424
  if isinstance(v, Series):
425
  v = list(v)
426
- elif fieldnm == "important_kwd":
427
  assert isinstance(v, str)
428
  v = v.split()
 
 
 
 
 
 
 
 
 
 
 
 
 
429
  else:
430
  if not isinstance(v, str):
431
  v = str(v)
 
297
  df_list.append(kb_res)
298
  self.connPool.release_conn(inf_conn)
299
  res = concat_dataframes(df_list, selectFields)
300
+ logger.debug(f"INFINITY search tables: {str(table_list)}, result: {str(res)}")
301
  return res
302
 
303
  def get(
 
307
  db_instance = inf_conn.get_database(self.dbName)
308
  df_list = list()
309
  assert isinstance(knowledgebaseIds, list)
310
+ table_list = list()
311
  for knowledgebaseId in knowledgebaseIds:
312
  table_name = f"{indexName}_{knowledgebaseId}"
313
+ table_list.append(table_name)
314
  table_instance = db_instance.get_table(table_name)
315
  kb_res = table_instance.output(["*"]).filter(f"id = '{chunkId}'").to_pl()
316
  if len(kb_res) != 0 and kb_res.shape[0] > 0:
 
318
 
319
  self.connPool.release_conn(inf_conn)
320
  res = concat_dataframes(df_list, ["id"])
321
+ logger.debug(f"INFINITY get tables: {str(table_list)}, result: {str(res)}")
322
  res_fields = self.getFields(res, res.columns)
323
  return res_fields.get(chunkId, None)
324
 
 
352
  for k, v in d.items():
353
  if k.endswith("_kwd") and isinstance(v, list):
354
  d[k] = " ".join(v)
355
+ elif k == 'kb_id':
356
  if isinstance(d[k], list):
357
  d[k] = d[k][0] # since d[k] is a list, but we need a str
358
+ elif k == "position_int":
359
+ assert isinstance(v, list)
360
+ arr = [num for row in v for num in row]
361
+ d[k] = "_".join(f"{num:08x}" for num in arr)
362
+ elif k in ["page_num_int", "top_int", "position_int"]:
363
+ assert isinstance(v, list)
364
+ d[k] = "_".join(f"{num:08x}" for num in v)
365
  ids = ["'{}'".format(d["id"]) for d in documents]
366
  str_ids = ", ".join(ids)
367
  str_filter = f"id IN ({str_ids})"
368
  table_instance.delete(str_filter)
369
  # for doc in documents:
370
+ # logger.info(f"insert position_int: {doc['position_int']}")
371
  # logger.info(f"InfinityConnection.insert {json.dumps(documents)}")
372
  table_instance.insert(documents)
373
  self.connPool.release_conn(inf_conn)
 
377
  def update(
378
  self, condition: dict, newValue: dict, indexName: str, knowledgebaseId: str
379
  ) -> bool:
380
+ # if 'position_int' in newValue:
381
+ # logger.info(f"update position_int: {newValue['position_int']}")
382
  inf_conn = self.connPool.get_conn()
383
  db_instance = inf_conn.get_database(self.dbName)
384
  table_name = f"{indexName}_{knowledgebaseId}"
 
387
  for k, v in newValue.items():
388
  if k.endswith("_kwd") and isinstance(v, list):
389
  newValue[k] = " ".join(v)
390
+ elif k == 'kb_id':
391
+ if isinstance(newValue[k], list):
392
+ newValue[k] = newValue[k][0] # since d[k] is a list, but we need a str
393
+ elif k == "position_int":
394
+ assert isinstance(v, list)
395
+ arr = [num for row in v for num in row]
396
+ newValue[k] = "_".join(f"{num:08x}" for num in arr)
397
+ elif k in ["page_num_int", "top_int"]:
398
+ assert isinstance(v, list)
399
+ newValue[k] = "_".join(f"{num:08x}" for num in v)
400
  table_instance.update(filter, newValue)
401
  self.connPool.release_conn(inf_conn)
402
  return True
 
443
  v = res[fieldnm][i]
444
  if isinstance(v, Series):
445
  v = list(v)
446
+ elif fieldnm.endswith("_kwd"):
447
  assert isinstance(v, str)
448
  v = v.split()
449
+ elif fieldnm == "position_int":
450
+ assert isinstance(v, str)
451
+ if v:
452
+ arr = [int(hex_val, 16) for hex_val in v.split('_')]
453
+ v = [arr[i:i + 4] for i in range(0, len(arr), 4)]
454
+ else:
455
+ v = []
456
+ elif fieldnm in ["page_num_int", "top_int"]:
457
+ assert isinstance(v, str)
458
+ if v:
459
+ v = [int(hex_val, 16) for hex_val in v.split('_')]
460
+ else:
461
+ v = []
462
  else:
463
  if not isinstance(v, str):
464
  v = str(v)