Error while Importing the model

#1
by Aggshourya - opened

in <cell line: 0>()
2 import torch
3 path="dle666/R-CoT-8B"
----> 4 model = AutoModel.from_pretrained(path, torch_dtype=torch.bfloat16, low_cpu_mem_usage=True, trust_remote_code=True).eval().cuda()
5 tokenizer = AutoTokenizer.from_pretrained(path, trust_remote_code=True, use_fast=False)

7 frames
~/.cache/huggingface/modules/transformers_modules/dle666/R-CoT-8B/77557a107f5600c8fde22713889d58f95776a3f4/configuration_internvl_chat.py in init(self, vision_config, llm_config, use_backbone_lora, use_llm_lora, select_layer, force_image_size, downsample_ratio, template, dynamic_image_size, use_thumbnail, ps_version, min_dynamic_patch, max_dynamic_patch, **kwargs)
48
49 self.vision_config = InternVisionConfig(**vision_config)
---> 50 if llm_config['architectures'][0] == 'LlamaForCausalLM':
51 self.llm_config = LlamaConfig(**llm_config)
52 elif llm_config['architectures'][0] == 'InternLM2ForCausalLM':

KeyError: 'architectures'

Sign up or log in to comment