fix compatibility issue for transformers 4.46+
Browse files
configuration_internvl_chat.py
CHANGED
|
@@ -47,12 +47,12 @@ class InternVLChatConfig(PretrainedConfig):
|
|
| 47 |
logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
|
| 48 |
|
| 49 |
self.vision_config = InternVisionConfig(**vision_config)
|
| 50 |
-
if llm_config.get(
|
| 51 |
self.llm_config = LlamaConfig(**llm_config)
|
| 52 |
-
elif llm_config.get(
|
| 53 |
self.llm_config = InternLM2Config(**llm_config)
|
| 54 |
else:
|
| 55 |
-
raise ValueError('Unsupported architecture: {}'.format(llm_config.get(
|
| 56 |
self.use_backbone_lora = use_backbone_lora
|
| 57 |
self.use_llm_lora = use_llm_lora
|
| 58 |
self.select_layer = select_layer
|
|
|
|
| 47 |
logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
|
| 48 |
|
| 49 |
self.vision_config = InternVisionConfig(**vision_config)
|
| 50 |
+
if llm_config.get('architectures')[0] == 'LlamaForCausalLM':
|
| 51 |
self.llm_config = LlamaConfig(**llm_config)
|
| 52 |
+
elif llm_config.get('architectures')[0] == 'InternLM2ForCausalLM':
|
| 53 |
self.llm_config = InternLM2Config(**llm_config)
|
| 54 |
else:
|
| 55 |
+
raise ValueError('Unsupported architecture: {}'.format(llm_config.get('architectures')[0]))
|
| 56 |
self.use_backbone_lora = use_backbone_lora
|
| 57 |
self.use_llm_lora = use_llm_lora
|
| 58 |
self.select_layer = select_layer
|