dyang39 commited on
Commit
e5e1c6f
·
verified ·
1 Parent(s): ceff765

Upload configuration_internvl_chat.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. configuration_internvl_chat.py +4 -5
configuration_internvl_chat.py CHANGED
@@ -6,12 +6,11 @@
6
 
7
  import copy
8
 
9
- from transformers import AutoConfig, LlamaConfig
10
  from transformers.configuration_utils import PretrainedConfig
11
  from transformers.utils import logging
12
 
13
  from .configuration_intern_vit import InternVisionConfig
14
- from .configuration_internlm2 import InternLM2Config
15
 
16
  logger = logging.get_logger(__name__)
17
 
@@ -43,14 +42,14 @@ class InternVLChatConfig(PretrainedConfig):
43
  logger.info('vision_config is None. Initializing the InternVisionConfig with default values.')
44
 
45
  if llm_config is None:
46
- llm_config = {'architectures': ['InternLM2ForCausalLM']}
47
  logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
48
 
49
  self.vision_config = InternVisionConfig(**vision_config)
50
  if llm_config.get('architectures')[0] == 'LlamaForCausalLM':
51
  self.llm_config = LlamaConfig(**llm_config)
52
- elif llm_config.get('architectures')[0] == 'InternLM2ForCausalLM':
53
- self.llm_config = InternLM2Config(**llm_config)
54
  else:
55
  raise ValueError('Unsupported architecture: {}'.format(llm_config.get('architectures')[0]))
56
  self.use_backbone_lora = use_backbone_lora
 
6
 
7
  import copy
8
 
9
+ from transformers import AutoConfig, LlamaConfig, Qwen2Config
10
  from transformers.configuration_utils import PretrainedConfig
11
  from transformers.utils import logging
12
 
13
  from .configuration_intern_vit import InternVisionConfig
 
14
 
15
  logger = logging.get_logger(__name__)
16
 
 
42
  logger.info('vision_config is None. Initializing the InternVisionConfig with default values.')
43
 
44
  if llm_config is None:
45
+ llm_config = {'architectures': ['Qwen2ForCausalLM']}
46
  logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
47
 
48
  self.vision_config = InternVisionConfig(**vision_config)
49
  if llm_config.get('architectures')[0] == 'LlamaForCausalLM':
50
  self.llm_config = LlamaConfig(**llm_config)
51
+ elif llm_config.get('architectures')[0] == 'Qwen2ForCausalLM':
52
+ self.llm_config = Qwen2Config(**llm_config)
53
  else:
54
  raise ValueError('Unsupported architecture: {}'.format(llm_config.get('architectures')[0]))
55
  self.use_backbone_lora = use_backbone_lora