hmellor HF Staff commited on
Commit
ad0bfce
·
verified ·
1 Parent(s): deac083

Make remote code compatible with vLLM x Transformers v5

Browse files

This config class cannot be instantiated on Transformers v5 because super init performs validation which fails because nothing has been initialised yet.

This change should enable the current state of the remote code to work with vLLM with Transformers v5.

Files changed (1) hide show
  1. configuration_ernie4_5_vl.py +1 -2
configuration_ernie4_5_vl.py CHANGED
@@ -80,8 +80,6 @@ class DFNRopeVisionTransformerConfig(PretrainedConfig):
80
  vit_first_fwd_bsz (int): First forward batch size for ViT. Defaults to 128.
81
  vit_num_recompute_layers (int): Number of recomputed layers for ViT. Defaults to
82
  """
83
- super().__init__(**kwargs)
84
-
85
  self.depth = depth
86
  self.embed_dim = embed_dim
87
  self.hidden_size = hidden_size
@@ -97,6 +95,7 @@ class DFNRopeVisionTransformerConfig(PretrainedConfig):
97
  self.attn_sep = attn_sep
98
  self.vit_first_fwd_bsz = vit_first_fwd_bsz
99
  self.vit_num_recompute_layers = vit_num_recompute_layers
 
100
 
101
  def get(self, key, default=None):
102
  """get config value by key"""
 
80
  vit_first_fwd_bsz (int): First forward batch size for ViT. Defaults to 128.
81
  vit_num_recompute_layers (int): Number of recomputed layers for ViT. Defaults to
82
  """
 
 
83
  self.depth = depth
84
  self.embed_dim = embed_dim
85
  self.hidden_size = hidden_size
 
95
  self.attn_sep = attn_sep
96
  self.vit_first_fwd_bsz = vit_first_fwd_bsz
97
  self.vit_num_recompute_layers = vit_num_recompute_layers
98
+ super().__init__(**kwargs)
99
 
100
  def get(self, key, default=None):
101
  """get config value by key"""