maciej.karasek commited on
Commit
136522f
·
1 Parent(s): 556fe40

style correction

Browse files
Files changed (1) hide show
  1. src/axolotl/utils/models.py +9 -3
src/axolotl/utils/models.py CHANGED
@@ -252,11 +252,16 @@ def load_model(
252
  )
253
  # Shouldn't be a problem most of the time. will obviously error if the model doesn't support this
254
  # when training starts
255
- if hasattr(config, "max_seq_len") and config.max_seq_len and cfg.sequence_len > config.max_seq_len:
 
 
 
 
256
  config.max_seq_len = cfg.sequence_len
257
  logging.warning(f"increasing context length to {cfg.sequence_len}")
258
  elif (
259
- hasattr(config, "max_sequence_length") and config.max_sequence_length
 
260
  and cfg.sequence_len > config.max_sequence_length
261
  ):
262
  config.max_sequence_length = cfg.sequence_len
@@ -289,7 +294,8 @@ def load_model(
289
  model.resize_token_embeddings(embeddings_len)
290
 
291
  if (
292
- hasattr(model.config, "max_position_embeddings") and model.config.max_position_embeddings
 
293
  and cfg.sequence_len >= model.config.max_position_embeddings
294
  ):
295
  logging.warning(
 
252
  )
253
  # Shouldn't be a problem most of the time. will obviously error if the model doesn't support this
254
  # when training starts
255
+ if (
256
+ hasattr(config, "max_seq_len")
257
+ and config.max_seq_len
258
+ and cfg.sequence_len > config.max_seq_len
259
+ ):
260
  config.max_seq_len = cfg.sequence_len
261
  logging.warning(f"increasing context length to {cfg.sequence_len}")
262
  elif (
263
+ hasattr(config, "max_sequence_length")
264
+ and config.max_sequence_length
265
  and cfg.sequence_len > config.max_sequence_length
266
  ):
267
  config.max_sequence_length = cfg.sequence_len
 
294
  model.resize_token_embeddings(embeddings_len)
295
 
296
  if (
297
+ hasattr(model.config, "max_position_embeddings")
298
+ and model.config.max_position_embeddings
299
  and cfg.sequence_len >= model.config.max_position_embeddings
300
  ):
301
  logging.warning(