Crystalcareai commited on
Commit
e044dea
·
verified ·
1 Parent(s): 39bf6e4

Update modeling_quiet.py

Browse files
Files changed (1) hide show
  1. modeling_quiet.py +9 -1
modeling_quiet.py CHANGED
@@ -1454,10 +1454,11 @@ class QuietForCausalLM(QuietPreTrainedModel, GenerationMixin):
1454
  inputs_embeds: Optional[torch.FloatTensor] = None,
1455
  labels: Optional[torch.LongTensor] = None,
1456
  use_cache: Optional[bool] = None,
1457
- # output_router_logits: Optional[bool] = None,
1458
  output_attentions: Optional[bool] = None,
1459
  output_hidden_states: Optional[bool] = None,
1460
  return_dict: Optional[bool] = None,
 
 
1461
  temperature_last: Optional[float] = None,
1462
  dynamic_temperature: Optional[float] = None,
1463
  dynatemp_low: Optional[float] = None,
@@ -1483,6 +1484,13 @@ class QuietForCausalLM(QuietPreTrainedModel, GenerationMixin):
1483
  do_sample: Optional[bool] = None,
1484
  encoder_repetition_penalty: Optional[float] = None,
1485
  no_repeat_ngram_size: Optional[int] = None,
 
 
 
 
 
 
 
1486
  ) -> Union[Tuple, CausalLMOutputWithPast]:
1487
  r"""
1488
  Args:
 
1454
  inputs_embeds: Optional[torch.FloatTensor] = None,
1455
  labels: Optional[torch.LongTensor] = None,
1456
  use_cache: Optional[bool] = None,
 
1457
  output_attentions: Optional[bool] = None,
1458
  output_hidden_states: Optional[bool] = None,
1459
  return_dict: Optional[bool] = None,
1460
+ max_new_tokens: Optional[int] = None,
1461
+ temperature: Optional[float] = None,
1462
  temperature_last: Optional[float] = None,
1463
  dynamic_temperature: Optional[float] = None,
1464
  dynatemp_low: Optional[float] = None,
 
1484
  do_sample: Optional[bool] = None,
1485
  encoder_repetition_penalty: Optional[float] = None,
1486
  no_repeat_ngram_size: Optional[int] = None,
1487
+ sampler_priority: Optional[List[str]] = None,
1488
+ negative_prompt_ids: Optional[List[int]] = None,
1489
+ prompt_lookup_num_tokens: Optional[int] = None,
1490
+ epsilon_cutoff: Optional[float] = None,
1491
+ eta_cutoff: Optional[float] = None,
1492
+ suppress_tokens: Optional[List[int]] = None,
1493
+ synced_gpus: Optional[bool] = None,
1494
  ) -> Union[Tuple, CausalLMOutputWithPast]:
1495
  r"""
1496
  Args: