Skip to content

Commit

Permalink
Minor error condition bug fix (huggingface#33781)
Browse files Browse the repository at this point in the history
* Error condition bug fix

* Update error message

* Update src/transformers/models/qwen2_vl/modeling_qwen2_vl.py

Co-authored-by: Pavel Iakubovskii <qubvel@gmail.com>

* Making change in the rest of the repo

* Formatting

* Formatting with ruff

---------

Co-authored-by: Pavel Iakubovskii <qubvel@gmail.com>
  • Loading branch information
2 people authored and BernardZach committed Dec 5, 2024
1 parent 94ec9cc commit c9bd851
Show file tree
Hide file tree
Showing 48 changed files with 49 additions and 143 deletions.
2 changes: 1 addition & 1 deletion examples/modular-transformers/modeling_dummy.py
Original file line number Diff line number Diff line change
Expand Up @@ -882,7 +882,7 @@ def forward(

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
"You must specify exactly one of input_ids or inputs_embeds"
)

if self.gradient_checkpointing and self.training and use_cache:
Expand Down
2 changes: 1 addition & 1 deletion examples/modular-transformers/modeling_my_new_model2.py
Original file line number Diff line number Diff line change
Expand Up @@ -759,7 +759,7 @@ def forward(

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
"You must specify exactly one of input_ids or inputs_embeds"
)

if self.gradient_checkpointing and self.training and use_cache:
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/bloom/modeling_bloom.py
Original file line number Diff line number Diff line change
Expand Up @@ -620,9 +620,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/chameleon/modeling_chameleon.py
Original file line number Diff line number Diff line change
Expand Up @@ -1278,9 +1278,7 @@ def forward(
use_cache = False

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if pixel_values is not None and inputs_embeds is not None:
raise ValueError(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/codegen/modeling_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -459,9 +459,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training:
if use_cache:
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/cohere/modeling_cohere.py
Original file line number Diff line number Diff line change
Expand Up @@ -842,9 +842,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/dbrx/modeling_dbrx.py
Original file line number Diff line number Diff line change
Expand Up @@ -991,9 +991,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/falcon/modeling_falcon.py
Original file line number Diff line number Diff line change
Expand Up @@ -964,9 +964,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training:
if use_cache:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -650,9 +650,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None): # ^ is python for xor
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if inputs_embeds is None:
inputs_embeds = self.embeddings(input_ids)
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/gemma/modeling_gemma.py
Original file line number Diff line number Diff line change
Expand Up @@ -754,9 +754,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/gemma/modular_gemma.py
Original file line number Diff line number Diff line change
Expand Up @@ -667,9 +667,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/gemma2/modeling_gemma2.py
Original file line number Diff line number Diff line change
Expand Up @@ -768,9 +768,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/gemma2/modular_gemma2.py
Original file line number Diff line number Diff line change
Expand Up @@ -605,9 +605,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/gpt_neo/modeling_gpt_neo.py
Original file line number Diff line number Diff line change
Expand Up @@ -674,9 +674,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training:
if use_cache:
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/gpt_neox/modeling_gpt_neox.py
Original file line number Diff line number Diff line change
Expand Up @@ -876,9 +876,7 @@ def forward(
use_cache = use_cache if use_cache is not None else self.config.use_cache

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training:
if use_cache:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -603,9 +603,7 @@ def forward(
use_cache = use_cache if use_cache is not None else self.config.use_cache

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if inputs_embeds is None:
inputs_embeds = self.embed_in(input_ids)
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/gptj/modeling_gptj.py
Original file line number Diff line number Diff line change
Expand Up @@ -746,9 +746,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training:
if use_cache:
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/granite/modeling_granite.py
Original file line number Diff line number Diff line change
Expand Up @@ -766,9 +766,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/granitemoe/modeling_granitemoe.py
Original file line number Diff line number Diff line change
Expand Up @@ -997,9 +997,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/idefics/modeling_idefics.py
Original file line number Diff line number Diff line change
Expand Up @@ -1172,9 +1172,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/jamba/modeling_jamba.py
Original file line number Diff line number Diff line change
Expand Up @@ -1282,9 +1282,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/jetmoe/modeling_jetmoe.py
Original file line number Diff line number Diff line change
Expand Up @@ -972,9 +972,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/llama/modeling_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -878,9 +878,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/llava/modeling_llava.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,9 +444,7 @@ def forward(
)

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if pixel_values is not None and inputs_embeds is not None:
raise ValueError(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/llava_next/modeling_llava_next.py
Original file line number Diff line number Diff line change
Expand Up @@ -786,9 +786,7 @@ def forward(
)

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if pixel_values is not None and inputs_embeds is not None:
raise ValueError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -875,9 +875,7 @@ def forward(
)

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if (pixel_values is not None or pixel_values_videos is not None) and inputs_embeds is not None:
raise ValueError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -380,9 +380,7 @@ def forward(
)

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if (pixel_values is not None or pixel_values_videos is not None) and inputs_embeds is not None:
raise ValueError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -572,9 +572,7 @@ def forward(
)

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if (pixel_values is not None or pixel_values_videos is not None) and inputs_embeds is not None:
raise ValueError(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/mamba/modeling_mamba.py
Original file line number Diff line number Diff line change
Expand Up @@ -590,9 +590,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None): # ^ is python for xor
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if inputs_embeds is None:
inputs_embeds = self.embeddings(input_ids)
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/mamba2/modeling_mamba2.py
Original file line number Diff line number Diff line change
Expand Up @@ -862,9 +862,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None): # ^ is python for xor
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if inputs_embeds is None:
inputs_embeds = self.embeddings(input_ids)
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/mistral/modeling_mistral.py
Original file line number Diff line number Diff line change
Expand Up @@ -749,9 +749,7 @@ def forward(

# retrieve input_ids and inputs_embeds
if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/mixtral/modeling_mixtral.py
Original file line number Diff line number Diff line change
Expand Up @@ -959,9 +959,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training:
if use_cache:
Expand Down
8 changes: 2 additions & 6 deletions src/transformers/models/mllama/modeling_mllama.py
Original file line number Diff line number Diff line change
Expand Up @@ -1606,9 +1606,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down Expand Up @@ -2145,9 +2143,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if pixel_values is not None and inputs_embeds is not None:
raise ValueError(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/nemotron/modeling_nemotron.py
Original file line number Diff line number Diff line change
Expand Up @@ -771,9 +771,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
4 changes: 1 addition & 3 deletions src/transformers/models/olmo/modeling_olmo.py
Original file line number Diff line number Diff line change
Expand Up @@ -799,9 +799,7 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError(
"You cannot specify both input_ids and inputs_embeds at the same time, and must specify either one"
)
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")

if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
Expand Down
Loading

0 comments on commit c9bd851

Please sign in to comment.