Skip to content

Commit

Permalink
Cleanup: set shared.model_name only once
Browse files Browse the repository at this point in the history
  • Loading branch information
oobabooga committed Dec 8, 2023
1 parent 62d59a5 commit 2a335b8
Show file tree
Hide file tree
Showing 4 changed files with 3 additions and 5 deletions.
1 change: 0 additions & 1 deletion extensions/openai/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@ def _load_model(data):
setattr(shared.args, k, args[k])

shared.model, shared.tokenizer = load_model(model_name)
shared.model_name = model_name

# Update shared.settings with custom generation defaults
if settings:
Expand Down
3 changes: 1 addition & 2 deletions modules/evaluate.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,8 @@ def calculate_perplexity(models, input_dataset, stride, _max_length):
model_settings = get_model_metadata(model)
shared.settings.update({k: v for k, v in model_settings.items() if k in shared.settings}) # hijacking the interface defaults
update_model_parameters(model_settings) # hijacking the command-line arguments
shared.model_name = model
unload_model()
shared.model, shared.tokenizer = load_model(shared.model_name)
shared.model, shared.tokenizer = load_model(model)
except:
cumulative_log += f"Failed to load `{model}`. Moving on.\n\n"
yield cumulative_log
Expand Down
1 change: 1 addition & 0 deletions modules/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def load_model(model_name, loader=None):
t0 = time.time()

shared.is_seq2seq = False
shared.model_name = model_name
load_func_map = {
'Transformers': huggingface_loader,
'AutoGPTQ': AutoGPTQ_loader,
Expand Down
3 changes: 1 addition & 2 deletions modules/ui_model_menu.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,10 +203,9 @@ def load_model_wrapper(selected_model, loader, autoload=False):
else:
try:
yield f"Loading `{selected_model}`..."
shared.model_name = selected_model
unload_model()
if selected_model != '':
shared.model, shared.tokenizer = load_model(shared.model_name, loader)
shared.model, shared.tokenizer = load_model(selected_model, loader)

if shared.model is not None:
output = f"Successfully loaded `{selected_model}`."
Expand Down

0 comments on commit 2a335b8

Please sign in to comment.