Skip to content

Commit

Permalink
fix huggingface_hub install and bark model loader
Browse files Browse the repository at this point in the history
  • Loading branch information
rsxdalv committed Oct 16, 2024
1 parent 1676db5 commit 0697b69
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 7 deletions.
2 changes: 2 additions & 0 deletions installer_scripts/js/initializeApp.js
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,8 @@ async function pip_install_all() {
// reinstall hydra-core==1.3.2 because of fairseq
pip_install("hydra-core==1.3.2", "hydra-core fix due to fairseq");
pip_install("nvidia-ml-py", "nvidia-ml-py");
// huggingface_hub==0.25.2 for gradio
pip_install("huggingface_hub==0.25.2", "huggingface_hub fix");
savePipPackagesVersion(newPipPackagesVersion);
displayMessage("");
}
Expand Down
2 changes: 1 addition & 1 deletion installer_scripts/versions.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"version": "0.2.0",
"pip_packages": 5,
"pip_packages": 6,
"npm_packages": 4,
"react_ui": 4
}
21 changes: 17 additions & 4 deletions tts_webui/bark/BarkModelManager.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
from bark.generation import preload_models, clean_models


class BarkModelManager:
def __init__(self):
self.models_loaded = False

def reload_models(self, config):
from bark.generation import preload_models

self.models_loaded = True
c = config["model"]

Expand All @@ -23,15 +22,29 @@ def _yes_or_no(x: bool):
_print_prop("Coarse-to-Fine", c["fine_use_gpu"], c["fine_use_small"])
_print_prop("Encodec", c["codec_use_gpu"], False)

preload_models(**c, force_reload=True)
# preload_models(**c, force_reload=True)
preload_models(
coarse_use_gpu=c["coarse_use_gpu"],
coarse_use_small=c["coarse_use_small"],
fine_use_gpu=c["fine_use_gpu"],
fine_use_small=c["fine_use_small"],
text_use_gpu=c["text_use_gpu"],
text_use_small=c["text_use_small"],
codec_use_gpu=c["codec_use_gpu"],
force_reload=True,
)

def unload_models(self):
from bark.generation import clean_models

print("Unloading Bark models...")
self.models_loaded = False
clean_models()
print("Unloaded Bark models")

def unload_model(self, model_key):
from bark.generation import clean_models

print(f"Unloading Bark model {model_key}")
clean_models(model_key=model_key)
print(f"Unloaded Bark model {model_key}")
Expand Down
2 changes: 0 additions & 2 deletions tts_webui/config/load_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
"fine_use_gpu": True,
"fine_use_small": True,
"codec_use_gpu": True,
"load_models_on_startup": False,
},
"gradio_interface_options": {
"inline": False,
Expand Down Expand Up @@ -39,7 +38,6 @@
"file_directories": None,
"_frontend": True,
},
"load_models_on_startup": False,
"extensions": {
"disabled": [],
},
Expand Down

0 comments on commit 0697b69

Please sign in to comment.