diff --git a/src/openllm/repo.py b/src/openllm/repo.py index e3b7e16b9..be828dbdd 100644 --- a/src/openllm/repo.py +++ b/src/openllm/repo.py @@ -24,9 +24,7 @@ def cmd_list(verbose: bool = False): if verbose: VERBOSE_LEVEL.set(20) - pyaml.pprint( - list_repo(), sort_dicts=False, sort_keys=False - ) + pyaml.pprint(list_repo(), sort_dicts=False, sort_keys=False) @app.command(name='remove', help='remove given repo') @@ -106,17 +104,11 @@ def default(): return info.path -def list_repo(repo_name: typing.Optional[str]=None) -> typing.List[RepoInfo]: +def list_repo(repo_name: typing.Optional[str] = None) -> typing.List[RepoInfo]: if TEST_REPO: return [ RepoInfo( - name='default', - url='', - server='test', - owner='test', - repo='test', - branch='main', - path=Path(TEST_REPO), + name='default', url='', server='test', owner='test', repo='test', branch='main', path=Path(TEST_REPO) ) ] config = load_config() @@ -163,7 +155,7 @@ def ensure_repo_updated(): 'The repo cache is never updated, do you want to update it to fetch the latest model list?' ).ask() if choice: - update() + cmd_update() return else: output( @@ -178,7 +170,7 @@ def ensure_repo_updated(): 'The repo cache is outdated, do you want to update it to fetch the latest model list?' ).ask() if choice: - update() + cmd_update() else: output( 'The repo cache is outdated, please run `openllm repo update` to fetch the latest model list',