Skip to content

Commit

Permalink
Minor fix on the script (#106)
Browse files Browse the repository at this point in the history
  • Loading branch information
jinhongyii authored May 22, 2023
1 parent 911ea9b commit e1e2e0a
Show file tree
Hide file tree
Showing 5 changed files with 7 additions and 8 deletions.
2 changes: 1 addition & 1 deletion scripts/gh_deploy_site.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash
set -euxo pipefail

scripts/build_site.sh web/global_config.json
scripts/build_site.sh web/gh-page-config.json

git fetch
git checkout -B gh-pages origin/gh-pages
Expand Down
7 changes: 3 additions & 4 deletions scripts/local_deploy_site.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,27 +6,26 @@ MLC_LLM_HOME_SET="${MLC_LLM_HOME:-}"
if [ -z ${MLC_LLM_HOME_SET} ]; then
export MLC_LLM_HOME="${MLC_LLM_HOME:-mlc-llm}"
fi

scripts/build_site.sh web/local-config.json

echo "symlink parameter location to site.."

if [ -d "$MLC_LLM_HOME/dist/vicuna-v1-7b-q4f32_0/params" ]; then
rm -rf site/_site/dist/vicuna-v1-7b-q4f32_0
mkdir -p site/_site/dist/vicuna-v1-7b-q4f32_0
ln -s $MLC_LLM_HOME/dist/vicuna-v1-7b-q4f32_0/params site/_site/dist/vicuna-v1-7b-q4f32_0/params
ln -s "$(cd $MLC_LLM_HOME/dist/vicuna-v1-7b-q4f32_0/params && pwd)" site/_site/dist/vicuna-v1-7b-q4f32_0/params
cp -rf $MLC_LLM_HOME/dist/vicuna-v1-7b-q4f32_0/vicuna-v1-7b-q4f32_0-webgpu.wasm site/_site/dist/vicuna-v1-7b-q4f32_0/
fi
if [ -d "$MLC_LLM_HOME/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/params" ]; then
rm -rf site/_site/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0
mkdir -p site/_site/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0
ln -s $MLC_LLM_HOME/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/params site/_site/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/params
ln -s "$(cd $MLC_LLM_HOME/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/params && pwd)" site/_site/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/params
cp -rf $MLC_LLM_HOME/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/RedPajama-INCITE-Chat-3B-v1-q4f32_0-webgpu.wasm site/_site/dist/RedPajama-INCITE-Chat-3B-v1-q4f32_0/
fi
if [ -d "$MLC_LLM_HOME/dist/wizardlm-7b/params" ]; then
rm -rf site/_site/dist/wizardlm-7b
mkdir -p site/_site/dist/wizardlm-7b
ln -s $MLC_LLM_HOME/dist/wizardlm-7b/params site/_site/dist/wizardlm-7b/params
ln -s "$(cd $MLC_LLM_HOME/dist/wizardlm-7b/params && pwd)" site/_site/dist/wizardlm-7b/params
cp -rf $MLC_LLM_HOME/dist/wizardlm-7b/wizardlm-7b-webgpu.wasm site/_site/dist/wizardlm-7b/
fi

Expand Down
2 changes: 1 addition & 1 deletion site/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ If you have a Mac computer with Apple silicon, here are the instructions for you

## Chat Demo

The chat demo is based on [vicuna-7b-v1.1](https://huggingface.co/lmsys/vicuna-7b-delta-v1.1) model. More model support are on the way.
The chat demo is based on [vicuna-7b-v1.1](https://huggingface.co/lmsys/vicuna-7b-delta-v1.1) model and [RedPajama-INCITE-Chat-3B-v1](https://huggingface.co/togethercomputer/RedPajama-INCITE-Chat-3B-v1) model . More model support are on the way.

{% include llm_chat.html %}

Expand Down
2 changes: 1 addition & 1 deletion web/llm_chat.js
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ class LLMChatPipeline {
this.vm.getFunction("prefill")
);
this.decoding = this.tvm.detachFromCurrentScope(
this.vm.getFunction("decoding")
this.vm.getFunction("decode")
);
this.params = this.tvm.detachFromCurrentScope(
this.tvm.getParamsFromCache("param", cacheMetadata.ParamSize)
Expand Down

0 comments on commit e1e2e0a

Please sign in to comment.