Former-commit-id: 5dbc9b355e85b203cb43ff72589374f0e04be391
This commit is contained in:
hiyouga
2023-10-15 18:28:45 +08:00
parent a003d1fa1e
commit a6f800b741
9 changed files with 40 additions and 57 deletions

View File

@@ -17,10 +17,7 @@ def create_infer_tab(engine: "Engine") -> Dict[str, "Component"]:
unload_btn = gr.Button()
info_box = gr.Textbox(show_label=False, interactive=False)
elem_dict.update(dict(
info_box=info_box, load_btn=load_btn, unload_btn=unload_btn
))
elem_dict.update(dict(load_btn=load_btn, unload_btn=unload_btn, info_box=info_box))
chat_box, chatbot, history, chat_elems = create_chat_box(engine, visible=False)
elem_dict.update(dict(chat_box=chat_box, **chat_elems))

View File

@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, Dict
from llmtuner.extras.constants import METHODS, SUPPORTED_MODELS
from llmtuner.extras.template import templates
from llmtuner.webui.common import get_model_path, get_template, list_checkpoint, load_config, save_config
from llmtuner.webui.common import get_model_path, get_template, list_checkpoint, save_config
from llmtuner.webui.utils import can_quantize
if TYPE_CHECKING:
@@ -12,7 +12,6 @@ if TYPE_CHECKING:
def create_top() -> Dict[str, "Component"]:
available_models = list(SUPPORTED_MODELS.keys()) + ["Custom"]
config = gr.State(value=load_config())
with gr.Row():
lang = gr.Dropdown(choices=["en", "zh"], scale=1)
@@ -39,17 +38,17 @@ def create_top() -> Dict[str, "Component"]:
model_name.change(
list_checkpoint, [model_name, finetuning_type], [checkpoints], queue=False
).then(
get_model_path, [config, model_name], [model_path], queue=False
get_model_path, [model_name], [model_path], queue=False
).then(
get_template, [model_name], [template], queue=False
) # do not save config since the below line will save
model_path.change(save_config, inputs=[config, lang, model_name, model_path])
model_path.change(save_config, inputs=[lang, model_name, model_path], queue=False)
finetuning_type.change(
list_checkpoint, [model_name, finetuning_type], [checkpoints]
list_checkpoint, [model_name, finetuning_type], [checkpoints], queue=False
).then(
can_quantize, [finetuning_type], [quantization_bit]
can_quantize, [finetuning_type], [quantization_bit], queue=False
)
refresh_btn.click(
@@ -57,7 +56,6 @@ def create_top() -> Dict[str, "Component"]:
)
return dict(
config=config,
lang=lang,
model_name=model_name,
model_path=model_path,