fix ChatGLM lm_head #494

Former-commit-id: bf0048abdaeb2b9592d38ac991704ad014370b47
This commit is contained in:
hiyouga
2023-08-14 14:14:48 +08:00
parent 0bfeed3a7e
commit bceaba551d
3 changed files with 12 additions and 8 deletions

View File

@@ -45,7 +45,7 @@ def create_eval_tab(top_elems: Dict[str, "Component"], runner: "Runner") -> Dict
with gr.Box():
output_box = gr.Markdown()
input_list = [
input_components = [
top_elems["lang"],
top_elems["model_name"],
top_elems["checkpoints"],
@@ -62,13 +62,13 @@ def create_eval_tab(top_elems: Dict[str, "Component"], runner: "Runner") -> Dict
predict
]
output_list = [
output_components = [
output_box,
process_bar
]
cmd_preview_btn.click(runner.preview_eval, input_list, output_list)
start_btn.click(runner.run_eval, input_list, output_list)
cmd_preview_btn.click(runner.preview_eval, input_components, output_components)
start_btn.click(runner.run_eval, input_components, output_components)
stop_btn.click(runner.set_abort, queue=False)
return dict(