[inference] fix stop token for object detection (#6624)
* fix stop token * update minicpm data pipeline * fix npu qlora examples Former-commit-id: 844919fadaa8a61dfae47020971ea80730b2346f
This commit is contained in:
@@ -133,7 +133,7 @@ class HuggingfaceEngine(BaseEngine):
|
||||
if repetition_penalty is not None
|
||||
else generating_args["repetition_penalty"],
|
||||
length_penalty=length_penalty if length_penalty is not None else generating_args["length_penalty"],
|
||||
eos_token_id=[tokenizer.eos_token_id] + tokenizer.additional_special_tokens_ids,
|
||||
eos_token_id=template.get_stop_token_ids(tokenizer),
|
||||
pad_token_id=tokenizer.pad_token_id,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -168,7 +168,7 @@ class VllmEngine(BaseEngine):
|
||||
top_p=(top_p if top_p is not None else self.generating_args["top_p"]) or 1.0, # top_p must > 0
|
||||
top_k=top_k if top_k is not None else self.generating_args["top_k"],
|
||||
stop=stop,
|
||||
stop_token_ids=[self.tokenizer.eos_token_id] + self.tokenizer.additional_special_tokens_ids,
|
||||
stop_token_ids=self.template.get_stop_token_ids(self.tokenizer),
|
||||
max_tokens=max_tokens,
|
||||
skip_special_tokens=self.generating_args["skip_special_tokens"],
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user