merge some func
Former-commit-id: 3085107c44715e4b2ca96d73b20d90c172b95219
This commit is contained in:
@@ -50,6 +50,7 @@ def preprocess_supervised_dataset(
|
||||
tokenizer: "PreTrainedTokenizer",
|
||||
template: "Template",
|
||||
data_args: "DataArguments",
|
||||
processor: "AutoProcessor" = None,
|
||||
) -> Dict[str, List[List[int]]]:
|
||||
# build inputs with format `<bos> X Y <eos>` and labels with format `<ignore> ... <ignore> Y <eos>`
|
||||
# for multiturn examples, we only mask the prompt part in each prompt-response pair.
|
||||
@@ -88,7 +89,9 @@ def preprocess_supervised_dataset(
|
||||
model_inputs["input_ids"].append(input_ids)
|
||||
model_inputs["attention_mask"].append([1] * len(input_ids))
|
||||
model_inputs["labels"].append(labels)
|
||||
|
||||
if processor is not None and "images" in examples:
|
||||
pixel_values = processor.image_processor(examples["images"][0], return_tensors="pt")["pixel_values"][0]
|
||||
model_inputs["pixel_values"].append(pixel_values)
|
||||
return model_inputs
|
||||
|
||||
|
||||
@@ -138,55 +141,6 @@ def preprocess_packed_supervised_dataset(
|
||||
return model_inputs
|
||||
|
||||
|
||||
def preprocess_multimodal_supervised_dataset(
|
||||
examples: Dict[str, List[Any]],
|
||||
processor: "AutoProcessor",
|
||||
template: "Template",
|
||||
data_args: "DataArguments",
|
||||
) -> Dict[str, List[List[int]]]:
|
||||
# build inputs with format `<bos> X Y <eos>` and labels with format `<ignore> ... <ignore> Y <eos>`
|
||||
# for multiturn examples, we only mask the prompt part in each prompt-response pair.
|
||||
tokenizer = processor.tokenizer
|
||||
model_inputs = {"input_ids": [], "attention_mask": [], "labels": [], "pixel_values": []}
|
||||
|
||||
for i in range(len(examples["prompt"])):
|
||||
if len(examples["prompt"][i]) % 2 != 1 or len(examples["response"][i]) != 1:
|
||||
continue
|
||||
|
||||
messages = examples["prompt"][i] + examples["response"][i]
|
||||
input_ids, labels = [], []
|
||||
for turn_idx, (source_ids, target_ids) in enumerate(
|
||||
template.encode_multiturn(
|
||||
tokenizer,
|
||||
messages,
|
||||
examples["system"][i],
|
||||
examples["tools"][i],
|
||||
data_args.cutoff_len,
|
||||
data_args.reserved_label_len,
|
||||
)
|
||||
):
|
||||
if data_args.train_on_prompt:
|
||||
source_mask = source_ids
|
||||
elif turn_idx != 0 and template.efficient_eos:
|
||||
source_mask = [tokenizer.eos_token_id] + [IGNORE_INDEX] * (len(source_ids) - 1)
|
||||
else:
|
||||
source_mask = [IGNORE_INDEX] * len(source_ids)
|
||||
|
||||
input_ids += source_ids + target_ids
|
||||
labels += source_mask + target_ids
|
||||
|
||||
if template.efficient_eos:
|
||||
input_ids += [tokenizer.eos_token_id]
|
||||
labels += [tokenizer.eos_token_id]
|
||||
|
||||
model_inputs["input_ids"].append(input_ids)
|
||||
model_inputs["attention_mask"].append([1] * len(input_ids))
|
||||
model_inputs["labels"].append(labels)
|
||||
pixel_values = processor.image_processor(examples["images"][0], return_tensors="pt")["pixel_values"][0]
|
||||
model_inputs["pixel_values"].append(pixel_values)
|
||||
return model_inputs
|
||||
|
||||
|
||||
def preprocess_unsupervised_dataset(
|
||||
examples: Dict[str, List[Any]],
|
||||
tokenizer: "PreTrainedTokenizer",
|
||||
@@ -307,15 +261,14 @@ def get_preprocess_and_print_func(
|
||||
preprocess_func = partial(
|
||||
preprocess_packed_supervised_dataset, tokenizer=tokenizer, template=template, data_args=data_args
|
||||
)
|
||||
elif processor is not None:
|
||||
preprocess_func = partial(
|
||||
preprocess_multimodal_supervised_dataset, processor=processor, template=template, data_args=data_args
|
||||
)
|
||||
else:
|
||||
preprocess_func = partial(
|
||||
preprocess_supervised_dataset, tokenizer=tokenizer, template=template, data_args=data_args
|
||||
preprocess_supervised_dataset,
|
||||
tokenizer=tokenizer,
|
||||
template=template,
|
||||
data_args=data_args,
|
||||
processor=processor,
|
||||
)
|
||||
|
||||
print_function = partial(print_supervised_dataset_example, tokenizer=tokenizer)
|
||||
elif stage == "rm":
|
||||
preprocess_func = partial(
|
||||
|
||||
Reference in New Issue
Block a user