Skip to content
This repository has been archived by the owner on Oct 25, 2024. It is now read-only.

Commit

Permalink
fix not define unwrapped_model. (#841)
Browse files Browse the repository at this point in the history
  • Loading branch information
lkk12014402 authored Dec 1, 2023
1 parent 4212741 commit c916ee0
Showing 1 changed file with 3 additions and 0 deletions.
3 changes: 3 additions & 0 deletions intel_extension_for_transformers/llm/finetuning/finetuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -598,6 +598,7 @@ class Eval_Args:
self.logger.info(results)

elif finetune_args.do_lm_eval and finetune_args.task != "summarization":
unwrapped_model = unwrap_model(model)
unwrapped_model.eval()
from intel_extension_for_transformers.llm.evaluation.lm_eval import evaluate
with training_args.main_process_first(desc="lm_eval"):
Expand All @@ -615,6 +616,8 @@ class Eval_Args:
self.logger.info(results)

if finetune_args.task == "summarization":
unwrapped_model = unwrap_model(model)
unwrapped_model.eval()
from .eval_utils import compute_rouge_metric
gen_kwargs = {
"num_beams": data_args.num_beams,
Expand Down

0 comments on commit c916ee0

Please sign in to comment.