Skip to content
Snippets Groups Projects
Unverified Commit e2342c21 authored by Sanyam Bhutani's avatar Sanyam Bhutani Committed by GitHub
Browse files

Append epoch rather than best val. loss to val_loss (#744)

parents d8b0eba7 2a94bfff
No related branches found
No related tags found
No related merge requests found
...@@ -288,7 +288,7 @@ def train(model, train_dataloader,eval_dataloader, tokenizer, optimizer, lr_sche ...@@ -288,7 +288,7 @@ def train(model, train_dataloader,eval_dataloader, tokenizer, optimizer, lr_sche
print(f"best eval loss on epoch {epoch+1} is {best_val_loss}") print(f"best eval loss on epoch {epoch+1} is {best_val_loss}")
else: else:
print(f"best eval loss on epoch {epoch+1} is {best_val_loss}") print(f"best eval loss on epoch {epoch+1} is {best_val_loss}")
val_loss.append(float(best_val_loss)) val_loss.append(float(eval_epoch_loss))
val_prep.append(float(eval_ppl)) val_prep.append(float(eval_ppl))
if train_config.enable_fsdp: if train_config.enable_fsdp:
if rank==0: if rank==0:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment