Skip to content

Commit

Permalink
Greatly reduce trainning mem consuption
Browse files Browse the repository at this point in the history
  • Loading branch information
Corentin-Allaire committed Dec 7, 2023
1 parent 397cc2e commit 791498d
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def scoringBatch(batch: list[pd.DataFrame], Optimiser=0) -> tuple[int, int, floa
nb_part += 1
# Normalise the loss to the batch size
batch_loss = batch_loss / len(b_data[0])
loss += batch_loss
loss += batch_loss.item()
# Perform the gradient descend if an optimiser was specified
if Optimiser:
batch_loss.backward()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def scoringBatch(batch: list[pd.DataFrame], Optimiser=0) -> tuple[int, int, floa
nb_part += 1
# Normalise the loss to the batch size
batch_loss = batch_loss / len(b_data[0])
loss += batch_loss
loss += batch_loss.item()
# Perform the gradient descent if an optimiser was specified
if Optimiser:
batch_loss.backward()
Expand Down

0 comments on commit 791498d

Please sign in to comment.