Skip to content

Commit

Permalink
Fix: utils error in michi finetuning script (#218)
Browse files Browse the repository at this point in the history
  • Loading branch information
guptaaryan16 authored Jan 14, 2025
1 parent 9f0fdb5 commit f5f9cc0
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 4 deletions.
4 changes: 0 additions & 4 deletions training/mochi-1/text_to_video_lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,6 @@
from dataset_simple import LatentEmbedDataset

import sys


sys.path.append("..")

from utils import print_memory, reset_memory # isort:skip


Expand Down
22 changes: 22 additions & 0 deletions training/mochi-1/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import gc
import inspect
from typing import Optional, Tuple, Union

import torch

logger = get_logger(__name__)

def reset_memory(device: Union[str, torch.device]) -> None:
gc.collect()
torch.cuda.empty_cache()
torch.cuda.reset_peak_memory_stats(device)
torch.cuda.reset_accumulated_memory_stats(device)


def print_memory(device: Union[str, torch.device]) -> None:
memory_allocated = torch.cuda.memory_allocated(device) / 1024**3
max_memory_allocated = torch.cuda.max_memory_allocated(device) / 1024**3
max_memory_reserved = torch.cuda.max_memory_reserved(device) / 1024**3
print(f"{memory_allocated=:.3f} GB")
print(f"{max_memory_allocated=:.3f} GB")
print(f"{max_memory_reserved=:.3f} GB")

0 comments on commit f5f9cc0

Please sign in to comment.