Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add a class to monitor performance of a block of code #5

Merged
1 commit merged into from
Dec 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# VS Code
.vscode/

local-models/
neurons/pretraining_model/
# Byte-compiled / optimized / DLL files
Expand Down
11 changes: 6 additions & 5 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
bittensor
torch
transformers==4.34.1
wandb
rich
matplotlib
numpy
pydantic==1.10
rich
safetensors
pydantic==1.10
torch
transformers==4.34.1
wandb
Empty file added tests/__init__.py
Empty file.
Empty file added tests/utils/__init__.py
Empty file.
33 changes: 33 additions & 0 deletions tests/utils/test_perf_monitor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import time
import unittest

from utils.perf_monitor import PerfMonitor


class TestPerfMonitor(unittest.TestCase):
def test_perf_monitor(self):
"""Performs basic validation of the PerfTracker and its output_str."""

tracker = PerfMonitor("TestOfTime")

self.assertRegex(tracker.summary_str(), "TestOfTime performance: N=0")

with tracker.sample():
time.sleep(1)

self.assertRegex(
tracker.summary_str(),
r"TestOfTime performance: N=1 \| Min=1.[0-9]{2} s \| Max=1.[0-9]{2} s \| Median=1.[0-9]{2} s \| P90=1.[0-9]{2} s",
)

with tracker.sample():
time.sleep(4)

self.assertRegex(
tracker.summary_str(),
r"TestOfTime performance: N=2 \| Min=1.[0-9]{2} s \| Max=4.[0-9]{2} s \| Median=2.[0-9]{2} s \| P90=3.[0-9]{2} s",
)


if __name__ == "__main__":
unittest.main()
Empty file added utils/__init__.py
Empty file.
69 changes: 69 additions & 0 deletions utils/perf_monitor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import time
import numpy as np


class PerfSample:
def __init__(self, perf_tracker):
self.perf_tracker = perf_tracker
self.start_time = None

def __enter__(self):
self.start_time = time.monotonic_ns()
return self

def __exit__(self, exception_type, exception_value, exception_traceback):
duration = time.monotonic_ns() - self.start_time
self.perf_tracker.samples.append(duration)


class PerfMonitor:
"""PerfMonitor is a context manager that tracks the performance of a block of code by taking several samples.

Example:
tracker = PerfMonitor("MyOperation")
for _ in range(10):
with tracker.sample():
// Do something

print(tracker.summary_str())
"""

def __init__(self, name):
self.name = name
self.samples = []

def sample(self) -> PerfSample:
"""Returns a context manager that will record the duration of the block it wraps."""
return PerfSample(self)

def summary_str(self) -> str:
"""Returns a string summarizing the performance of the tracked operation."""
if not self.samples:
return f"{self.name} performance: N=0"

durations_ns = np.array(self.samples)

return (
f"{self.name} performance: N={len(durations_ns)} | "
+ f"Min={self._format_duration(np.min(durations_ns))} | "
+ f"Max={self._format_duration(np.max(durations_ns))} | "
+ f"Median={self._format_duration(np.median(durations_ns))} | "
+ f"P90={self._format_duration(np.percentile(durations_ns, 90))}"
)

def _format_duration(self, duration_ns):
units = [
("ns", 1),
("μs", 1000),
("ms", 1000_000),
("s", 1000_000_000),
("min", 60 * 1000_000_000),
]

for unit, divisor in units:
if duration_ns < divisor:
return f"{duration_ns:.2f} {unit}"

duration_ns /= divisor

return f"{duration_ns:.2f} s"