import sys
= set(sys.modules) & set(globals())
modulenames = [sys.modules[name] for name in modulenames]
allmodules allmodules
Python General
To see what modules have been imported use:
To see if a particular module is imported:
for i, x in enumerate(allmodules) if "'fastai'" in str(x)] != [] [i
To see memory stats (CPU/GPU/top variables):
import psutil
import torch
def print_cpu_memory(verbose=True):
# Get the current memory usage
= psutil.virtual_memory()
memory_info
# Extract the memory information
= memory_info.total
total_memory = memory_info.available
available_memory = memory_info.used
used_memory = memory_info.percent
percent_memory
# Convert bytes to megabytes
= total_memory / 1024**2
total_memory_mb = available_memory / 1024**2
available_memory_mb = used_memory / 1024**2
used_memory_mb
if verbose:
# Print the memory information
print(f"Total CPU memory: {total_memory_mb:.2f} MB")
print(f"Available CPU memory: {available_memory_mb:.2f} MB")
print(f"Used CPU memory: {used_memory_mb:.2f} MB")
print(f"Percentage of used CPU memory: {percent_memory}%")
else:
print(f"Percentage of used CPU memory: {percent_memory}%")
def print_gpu_memory():
# Check if CUDA is available
if torch.cuda.is_available():
# Get the default CUDA device
= torch.cuda.current_device()
device
# Get the total memory and currently allocated memory on the device
= torch.cuda.get_device_properties(device).total_memory
total_memory = torch.cuda.memory_allocated(device)
allocated_memory
# Convert bytes to megabytes
= total_memory / 1024**2
total_memory_mb = allocated_memory / 1024**2
allocated_memory_mb
# Print the memory information
print(f"Total GPU memory: {total_memory_mb:.2f} MB")
print(f"Allocated GPU memory: {allocated_memory_mb:.2f} MB")
else:
print("CUDA is not available")
def print_top_memory_variables(local_vars, var_number_to_print=5):
"""Prints top variables in terms of memory.
Usage: `print_top_memory_variables(locals().copy())` can't call locals() in the function itself.
Args:
local_vars (dict): pass `locals().copy()`
var_number_to_print(int):
"""
# Get the local variables
= {}
memory
# Iterate over the local variables and print their sizes
for var_name, var_value in local_vars.items():
= sys.getsizeof(var_value)
var_size = var_size
memory[var_name]
= sorted(memory.items(), key=lambda x: x[1], reverse=True)[:var_number_to_print]
memory_sorted
for (var_name, var_size) in memory_sorted:
print(f"Variable: {var_name}, Size: {var_size} bytes")