Skip to content

Commit 58d28ed

Browse files
committed
no limit for offload size
1 parent aab0e24 commit 58d28ed

File tree

1 file changed

+11
-11
lines changed

1 file changed

+11
-11
lines changed

comfy/model_management.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -526,17 +526,17 @@ def model_unload(self, memory_to_free=None, unpatch_weights=True):
526526
logging.debug(f"offload_device: {self.model.offload_device}")
527527
available_memory = get_free_memory(self.model.offload_device)
528528
logging.debug(f"before unload, available_memory of offload device {self.model.offload_device}: {available_memory/(1024*1024*1024)} GB")
529-
reserved_memory = 1024*1024*1024 # 1GB reserved memory for other usage
530-
if available_memory < reserved_memory:
531-
logging.warning(f"Not enough cpu memory to unload. Available: {available_memory/(1024*1024*1024)} GB, Reserved: {reserved_memory/(1024*1024*1024)} GB")
532-
return False
533-
else:
534-
offload_memory = available_memory - reserved_memory
535-
536-
if offload_memory < memory_to_free:
537-
memory_to_free = offload_memory
538-
logging.info(f"Not enough cpu memory to unload. Available: {available_memory/(1024*1024*1024)} GB, Reserved: {reserved_memory/(1024*1024*1024)} GB, Offload: {offload_memory/(1024*1024*1024)} GB")
539-
logging.info(f"Set memory_to_free to {memory_to_free/(1024*1024*1024)} GB")
529+
# reserved_memory = 1024*1024*1024 # 1GB reserved memory for other usage
530+
# if available_memory < reserved_memory:
531+
# logging.warning(f"Not enough cpu memory to unload. Available: {available_memory/(1024*1024*1024)} GB, Reserved: {reserved_memory/(1024*1024*1024)} GB")
532+
# return False
533+
# else:
534+
# offload_memory = available_memory - reserved_memory
535+
#
536+
# if offload_memory < memory_to_free:
537+
# memory_to_free = offload_memory
538+
# logging.info(f"Not enough cpu memory to unload. Available: {available_memory/(1024*1024*1024)} GB, Reserved: {reserved_memory/(1024*1024*1024)} GB, Offload: {offload_memory/(1024*1024*1024)} GB")
539+
# logging.info(f"Set memory_to_free to {memory_to_free/(1024*1024*1024)} GB")
540540
try:
541541
if memory_to_free is not None:
542542
if memory_to_free < self.model.loaded_size():

0 commit comments

Comments
 (0)