|
1 | 1 | from django.core.management import CommandError, BaseCommand
|
2 | 2 | from django.conf import settings
|
3 | 3 | from django.db import connection, transaction
|
4 |
| -from django.db.models import ManyToManyField |
| 4 | +from django.db.models import ManyToManyField, Max, Min, Q |
5 | 5 | from django.utils import timezone
|
6 | 6 | from django.contrib.contenttypes.models import ContentType
|
7 | 7 | from django.db.migrations.loader import MigrationLoader
|
8 | 8 |
|
9 | 9 | from dbcleanup import utils, models
|
10 | 10 |
|
11 | 11 | REQUIRED_TABLES = {'django_migrations'}
|
| 12 | +BATCH_SIZE = 5000 |
12 | 13 |
|
13 | 14 |
|
14 | 15 | class Command(BaseCommand):
|
@@ -139,21 +140,52 @@ def _clean_history(self, options):
|
139 | 140 | ct = ContentType.objects.get_by_natural_key(*model_tuple)
|
140 | 141 | # normalize model name to match against .delete() return labels (and for capitalized printing!)
|
141 | 142 | model = ct.model_class()._meta.label
|
142 |
| - q = ct.get_all_objects_for_this_type(**{f'{field}__lt': timezone.now() - timezone.timedelta(days=log_size)}) |
| 143 | + q = ct.get_all_objects_for_this_type() |
| 144 | + filtered = q.filter( |
| 145 | + **{f"{field}__lt": timezone.now() - timezone.timedelta(days=log_size)} |
| 146 | + ).aggregate(Min("id"), Max("id")) |
| 147 | + min_id = filtered["id__min"] |
| 148 | + max_id = filtered["id__max"] |
| 149 | + rows_deleted = {} |
143 | 150 |
|
144 |
| - try: |
145 |
| - deleted, rows_deleted = self._clean_history_intention(model, q, options) |
146 |
| - except CascadeException as e: |
147 |
| - _exit = 1 |
148 |
| - self.stderr.write(f'{model} cleanup aborted as it would cascade to:\n') |
149 |
| - self._clean_history_print(e.args[2].items(), err=True) |
150 |
| - continue |
| 151 | + while True: |
| 152 | + batch = q.filter( |
| 153 | + Q(id__lte=min_id + BATCH_SIZE), |
| 154 | + Q(id__gte=min_id), |
| 155 | + Q( |
| 156 | + **{ |
| 157 | + f"{field}__lt": timezone.now() |
| 158 | + - timezone.timedelta(days=log_size) |
| 159 | + } |
| 160 | + ), |
| 161 | + ) |
| 162 | + if batch: |
| 163 | + try: |
| 164 | + deleted, batch_rows_deleted = self._clean_history_intention( |
| 165 | + model, batch, options |
| 166 | + ) |
| 167 | + for k, v in batch_rows_deleted.items(): |
| 168 | + if rows_deleted.get(k): |
| 169 | + rows_deleted[k] = rows_deleted[k] + v |
| 170 | + else: |
| 171 | + rows_deleted.update(batch_rows_deleted) |
| 172 | + break |
151 | 173 |
|
| 174 | + except CascadeException as e: |
| 175 | + _exit = 1 |
| 176 | + self.stderr.write( |
| 177 | + f"{model} cleanup aborted as it would cascade to:\n" |
| 178 | + ) |
| 179 | + self._clean_history_print(e.args[2].items(), err=True) |
| 180 | + continue |
| 181 | + min_id += BATCH_SIZE |
| 182 | + if min_id > max_id: |
| 183 | + break |
152 | 184 | if deleted:
|
153 |
| - if options['force'] or options['interactive']: |
154 |
| - self.stdout.write(f'{model} cleanup deleted:\n') |
| 185 | + if options["force"] or options["interactive"]: |
| 186 | + self.stdout.write(f"{model} cleanup deleted:\n") |
155 | 187 | else:
|
156 |
| - self.stdout.write(f'{model} cleanup would delete:\n') |
| 188 | + self.stdout.write(f"{model} cleanup would delete:\n") |
157 | 189 | self._clean_history_print(rows_deleted.items())
|
158 | 190 | return _exit
|
159 | 191 |
|
|
0 commit comments