store all processupdates result files in mapupdate-based directory

This commit is contained in:
Laura Klünder 2024-09-17 01:42:17 +02:00
parent b643494a0f
commit 71131cc5c5
6 changed files with 32 additions and 24 deletions

View file

@ -179,6 +179,9 @@ class MapUpdate(models.Model):
from c3nav.mapdata.utils.cache.changes import changed_geometries
changed_geometries.reset()
update_cache_key = MapUpdate.build_cache_key(*new_updates[-1].to_tuple)
(settings.CACHE_ROOT / update_cache_key).mkdir()
logger.info('Recalculating altitude areas...')
from c3nav.mapdata.models import AltitudeArea
@ -205,7 +208,7 @@ class MapUpdate(models.Model):
logger.info('Rebuilding level render data...')
from c3nav.mapdata.render.renderdata import LevelRenderData
LevelRenderData.rebuild()
LevelRenderData.rebuild(update_cache_key)
else:
logger.info('No geometries affected.')

View file

@ -55,7 +55,7 @@ class LevelRenderData:
darken_area: MultiPolygon | None = None
@staticmethod
def rebuild():
def rebuild(update_cache_key):
# Levels are automatically sorted by base_altitude, ascending
levels = tuple(Level.objects.prefetch_related('altitudeareas', 'buildings', 'doors', 'spaces',
'spaces__holes', 'spaces__areas', 'spaces__columns',
@ -352,19 +352,19 @@ class LevelRenderData:
package.add_level(render_level.pk, theme, map_history, access_restriction_affected)
render_data.save(render_level.pk, theme)
render_data.save(update_cache_key, render_level.pk, theme)
package.save_all()
package.save_all(update_cache_key)
cached = LocalContext()
@staticmethod
def _level_filename(level_pk, theme_pk):
def _level_filename(update_cache_key, level_pk, theme_pk):
if theme_pk is None:
name = 'render_data_level_%d.pickle' % level_pk
else:
name = 'render_data_level_%d_theme_%d.pickle' % (level_pk, theme_pk)
return settings.CACHE_ROOT / name
return settings.CACHE_ROOT / update_cache_key / name
@classmethod
def get(cls, level, theme):
@ -382,10 +382,10 @@ class LevelRenderData:
if result is not None:
return result
result = pickle.load(open(cls._level_filename(level_pk, theme_pk), 'rb'))
result = pickle.load(open(cls._level_filename(cache_key, level_pk, theme_pk), 'rb'))
cls.cached.data[key] = result
return result
def save(self, level_pk, theme_pk):
return pickle.dump(self, open(self._level_filename(level_pk, theme_pk), 'wb'))
def save(self, update_cache_key, level_pk, theme_pk):
return pickle.dump(self, open(self._level_filename(update_cache_key, level_pk, theme_pk), 'wb'))

View file

@ -30,13 +30,17 @@ class CachePackage:
if theme_id not in self.theme_ids:
self.theme_ids.append(theme_id)
def save(self, filename=None, compression=None):
@staticmethod
def get_filename(update_cache_key, compression=None):
from django.conf import settings
if compression is not None:
return settings.CACHE_ROOT / update_cache_key / f'package.tar.{compression}'
else:
return settings.CACHE_ROOT / update_cache_key / 'package.tar'
def save(self, update_cache_key, filename=None, compression=None):
if filename is None:
from django.conf import settings
if compression is not None:
filename = settings.CACHE_ROOT / f'package.tar.{compression}'
else:
filename = settings.CACHE_ROOT / 'package.tar'
filename = self.get_filename(update_cache_key, compression=compression)
filemode = 'w'
fileobj = None
@ -76,9 +80,9 @@ class CachePackage:
obj.write(data)
self._add_bytesio(f, filename, data)
def save_all(self, filename=None):
def save_all(self, update_cache_key, filename=None):
for compression in (None, 'gz', 'xz', 'zst'):
self.save(filename, compression)
self.save(update_cache_key, filename, compression)
@classmethod
def read(cls, f: BinaryIO) -> Self:
@ -120,10 +124,11 @@ class CachePackage:
return cls(bounds, levels)
@classmethod
def open(cls, package: Optional[str | os.PathLike] = None) -> Self:
def open(cls, update_cache_key=None, package: Optional[str | os.PathLike] = None) -> Self:
if package is None:
from django.conf import settings
package = settings.CACHE_ROOT / 'package.tar'
if update_cache_key is None:
raise ValueError
package = cls.get_filename(update_cache_key)
elif not hasattr(package, 'open'):
package = Path(package)
return cls.read(package.open('rb'))
@ -139,7 +144,7 @@ class CachePackage:
cls.cached.data = None
if cls.cached.data is None:
cls.cached.data = cls.open()
cls.cached.data = cls.open(update_cache_key=cache_key)
return cls.cached.data

View file

@ -458,7 +458,7 @@ def get_cache_package(request, filetype):
enforce_tile_secret_auth(request)
filename = 'package.' + filetype
cache_package = settings.CACHE_ROOT / filename
cache_package = CachePackage.get_filename(MapUpdate.current_processed_cache_key())
try:
size = cache_package.stat().st_size
f = cache_package.open('rb')

View file

@ -156,7 +156,7 @@ class Locator:
@classmethod
def build_filename(cls, update):
return settings.CACHE_ROOT / ('locator_%s.pickle' % MapUpdate.build_cache_key(*update))
return settings.CACHE_ROOT / MapUpdate.build_cache_key(*update) / 'locator.pickle'
@classmethod
def load_nocache(cls, update):

View file

@ -275,7 +275,7 @@ class Router:
@classmethod
def build_filename(cls, update):
return settings.CACHE_ROOT / ('router_%s.pickle' % MapUpdate.build_cache_key(*update))
return settings.CACHE_ROOT / MapUpdate.build_cache_key(*update) / 'router.pickle'
@classmethod
def load_nocache(cls, update):