team-3/src/c3nav/mapdata/models/update.py

215 lines
7.8 KiB
Python
Raw Normal View History

2017-11-16 20:54:59 +01:00
import logging
import os
2017-11-10 16:32:58 +01:00
import pickle
from contextlib import contextmanager, suppress
from sqlite3 import DatabaseError
2017-07-05 22:04:22 +02:00
from django.conf import settings
from django.core.cache import cache
from django.db import models, transaction
2017-07-05 23:38:47 +02:00
from django.utils.http import int_to_base36
from django.utils.timezone import make_naive
2017-07-05 22:04:22 +02:00
from django.utils.translation import ugettext_lazy as _
from c3nav.mapdata.tasks import process_map_updates
2017-07-05 22:04:22 +02:00
class MapUpdate(models.Model):
"""
A map update. created whenever mapdata is changed.
"""
2018-12-16 02:23:05 +01:00
TYPES = (
('changeset', _('changeset applied')),
('direct_edit', _('direct edit')),
('control_panel', _('via control panel')),
2018-12-16 02:45:03 +01:00
('management', 'manage.py clearmapcache'),
2018-12-16 02:23:05 +01:00
)
2017-07-05 22:04:22 +02:00
datetime = models.DateTimeField(auto_now_add=True, db_index=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete=models.PROTECT)
2018-12-16 02:23:05 +01:00
type = models.CharField(max_length=32, choices=TYPES)
2017-11-10 16:32:58 +01:00
processed = models.BooleanField(default=False)
geometries_changed = models.BooleanField()
2017-07-05 22:04:22 +02:00
class Meta:
verbose_name = _('Map update')
verbose_name_plural = _('Map updates')
default_related_name = 'mapupdates'
get_latest_by = 'datetime'
2017-11-10 16:32:58 +01:00
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.was_processed = self.processed
@classmethod
def last_update(cls, force=False):
if not force:
last_update = cache.get('mapdata:last_update', None)
if last_update is not None:
return last_update
try:
with cls.lock():
last_update = cls.objects.latest().to_tuple
cache.set('mapdata:last_update', last_update, None)
except cls.DoesNotExist:
last_update = (0, 0)
cache.set('mapdata:last_update', last_update, None)
return last_update
2017-11-10 16:32:58 +01:00
@classmethod
def last_processed_update(cls, force=False):
if not force:
last_processed_update = cache.get('mapdata:last_processed_update', None)
if last_processed_update is not None:
return last_processed_update
try:
with cls.lock():
last_processed_update = cls.objects.filter(processed=True).latest().to_tuple
cache.set('mapdata:last_processed_update', last_processed_update, None)
except cls.DoesNotExist:
last_processed_update = (0, 0)
cache.set('mapdata:last_processed_update', last_processed_update, None)
return last_processed_update
2017-11-10 16:32:58 +01:00
@property
def to_tuple(self):
return self.pk, int(make_naive(self.datetime).timestamp())
2017-07-05 23:38:47 +02:00
2017-10-23 22:49:45 +02:00
@property
def cache_key(self):
2017-10-24 18:12:46 +02:00
return self.build_cache_key(self.pk, int(make_naive(self.datetime).timestamp()))
2017-10-23 22:49:45 +02:00
2017-07-05 23:38:47 +02:00
@classmethod
def current_cache_key(cls, request=None):
2017-10-24 18:12:46 +02:00
return cls.build_cache_key(*cls.last_update())
2017-11-10 16:32:58 +01:00
@classmethod
def current_processed_cache_key(cls, request=None):
2017-11-10 16:32:58 +01:00
return cls.build_cache_key(*cls.last_processed_update())
2017-10-24 18:12:46 +02:00
@staticmethod
def build_cache_key(pk, timestamp):
return int_to_base36(pk)+'_'+int_to_base36(timestamp)
@classmethod
@contextmanager
def lock(cls):
with transaction.atomic():
2017-12-20 13:13:49 +01:00
yield cls.objects.select_for_update().get(pk=cls.objects.earliest().pk)
def _changed_geometries_filename(self):
return os.path.join(settings.CACHE_ROOT, 'changed_geometries', 'update_%d.pickle' % self.pk)
class ProcessUpdatesAlreadyRunning(Exception):
pass
@classmethod
@contextmanager
def get_updates_to_process(cls):
queryset = cls.objects.filter(processed=False)
with transaction.atomic():
if settings.HAS_REDIS:
import redis
lock_aquired = None
2017-12-23 16:04:29 +01:00
lock = redis.Redis().lock('mapupdate:process_updates:lock')
try:
2017-12-23 16:02:18 +01:00
lock_aquired = lock.acquire(blocking=False, blocking_timeout=1800)
if not lock_aquired:
raise cls.ProcessUpdatesAlreadyRunning
yield tuple(queryset)
finally:
if lock_aquired:
lock.release()
else:
try:
yield tuple(queryset.select_for_update(nowait=True))
except DatabaseError:
raise cls.ProcessUpdatesAlreadyRunning
2017-11-10 16:32:58 +01:00
@classmethod
def process_updates(cls):
2017-11-16 20:54:59 +01:00
logger = logging.getLogger('c3nav')
with cls.get_updates_to_process() as new_updates:
2017-11-10 16:32:58 +01:00
if not new_updates:
return ()
if any(update.geometries_changed for update in new_updates):
from c3nav.mapdata.utils.cache.changes import changed_geometries
changed_geometries.reset()
2017-11-16 19:24:49 +01:00
logger.info('Recalculating altitude areas...')
2017-11-16 20:54:59 +01:00
from c3nav.mapdata.models import AltitudeArea
AltitudeArea.recalculate()
2017-11-16 19:24:49 +01:00
logger.info('%.3f m² of altitude areas affected.' % changed_geometries.area)
2017-11-16 20:54:59 +01:00
last_processed_update = cls.last_processed_update(force=True)
for new_update in new_updates:
logger.info('Applying changed geometries from MapUpdate #%(id)s (%(type)s)...' %
{'id': new_update.pk, 'type': new_update.type})
try:
new_changes = pickle.load(open(new_update._changed_geometries_filename(), 'rb'))
except FileNotFoundError:
logger.warning('changed_geometries pickle file not found.')
else:
logger.info('%.3f m² affected by this update.' % new_changes.area)
changed_geometries.combine(new_changes)
2017-11-10 17:58:39 +01:00
logger.info('%.3f m² of geometries affected in total.' % changed_geometries.area)
2017-11-16 20:54:59 +01:00
changed_geometries.save(last_processed_update, new_updates[-1].to_tuple)
2017-10-24 00:11:02 +02:00
logger.info('Rebuilding level render data...')
2017-11-16 20:54:59 +01:00
from c3nav.mapdata.render.renderdata import LevelRenderData
LevelRenderData.rebuild()
else:
logger.info('No geometries affected.')
2017-10-24 00:11:02 +02:00
2017-11-27 16:53:56 +01:00
logger.info('Rebuilding router...')
from c3nav.routing.router import Router
Router.rebuild(new_updates[-1].to_tuple)
2017-11-27 16:53:56 +01:00
2017-12-25 16:41:59 +01:00
logger.info('Rebuilding locator...')
from c3nav.routing.locator import Locator
Locator.rebuild(new_updates[-1].to_tuple)
2017-12-25 16:41:59 +01:00
2017-12-20 20:14:00 +01:00
for new_update in new_updates:
new_update.processed = True
new_update.save()
transaction.on_commit(
lambda: cache.set('mapdata:last_processed_update', new_updates[-1].to_tuple, None)
)
2017-11-10 16:32:58 +01:00
return new_updates
def save(self, **kwargs):
new = self.pk is None
if not new and (self.was_processed or not self.processed):
2017-11-10 16:32:58 +01:00
raise TypeError
2017-10-24 00:11:02 +02:00
from c3nav.mapdata.utils.cache.changes import changed_geometries
if self.geometries_changed is None:
self.geometries_changed = not changed_geometries.is_empty
2017-11-10 16:32:58 +01:00
super().save(**kwargs)
with suppress(FileExistsError):
os.mkdir(os.path.dirname(self._changed_geometries_filename()))
if self.geometries_changed:
pickle.dump(changed_geometries, open(self._changed_geometries_filename(), 'wb'))
if new:
transaction.on_commit(
lambda: cache.set('mapdata:last_update', self.to_tuple, None)
)
2018-12-16 02:23:05 +01:00
if settings.HAS_CELERY and settings.AUTO_PROCESS_UPDATES:
transaction.on_commit(
lambda: process_map_updates.delay()
)