reimplement dumpmap and checkmap

This commit is contained in:
Laura Klünder 2016-09-24 15:36:14 +02:00
parent bca976af11
commit 6e65f8b8bd
9 changed files with 158 additions and 120 deletions

View file

@ -2,9 +2,12 @@ import os
import tempfile
from django.core.management import call_command
from django.core.management.base import BaseCommand
from django.core.management.base import BaseCommand, CommandError
from django.db import connections, router
from c3nav.mapdata.packageio.read import MapdataReader
from c3nav.mapdata.packageio.write import MapdataWriter
class Command(BaseCommand):
help = 'Check if there are errors in the map package files'
@ -30,7 +33,17 @@ class Command(BaseCommand):
try:
call_command('migrate', database='tmpdb')
call_command('loadmap', yes=True)
call_command('dumpmap', prettify=options['prettify'], check_only=True)
reader = MapdataReader()
reader.read_packages()
reader.apply_to_db()
writer = MapdataWriter()
count = writer.prepare_write_packages(prettify=options['prettify'], diff=True)
if count:
raise CommandError('%s files affected.' % count)
else:
print('Everything ok!')
finally:
os.remove(tmp)

View file

@ -1,23 +1,33 @@
from django.core.management.base import BaseCommand, CommandError
from ...packageio import write_packages
from ...packageio import MapdataWriter
class Command(BaseCommand):
help = 'Dump the map database into the map package files'
def add_arguments(self, parser):
parser.add_argument('--yes', '-y', action='store_const', const=True, default=False,
help='don\'t ask for confirmation')
parser.add_argument('--no-prettify', dest='prettify', action='store_const', const=False, default=True,
help='dont\'t prettify existing files')
help='don\'t prettify existing files')
parser.add_argument('--diff', action='store_const', const=True, default=False,
help='show changes as diff')
parser.add_argument('--check-only', action='store_const', const=True, default=False,
help='check if there are files to update')
def handle(self, *args, **options):
count = write_packages(prettify=options['prettify'], check_only=options['check_only'])
writer = MapdataWriter()
count = writer.prepare_write_packages(prettify=options['prettify'], diff=options['diff'])
if options['check_only']:
if count == 0:
print('No errors found!')
else:
raise CommandError('Found errors in %s file(s)' % count)
if count:
raise CommandError('Check resulted in files to update.')
print('Nothing to do.')
else:
print('%s file(s) affected' % count)
if not count:
print('Nothing to do.')
else:
if not options['yes'] and input('Confirm (y/N): ') != 'y':
raise CommandError('Aborted.')
writer.do_write_packages()

View file

@ -14,6 +14,9 @@ class Level(models.Model):
path_regex = r'^levels/'
def tofilename(self):
return 'levels/%s.json' % self.name
@classmethod
def fromfile(cls, data):
if 'altitude' not in data:

View file

@ -24,6 +24,20 @@ class Package(models.Model):
path_regex = r'^package.json$'
@property
def package(self):
return self
@property
def bounds(self):
if self.bottom is None:
return None
return (float(self.bottom), float(self.left)), (float(self.top), float(self.right))
@property
def public(self):
return self.name in settings.PUBLIC_PACKAGES
@classmethod
def fromfile(cls, data):
kwargs = {}
@ -53,19 +67,9 @@ class Package(models.Model):
return kwargs
@property
def package(self):
return self
@property
def public(self):
return self.name in settings.PUBLIC_PACKAGES
@property
def bounds(self):
if self.bottom is None:
return None
return (float(self.bottom), float(self.left)), (float(self.top), float(self.right))
# noinspection PyMethodMayBeStatic
def tofilename(self):
return 'package.json'
def tofile(self):
data = OrderedDict()

View file

@ -28,6 +28,9 @@ class Source(models.Model):
def bounds(self):
return (float(self.bottom), float(self.left)), (float(self.top), float(self.right))
def tofilename(self):
return 'sources/%s.json' % self.name
@classmethod
def fromfile(cls, data):
kwargs = {}

View file

@ -1,2 +1,2 @@
from .read import MapdataReader # noqa
from .write import write_packages, write_package # noqa
from .write import MapdataWriter # noqa

View file

@ -0,0 +1,4 @@
from ..models import Feature, Level, Package, Source
ordered_models = (Package, Level, Source, Feature)

View file

@ -6,16 +6,15 @@ import subprocess
from django.conf import settings
from django.core.management import CommandError
from ..models import Feature, Level, Package, Source
from ..models import Level, Package
from .const import ordered_models
class MapdataReader:
ordered_models = (Package, Level, Source, Feature)
def __init__(self):
self.content = {}
self.package_names_by_dir = {}
self.saved_items = {model: {} for model in self.ordered_models}
self.saved_items = {model: {} for model in ordered_models}
def read_packages(self):
print('Detecting Map Packages…')
@ -38,14 +37,14 @@ class MapdataReader:
def _add_item(self, item):
if item.package_dir not in self.content:
self.content[item.package_dir] = {model: [] for model in self.ordered_models}
self.content[item.package_dir] = {model: [] for model in ordered_models}
self.content[item.package_dir][item.model].append(item)
def add_file(self, package_dir, path, filename):
file_path = os.path.join(package_dir, path, filename)
relative_file_path = os.path.join(path, filename)
print(file_path)
for model in self.ordered_models:
for model in ordered_models:
if re.search(model.path_regex, relative_file_path):
self._add_item(ReaderItem(self, package_dir, path, filename, model))
break
@ -86,13 +85,13 @@ class MapdataReader:
print('')
package_dir = package_dirs_by_name[package_name]
items_by_model = self.content[package_dir]
for model in self.ordered_models:
for model in ordered_models:
items = items_by_model[model]
for item in items:
item.save()
# Delete old entries
for model in reversed(self.ordered_models):
for model in reversed(ordered_models):
queryset = model.objects.exclude(name__in=self.saved_items[model].keys())
for name in queryset.values_list('name', flat=True):
print('- Deleted %s: %s' % (model.__name__, name))

View file

@ -10,106 +10,108 @@ from django.utils import timezone
from c3nav.mapdata.utils import json_encoder_reindent
from ..models import Package
from .const import ordered_models
def write_packages(prettify=False, check_only=False):
if not check_only:
class MapdataWriter:
def __init__(self):
self.keep = set()
self.write = []
self.delete = []
def prepare_write_packages(self, prettify=False, diff=False):
print('Writing Map Packages…')
count = 0
for package in Package.objects.all():
if not check_only:
print('\n'+package.name)
count += write_package(package, prettify, check_only)
return count
count = 0
for model in ordered_models:
for obj in model.objects.all().order_by('name').prefetch_related():
file_path = os.path.join(obj.package.directory, obj.tofilename())
full_file_path = os.path.join(settings.MAP_ROOT, file_path)
self.keep.add(file_path)
new_data = obj.tofile()
new_data_encoded = json_encode(new_data)
old_data = None
old_data_encoded = None
def write_package(package, prettify=False, check_only=False):
count = 0
count += _write_object(package, package.directory, 'pkg.json', prettify, check_only)
count += _write_folder(package.levels.all(), os.path.join(package.directory, 'levels'), prettify, check_only)
count += _write_folder(package.sources.all(), os.path.join(package.directory, 'sources'), prettify, check_only,
check_sister_file=True)
return count
if os.path.isfile(full_file_path):
with open(full_file_path) as f:
old_data_encoded = f.read()
old_data = json.loads(old_data_encoded, parse_int=float)
if old_data != json.loads(new_data_encoded, parse_int=float):
if not diff:
print('- Updated: ' + file_path)
elif old_data_encoded != new_data_encoded:
if not prettify:
continue
if not diff:
print('- Prettified: ' + file_path)
else:
continue
else:
if not diff:
print('- Created: ' + file_path)
def _write_folder(objects, path, prettify=False, check_only=False, check_sister_file=False):
count = 0
filenames = set()
full_path = os.path.join(settings.MAP_ROOT, path)
if objects:
if not os.path.isdir(full_path):
os.mkdir(full_path)
for obj in objects:
filename = '%s.json' % obj.name
filenames.add(filename)
count += _write_object(obj, path, filename, prettify, check_only)
if diff:
sys.stdout.writelines(difflib.unified_diff(
[] if old_data is None else [(line + '\n') for line in old_data_encoded.split('\n')],
[(line + '\n') for line in new_data_encoded.split('\n')],
fromfiledate=timezone.make_aware(
datetime.fromtimestamp(0 if old_data is None else os.path.getmtime(full_file_path))
).isoformat(),
tofiledate=timezone.now().isoformat(),
fromfile=file_path,
tofile=file_path
))
print()
if os.path.isdir(full_path):
for filename in sorted(os.listdir(full_path)):
full_filename = os.path.join(full_path, filename)
if filename in filenames or not filename.endswith('.json') or not os.path.isfile(full_filename):
continue
self.write.append((file_path, new_data_encoded))
count += 1
count += 1
if check_only:
sys.stdout.writelines(difflib.unified_diff(
list(open(full_filename)),
[],
fromfiledate=timezone.make_aware(
datetime.fromtimestamp(os.path.getmtime(full_filename))
).isoformat(),
tofiledate=timezone.make_aware(datetime.fromtimestamp(0)).isoformat(),
fromfile=os.path.join(path, filename),
tofile=os.path.join(path, filename)
))
else:
os.remove(full_filename)
if check_sister_file and os.path.isfile(full_filename[:-5]):
os.remove(full_filename[:-5])
return count
# Delete old files
for package_dir in Package.objects.all().values_list('directory', flat=True):
for path, sub_dirs, filenames in os.walk(os.path.join(settings.MAP_ROOT, package_dir)):
sub_dirs[:] = sorted([directory for directory in sub_dirs if not directory.startswith('.')])
for filename in sorted(filenames):
if not filename.endswith('.json'):
continue
file_path = os.path.join(path[len(settings.MAP_ROOT) + 1:], filename)
if file_path not in self.keep:
if not diff:
print('- Deleted: ' + file_path)
else:
full_file_path = os.path.join(path, filename)
lines = list(open(full_file_path).readlines())
if not lines:
lines = ['\n']
sys.stdout.writelines(difflib.unified_diff(
lines,
[],
fromfiledate=timezone.make_aware(
datetime.fromtimestamp(os.path.getmtime(full_file_path))
).isoformat(),
tofiledate=timezone.make_aware(
datetime.fromtimestamp(0)
).isoformat(),
fromfile=file_path,
tofile=file_path
))
print()
self.delete.append(file_path)
return count
def _write_object(obj, path, filename, prettify=False, check_only=False):
full_path = os.path.join(settings.MAP_ROOT, path)
full_filename = os.path.join(full_path, filename)
new_data = obj.tofile()
new_data_encoded = json_encode(new_data)
old_data = None
old_data_encoded = None
if os.path.isfile(full_filename):
with open(full_filename) as f:
old_data_encoded = f.read()
old_data = json.loads(old_data_encoded, parse_int=float)
if old_data != json.loads(new_data_encoded, parse_int=float):
if not check_only:
print('- Updated: '+os.path.join(path, filename))
elif old_data_encoded != new_data_encoded:
if not prettify:
return 0
if not check_only:
print('- Beautified: '+os.path.join(path, filename))
else:
return 0
else:
if not check_only:
print('- Created: '+os.path.join(path, filename))
def do_write_packages(self):
for file_path, content in self.write:
full_file_path = os.path.join(settings.MAP_ROOT, file_path)
if content is not None:
with open(full_file_path, 'w') as f:
f.write(content)
if check_only:
sys.stdout.writelines(difflib.unified_diff(
[] if old_data is None else [(line+'\n') for line in old_data_encoded.split('\n')],
[(line+'\n') for line in new_data_encoded.split('\n')],
fromfiledate=timezone.make_aware(
datetime.fromtimestamp(0 if old_data is None else os.path.getmtime(full_filename))
).isoformat(),
tofiledate=timezone.now().isoformat(),
fromfile=os.path.join(path, filename),
tofile=os.path.join(path, filename)
))
else:
with open(full_filename, 'w') as f:
f.write(new_data_encoded)
return 1
for file_path in self.delete:
full_file_path = os.path.join(settings.MAP_ROOT, file_path)
os.remove(full_file_path)
def json_encode(data):