reimplement dumpmap and checkmap
This commit is contained in:
parent
bca976af11
commit
6e65f8b8bd
9 changed files with 158 additions and 120 deletions
|
@ -2,9 +2,12 @@ import os
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
from django.db import connections, router
|
from django.db import connections, router
|
||||||
|
|
||||||
|
from c3nav.mapdata.packageio.read import MapdataReader
|
||||||
|
from c3nav.mapdata.packageio.write import MapdataWriter
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = 'Check if there are errors in the map package files'
|
help = 'Check if there are errors in the map package files'
|
||||||
|
@ -30,7 +33,17 @@ class Command(BaseCommand):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
call_command('migrate', database='tmpdb')
|
call_command('migrate', database='tmpdb')
|
||||||
call_command('loadmap', yes=True)
|
|
||||||
call_command('dumpmap', prettify=options['prettify'], check_only=True)
|
reader = MapdataReader()
|
||||||
|
reader.read_packages()
|
||||||
|
reader.apply_to_db()
|
||||||
|
|
||||||
|
writer = MapdataWriter()
|
||||||
|
count = writer.prepare_write_packages(prettify=options['prettify'], diff=True)
|
||||||
|
|
||||||
|
if count:
|
||||||
|
raise CommandError('%s files affected.' % count)
|
||||||
|
else:
|
||||||
|
print('Everything ok!')
|
||||||
finally:
|
finally:
|
||||||
os.remove(tmp)
|
os.remove(tmp)
|
||||||
|
|
|
@ -1,23 +1,33 @@
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
|
||||||
from ...packageio import write_packages
|
from ...packageio import MapdataWriter
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = 'Dump the map database into the map package files'
|
help = 'Dump the map database into the map package files'
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--yes', '-y', action='store_const', const=True, default=False,
|
||||||
|
help='don\'t ask for confirmation')
|
||||||
parser.add_argument('--no-prettify', dest='prettify', action='store_const', const=False, default=True,
|
parser.add_argument('--no-prettify', dest='prettify', action='store_const', const=False, default=True,
|
||||||
help='dont\'t prettify existing files')
|
help='don\'t prettify existing files')
|
||||||
|
parser.add_argument('--diff', action='store_const', const=True, default=False,
|
||||||
|
help='show changes as diff')
|
||||||
parser.add_argument('--check-only', action='store_const', const=True, default=False,
|
parser.add_argument('--check-only', action='store_const', const=True, default=False,
|
||||||
help='check if there are files to update')
|
help='check if there are files to update')
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
count = write_packages(prettify=options['prettify'], check_only=options['check_only'])
|
writer = MapdataWriter()
|
||||||
|
count = writer.prepare_write_packages(prettify=options['prettify'], diff=options['diff'])
|
||||||
|
|
||||||
if options['check_only']:
|
if options['check_only']:
|
||||||
if count == 0:
|
if count:
|
||||||
print('No errors found!')
|
raise CommandError('Check resulted in files to update.')
|
||||||
else:
|
print('Nothing to do.')
|
||||||
raise CommandError('Found errors in %s file(s)' % count)
|
|
||||||
else:
|
else:
|
||||||
print('%s file(s) affected' % count)
|
if not count:
|
||||||
|
print('Nothing to do.')
|
||||||
|
else:
|
||||||
|
if not options['yes'] and input('Confirm (y/N): ') != 'y':
|
||||||
|
raise CommandError('Aborted.')
|
||||||
|
writer.do_write_packages()
|
||||||
|
|
|
@ -14,6 +14,9 @@ class Level(models.Model):
|
||||||
|
|
||||||
path_regex = r'^levels/'
|
path_regex = r'^levels/'
|
||||||
|
|
||||||
|
def tofilename(self):
|
||||||
|
return 'levels/%s.json' % self.name
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def fromfile(cls, data):
|
def fromfile(cls, data):
|
||||||
if 'altitude' not in data:
|
if 'altitude' not in data:
|
||||||
|
|
|
@ -24,6 +24,20 @@ class Package(models.Model):
|
||||||
|
|
||||||
path_regex = r'^package.json$'
|
path_regex = r'^package.json$'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def package(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bounds(self):
|
||||||
|
if self.bottom is None:
|
||||||
|
return None
|
||||||
|
return (float(self.bottom), float(self.left)), (float(self.top), float(self.right))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def public(self):
|
||||||
|
return self.name in settings.PUBLIC_PACKAGES
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def fromfile(cls, data):
|
def fromfile(cls, data):
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
|
@ -53,19 +67,9 @@ class Package(models.Model):
|
||||||
|
|
||||||
return kwargs
|
return kwargs
|
||||||
|
|
||||||
@property
|
# noinspection PyMethodMayBeStatic
|
||||||
def package(self):
|
def tofilename(self):
|
||||||
return self
|
return 'package.json'
|
||||||
|
|
||||||
@property
|
|
||||||
def public(self):
|
|
||||||
return self.name in settings.PUBLIC_PACKAGES
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bounds(self):
|
|
||||||
if self.bottom is None:
|
|
||||||
return None
|
|
||||||
return (float(self.bottom), float(self.left)), (float(self.top), float(self.right))
|
|
||||||
|
|
||||||
def tofile(self):
|
def tofile(self):
|
||||||
data = OrderedDict()
|
data = OrderedDict()
|
||||||
|
|
|
@ -28,6 +28,9 @@ class Source(models.Model):
|
||||||
def bounds(self):
|
def bounds(self):
|
||||||
return (float(self.bottom), float(self.left)), (float(self.top), float(self.right))
|
return (float(self.bottom), float(self.left)), (float(self.top), float(self.right))
|
||||||
|
|
||||||
|
def tofilename(self):
|
||||||
|
return 'sources/%s.json' % self.name
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def fromfile(cls, data):
|
def fromfile(cls, data):
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
from .read import MapdataReader # noqa
|
from .read import MapdataReader # noqa
|
||||||
from .write import write_packages, write_package # noqa
|
from .write import MapdataWriter # noqa
|
||||||
|
|
4
src/c3nav/mapdata/packageio/const.py
Normal file
4
src/c3nav/mapdata/packageio/const.py
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
from ..models import Feature, Level, Package, Source
|
||||||
|
|
||||||
|
|
||||||
|
ordered_models = (Package, Level, Source, Feature)
|
|
@ -6,16 +6,15 @@ import subprocess
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.management import CommandError
|
from django.core.management import CommandError
|
||||||
|
|
||||||
from ..models import Feature, Level, Package, Source
|
from ..models import Level, Package
|
||||||
|
from .const import ordered_models
|
||||||
|
|
||||||
|
|
||||||
class MapdataReader:
|
class MapdataReader:
|
||||||
ordered_models = (Package, Level, Source, Feature)
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.content = {}
|
self.content = {}
|
||||||
self.package_names_by_dir = {}
|
self.package_names_by_dir = {}
|
||||||
self.saved_items = {model: {} for model in self.ordered_models}
|
self.saved_items = {model: {} for model in ordered_models}
|
||||||
|
|
||||||
def read_packages(self):
|
def read_packages(self):
|
||||||
print('Detecting Map Packages…')
|
print('Detecting Map Packages…')
|
||||||
|
@ -38,14 +37,14 @@ class MapdataReader:
|
||||||
|
|
||||||
def _add_item(self, item):
|
def _add_item(self, item):
|
||||||
if item.package_dir not in self.content:
|
if item.package_dir not in self.content:
|
||||||
self.content[item.package_dir] = {model: [] for model in self.ordered_models}
|
self.content[item.package_dir] = {model: [] for model in ordered_models}
|
||||||
self.content[item.package_dir][item.model].append(item)
|
self.content[item.package_dir][item.model].append(item)
|
||||||
|
|
||||||
def add_file(self, package_dir, path, filename):
|
def add_file(self, package_dir, path, filename):
|
||||||
file_path = os.path.join(package_dir, path, filename)
|
file_path = os.path.join(package_dir, path, filename)
|
||||||
relative_file_path = os.path.join(path, filename)
|
relative_file_path = os.path.join(path, filename)
|
||||||
print(file_path)
|
print(file_path)
|
||||||
for model in self.ordered_models:
|
for model in ordered_models:
|
||||||
if re.search(model.path_regex, relative_file_path):
|
if re.search(model.path_regex, relative_file_path):
|
||||||
self._add_item(ReaderItem(self, package_dir, path, filename, model))
|
self._add_item(ReaderItem(self, package_dir, path, filename, model))
|
||||||
break
|
break
|
||||||
|
@ -86,13 +85,13 @@ class MapdataReader:
|
||||||
print('')
|
print('')
|
||||||
package_dir = package_dirs_by_name[package_name]
|
package_dir = package_dirs_by_name[package_name]
|
||||||
items_by_model = self.content[package_dir]
|
items_by_model = self.content[package_dir]
|
||||||
for model in self.ordered_models:
|
for model in ordered_models:
|
||||||
items = items_by_model[model]
|
items = items_by_model[model]
|
||||||
for item in items:
|
for item in items:
|
||||||
item.save()
|
item.save()
|
||||||
|
|
||||||
# Delete old entries
|
# Delete old entries
|
||||||
for model in reversed(self.ordered_models):
|
for model in reversed(ordered_models):
|
||||||
queryset = model.objects.exclude(name__in=self.saved_items[model].keys())
|
queryset = model.objects.exclude(name__in=self.saved_items[model].keys())
|
||||||
for name in queryset.values_list('name', flat=True):
|
for name in queryset.values_list('name', flat=True):
|
||||||
print('- Deleted %s: %s' % (model.__name__, name))
|
print('- Deleted %s: %s' % (model.__name__, name))
|
||||||
|
|
|
@ -10,106 +10,108 @@ from django.utils import timezone
|
||||||
from c3nav.mapdata.utils import json_encoder_reindent
|
from c3nav.mapdata.utils import json_encoder_reindent
|
||||||
|
|
||||||
from ..models import Package
|
from ..models import Package
|
||||||
|
from .const import ordered_models
|
||||||
|
|
||||||
|
|
||||||
def write_packages(prettify=False, check_only=False):
|
class MapdataWriter:
|
||||||
if not check_only:
|
def __init__(self):
|
||||||
|
self.keep = set()
|
||||||
|
self.write = []
|
||||||
|
self.delete = []
|
||||||
|
|
||||||
|
def prepare_write_packages(self, prettify=False, diff=False):
|
||||||
print('Writing Map Packages…')
|
print('Writing Map Packages…')
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
for package in Package.objects.all():
|
for model in ordered_models:
|
||||||
if not check_only:
|
for obj in model.objects.all().order_by('name').prefetch_related():
|
||||||
print('\n'+package.name)
|
file_path = os.path.join(obj.package.directory, obj.tofilename())
|
||||||
count += write_package(package, prettify, check_only)
|
full_file_path = os.path.join(settings.MAP_ROOT, file_path)
|
||||||
return count
|
self.keep.add(file_path)
|
||||||
|
|
||||||
|
new_data = obj.tofile()
|
||||||
|
new_data_encoded = json_encode(new_data)
|
||||||
|
old_data = None
|
||||||
|
old_data_encoded = None
|
||||||
|
|
||||||
def write_package(package, prettify=False, check_only=False):
|
if os.path.isfile(full_file_path):
|
||||||
count = 0
|
with open(full_file_path) as f:
|
||||||
count += _write_object(package, package.directory, 'pkg.json', prettify, check_only)
|
old_data_encoded = f.read()
|
||||||
count += _write_folder(package.levels.all(), os.path.join(package.directory, 'levels'), prettify, check_only)
|
old_data = json.loads(old_data_encoded, parse_int=float)
|
||||||
count += _write_folder(package.sources.all(), os.path.join(package.directory, 'sources'), prettify, check_only,
|
|
||||||
check_sister_file=True)
|
|
||||||
return count
|
|
||||||
|
|
||||||
|
if old_data != json.loads(new_data_encoded, parse_int=float):
|
||||||
|
if not diff:
|
||||||
|
print('- Updated: ' + file_path)
|
||||||
|
elif old_data_encoded != new_data_encoded:
|
||||||
|
if not prettify:
|
||||||
|
continue
|
||||||
|
if not diff:
|
||||||
|
print('- Prettified: ' + file_path)
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if not diff:
|
||||||
|
print('- Created: ' + file_path)
|
||||||
|
|
||||||
def _write_folder(objects, path, prettify=False, check_only=False, check_sister_file=False):
|
if diff:
|
||||||
count = 0
|
sys.stdout.writelines(difflib.unified_diff(
|
||||||
filenames = set()
|
[] if old_data is None else [(line + '\n') for line in old_data_encoded.split('\n')],
|
||||||
full_path = os.path.join(settings.MAP_ROOT, path)
|
[(line + '\n') for line in new_data_encoded.split('\n')],
|
||||||
if objects:
|
fromfiledate=timezone.make_aware(
|
||||||
if not os.path.isdir(full_path):
|
datetime.fromtimestamp(0 if old_data is None else os.path.getmtime(full_file_path))
|
||||||
os.mkdir(full_path)
|
).isoformat(),
|
||||||
for obj in objects:
|
tofiledate=timezone.now().isoformat(),
|
||||||
filename = '%s.json' % obj.name
|
fromfile=file_path,
|
||||||
filenames.add(filename)
|
tofile=file_path
|
||||||
count += _write_object(obj, path, filename, prettify, check_only)
|
))
|
||||||
|
print()
|
||||||
|
|
||||||
if os.path.isdir(full_path):
|
self.write.append((file_path, new_data_encoded))
|
||||||
for filename in sorted(os.listdir(full_path)):
|
count += 1
|
||||||
full_filename = os.path.join(full_path, filename)
|
|
||||||
if filename in filenames or not filename.endswith('.json') or not os.path.isfile(full_filename):
|
|
||||||
continue
|
|
||||||
|
|
||||||
count += 1
|
# Delete old files
|
||||||
if check_only:
|
for package_dir in Package.objects.all().values_list('directory', flat=True):
|
||||||
sys.stdout.writelines(difflib.unified_diff(
|
for path, sub_dirs, filenames in os.walk(os.path.join(settings.MAP_ROOT, package_dir)):
|
||||||
list(open(full_filename)),
|
sub_dirs[:] = sorted([directory for directory in sub_dirs if not directory.startswith('.')])
|
||||||
[],
|
for filename in sorted(filenames):
|
||||||
fromfiledate=timezone.make_aware(
|
if not filename.endswith('.json'):
|
||||||
datetime.fromtimestamp(os.path.getmtime(full_filename))
|
continue
|
||||||
).isoformat(),
|
file_path = os.path.join(path[len(settings.MAP_ROOT) + 1:], filename)
|
||||||
tofiledate=timezone.make_aware(datetime.fromtimestamp(0)).isoformat(),
|
if file_path not in self.keep:
|
||||||
fromfile=os.path.join(path, filename),
|
if not diff:
|
||||||
tofile=os.path.join(path, filename)
|
print('- Deleted: ' + file_path)
|
||||||
))
|
else:
|
||||||
else:
|
full_file_path = os.path.join(path, filename)
|
||||||
os.remove(full_filename)
|
lines = list(open(full_file_path).readlines())
|
||||||
if check_sister_file and os.path.isfile(full_filename[:-5]):
|
if not lines:
|
||||||
os.remove(full_filename[:-5])
|
lines = ['\n']
|
||||||
return count
|
sys.stdout.writelines(difflib.unified_diff(
|
||||||
|
lines,
|
||||||
|
[],
|
||||||
|
fromfiledate=timezone.make_aware(
|
||||||
|
datetime.fromtimestamp(os.path.getmtime(full_file_path))
|
||||||
|
).isoformat(),
|
||||||
|
tofiledate=timezone.make_aware(
|
||||||
|
datetime.fromtimestamp(0)
|
||||||
|
).isoformat(),
|
||||||
|
fromfile=file_path,
|
||||||
|
tofile=file_path
|
||||||
|
))
|
||||||
|
print()
|
||||||
|
self.delete.append(file_path)
|
||||||
|
|
||||||
|
return count
|
||||||
|
|
||||||
def _write_object(obj, path, filename, prettify=False, check_only=False):
|
def do_write_packages(self):
|
||||||
full_path = os.path.join(settings.MAP_ROOT, path)
|
for file_path, content in self.write:
|
||||||
full_filename = os.path.join(full_path, filename)
|
full_file_path = os.path.join(settings.MAP_ROOT, file_path)
|
||||||
new_data = obj.tofile()
|
if content is not None:
|
||||||
new_data_encoded = json_encode(new_data)
|
with open(full_file_path, 'w') as f:
|
||||||
old_data = None
|
f.write(content)
|
||||||
old_data_encoded = None
|
|
||||||
if os.path.isfile(full_filename):
|
|
||||||
with open(full_filename) as f:
|
|
||||||
old_data_encoded = f.read()
|
|
||||||
old_data = json.loads(old_data_encoded, parse_int=float)
|
|
||||||
if old_data != json.loads(new_data_encoded, parse_int=float):
|
|
||||||
if not check_only:
|
|
||||||
print('- Updated: '+os.path.join(path, filename))
|
|
||||||
elif old_data_encoded != new_data_encoded:
|
|
||||||
if not prettify:
|
|
||||||
return 0
|
|
||||||
if not check_only:
|
|
||||||
print('- Beautified: '+os.path.join(path, filename))
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
else:
|
|
||||||
if not check_only:
|
|
||||||
print('- Created: '+os.path.join(path, filename))
|
|
||||||
|
|
||||||
if check_only:
|
for file_path in self.delete:
|
||||||
sys.stdout.writelines(difflib.unified_diff(
|
full_file_path = os.path.join(settings.MAP_ROOT, file_path)
|
||||||
[] if old_data is None else [(line+'\n') for line in old_data_encoded.split('\n')],
|
os.remove(full_file_path)
|
||||||
[(line+'\n') for line in new_data_encoded.split('\n')],
|
|
||||||
fromfiledate=timezone.make_aware(
|
|
||||||
datetime.fromtimestamp(0 if old_data is None else os.path.getmtime(full_filename))
|
|
||||||
).isoformat(),
|
|
||||||
tofiledate=timezone.now().isoformat(),
|
|
||||||
fromfile=os.path.join(path, filename),
|
|
||||||
tofile=os.path.join(path, filename)
|
|
||||||
))
|
|
||||||
else:
|
|
||||||
with open(full_filename, 'w') as f:
|
|
||||||
f.write(new_data_encoded)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def json_encode(data):
|
def json_encode(data):
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue