introducing the manage.py checkmap command
This commit is contained in:
parent
ef3969a4f5
commit
b314bcae5d
4 changed files with 129 additions and 45 deletions
30
src/c3nav/mapdata/management/commands/checkmap.py
Normal file
30
src/c3nav/mapdata/management/commands/checkmap.py
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from django.core.management import call_command
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.db import connections
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'Check if there are errors in the map package files'
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--no-prettify', dest='prettify', action='store_const', const=False, default=True,
|
||||||
|
help='ignore formatting errors')
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
print('Creating temporary database for checking…\n')
|
||||||
|
|
||||||
|
_, tmp = tempfile.mkstemp(suffix='.sqlite3', prefix='c3nav-checkmap-')
|
||||||
|
connections.databases['tmpdb'] = {
|
||||||
|
'ENGINE': 'django.db.backends.sqlite3',
|
||||||
|
'NAME': tmp,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
call_command('migrate', database='tmpdb')
|
||||||
|
call_command('loadmap', yes=True)
|
||||||
|
call_command('dumpmap', prettify=options['prettify'], check_only=True)
|
||||||
|
finally:
|
||||||
|
os.remove(tmp)
|
|
@ -1,17 +1,23 @@
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
from django.db import transaction
|
|
||||||
|
|
||||||
from ...packageio import write_packages
|
from ...packageio import write_packages
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = 'Dump the map database'
|
help = 'Dump the map database into the map package files'
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument('--no-prettify', dest='prettify', action='store_const', const=False, default=True,
|
parser.add_argument('--no-prettify', dest='prettify', action='store_const', const=False, default=True,
|
||||||
help='dont\'t prettify existing files')
|
help='dont\'t prettify existing files')
|
||||||
|
parser.add_argument('--check-only', action='store_const', const=True, default=False,
|
||||||
|
help='check if there are files to update')
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
with transaction.atomic():
|
count = write_packages(prettify=options['prettify'], check_only=options['check_only'])
|
||||||
write_packages(prettify=options['prettify'])
|
if options['check_only']:
|
||||||
print()
|
if count == 0:
|
||||||
|
print('No errors found!')
|
||||||
|
else:
|
||||||
|
raise CommandError('Found errors in %s file(s)' % count)
|
||||||
|
else:
|
||||||
|
print('%s file(s) affected' % count)
|
||||||
|
|
|
@ -5,15 +5,15 @@ from ...packageio import read_packages
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = 'Update the map database'
|
help = 'Load the map package files into the database'
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument('-y', action='store_const', const=True, default=False,
|
parser.add_argument('--yes', '-y', action='store_const', const=True, default=False,
|
||||||
help='don\'t ask for confirmation')
|
help='don\'t ask for confirmation')
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
read_packages()
|
read_packages()
|
||||||
print()
|
print()
|
||||||
if input('Confirm (y/N): ') != 'y':
|
if not options['yes'] and input('Confirm (y/N): ') != 'y':
|
||||||
raise CommandError('Aborted.')
|
raise CommandError('Aborted.')
|
||||||
|
|
|
@ -1,63 +1,111 @@
|
||||||
|
import difflib
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
from ..models import Package
|
from ..models import Package
|
||||||
from .utils import json_encode
|
from .utils import json_encode
|
||||||
|
|
||||||
|
|
||||||
def write_packages(prettify=False):
|
def write_packages(prettify=False, check_only=False):
|
||||||
print('Writing Map Packages…')
|
if not check_only:
|
||||||
|
sys.out.write('Writing Map Packages…')
|
||||||
|
|
||||||
|
count = 0
|
||||||
for package in Package.objects.all():
|
for package in Package.objects.all():
|
||||||
print('\n'+package.name)
|
if not check_only:
|
||||||
write_package(package, prettify)
|
sys.out.write('\n'+package.name)
|
||||||
|
count += write_package(package, prettify, check_only)
|
||||||
|
return count
|
||||||
|
|
||||||
|
|
||||||
def write_package(package, prettify=False):
|
def write_package(package, prettify=False, check_only=False):
|
||||||
pkg_path = os.path.join(settings.MAP_ROOT, package.directory)
|
count = 0
|
||||||
|
count += _write_object(package, package.directory, 'pkg.json', prettify, check_only)
|
||||||
with open(os.path.join(pkg_path, 'pkg.json'), 'w') as f:
|
count += _write_folder(package.levels.all(), os.path.join(package.directory, 'levels'), prettify, check_only)
|
||||||
f.write(json_encode(package.tofile()))
|
count += _write_folder(package.sources.all(), os.path.join(package.directory, 'sources'), prettify, check_only,
|
||||||
|
check_sister_file=True)
|
||||||
_write_folder(package, package.levels.all(), 'levels', prettify)
|
return count
|
||||||
_write_folder(package, package.sources.all(), 'sources', prettify, check_sister_file=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _write_folder(package, objects, path, prettify=False, check_sister_file=False):
|
def _write_folder(objects, path, prettify=False, check_only=False, check_sister_file=False):
|
||||||
|
count = 0
|
||||||
filenames = set()
|
filenames = set()
|
||||||
full_path = os.path.join(settings.MAP_ROOT, package.directory, path)
|
full_path = os.path.join(settings.MAP_ROOT, path)
|
||||||
if objects:
|
if objects:
|
||||||
if not os.path.isdir(full_path):
|
if not os.path.isdir(full_path):
|
||||||
os.mkdir(full_path)
|
os.mkdir(full_path)
|
||||||
for obj in objects:
|
for obj in objects:
|
||||||
filename = '%s.json' % obj.name
|
filename = '%s.json' % obj.name
|
||||||
filenames.add(filename)
|
filenames.add(filename)
|
||||||
|
count += _write_object(obj, path, filename, prettify, check_only)
|
||||||
|
|
||||||
|
if os.path.isdir(full_path):
|
||||||
|
for filename in os.listdir(full_path):
|
||||||
full_filename = os.path.join(full_path, filename)
|
full_filename = os.path.join(full_path, filename)
|
||||||
new_data = obj.tofile()
|
if filename in filenames or not filename.endswith('.json') or not os.path.isfile(full_filename):
|
||||||
new_data_encoded = json_encode(new_data)
|
continue
|
||||||
if os.path.isfile(full_filename):
|
|
||||||
with open(full_filename) as f:
|
|
||||||
old_data_encoded = f.read()
|
|
||||||
old_data = json.loads(old_data_encoded, parse_int=float)
|
|
||||||
if old_data != json.loads(new_data_encoded, parse_int=float):
|
|
||||||
print('- Updated: '+os.path.join(path, filename))
|
|
||||||
elif old_data_encoded != new_data_encoded:
|
|
||||||
if not prettify:
|
|
||||||
continue
|
|
||||||
print('- Beautified: '+os.path.join(path, filename))
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
print('- Created: '+os.path.join(path, filename))
|
|
||||||
with open(full_filename, 'w') as f:
|
|
||||||
f.write(new_data_encoded)
|
|
||||||
|
|
||||||
if os.path.isdir(path):
|
count += 1
|
||||||
for filename in os.listdir(path):
|
if check_only:
|
||||||
full_filename = os.path.join(path, filename)
|
sys.stdout.writelines(difflib.unified_diff(
|
||||||
if filename not in filenames and filename.endswith('.json') and os.path.isfile(full_filename):
|
list(open(full_filename)),
|
||||||
|
[],
|
||||||
|
fromfiledate=timezone.make_aware(
|
||||||
|
datetime.fromtimestamp(os.path.getmtime(full_filename))
|
||||||
|
).isoformat(),
|
||||||
|
tofiledate=timezone.make_aware(datetime.fromtimestamp(0)).isoformat(),
|
||||||
|
fromfile=os.path.join(path, filename),
|
||||||
|
tofile=os.path.join(path, filename)
|
||||||
|
))
|
||||||
|
else:
|
||||||
os.remove(full_filename)
|
os.remove(full_filename)
|
||||||
if check_sister_file and os.path.isfile(full_filename[:-5]):
|
if check_sister_file and os.path.isfile(full_filename[:-5]):
|
||||||
os.remove(full_filename[:-5])
|
os.remove(full_filename[:-5])
|
||||||
|
return count
|
||||||
|
|
||||||
|
|
||||||
|
def _write_object(obj, path, filename, prettify=False, check_only=False):
|
||||||
|
full_path = os.path.join(settings.MAP_ROOT, path)
|
||||||
|
full_filename = os.path.join(full_path, filename)
|
||||||
|
new_data = obj.tofile()
|
||||||
|
new_data_encoded = json_encode(new_data)
|
||||||
|
old_data = None
|
||||||
|
old_data_encoded = None
|
||||||
|
if os.path.isfile(full_filename):
|
||||||
|
with open(full_filename) as f:
|
||||||
|
old_data_encoded = f.read()
|
||||||
|
old_data = json.loads(old_data_encoded, parse_int=float)
|
||||||
|
if old_data != json.loads(new_data_encoded, parse_int=float):
|
||||||
|
if not check_only:
|
||||||
|
sys.stdout.write('- Updated: '+os.path.join(path, filename))
|
||||||
|
elif old_data_encoded != new_data_encoded:
|
||||||
|
if not prettify:
|
||||||
|
return 0
|
||||||
|
if not check_only:
|
||||||
|
sys.stdout.write('- Beautified: '+os.path.join(path, filename))
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
if not check_only:
|
||||||
|
sys.stdout.write('- Created: '+os.path.join(path, filename))
|
||||||
|
|
||||||
|
if check_only:
|
||||||
|
sys.stdout.writelines(difflib.unified_diff(
|
||||||
|
[] if old_data is None else [(line+'\n') for line in old_data_encoded.split('\n')],
|
||||||
|
[(line+'\n') for line in new_data_encoded.split('\n')],
|
||||||
|
fromfiledate=timezone.make_aware(
|
||||||
|
datetime.fromtimestamp(0 if old_data is None else os.path.getmtime(full_filename))
|
||||||
|
).isoformat(),
|
||||||
|
tofiledate=timezone.now().isoformat(),
|
||||||
|
fromfile=os.path.join(path, filename),
|
||||||
|
tofile=os.path.join(path, filename)
|
||||||
|
))
|
||||||
|
else:
|
||||||
|
with open(full_filename, 'w') as f:
|
||||||
|
f.write(new_data_encoded)
|
||||||
|
return 1
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue