introducing the manage.py checkmap command
This commit is contained in:
parent
ef3969a4f5
commit
b314bcae5d
4 changed files with 129 additions and 45 deletions
30
src/c3nav/mapdata/management/commands/checkmap.py
Normal file
30
src/c3nav/mapdata/management/commands/checkmap.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
import os
|
||||
import tempfile
|
||||
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connections
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Check if there are errors in the map package files'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--no-prettify', dest='prettify', action='store_const', const=False, default=True,
|
||||
help='ignore formatting errors')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
print('Creating temporary database for checking…\n')
|
||||
|
||||
_, tmp = tempfile.mkstemp(suffix='.sqlite3', prefix='c3nav-checkmap-')
|
||||
connections.databases['tmpdb'] = {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': tmp,
|
||||
}
|
||||
|
||||
try:
|
||||
call_command('migrate', database='tmpdb')
|
||||
call_command('loadmap', yes=True)
|
||||
call_command('dumpmap', prettify=options['prettify'], check_only=True)
|
||||
finally:
|
||||
os.remove(tmp)
|
|
@ -1,17 +1,23 @@
|
|||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from ...packageio import write_packages
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Dump the map database'
|
||||
help = 'Dump the map database into the map package files'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--no-prettify', dest='prettify', action='store_const', const=False, default=True,
|
||||
help='dont\'t prettify existing files')
|
||||
parser.add_argument('--check-only', action='store_const', const=True, default=False,
|
||||
help='check if there are files to update')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
with transaction.atomic():
|
||||
write_packages(prettify=options['prettify'])
|
||||
print()
|
||||
count = write_packages(prettify=options['prettify'], check_only=options['check_only'])
|
||||
if options['check_only']:
|
||||
if count == 0:
|
||||
print('No errors found!')
|
||||
else:
|
||||
raise CommandError('Found errors in %s file(s)' % count)
|
||||
else:
|
||||
print('%s file(s) affected' % count)
|
||||
|
|
|
@ -5,15 +5,15 @@ from ...packageio import read_packages
|
|||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Update the map database'
|
||||
help = 'Load the map package files into the database'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('-y', action='store_const', const=True, default=False,
|
||||
parser.add_argument('--yes', '-y', action='store_const', const=True, default=False,
|
||||
help='don\'t ask for confirmation')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
with transaction.atomic():
|
||||
read_packages()
|
||||
print()
|
||||
if input('Confirm (y/N): ') != 'y':
|
||||
if not options['yes'] and input('Confirm (y/N): ') != 'y':
|
||||
raise CommandError('Aborted.')
|
||||
|
|
|
@ -1,63 +1,111 @@
|
|||
import difflib
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
from ..models import Package
|
||||
from .utils import json_encode
|
||||
|
||||
|
||||
def write_packages(prettify=False):
|
||||
print('Writing Map Packages…')
|
||||
def write_packages(prettify=False, check_only=False):
|
||||
if not check_only:
|
||||
sys.out.write('Writing Map Packages…')
|
||||
|
||||
count = 0
|
||||
for package in Package.objects.all():
|
||||
print('\n'+package.name)
|
||||
write_package(package, prettify)
|
||||
if not check_only:
|
||||
sys.out.write('\n'+package.name)
|
||||
count += write_package(package, prettify, check_only)
|
||||
return count
|
||||
|
||||
|
||||
def write_package(package, prettify=False):
|
||||
pkg_path = os.path.join(settings.MAP_ROOT, package.directory)
|
||||
|
||||
with open(os.path.join(pkg_path, 'pkg.json'), 'w') as f:
|
||||
f.write(json_encode(package.tofile()))
|
||||
|
||||
_write_folder(package, package.levels.all(), 'levels', prettify)
|
||||
_write_folder(package, package.sources.all(), 'sources', prettify, check_sister_file=True)
|
||||
def write_package(package, prettify=False, check_only=False):
|
||||
count = 0
|
||||
count += _write_object(package, package.directory, 'pkg.json', prettify, check_only)
|
||||
count += _write_folder(package.levels.all(), os.path.join(package.directory, 'levels'), prettify, check_only)
|
||||
count += _write_folder(package.sources.all(), os.path.join(package.directory, 'sources'), prettify, check_only,
|
||||
check_sister_file=True)
|
||||
return count
|
||||
|
||||
|
||||
def _write_folder(package, objects, path, prettify=False, check_sister_file=False):
|
||||
def _write_folder(objects, path, prettify=False, check_only=False, check_sister_file=False):
|
||||
count = 0
|
||||
filenames = set()
|
||||
full_path = os.path.join(settings.MAP_ROOT, package.directory, path)
|
||||
full_path = os.path.join(settings.MAP_ROOT, path)
|
||||
if objects:
|
||||
if not os.path.isdir(full_path):
|
||||
os.mkdir(full_path)
|
||||
for obj in objects:
|
||||
filename = '%s.json' % obj.name
|
||||
filenames.add(filename)
|
||||
count += _write_object(obj, path, filename, prettify, check_only)
|
||||
|
||||
if os.path.isdir(full_path):
|
||||
for filename in os.listdir(full_path):
|
||||
full_filename = os.path.join(full_path, filename)
|
||||
new_data = obj.tofile()
|
||||
new_data_encoded = json_encode(new_data)
|
||||
if os.path.isfile(full_filename):
|
||||
with open(full_filename) as f:
|
||||
old_data_encoded = f.read()
|
||||
old_data = json.loads(old_data_encoded, parse_int=float)
|
||||
if old_data != json.loads(new_data_encoded, parse_int=float):
|
||||
print('- Updated: '+os.path.join(path, filename))
|
||||
elif old_data_encoded != new_data_encoded:
|
||||
if not prettify:
|
||||
continue
|
||||
print('- Beautified: '+os.path.join(path, filename))
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
print('- Created: '+os.path.join(path, filename))
|
||||
with open(full_filename, 'w') as f:
|
||||
f.write(new_data_encoded)
|
||||
if filename in filenames or not filename.endswith('.json') or not os.path.isfile(full_filename):
|
||||
continue
|
||||
|
||||
if os.path.isdir(path):
|
||||
for filename in os.listdir(path):
|
||||
full_filename = os.path.join(path, filename)
|
||||
if filename not in filenames and filename.endswith('.json') and os.path.isfile(full_filename):
|
||||
count += 1
|
||||
if check_only:
|
||||
sys.stdout.writelines(difflib.unified_diff(
|
||||
list(open(full_filename)),
|
||||
[],
|
||||
fromfiledate=timezone.make_aware(
|
||||
datetime.fromtimestamp(os.path.getmtime(full_filename))
|
||||
).isoformat(),
|
||||
tofiledate=timezone.make_aware(datetime.fromtimestamp(0)).isoformat(),
|
||||
fromfile=os.path.join(path, filename),
|
||||
tofile=os.path.join(path, filename)
|
||||
))
|
||||
else:
|
||||
os.remove(full_filename)
|
||||
if check_sister_file and os.path.isfile(full_filename[:-5]):
|
||||
os.remove(full_filename[:-5])
|
||||
return count
|
||||
|
||||
|
||||
def _write_object(obj, path, filename, prettify=False, check_only=False):
|
||||
full_path = os.path.join(settings.MAP_ROOT, path)
|
||||
full_filename = os.path.join(full_path, filename)
|
||||
new_data = obj.tofile()
|
||||
new_data_encoded = json_encode(new_data)
|
||||
old_data = None
|
||||
old_data_encoded = None
|
||||
if os.path.isfile(full_filename):
|
||||
with open(full_filename) as f:
|
||||
old_data_encoded = f.read()
|
||||
old_data = json.loads(old_data_encoded, parse_int=float)
|
||||
if old_data != json.loads(new_data_encoded, parse_int=float):
|
||||
if not check_only:
|
||||
sys.stdout.write('- Updated: '+os.path.join(path, filename))
|
||||
elif old_data_encoded != new_data_encoded:
|
||||
if not prettify:
|
||||
return 0
|
||||
if not check_only:
|
||||
sys.stdout.write('- Beautified: '+os.path.join(path, filename))
|
||||
else:
|
||||
return 0
|
||||
else:
|
||||
if not check_only:
|
||||
sys.stdout.write('- Created: '+os.path.join(path, filename))
|
||||
|
||||
if check_only:
|
||||
sys.stdout.writelines(difflib.unified_diff(
|
||||
[] if old_data is None else [(line+'\n') for line in old_data_encoded.split('\n')],
|
||||
[(line+'\n') for line in new_data_encoded.split('\n')],
|
||||
fromfiledate=timezone.make_aware(
|
||||
datetime.fromtimestamp(0 if old_data is None else os.path.getmtime(full_filename))
|
||||
).isoformat(),
|
||||
tofiledate=timezone.now().isoformat(),
|
||||
fromfile=os.path.join(path, filename),
|
||||
tofile=os.path.join(path, filename)
|
||||
))
|
||||
else:
|
||||
with open(full_filename, 'w') as f:
|
||||
f.write(new_data_encoded)
|
||||
return 1
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue