remove packageio
This commit is contained in:
parent
de32f8e012
commit
5e4e11b51d
4 changed files with 0 additions and 332 deletions
|
@ -1,2 +0,0 @@
|
|||
from .read import MapdataReader # noqa
|
||||
from .write import MapdataWriter # noqa
|
|
@ -1,9 +0,0 @@
|
|||
from c3nav.mapdata.models import AreaLocation, Level, LocationGroup, Package, Source
|
||||
from c3nav.mapdata.models.collections import Elevator
|
||||
from c3nav.mapdata.models.geometry import (Building, Door, ElevatorLevel, Escalator, EscalatorSlope, Hole,
|
||||
LevelConnector, LineObstacle, Obstacle, OneWay, Outside, Room, Stair,
|
||||
StuffedArea)
|
||||
|
||||
ordered_models = (Package, Level, LevelConnector, Source, Building, Room, Outside, Door, Obstacle, Hole)
|
||||
ordered_models += (Elevator, ElevatorLevel, LineObstacle, Stair, Escalator, EscalatorSlope, OneWay)
|
||||
ordered_models += (LocationGroup, AreaLocation, StuffedArea)
|
|
@ -1,199 +0,0 @@
|
|||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management import CommandError
|
||||
|
||||
from c3nav.mapdata.models import AreaLocation, Elevator, Level, LocationGroup, Package
|
||||
from c3nav.mapdata.models.geometry import LevelConnector
|
||||
from c3nav.mapdata.packageio.const import ordered_models
|
||||
|
||||
|
||||
class MapdataReader:
|
||||
def __init__(self):
|
||||
self.content = {}
|
||||
self.package_names_by_dir = {}
|
||||
self.saved_items = {model: {} for model in ordered_models}
|
||||
self.path_regexes = OrderedDict((model, model.get_path_regex()) for model in ordered_models)
|
||||
print(self.path_regexes)
|
||||
|
||||
def read_packages(self):
|
||||
print('Detecting Map Packages…')
|
||||
|
||||
for directory in os.listdir(settings.MAP_ROOT):
|
||||
print('\n' + directory)
|
||||
if not os.path.isdir(os.path.join(settings.MAP_ROOT, directory)):
|
||||
continue
|
||||
self.read_package(directory)
|
||||
|
||||
def read_package(self, package_dir):
|
||||
full_package_dir = os.path.join(settings.MAP_ROOT, package_dir)
|
||||
|
||||
for path, sub_dirs, filenames in os.walk(full_package_dir):
|
||||
sub_dirs[:] = sorted([directory for directory in sub_dirs if not directory.startswith('.')])
|
||||
for filename in sorted(filenames):
|
||||
if not filename.endswith('.json'):
|
||||
continue
|
||||
self.add_file(package_dir, path[len(full_package_dir) + 1:], filename)
|
||||
|
||||
def _add_item(self, item):
|
||||
if item.package_dir not in self.content:
|
||||
self.content[item.package_dir] = {model: [] for model in ordered_models}
|
||||
self.content[item.package_dir][item.model].append(item)
|
||||
|
||||
def add_file(self, package_dir, path, filename):
|
||||
file_path = os.path.join(package_dir, path, filename)
|
||||
relative_file_path = os.path.join(path, filename)
|
||||
for model, path_regex in self.path_regexes.items():
|
||||
if re.search(path_regex, relative_file_path):
|
||||
self._add_item(ReaderItem(self, package_dir, path, filename, model))
|
||||
break
|
||||
else:
|
||||
raise CommandError('Unexpected JSON file: %s' % file_path)
|
||||
|
||||
def apply_to_db(self):
|
||||
# Collect all Packages
|
||||
package_items_by_name = {}
|
||||
package_dirs_by_name = {}
|
||||
for package_dir, items_by_model in self.content.items():
|
||||
if not items_by_model[Package]:
|
||||
raise CommandError('Missing package file: %s' % package_dir)
|
||||
|
||||
if len(items_by_model[Package]) > 1:
|
||||
raise CommandError('Multiple package files: %s' % package_dir)
|
||||
|
||||
package_item = items_by_model[Package][0]
|
||||
package_items_by_name[package_item.data['name']] = package_item
|
||||
package_dirs_by_name[package_item.data['name']] = package_dir
|
||||
self.package_names_by_dir[package_dir] = package_item.data['name']
|
||||
|
||||
# Resolve Package Dependencies
|
||||
unresolved_packages = set(package_items_by_name.keys())
|
||||
resolved_packages = set()
|
||||
package_order = []
|
||||
while unresolved_packages:
|
||||
resolvable = set([name for name in unresolved_packages if
|
||||
not set(package_items_by_name[name].data['depends'])-resolved_packages])
|
||||
if not resolvable:
|
||||
raise CommandError('Could not resolve package dependencies: %s' % unresolved_packages)
|
||||
package_order.extend(resolvable)
|
||||
unresolved_packages -= resolvable
|
||||
resolved_packages |= resolvable
|
||||
|
||||
# Create new and update existing entries
|
||||
for package_name in package_order:
|
||||
print('')
|
||||
package_dir = package_dirs_by_name[package_name]
|
||||
items_by_model = self.content[package_dir]
|
||||
for model in ordered_models:
|
||||
items = items_by_model[model]
|
||||
for item in items:
|
||||
item.save()
|
||||
|
||||
# Delete old entries
|
||||
for model in reversed(ordered_models):
|
||||
saved = set(self.saved_items[model].keys())
|
||||
for obj in model.objects.all():
|
||||
if obj.name not in saved:
|
||||
print('- Deleted %s: %s' % (model.__name__, obj.name))
|
||||
obj.delete()
|
||||
|
||||
|
||||
class ReaderItem:
|
||||
def __init__(self, reader, package_dir, path, filename, model):
|
||||
self.reader = reader
|
||||
self.package_dir = package_dir
|
||||
self.path = path
|
||||
self.filename = filename
|
||||
self.model = model
|
||||
self.obj = None
|
||||
self.path_in_package = os.path.join(self.path, self.filename)
|
||||
|
||||
try:
|
||||
with open(os.path.join(settings.MAP_ROOT, package_dir, path, filename)) as f:
|
||||
self.content = f.read()
|
||||
except Exception as e:
|
||||
raise CommandError('Could not read File: %s' % e)
|
||||
|
||||
try:
|
||||
self.json_data = json.loads(self.content)
|
||||
except json.JSONDecodeError as e:
|
||||
raise CommandError('Could not decode JSON: %s' % e)
|
||||
|
||||
self.data = {'name': filename[:-5]}
|
||||
|
||||
if self.model == Package:
|
||||
self.data['directory'] = package_dir
|
||||
self.data['commit_id'] = None
|
||||
try:
|
||||
full_package_dir = os.path.join(settings.MAP_ROOT, package_dir)
|
||||
result = subprocess.Popen(['git', '-C', full_package_dir, 'rev-parse', '--verify', 'HEAD'],
|
||||
stdout=subprocess.PIPE)
|
||||
returncode = result.wait()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
else:
|
||||
if returncode == 0:
|
||||
self.data['commit_id'] = result.stdout.read().strip()
|
||||
|
||||
try:
|
||||
add_data = self.model.fromfile(self.json_data, self.path_in_package)
|
||||
except Exception as e:
|
||||
raise
|
||||
raise CommandError('Could not load data: %s' % e)
|
||||
self.data.update(add_data)
|
||||
|
||||
relations = {
|
||||
'level': Level,
|
||||
'crop_to_level': Level,
|
||||
'elevator': Elevator,
|
||||
}
|
||||
|
||||
def save(self):
|
||||
depends = []
|
||||
if self.model != Package:
|
||||
package_name = self.reader.package_names_by_dir[self.package_dir]
|
||||
self.data['package'] = self.reader.saved_items[Package][package_name].obj
|
||||
else:
|
||||
depends = [self.reader.saved_items[Package][name].obj.pk for name in self.data['depends']]
|
||||
self.data.pop('depends')
|
||||
|
||||
levels = []
|
||||
if self.model == LevelConnector:
|
||||
levels = [self.reader.saved_items[Level][name].obj.pk for name in self.data['levels']]
|
||||
self.data.pop('levels')
|
||||
|
||||
groups = []
|
||||
if self.model == AreaLocation:
|
||||
groups = [self.reader.saved_items[LocationGroup][name].obj.pk for name in self.data['groups']]
|
||||
self.data.pop('groups')
|
||||
|
||||
# Change name references to the referenced object
|
||||
for name, model in self.relations.items():
|
||||
if name in self.data:
|
||||
self.data[name] = self.reader.saved_items[model][self.data[name]].obj
|
||||
|
||||
obj, created = self.model.objects.update_or_create(name=self.data['name'], defaults=self.data)
|
||||
if created:
|
||||
print('- Created %s: %s' % (self.model.__name__, obj.name))
|
||||
|
||||
self.obj = obj
|
||||
self.reader.saved_items[self.model][obj.name] = self
|
||||
|
||||
if depends:
|
||||
self.obj.depends.clear()
|
||||
for dependency in depends:
|
||||
self.obj.depends.add(dependency)
|
||||
|
||||
if levels:
|
||||
self.obj.levels.clear()
|
||||
for level in levels:
|
||||
self.obj.levels.add(level)
|
||||
|
||||
if groups:
|
||||
self.obj.groups.clear()
|
||||
for group in groups:
|
||||
self.obj.groups.add(group)
|
|
@ -1,122 +0,0 @@
|
|||
import difflib
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
from c3nav.mapdata.models import Package
|
||||
from c3nav.mapdata.packageio.const import ordered_models
|
||||
from c3nav.mapdata.utils.json import json_encoder_reindent
|
||||
|
||||
|
||||
class MapdataWriter:
|
||||
def __init__(self):
|
||||
self.keep = set()
|
||||
self.write = []
|
||||
self.delete = []
|
||||
|
||||
def prepare_write_packages(self, prettify=False, diff=False):
|
||||
print('Writing Map Packages…')
|
||||
|
||||
count = 0
|
||||
for model in ordered_models:
|
||||
for obj in model.objects.all().order_by('name').prefetch_related():
|
||||
file_path = os.path.join(obj.package.directory, obj.get_filename())
|
||||
full_file_path = os.path.join(settings.MAP_ROOT, file_path)
|
||||
self.keep.add(file_path)
|
||||
|
||||
new_data = obj.tofile()
|
||||
new_data_encoded = json_encode(new_data)
|
||||
old_data = None
|
||||
old_data_encoded = None
|
||||
|
||||
if os.path.isfile(full_file_path):
|
||||
with open(full_file_path) as f:
|
||||
old_data_encoded = f.read()
|
||||
old_data = json.loads(old_data_encoded, parse_int=float)
|
||||
|
||||
if old_data != json.loads(new_data_encoded, parse_int=float):
|
||||
if not diff:
|
||||
print('- Updated: ' + file_path)
|
||||
elif old_data_encoded != new_data_encoded:
|
||||
if not prettify:
|
||||
continue
|
||||
if not diff:
|
||||
print('- Prettified: ' + file_path)
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
if not diff:
|
||||
print('- Created: ' + file_path)
|
||||
|
||||
if diff:
|
||||
sys.stdout.writelines(difflib.unified_diff(
|
||||
[] if old_data is None else [(line + '\n') for line in old_data_encoded.split('\n')],
|
||||
[(line + '\n') for line in new_data_encoded.split('\n')],
|
||||
fromfiledate=timezone.make_aware(
|
||||
datetime.fromtimestamp(0 if old_data is None else os.path.getmtime(full_file_path))
|
||||
).isoformat(),
|
||||
tofiledate=timezone.now().isoformat(),
|
||||
fromfile=file_path,
|
||||
tofile=file_path
|
||||
))
|
||||
print()
|
||||
|
||||
self.write.append((file_path, new_data_encoded))
|
||||
count += 1
|
||||
|
||||
# Delete old files
|
||||
for package_dir in Package.objects.all().values_list('directory', flat=True):
|
||||
for path, sub_dirs, filenames in os.walk(os.path.join(settings.MAP_ROOT, package_dir)):
|
||||
sub_dirs[:] = sorted([directory for directory in sub_dirs if not directory.startswith('.')])
|
||||
for filename in sorted(filenames):
|
||||
if not filename.endswith('.json'):
|
||||
continue
|
||||
file_path = os.path.join(path[len(settings.MAP_ROOT) + 1:], filename)
|
||||
if file_path not in self.keep:
|
||||
if not diff:
|
||||
print('- Deleted: ' + file_path)
|
||||
else:
|
||||
full_file_path = os.path.join(path, filename)
|
||||
lines = list(open(full_file_path).readlines())
|
||||
if not lines:
|
||||
lines = ['\n']
|
||||
sys.stdout.writelines(difflib.unified_diff(
|
||||
lines,
|
||||
[],
|
||||
fromfiledate=timezone.make_aware(
|
||||
datetime.fromtimestamp(os.path.getmtime(full_file_path))
|
||||
).isoformat(),
|
||||
tofiledate=timezone.make_aware(
|
||||
datetime.fromtimestamp(0)
|
||||
).isoformat(),
|
||||
fromfile=file_path,
|
||||
tofile=file_path
|
||||
))
|
||||
print()
|
||||
self.delete.append(file_path)
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
def do_write_packages(self):
|
||||
for file_path, content in self.write:
|
||||
full_file_path = os.path.join(settings.MAP_ROOT, file_path)
|
||||
try:
|
||||
os.makedirs(os.path.split(full_file_path)[0])
|
||||
except os.error:
|
||||
pass
|
||||
if content is not None:
|
||||
with open(full_file_path, 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
for file_path in self.delete:
|
||||
full_file_path = os.path.join(settings.MAP_ROOT, file_path)
|
||||
os.remove(full_file_path)
|
||||
|
||||
|
||||
def json_encode(data):
|
||||
return json_encoder_reindent(json.dumps, data, indent=4)+'\n'
|
Loading…
Add table
Add a link
Reference in a new issue