2017-11-20 23:05:27 +01:00
|
|
|
import base64
|
|
|
|
import logging
|
|
|
|
import os
|
2017-12-23 20:03:25 +01:00
|
|
|
import pickle
|
2017-11-20 23:05:27 +01:00
|
|
|
import re
|
2017-11-21 02:47:38 +01:00
|
|
|
import threading
|
2017-11-20 23:05:27 +01:00
|
|
|
import time
|
2017-12-23 20:03:25 +01:00
|
|
|
from datetime import datetime
|
2017-11-21 05:42:19 +01:00
|
|
|
from email.utils import formatdate
|
2017-11-20 23:05:27 +01:00
|
|
|
from io import BytesIO
|
|
|
|
|
2019-11-26 16:16:13 +01:00
|
|
|
import pylibmc
|
2017-11-20 23:05:27 +01:00
|
|
|
import requests
|
2023-12-07 06:48:42 +01:00
|
|
|
from pyzstd import decompress as zstd_decompress
|
2019-12-18 23:23:24 +01:00
|
|
|
from requests.auth import HTTPBasicAuth
|
2017-11-20 23:05:27 +01:00
|
|
|
|
|
|
|
from c3nav.mapdata.utils.cache import CachePackage
|
2017-11-21 00:47:55 +01:00
|
|
|
from c3nav.mapdata.utils.tiles import (build_access_cache_key, build_base_cache_key, build_tile_etag, get_tile_bounds,
|
|
|
|
parse_tile_access_cookie)
|
2017-11-20 23:05:27 +01:00
|
|
|
|
2023-07-14 05:30:07 +02:00
|
|
|
loglevel = logging.DEBUG if os.environ.get('C3NAV_DEBUG', False) else os.environ.get('C3NAV_LOGLEVEL', 'INFO').upper()
|
2018-02-01 11:02:32 +01:00
|
|
|
|
|
|
|
logging.basicConfig(level=loglevel,
|
2017-11-20 23:05:27 +01:00
|
|
|
format='[%(asctime)s] [%(process)s] [%(levelname)s] %(name)s: %(message)s',
|
|
|
|
datefmt='%Y-%m-%d %H:%M:%S %z')
|
2018-02-01 11:02:32 +01:00
|
|
|
|
2017-11-20 23:05:27 +01:00
|
|
|
logger = logging.getLogger('c3nav')
|
|
|
|
|
2018-02-01 11:02:32 +01:00
|
|
|
if os.environ.get('C3NAV_LOGFILE'):
|
|
|
|
logging.basicConfig(filename=os.environ['C3NAV_LOGFILE'])
|
2017-11-20 23:05:27 +01:00
|
|
|
|
2018-02-06 15:39:49 +01:00
|
|
|
|
2017-11-20 23:05:27 +01:00
|
|
|
class TileServer:
|
|
|
|
def __init__(self):
|
2017-12-23 16:53:17 +01:00
|
|
|
self.path_regex = re.compile(r'^/(\d+)/(-?\d+)/(-?\d+)/(-?\d+).png$')
|
2017-11-21 05:16:24 +01:00
|
|
|
|
2017-12-23 01:08:44 +01:00
|
|
|
self.cookie_regex = re.compile(r'(^| )c3nav_tile_access="?([^;" ]+)"?')
|
2017-11-21 05:16:24 +01:00
|
|
|
|
2017-11-20 23:05:27 +01:00
|
|
|
try:
|
|
|
|
self.upstream_base = os.environ['C3NAV_UPSTREAM_BASE'].strip('/')
|
|
|
|
except KeyError:
|
|
|
|
raise Exception('C3NAV_UPSTREAM_BASE needs to be set.')
|
|
|
|
|
2017-12-23 20:03:25 +01:00
|
|
|
try:
|
|
|
|
self.data_dir = os.environ.get('C3NAV_DATA_DIR', 'data')
|
|
|
|
except KeyError:
|
|
|
|
raise Exception('C3NAV_DATA_DIR needs to be set.')
|
|
|
|
|
|
|
|
if not os.path.exists(self.data_dir):
|
|
|
|
os.mkdir(self.data_dir)
|
|
|
|
|
2017-11-20 23:05:27 +01:00
|
|
|
self.tile_secret = os.environ.get('C3NAV_TILE_SECRET', None)
|
|
|
|
if not self.tile_secret:
|
|
|
|
tile_secret_file = None
|
|
|
|
try:
|
|
|
|
tile_secret_file = os.environ['C3NAV_TILE_SECRET_FILE']
|
|
|
|
self.tile_secret = open(tile_secret_file).read().strip()
|
|
|
|
except KeyError:
|
|
|
|
raise Exception('C3NAV_TILE_SECRET or C3NAV_TILE_SECRET_FILE need to be set.')
|
|
|
|
except FileNotFoundError:
|
|
|
|
raise Exception('The C3NAV_TILE_SECRET_FILE (%s) does not exist.' % tile_secret_file)
|
|
|
|
|
2017-12-23 03:29:00 +01:00
|
|
|
self.reload_interval = int(os.environ.get('C3NAV_RELOAD_INTERVAL', 60))
|
|
|
|
|
2018-12-23 20:32:39 +01:00
|
|
|
self.http_auth = os.environ.get('C3NAV_HTTP_AUTH', None)
|
|
|
|
if self.http_auth:
|
2019-12-18 23:23:24 +01:00
|
|
|
self.http_auth = HTTPBasicAuth(*self.http_auth.split(':', 1))
|
2018-12-23 20:32:39 +01:00
|
|
|
|
2018-12-23 20:29:13 +01:00
|
|
|
self.auth_headers = {'X-Tile-Secret': base64.b64encode(self.tile_secret.encode()).decode()}
|
2017-11-20 23:05:27 +01:00
|
|
|
|
|
|
|
self.cache_package = None
|
|
|
|
self.cache_package_etag = None
|
2017-12-23 20:03:25 +01:00
|
|
|
self.cache_package_filename = None
|
2017-11-20 23:05:27 +01:00
|
|
|
|
2017-12-24 00:38:46 +01:00
|
|
|
cache = self.get_cache_client()
|
2017-11-21 03:27:11 +01:00
|
|
|
|
2017-11-20 23:05:27 +01:00
|
|
|
wait = 1
|
|
|
|
while True:
|
2017-12-24 00:38:46 +01:00
|
|
|
success = self.load_cache_package(cache=cache)
|
2017-11-20 23:05:27 +01:00
|
|
|
if success:
|
|
|
|
logger.info('Cache package successfully loaded.')
|
|
|
|
break
|
|
|
|
logger.info('Retrying after %s seconds...' % wait)
|
|
|
|
time.sleep(wait)
|
2017-11-21 04:43:56 +01:00
|
|
|
wait = min(10, wait*2)
|
2017-11-20 23:05:27 +01:00
|
|
|
|
2017-11-21 02:47:38 +01:00
|
|
|
threading.Thread(target=self.update_cache_package_thread, daemon=True).start()
|
|
|
|
|
2017-12-23 20:42:36 +01:00
|
|
|
@staticmethod
|
|
|
|
def get_cache_client():
|
2023-07-14 05:30:07 +02:00
|
|
|
servers = os.environ.get('C3NAV_MEMCACHED_SERVER', '127.0.0.1').split(',')
|
|
|
|
return pylibmc.Client(servers, binary=True, behaviors={"tcp_nodelay": True, "ketama": True})
|
2017-12-23 20:42:36 +01:00
|
|
|
|
2017-11-21 02:47:38 +01:00
|
|
|
def update_cache_package_thread(self):
|
2017-12-23 20:42:36 +01:00
|
|
|
cache = self.get_cache_client() # different thread → different client!
|
2017-11-21 02:47:38 +01:00
|
|
|
while True:
|
2017-12-23 03:29:00 +01:00
|
|
|
time.sleep(self.reload_interval)
|
2017-12-23 20:42:36 +01:00
|
|
|
self.load_cache_package(cache=cache)
|
2017-11-21 02:47:38 +01:00
|
|
|
|
2017-12-23 20:12:38 +01:00
|
|
|
def get_date_header(self):
|
|
|
|
return 'Date', formatdate(timeval=time.time(), localtime=False, usegmt=True)
|
2017-11-21 15:07:34 +01:00
|
|
|
|
2017-12-23 20:42:36 +01:00
|
|
|
def load_cache_package(self, cache):
|
2017-11-20 23:05:27 +01:00
|
|
|
logger.debug('Downloading cache package from upstream...')
|
|
|
|
try:
|
|
|
|
headers = self.auth_headers.copy()
|
|
|
|
if self.cache_package_etag is not None:
|
|
|
|
headers['If-None-Match'] = self.cache_package_etag
|
2023-12-21 21:44:30 +01:00
|
|
|
r = requests.get(self.upstream_base+'/map/cache/package.tar.zst', headers=headers, auth=self.http_auth)
|
2017-11-20 23:05:27 +01:00
|
|
|
|
|
|
|
if r.status_code == 403:
|
|
|
|
logger.error('Rejected cache package download with Error 403. Tile secret is probably incorrect.')
|
|
|
|
return False
|
|
|
|
|
2018-12-23 20:29:45 +01:00
|
|
|
if r.status_code == 401:
|
|
|
|
logger.error('Rejected cache package download with Error 401. You have HTTP Auth active.')
|
|
|
|
return False
|
|
|
|
|
2017-11-20 23:05:27 +01:00
|
|
|
if r.status_code == 304:
|
|
|
|
if self.cache_package is not None:
|
|
|
|
logger.debug('Not modified.')
|
2017-12-23 20:42:36 +01:00
|
|
|
cache['cache_package_filename'] = self.cache_package_filename
|
2023-12-23 23:31:38 +01:00
|
|
|
cache.set('cache_package_last_successful_check', time.time())
|
2017-11-20 23:05:27 +01:00
|
|
|
return True
|
|
|
|
logger.error('Unexpected not modified.')
|
|
|
|
return False
|
|
|
|
|
|
|
|
r.raise_for_status()
|
|
|
|
except Exception as e:
|
|
|
|
logger.error('Cache package download failed: %s' % e)
|
|
|
|
return False
|
|
|
|
|
2020-01-27 01:53:00 +01:00
|
|
|
logger.debug('Receiving and loading new cache package...')
|
2017-12-24 00:28:04 +01:00
|
|
|
|
2017-11-21 02:47:38 +01:00
|
|
|
try:
|
2023-12-07 06:48:42 +01:00
|
|
|
with BytesIO(zstd_decompress(r.content)) as f:
|
|
|
|
self.cache_package = CachePackage.read(f)
|
2017-11-21 02:47:38 +01:00
|
|
|
self.cache_package_etag = r.headers.get('ETag', None)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error('Cache package parsing failed: %s' % e)
|
|
|
|
return False
|
2017-12-23 20:03:25 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
self.cache_package_filename = os.path.join(
|
|
|
|
self.data_dir,
|
|
|
|
datetime.now().strftime('%Y-%m-%d_%H-%M-%S-%f')+'.pickle'
|
|
|
|
)
|
|
|
|
with open(self.cache_package_filename, 'wb') as f:
|
|
|
|
pickle.dump(self.cache_package, f)
|
2017-12-23 20:42:36 +01:00
|
|
|
cache.set('cache_package_filename', self.cache_package_filename)
|
2023-12-23 23:31:38 +01:00
|
|
|
cache.set('cache_package_last_successful_check', time.time())
|
2017-12-23 20:03:25 +01:00
|
|
|
except Exception as e:
|
|
|
|
self.cache_package_etag = None
|
|
|
|
logger.error('Saving pickled package failed: %s' % e)
|
|
|
|
return False
|
2017-11-20 23:05:27 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
def not_found(self, start_response, text):
|
2017-12-23 20:12:38 +01:00
|
|
|
start_response('404 Not Found', [self.get_date_header(),
|
2017-11-21 05:42:19 +01:00
|
|
|
('Content-Type', 'text/plain'),
|
2017-11-21 13:55:59 +01:00
|
|
|
('Content-Length', str(len(text)))])
|
2017-11-20 23:05:27 +01:00
|
|
|
return [text]
|
|
|
|
|
2017-12-24 00:38:46 +01:00
|
|
|
def internal_server_error(self, start_response, text=b'internal server error'):
|
|
|
|
start_response('500 Internal Server Error', [self.get_date_header(),
|
|
|
|
('Content-Type', 'text/plain'),
|
|
|
|
('Content-Length', str(len(text)))])
|
|
|
|
return [text]
|
|
|
|
|
2017-11-21 03:27:11 +01:00
|
|
|
def deliver_tile(self, start_response, etag, data):
|
2017-12-23 20:12:38 +01:00
|
|
|
start_response('200 OK', [self.get_date_header(),
|
2017-11-21 05:42:19 +01:00
|
|
|
('Content-Type', 'image/png'),
|
2017-11-21 13:55:59 +01:00
|
|
|
('Content-Length', str(len(data))),
|
2017-11-21 03:27:11 +01:00
|
|
|
('Cache-Control', 'no-cache'),
|
|
|
|
('ETag', etag)])
|
|
|
|
return [data]
|
|
|
|
|
2023-12-23 23:31:38 +01:00
|
|
|
def liveness_check_response(self, start_response):
|
2023-07-14 05:30:54 +02:00
|
|
|
self.get_cache_package()
|
|
|
|
text = b'OK'
|
|
|
|
start_response('200 OK', [self.get_date_header(),
|
|
|
|
('Content-Type', 'text/plain'),
|
|
|
|
('Content-Length', str(len(text)))])
|
|
|
|
return [text]
|
|
|
|
|
2023-12-23 23:31:38 +01:00
|
|
|
def readiness_check_response(self, start_response):
|
|
|
|
text = b'OK'
|
|
|
|
error = False
|
|
|
|
try:
|
|
|
|
last_check = self.cache.get('cache_package_last_successful_check')
|
|
|
|
except pylibmc.Error as e:
|
|
|
|
error = True
|
|
|
|
text = b'memcached error'
|
|
|
|
else:
|
|
|
|
if last_check is None or last_check <= (time.time() - self.reload_interval * 3):
|
|
|
|
error = True
|
|
|
|
if last_check:
|
|
|
|
text = f'last successful cache package check was {time.time() - last_check}s ago.'.encode('utf-8')
|
|
|
|
else:
|
|
|
|
text = b'last successful cache package check is unknown'
|
|
|
|
start_response(('500' if error else '200') + ' OK', [self.get_date_header(),
|
|
|
|
('Content-Type', 'text/plain'),
|
|
|
|
('Content-Length', str(len(text)))])
|
|
|
|
return [text]
|
|
|
|
|
2017-12-23 20:03:25 +01:00
|
|
|
def get_cache_package(self):
|
2017-12-24 00:38:46 +01:00
|
|
|
try:
|
|
|
|
cache_package_filename = self.cache.get('cache_package_filename')
|
|
|
|
except pylibmc.Error as e:
|
|
|
|
logger.warning('pylibmc error in get_cache_package(): %s' % e)
|
|
|
|
cache_package_filename = None
|
|
|
|
|
2017-12-23 20:35:56 +01:00
|
|
|
if cache_package_filename is None:
|
|
|
|
logger.warning('cache_package_filename went missing.')
|
|
|
|
return self.cache_package
|
2017-12-23 20:03:25 +01:00
|
|
|
if self.cache_package_filename != cache_package_filename:
|
|
|
|
logger.debug('Loading new cache package in worker.')
|
|
|
|
self.cache_package_filename = cache_package_filename
|
2017-12-23 20:10:05 +01:00
|
|
|
with open(self.cache_package_filename, 'rb') as f:
|
2017-12-23 20:03:25 +01:00
|
|
|
self.cache_package = pickle.load(f)
|
|
|
|
return self.cache_package
|
|
|
|
|
2017-12-24 00:38:46 +01:00
|
|
|
@property
|
|
|
|
def cache(self):
|
|
|
|
cache = self.get_cache_client()
|
|
|
|
self.__dict__['cache'] = cache
|
|
|
|
return cache
|
|
|
|
|
2017-11-20 23:05:27 +01:00
|
|
|
def __call__(self, env, start_response):
|
2017-12-23 20:05:51 +01:00
|
|
|
path_info = env['PATH_INFO']
|
2023-07-14 05:30:54 +02:00
|
|
|
|
2023-12-23 23:31:38 +01:00
|
|
|
if path_info == '/health' or path_info == '/health/live':
|
|
|
|
return self.liveness_check_response(start_response)
|
|
|
|
|
|
|
|
if path_info == '/health/ready':
|
|
|
|
return self.readiness_check_response(start_response)
|
2023-07-14 05:30:54 +02:00
|
|
|
|
2017-12-23 20:05:51 +01:00
|
|
|
match = self.path_regex.match(path_info)
|
2017-11-20 23:05:27 +01:00
|
|
|
if match is None:
|
|
|
|
return self.not_found(start_response, b'invalid tile path.')
|
|
|
|
|
2017-11-21 01:29:35 +01:00
|
|
|
level, zoom, x, y = match.groups()
|
|
|
|
|
|
|
|
zoom = int(zoom)
|
2017-11-28 14:14:03 +01:00
|
|
|
if not (-2 <= zoom <= 5):
|
2017-11-20 23:05:27 +01:00
|
|
|
return self.not_found(start_response, b'zoom out of bounds.')
|
|
|
|
|
|
|
|
# do this to be thread safe
|
2017-12-24 00:38:46 +01:00
|
|
|
try:
|
|
|
|
cache_package = self.get_cache_package()
|
|
|
|
except Exception as e:
|
|
|
|
logger.error('get_cache_package() failed: %s' % e)
|
|
|
|
return self.internal_server_error(start_response)
|
2017-11-21 00:47:55 +01:00
|
|
|
|
|
|
|
# check if bounds are valid
|
2017-11-21 01:29:35 +01:00
|
|
|
x = int(x)
|
|
|
|
y = int(y)
|
2017-11-21 00:47:55 +01:00
|
|
|
minx, miny, maxx, maxy = get_tile_bounds(zoom, x, y)
|
|
|
|
if not cache_package.bounds_valid(minx, miny, maxx, maxy):
|
|
|
|
return self.not_found(start_response, b'coordinates out of bounds.')
|
|
|
|
|
|
|
|
# get level
|
2017-11-21 01:29:35 +01:00
|
|
|
level = int(level)
|
2017-11-21 00:47:55 +01:00
|
|
|
level_data = cache_package.levels.get(level)
|
|
|
|
if level_data is None:
|
|
|
|
return self.not_found(start_response, b'invalid level.')
|
|
|
|
|
2017-11-21 15:16:30 +01:00
|
|
|
# build cache keys
|
|
|
|
last_update = level_data.history.last_update(minx, miny, maxx, maxy)
|
|
|
|
base_cache_key = build_base_cache_key(last_update)
|
|
|
|
|
2017-11-21 00:47:55 +01:00
|
|
|
# decode access permissions
|
2017-11-21 15:16:30 +01:00
|
|
|
access_permissions = set()
|
|
|
|
access_cache_key = '0'
|
|
|
|
|
2017-11-21 01:29:35 +01:00
|
|
|
cookie = env.get('HTTP_COOKIE', None)
|
|
|
|
if cookie:
|
2017-11-21 05:16:24 +01:00
|
|
|
cookie = self.cookie_regex.search(cookie)
|
2017-11-21 01:29:35 +01:00
|
|
|
if cookie:
|
2017-11-21 05:16:24 +01:00
|
|
|
cookie = cookie.group(2)
|
2017-11-21 15:16:30 +01:00
|
|
|
access_permissions = (parse_tile_access_cookie(cookie, self.tile_secret) &
|
2017-12-20 19:14:12 +01:00
|
|
|
set(level_data.restrictions[minx:maxx, miny:maxy]))
|
2017-11-21 15:16:30 +01:00
|
|
|
access_cache_key = build_access_cache_key(access_permissions)
|
2017-11-21 00:47:55 +01:00
|
|
|
|
|
|
|
# check browser cache
|
|
|
|
if_none_match = env.get('HTTP_IF_NONE_MATCH')
|
2017-11-21 03:27:11 +01:00
|
|
|
tile_etag = build_tile_etag(level, zoom, x, y, base_cache_key, access_cache_key, self.tile_secret)
|
|
|
|
if if_none_match == tile_etag:
|
2017-12-23 20:12:38 +01:00
|
|
|
start_response('304 Not Modified', [self.get_date_header(),
|
2017-11-21 05:42:19 +01:00
|
|
|
('Content-Length', '0'),
|
2017-11-21 03:27:11 +01:00
|
|
|
('ETag', tile_etag)])
|
|
|
|
return [b'']
|
|
|
|
|
2017-12-23 20:05:51 +01:00
|
|
|
cache_key = path_info+'_'+tile_etag
|
|
|
|
cached_result = self.cache.get(cache_key)
|
2017-11-21 03:27:11 +01:00
|
|
|
if cached_result is not None:
|
|
|
|
return self.deliver_tile(start_response, tile_etag, cached_result)
|
2017-11-21 00:47:55 +01:00
|
|
|
|
|
|
|
r = requests.get('%s/map/%d/%d/%d/%d/%s.png' % (self.upstream_base, level, zoom, x, y, access_cache_key),
|
2018-12-23 20:32:39 +01:00
|
|
|
headers=self.auth_headers, auth=self.http_auth)
|
2017-11-21 00:47:55 +01:00
|
|
|
|
2017-11-21 03:27:11 +01:00
|
|
|
if r.status_code == 200 and r.headers['Content-Type'] == 'image/png':
|
2017-12-23 20:42:36 +01:00
|
|
|
self.cache.set(cache_key, r.content)
|
2017-11-21 03:27:11 +01:00
|
|
|
return self.deliver_tile(start_response, tile_etag, r.content)
|
|
|
|
|
2017-12-23 18:58:13 +01:00
|
|
|
start_response('%d %s' % (r.status_code, r.reason), [
|
2017-12-23 20:12:38 +01:00
|
|
|
self.get_date_header(),
|
2018-12-23 20:41:37 +01:00
|
|
|
('Content-Length', str(len(r.content))),
|
2017-12-23 18:58:13 +01:00
|
|
|
('Content-Type', r.headers.get('Content-Type', 'text/plain'))
|
|
|
|
])
|
2017-11-21 00:47:55 +01:00
|
|
|
return [r.content]
|
2017-11-20 23:05:27 +01:00
|
|
|
|
|
|
|
|
|
|
|
application = TileServer()
|