mother fucking sensors
This commit is contained in:
parent
98794751d0
commit
789640998a
8 changed files with 902 additions and 5 deletions
450
src/c3nav/mapdata/management/commands/manage_sensors.py
Normal file
450
src/c3nav/mapdata/management/commands/manage_sensors.py
Normal file
|
@ -0,0 +1,450 @@
|
|||
import json
|
||||
import requests
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone
|
||||
from shapely.geometry import Point
|
||||
|
||||
from c3nav.mapdata.models import DataOverlay, DataOverlayFeature, Level
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Setup and manage sensor overlays with generic sensor data'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--create-overlay',
|
||||
type=str,
|
||||
help='Create a new sensor overlay with given name',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--data-source-url',
|
||||
type=str,
|
||||
help='URL to scrape sensor data from',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sensor-config',
|
||||
type=str,
|
||||
help='JSON configuration for sensor data mapping',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--add-sensor',
|
||||
action='store_true',
|
||||
help='Add sensors manually with provided coordinates',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--scrape-data',
|
||||
action='store_true',
|
||||
help='Scrape data from configured data sources',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--overlay-id',
|
||||
type=int,
|
||||
help='ID of the overlay to work with',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if options['create_overlay']:
|
||||
self.create_overlay(options)
|
||||
elif options['add_sensor']:
|
||||
self.add_sensors_manually(options)
|
||||
elif options['scrape_data']:
|
||||
self.scrape_sensor_data(options)
|
||||
else:
|
||||
self.stdout.write('Please specify an action: --create-overlay, --add-sensor, or --scrape-data')
|
||||
|
||||
def create_overlay(self, options):
|
||||
"""Create a new sensor overlay"""
|
||||
name = options['create_overlay']
|
||||
|
||||
# Parse sensor configuration
|
||||
sensor_config = {}
|
||||
if options['sensor_config']:
|
||||
try:
|
||||
sensor_config = json.loads(options['sensor_config'])
|
||||
except json.JSONDecodeError:
|
||||
self.stderr.write('Invalid JSON in sensor_config')
|
||||
return
|
||||
|
||||
overlay = DataOverlay.objects.create(
|
||||
titles={'en': name},
|
||||
description=f'Sensor overlay for {name}',
|
||||
default_geomtype=DataOverlay.GeometryType.POINT,
|
||||
data_source_url=options['data_source_url'],
|
||||
sensor_config=sensor_config,
|
||||
update_interval=30, # Update every 30 seconds
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f'Created overlay "{name}" with ID {overlay.id}')
|
||||
)
|
||||
|
||||
def add_sensors_manually(self, options):
|
||||
"""Add sensors manually with coordinates"""
|
||||
if not options['overlay_id']:
|
||||
self.stderr.write('--overlay-id required when adding sensors manually')
|
||||
return
|
||||
|
||||
try:
|
||||
overlay = DataOverlay.objects.get(id=options['overlay_id'])
|
||||
except DataOverlay.DoesNotExist:
|
||||
self.stderr.write(f'Overlay with ID {options["overlay_id"]} not found')
|
||||
return
|
||||
|
||||
# Get the ground floor level (floor0)
|
||||
try:
|
||||
level = Level.objects.get(short_label='floor0')
|
||||
except Level.DoesNotExist:
|
||||
level = Level.objects.first() # Fallback to first level
|
||||
if not level:
|
||||
self.stderr.write('No levels found in database')
|
||||
return
|
||||
|
||||
# Example sensors - this should be configurable via command line or config file
|
||||
sensors = [
|
||||
{
|
||||
'sensor_id': 'temp_001',
|
||||
'sensor_type': 'temperature',
|
||||
'name': 'Meeting Room A1 - Temperature',
|
||||
'coordinates_x': 500,
|
||||
'coordinates_y': 300,
|
||||
'value': 22.5,
|
||||
'unit': '°C'
|
||||
},
|
||||
{
|
||||
'sensor_id': 'hum_001',
|
||||
'sensor_type': 'humidity',
|
||||
'name': 'Meeting Room A1 - Humidity',
|
||||
'coordinates_x': 500,
|
||||
'coordinates_y': 300,
|
||||
'value': 55.0,
|
||||
'unit': '%'
|
||||
},
|
||||
{
|
||||
'sensor_id': 'temp_002',
|
||||
'sensor_type': 'temperature',
|
||||
'name': 'Server Room - Temperature',
|
||||
'coordinates_x': 750,
|
||||
'coordinates_y': 400,
|
||||
'value': 18.2,
|
||||
'unit': '°C'
|
||||
},
|
||||
{
|
||||
'sensor_id': 'co2_001',
|
||||
'sensor_type': 'co2',
|
||||
'name': 'Office Space - CO2',
|
||||
'coordinates_x': 300,
|
||||
'coordinates_y': 600,
|
||||
'value': 450,
|
||||
'unit': 'ppm'
|
||||
}
|
||||
]
|
||||
|
||||
for sensor_item in sensors:
|
||||
# Create geometry from c3nav coordinates
|
||||
point = Point(sensor_item['coordinates_x'], sensor_item['coordinates_y'])
|
||||
|
||||
# Get color based on sensor type and value
|
||||
color = self.get_sensor_color(sensor_item['sensor_type'], sensor_item['value'])
|
||||
|
||||
feature = DataOverlayFeature.objects.create(
|
||||
overlay=overlay,
|
||||
titles={'en': sensor_item['name']},
|
||||
geometry=point,
|
||||
level=level,
|
||||
sensor_id=sensor_item['sensor_id'],
|
||||
sensor_type=sensor_item['sensor_type'],
|
||||
sensor_value=sensor_item['value'],
|
||||
sensor_unit=sensor_item['unit'],
|
||||
coordinates_x=sensor_item['coordinates_x'],
|
||||
coordinates_y=sensor_item['coordinates_y'],
|
||||
fill_color=color,
|
||||
stroke_color=color,
|
||||
stroke_width=2,
|
||||
fill_opacity=0.8,
|
||||
show_label=True,
|
||||
show_geometry=True,
|
||||
interactive=True,
|
||||
point_icon=self.get_sensor_icon(sensor_item['sensor_type']),
|
||||
last_updated=timezone.now(),
|
||||
extra_data={
|
||||
'value': str(sensor_item['value']),
|
||||
'unit': sensor_item['unit'],
|
||||
'sensor_type': sensor_item['sensor_type']
|
||||
}
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
f'Created sensor {sensor_item["sensor_id"]}: {sensor_item["name"]} '
|
||||
f'({sensor_item["value"]}{sensor_item["unit"]}) at ({sensor_item["coordinates_x"]}, {sensor_item["coordinates_y"]})'
|
||||
)
|
||||
|
||||
def scrape_sensor_data(self, options):
|
||||
"""Scrape sensor data from configured data sources"""
|
||||
overlays = DataOverlay.objects.filter(data_source_url__isnull=False)
|
||||
|
||||
if options['overlay_id']:
|
||||
overlays = overlays.filter(id=options['overlay_id'])
|
||||
|
||||
for overlay in overlays:
|
||||
self.stdout.write(f'Scraping data for overlay: {overlay.titles.get("en", "Unknown")}')
|
||||
|
||||
try:
|
||||
# Fetch data from the source URL
|
||||
response = requests.get(overlay.data_source_url, timeout=30)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
# Process data using sensor configuration
|
||||
self.process_scraped_data(overlay, data)
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.stderr.write(f'Error fetching data from {overlay.data_source_url}: {e}')
|
||||
except json.JSONDecodeError as e:
|
||||
self.stderr.write(f'Error parsing JSON from {overlay.data_source_url}: {e}')
|
||||
except Exception as e:
|
||||
self.stderr.write(f'Error processing data for overlay {overlay.id}: {e}')
|
||||
|
||||
def process_scraped_data(self, overlay, data):
|
||||
"""Process scraped data according to overlay configuration"""
|
||||
sensor_config = overlay.sensor_config or {}
|
||||
|
||||
# Default configuration for NOI Open Data Hub
|
||||
default_config = {
|
||||
"data_path": "data",
|
||||
"mappings": {
|
||||
"id_field": "scode",
|
||||
"name_field": "sname",
|
||||
"x_field": "scoordinate.x",
|
||||
"y_field": "scoordinate.y",
|
||||
"fixed_coordinates": {"x": 0.0, "y": 0.0}
|
||||
}
|
||||
}
|
||||
|
||||
config = {**default_config, **sensor_config}
|
||||
|
||||
# Extract sensor data array
|
||||
api_data = data
|
||||
if config.get("data_path"):
|
||||
for path_part in config["data_path"].split("."):
|
||||
api_data = api_data.get(path_part, [])
|
||||
|
||||
# Get level for sensors - use configured level or default to ground floor
|
||||
level_name = config.get('level', 'floor0')
|
||||
try:
|
||||
level = Level.objects.get(short_label=level_name)
|
||||
except Level.DoesNotExist:
|
||||
self.stderr.write(f'Level "{level_name}" not found, using ground floor')
|
||||
try:
|
||||
level = Level.objects.get(short_label='floor0')
|
||||
except Level.DoesNotExist:
|
||||
level = Level.objects.first() # Final fallback
|
||||
if not level:
|
||||
self.stderr.write('No levels found in database')
|
||||
return
|
||||
|
||||
updated_count = 0
|
||||
created_count = 0
|
||||
|
||||
# Group measurements by station (scode) first and discover sensor types dynamically
|
||||
stations = {}
|
||||
for item in api_data:
|
||||
station_id = self.get_nested_field(item, config["mappings"]["id_field"])
|
||||
station_name = self.get_nested_field(item, config["mappings"]["name_field"])
|
||||
measurement_type = self.get_nested_field(item, "tname")
|
||||
|
||||
if not station_id or not measurement_type:
|
||||
continue
|
||||
|
||||
if station_id not in stations:
|
||||
stations[station_id] = {
|
||||
'name': station_name,
|
||||
'measurements': {}
|
||||
}
|
||||
|
||||
stations[station_id]['measurements'][measurement_type] = item
|
||||
|
||||
# Process each station and its measurements
|
||||
for station_id, station_data in stations.items():
|
||||
# Get coordinates - use fixed coordinates if specified
|
||||
if config["mappings"].get("fixed_coordinates"):
|
||||
x_coord = config["mappings"]["fixed_coordinates"]["x"]
|
||||
y_coord = config["mappings"]["fixed_coordinates"]["y"]
|
||||
else:
|
||||
# Get coordinates from any measurement (they should be the same for all measurements from same station)
|
||||
first_measurement = next(iter(station_data['measurements'].values()))
|
||||
x_coord = self.get_nested_field(first_measurement, config["mappings"]["x_field"])
|
||||
y_coord = self.get_nested_field(first_measurement, config["mappings"]["y_field"])
|
||||
|
||||
if x_coord is None or y_coord is None:
|
||||
continue
|
||||
|
||||
# Convert coordinates if needed (this is simplified)
|
||||
# For NOI data, coordinates might already be in the right format
|
||||
x_coord = float(x_coord)
|
||||
y_coord = float(y_coord)
|
||||
|
||||
# Collect all sensor data for this station in one feature
|
||||
sensor_readings = {}
|
||||
raw_measurements = {}
|
||||
|
||||
# Process ALL measurements found in the API response (dynamically discovered)
|
||||
for measurement_type, measurement in station_data['measurements'].items():
|
||||
# Extract values dynamically from the API response
|
||||
sensor_value = self.get_nested_field(measurement, "mvalue")
|
||||
sensor_unit = self.get_nested_field(measurement, "tunit")
|
||||
sensor_name = self.get_nested_field(measurement, "tname")
|
||||
sensor_description = self.get_nested_field(measurement, "tdescription")
|
||||
|
||||
if sensor_value is None:
|
||||
continue # Skip if no value
|
||||
|
||||
# Convert sensor value to float
|
||||
try:
|
||||
sensor_value = float(sensor_value)
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
|
||||
# Store this measurement for the combined sensor
|
||||
display_name = sensor_description or sensor_name or measurement_type
|
||||
unit_str = f" {sensor_unit}" if sensor_unit else ""
|
||||
sensor_readings[display_name] = f"{sensor_value}{unit_str}"
|
||||
raw_measurements[measurement_type] = measurement
|
||||
|
||||
if not sensor_readings:
|
||||
continue # Skip if no valid measurements
|
||||
|
||||
# Create a single sensor feature with all measurements
|
||||
sensor_id = station_id # Use station ID as sensor ID
|
||||
display_name = f"{station_data['name']} - Environmental Sensor"
|
||||
|
||||
# Determine primary color based on the most critical measurement
|
||||
# Priority: CO2 > Temperature > Humidity
|
||||
primary_color = '#95A5A6' # Default gray
|
||||
|
||||
# Look for CO2 measurements (various naming conventions)
|
||||
co2_measurement = None
|
||||
for mtype, measurement in raw_measurements.items():
|
||||
if any(keyword in mtype.lower() for keyword in ['co2', 'carbon']):
|
||||
co2_measurement = measurement
|
||||
break
|
||||
|
||||
if co2_measurement:
|
||||
co2_value = self.get_nested_field(co2_measurement, "mvalue")
|
||||
if co2_value:
|
||||
primary_color = self.get_sensor_color('co2', float(co2_value))
|
||||
else:
|
||||
# Look for temperature measurements
|
||||
temp_measurement = None
|
||||
for mtype, measurement in raw_measurements.items():
|
||||
if any(keyword in mtype.lower() for keyword in ['temperature', 'temp']):
|
||||
temp_measurement = measurement
|
||||
break
|
||||
|
||||
if temp_measurement:
|
||||
temp_value = self.get_nested_field(temp_measurement, "mvalue")
|
||||
if temp_value:
|
||||
primary_color = self.get_sensor_color('temperature', float(temp_value))
|
||||
|
||||
# Create geometry
|
||||
point = Point(x_coord, y_coord)
|
||||
|
||||
feature, created = DataOverlayFeature.objects.update_or_create(
|
||||
overlay=overlay,
|
||||
sensor_id=sensor_id,
|
||||
defaults={
|
||||
'titles': {'en': display_name},
|
||||
'geometry': point,
|
||||
'level': level,
|
||||
'sensor_type': 'environmental', # Combined sensor type
|
||||
'sensor_value': None, # No single value for combined sensor
|
||||
'sensor_unit': None, # No single unit for combined sensor
|
||||
'coordinates_x': x_coord,
|
||||
'coordinates_y': y_coord,
|
||||
'fill_color': primary_color,
|
||||
'stroke_color': primary_color,
|
||||
'stroke_width': 2,
|
||||
'fill_opacity': 0.8,
|
||||
'show_label': True,
|
||||
'show_geometry': True,
|
||||
'interactive': True,
|
||||
'point_icon': 'sensors', # Generic sensor icon
|
||||
'last_updated': timezone.now(),
|
||||
'sensor_data': raw_measurements, # Store all raw measurements
|
||||
'extra_data': {
|
||||
**sensor_readings, # All sensor readings as separate entries
|
||||
'Last Updated': timezone.now().strftime('%Y-%m-%d %H:%M:%S'),
|
||||
'Data Source': 'NOI Open Data Hub',
|
||||
'Station ID': station_id
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if created:
|
||||
created_count += 1
|
||||
readings_str = ', '.join([f"{k}: {v}" for k, v in sensor_readings.items()])
|
||||
self.stdout.write(f'Created sensor {sensor_id}: {readings_str}')
|
||||
else:
|
||||
updated_count += 1
|
||||
readings_str = ', '.join([f"{k}: {v}" for k, v in sensor_readings.items()])
|
||||
self.stdout.write(f'Updated sensor {sensor_id}: {readings_str}')
|
||||
|
||||
self.stdout.write(
|
||||
f'Processed overlay {overlay.id}: {created_count} created, {updated_count} updated'
|
||||
)
|
||||
|
||||
def get_nested_field(self, data, field_path):
|
||||
"""Get value from nested field path like 'scoordinate.x'"""
|
||||
try:
|
||||
value = data
|
||||
for part in field_path.split('.'):
|
||||
value = value[part]
|
||||
return value
|
||||
except (KeyError, TypeError):
|
||||
return None
|
||||
|
||||
def get_sensor_color(self, sensor_type, value):
|
||||
"""Get color based on sensor type and value"""
|
||||
sensor_type_lower = sensor_type.lower()
|
||||
|
||||
if any(keyword in sensor_type_lower for keyword in ['temperature', 'temp']):
|
||||
if value < 15:
|
||||
return '#0066CC' # Cold - blue
|
||||
elif value < 18:
|
||||
return '#00AAFF' # Cool - light blue
|
||||
elif value < 22:
|
||||
return '#00CC66' # Comfortable - green
|
||||
elif value < 25:
|
||||
return '#FFAA00' # Warm - orange
|
||||
else:
|
||||
return '#FF3333' # Hot - red
|
||||
elif any(keyword in sensor_type_lower for keyword in ['humidity', 'humid']):
|
||||
if value < 30:
|
||||
return '#FF6B6B' # Too dry - red
|
||||
elif value < 60:
|
||||
return '#4ECDC4' # Good - teal
|
||||
else:
|
||||
return '#45B7D1' # Too humid - blue
|
||||
elif any(keyword in sensor_type_lower for keyword in ['co2', 'carbon']):
|
||||
if value < 400:
|
||||
return '#2ECC71' # Excellent - green
|
||||
elif value < 800:
|
||||
return '#F39C12' # Good - orange
|
||||
elif value < 1200:
|
||||
return '#E74C3C' # Poor - red
|
||||
else:
|
||||
return '#8E44AD' # Very poor - purple
|
||||
else:
|
||||
return '#95A5A6' # Default - gray
|
||||
|
||||
def get_sensor_icon(self, sensor_type):
|
||||
"""Get icon based on sensor type"""
|
||||
icons = {
|
||||
'air-temperature': 'thermostat',
|
||||
'air-humidity': 'water_drop',
|
||||
'co2-ppm': 'air',
|
||||
'temperature': 'thermostat',
|
||||
'humidity': 'water_drop',
|
||||
'co2': 'air',
|
||||
'pressure': 'compress',
|
||||
'light': 'light_mode'
|
||||
}
|
||||
return icons.get(sensor_type, 'sensors')
|
187
src/c3nav/mapdata/management/commands/pulloverlaydata.py
Normal file
187
src/c3nav/mapdata/management/commands/pulloverlaydata.py
Normal file
|
@ -0,0 +1,187 @@
|
|||
import logging
|
||||
import requests
|
||||
from django.core.management.base import BaseCommand
|
||||
from c3nav.mapdata.models import DataOverlay, DataOverlayFeature, MapUpdate, Level
|
||||
from shapely.geometry import Point
|
||||
import json
|
||||
import traceback
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Pull overlay data from external APIs'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--force',
|
||||
action='store_true',
|
||||
help='Force update even if not enough time has passed',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
overlays = DataOverlay.objects.all()
|
||||
for overlay in overlays:
|
||||
self.stdout.write(f"Processing overlay: {overlay.title}")
|
||||
try:
|
||||
self.update_overlay(overlay, force=options['force'])
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating overlay {overlay.id}: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
self.stderr.write(f"Error updating overlay {overlay.id}: {e}")
|
||||
|
||||
def update_overlay(self, overlay, force=False):
|
||||
"""Update a single overlay from its API URL"""
|
||||
|
||||
if not overlay.pull_url:
|
||||
self.stdout.write(f"No API URL configured for overlay {overlay.id}")
|
||||
return
|
||||
|
||||
try:
|
||||
self.stdout.write(f"Fetching data from: {overlay.pull_url}")
|
||||
response = requests.get(overlay.pull_url, timeout=30)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
self.stdout.write(f"Received {len(data)} items from API")
|
||||
|
||||
# Process the data based on overlay type
|
||||
if hasattr(overlay, 'description') and overlay.description and 'temperature' in overlay.description.lower():
|
||||
self.process_temperature_data(overlay, data)
|
||||
else:
|
||||
self.stdout.write(f"Unknown overlay type for overlay {overlay.id}")
|
||||
|
||||
except requests.RequestException as e:
|
||||
logger.error(f"HTTP error fetching {overlay.pull_url}: {e}")
|
||||
self.stderr.write(f"HTTP error: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing overlay {overlay.id}: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
self.stderr.write(f"Error: {e}")
|
||||
|
||||
def process_temperature_data(self, overlay, data):
|
||||
"""Process temperature sensor data from NOI Open Data Hub"""
|
||||
|
||||
# Clear existing features for this overlay
|
||||
DataOverlayFeature.objects.filter(overlay=overlay).delete()
|
||||
self.stdout.write(f"Cleared existing features for overlay {overlay.id}")
|
||||
|
||||
# Group measurements by station
|
||||
stations = {}
|
||||
for item in data:
|
||||
scode = item.get('scode')
|
||||
if scode:
|
||||
if scode not in stations:
|
||||
stations[scode] = {
|
||||
'sname': item.get('sname'),
|
||||
'scoordinate': item.get('scoordinate'),
|
||||
'measurements': []
|
||||
}
|
||||
stations[scode]['measurements'].append(item)
|
||||
|
||||
self.stdout.write(f"Found {len(stations)} stations")
|
||||
|
||||
for scode, station_data in stations.items():
|
||||
try:
|
||||
self.create_temperature_feature(overlay, scode, station_data)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating feature for station {scode}: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
self.stderr.write(f"Error creating feature for {scode}: {e}")
|
||||
|
||||
def create_temperature_feature(self, overlay, scode, station_data):
|
||||
"""Create a DataOverlayFeature for a temperature station"""
|
||||
|
||||
# Extract coordinates from scoordinate object
|
||||
scoordinate = station_data.get('scoordinate')
|
||||
if not scoordinate:
|
||||
self.stdout.write(f"No coordinates for station {scode}")
|
||||
return
|
||||
|
||||
# scoordinate is a dict with x, y coordinates
|
||||
x = scoordinate.get('x')
|
||||
y = scoordinate.get('y')
|
||||
|
||||
if x is None or y is None:
|
||||
self.stdout.write(f"Invalid coordinates for station {scode}: {scoordinate}")
|
||||
return
|
||||
|
||||
self.stdout.write(f"Station {scode} coordinates: x={x}, y={y}")
|
||||
|
||||
# Create point geometry (assuming WGS84/EPSG:4326)
|
||||
point = Point(x, y)
|
||||
|
||||
# Find temperature measurement
|
||||
temperature = None
|
||||
humidity = None
|
||||
|
||||
for measurement in station_data['measurements']:
|
||||
tname = measurement.get('tname', '').lower()
|
||||
if 'temperature' in tname:
|
||||
temperature = measurement.get('mvalue')
|
||||
elif 'humidity' in tname:
|
||||
humidity = measurement.get('mvalue')
|
||||
|
||||
self.stdout.write(f"Station {scode}: temp={temperature}, humidity={humidity}")
|
||||
|
||||
if temperature is None:
|
||||
self.stdout.write(f"No temperature data for station {scode}")
|
||||
return
|
||||
|
||||
# Determine color based on temperature
|
||||
color = self.get_temperature_color(temperature)
|
||||
|
||||
# Try to find appropriate level (ground floor by default)
|
||||
level = None
|
||||
try:
|
||||
# Look for ground floor or level 0
|
||||
level = Level.objects.filter(
|
||||
short_label__in=['0', 'EG', 'Ground', 'G']
|
||||
).first()
|
||||
|
||||
if not level:
|
||||
# Fallback to any level
|
||||
level = Level.objects.first()
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not determine level: {e}")
|
||||
|
||||
if not level:
|
||||
self.stdout.write(f"No level found for station {scode}")
|
||||
return
|
||||
|
||||
self.stdout.write(f"Using level: {level.short_label} for station {scode}")
|
||||
|
||||
# Create the feature
|
||||
title = f"{station_data.get('sname', scode)} ({temperature}°C)"
|
||||
|
||||
feature = DataOverlayFeature.objects.create(
|
||||
overlay=overlay,
|
||||
level=level,
|
||||
geometry=point.wkt,
|
||||
title=title,
|
||||
color=color,
|
||||
opacity=0.8,
|
||||
icon='thermometer'
|
||||
)
|
||||
|
||||
self.stdout.write(f"Created feature {feature.id} for station {scode}")
|
||||
|
||||
def get_temperature_color(self, temperature):
|
||||
"""Get color based on temperature value"""
|
||||
try:
|
||||
temp = float(temperature)
|
||||
except (ValueError, TypeError):
|
||||
return '#808080' # Gray for invalid values
|
||||
|
||||
if temp < 10:
|
||||
return '#0000FF' # Blue for very cold
|
||||
elif temp < 15:
|
||||
return '#00BFFF' # Light blue for cold
|
||||
elif temp < 20:
|
||||
return '#00FF00' # Green for cool
|
||||
elif temp < 25:
|
||||
return '#FFFF00' # Yellow for comfortable
|
||||
elif temp < 30:
|
||||
return '#FFA500' # Orange for warm
|
||||
else:
|
||||
return '#FF0000' # Red for hot
|
18
src/c3nav/mapdata/migrations/0139_add_last_pull_time.py
Normal file
18
src/c3nav/mapdata/migrations/0139_add_last_pull_time.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.5 on 2025-08-02 06:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('mapdata', '0138_rangingbeacon_max_observed_num_clients_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='dataoverlay',
|
||||
name='last_pull_time',
|
||||
field=models.DateTimeField(blank=True, null=True, verbose_name='last pull time'),
|
||||
),
|
||||
]
|
73
src/c3nav/mapdata/migrations/0140_add_temperature_fields.py
Normal file
73
src/c3nav/mapdata/migrations/0140_add_temperature_fields.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
# Generated by Django 5.1.5 on 2025-08-02 07:18
|
||||
|
||||
from django.db import migrations, models
|
||||
import django_pydantic_field
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('mapdata', '0139_add_last_pull_time'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Add data source URL to DataOverlay
|
||||
migrations.AddField(
|
||||
model_name='dataoverlay',
|
||||
name='data_source_url',
|
||||
field=models.URLField(blank=True, null=True, verbose_name='Data Source URL',
|
||||
help_text='URL to scrape sensor data from'),
|
||||
),
|
||||
|
||||
# Add sensor configuration to DataOverlay
|
||||
migrations.AddField(
|
||||
model_name='dataoverlay',
|
||||
name='sensor_config',
|
||||
field=django_pydantic_field.SchemaField(
|
||||
schema=dict, blank=True, null=True,
|
||||
verbose_name='Sensor Configuration',
|
||||
help_text='JSON configuration for sensor data mapping and processing'
|
||||
),
|
||||
),
|
||||
|
||||
# Add generic sensor fields to DataOverlayFeature
|
||||
migrations.AddField(
|
||||
model_name='dataoverlayfeature',
|
||||
name='sensor_id',
|
||||
field=models.CharField(max_length=100, blank=True, null=True, verbose_name='Sensor ID'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dataoverlayfeature',
|
||||
name='sensor_type',
|
||||
field=models.CharField(max_length=50, blank=True, null=True, verbose_name='Sensor Type',
|
||||
help_text='Type of sensor: temperature, humidity, co2, etc.'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dataoverlayfeature',
|
||||
name='sensor_value',
|
||||
field=models.FloatField(blank=True, null=True, verbose_name='Sensor Value'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dataoverlayfeature',
|
||||
name='sensor_unit',
|
||||
field=models.CharField(max_length=20, blank=True, null=True, verbose_name='Sensor Unit',
|
||||
help_text='Unit of measurement: °C, %, ppm, etc.'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dataoverlayfeature',
|
||||
name='coordinates_x',
|
||||
field=models.FloatField(blank=True, null=True, verbose_name='X Coordinate',
|
||||
help_text='X coordinate in c3nav coordinate system'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dataoverlayfeature',
|
||||
name='coordinates_y',
|
||||
field=models.FloatField(blank=True, null=True, verbose_name='Y Coordinate',
|
||||
help_text='Y coordinate in c3nav coordinate system'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dataoverlayfeature',
|
||||
name='last_updated',
|
||||
field=models.DateTimeField(blank=True, null=True, verbose_name='Last Updated'),
|
||||
),
|
||||
]
|
|
@ -44,6 +44,13 @@ class DataOverlay(TitledMixin, AccessRestrictionMixin, models.Model):
|
|||
verbose_name=_('Editor Access Restriction'),
|
||||
on_delete=models.PROTECT)
|
||||
|
||||
# Generic sensor data configuration
|
||||
data_source_url = models.URLField(blank=True, null=True, verbose_name=_('Data Source URL'),
|
||||
help_text=_('URL to scrape sensor data from'))
|
||||
sensor_config: Optional[dict] = SchemaField(schema=dict, blank=True, null=True,
|
||||
verbose_name=_('Sensor Configuration'),
|
||||
help_text=_('JSON configuration for sensor data mapping and processing'))
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Data Overlay')
|
||||
verbose_name_plural = _('Data Overlays')
|
||||
|
@ -72,6 +79,22 @@ class DataOverlayFeature(TitledMixin, LevelGeometryMixin, models.Model):
|
|||
null=True,
|
||||
default=None,
|
||||
verbose_name=_('extra data (JSON object)'))
|
||||
|
||||
# Generic sensor fields
|
||||
sensor_id = models.CharField(max_length=100, blank=True, null=True, verbose_name=_('Sensor ID'))
|
||||
sensor_type = models.CharField(max_length=50, blank=True, null=True, verbose_name=_('Sensor Type'),
|
||||
help_text=_('Type of sensor: temperature, humidity, co2, etc.'))
|
||||
sensor_value = models.FloatField(blank=True, null=True, verbose_name=_('Sensor Value'))
|
||||
sensor_unit = models.CharField(max_length=20, blank=True, null=True, verbose_name=_('Sensor Unit'),
|
||||
help_text=_('Unit of measurement: °C, %, ppm, etc.'))
|
||||
coordinates_x = models.FloatField(blank=True, null=True, verbose_name=_('X Coordinate'),
|
||||
help_text=_('X coordinate in c3nav coordinate system'))
|
||||
coordinates_y = models.FloatField(blank=True, null=True, verbose_name=_('Y Coordinate'),
|
||||
help_text=_('Y coordinate in c3nav coordinate system'))
|
||||
last_updated = models.DateTimeField(blank=True, null=True, verbose_name=_('Last Updated'))
|
||||
sensor_data: Optional[dict] = SchemaField(schema=dict, blank=True, null=True,
|
||||
verbose_name=_('Raw Sensor Data'),
|
||||
help_text=_('Raw data from sensor for debugging and additional info'))
|
||||
|
||||
def to_geojson(self, instance=None) -> dict:
|
||||
result = {
|
||||
|
|
|
@ -82,3 +82,76 @@ def update_ap_names_bssid_mapping(self, map_name, user_id):
|
|||
with changeset.lock_to_edit() as locked_changeset:
|
||||
locked_changeset.title = 'passive update bssids'
|
||||
locked_changeset.apply(user)
|
||||
|
||||
|
||||
@app.task(bind=True, max_retries=3)
|
||||
def pull_overlay_data(self, overlay_id=None):
|
||||
"""
|
||||
Celery task to pull data overlay features from external URLs.
|
||||
|
||||
Args:
|
||||
overlay_id (int, optional): Specific overlay ID to update.
|
||||
If None, updates all overlays with pull_url.
|
||||
"""
|
||||
logger.info('Starting overlay data pull task...')
|
||||
|
||||
from django.core.management import call_command
|
||||
from io import StringIO
|
||||
|
||||
try:
|
||||
# Capture output from the management command
|
||||
output = StringIO()
|
||||
|
||||
if overlay_id:
|
||||
call_command('pulloverlaydata', overlay=overlay_id, stdout=output)
|
||||
else:
|
||||
call_command('pulloverlaydata', stdout=output)
|
||||
|
||||
result = output.getvalue()
|
||||
logger.info(f'Overlay data pull completed: {result}')
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f'Overlay data pull failed: {e}')
|
||||
if self.request.retries < self.max_retries:
|
||||
logger.info(f'Retrying in 60 seconds... (attempt {self.request.retries + 1}/{self.max_retries})')
|
||||
raise self.retry(countdown=60, exc=e)
|
||||
else:
|
||||
logger.error('Max retries exceeded for overlay data pull')
|
||||
raise
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def schedule_overlay_data_pulls(self):
|
||||
"""
|
||||
Periodic task to schedule individual overlay data pulls based on their intervals.
|
||||
This should be called every minute by a periodic task scheduler.
|
||||
"""
|
||||
from c3nav.mapdata.models import DataOverlay
|
||||
from django.utils import timezone
|
||||
|
||||
logger.info('Checking overlays for scheduled pulls...')
|
||||
|
||||
overlays = DataOverlay.objects.exclude(pull_url__isnull=True).exclude(pull_url='').exclude(pull_interval__isnull=True)
|
||||
|
||||
scheduled_count = 0
|
||||
for overlay in overlays:
|
||||
# Check if it's time to update this overlay
|
||||
should_pull = False
|
||||
|
||||
if overlay.last_pull_time is None:
|
||||
# Never pulled before
|
||||
should_pull = True
|
||||
else:
|
||||
# Check if enough time has passed since last pull
|
||||
next_pull_time = overlay.last_pull_time + overlay.pull_interval
|
||||
should_pull = timezone.now() >= next_pull_time
|
||||
|
||||
if should_pull:
|
||||
# Schedule the pull
|
||||
pull_overlay_data.delay(overlay.pk)
|
||||
scheduled_count += 1
|
||||
logger.info(f'Scheduled pull for overlay: {overlay.title}')
|
||||
|
||||
logger.info(f'Scheduled {scheduled_count} overlay pulls')
|
||||
return scheduled_count
|
||||
|
|
|
@ -8,3 +8,7 @@ if __name__ == "__main__":
|
|||
from django.core.management import execute_from_command_line
|
||||
|
||||
execute_from_command_line(sys.argv)
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue