diff --git a/install.sh b/install.sh index c6cd9d5f..7ea469df 100755 --- a/install.sh +++ b/install.sh @@ -3,7 +3,7 @@ set -e python3 -m venv .env source ./.env/bin/activate pip install -r src/requirements/production.txt -r src/requirements/dev.txt -pip install psycopg2 pylibmc +pip install psycopg2 sudo pacman -Sy librsvg ./start_db.sh db diff --git a/local_run/SENSOR_README.md b/local_run/SENSOR_README.md new file mode 100644 index 00000000..4b5db906 --- /dev/null +++ b/local_run/SENSOR_README.md @@ -0,0 +1,152 @@ +# NOI Sensor Management System + +This system allows you to manage environmental sensors from the NOI Open Data Hub in c3nav, displaying them as overlay features on different levels/floors. + +## Overview + +The system supports: +- Multiple sensors on the same overlay but on different levels +- Dynamic addition of new sensors through Django management commands +- Automatic data scraping from NOI Open Data Hub APIs +- Real-time display of CO2, temperature, humidity and other environmental data + +## Architecture + +- **Single Overlay**: All NOI environmental sensors are managed under one `DataOverlay` +- **Multiple Levels**: Sensors can be placed on different floors (floor0, floor1, etc.) +- **Flexible Configuration**: Sensor locations and properties are configurable via the overlay's `sensor_config` field +- **Dynamic Discovery**: The system can automatically discover and display any sensor data from the NOI API + +## Setup + +The main setup is handled by the `up.sh` script, which: + +1. Creates a single "NOI Environmental Sensors" overlay +2. Configures initial sensors with their coordinates and levels +3. Scrapes initial data from the NOI Open Data Hub +4. Applies necessary database migrations + +## Managing Sensors + +### 1. List All Sensors +```bash +# Using the helper script +./manage_noi_sensors.sh list + +# Or directly +docker compose exec -T c3nav-core python manage.py list_sensors --overlay-id 1 +``` + +### 2. Add a New Sensor +```bash +# Using the helper script +./manage_noi_sensors.sh add 'NOI:YourSensorID' 'Sensor Display Name' 300.0 250.0 floor1 + +# Or directly +docker compose exec -T c3nav-core python manage.py add_sensor \ + --overlay-id 1 \ + --sensor-id 'NOI:YourSensorID' \ + --name 'Sensor Display Name' \ + --x 300.0 \ + --y 250.0 \ + --level floor1 +``` + +### 3. Scrape Data for All Sensors +```bash +# Using the helper script +./manage_noi_sensors.sh scrape + +# Or directly +docker compose exec -T c3nav-core python manage.py manage_sensors --scrape-data --overlay-id 1 +``` + +## Configuration Structure + +The overlay's `sensor_config` field contains: + +```json +{ + "data_path": "data", + "mappings": { + "id_field": "scode", + "name_field": "sname", + "x_field": "scoordinate.x", + "y_field": "scoordinate.y" + }, + "sensors": [ + { + "id": "NOI:FreeSoftwareLab-Temperature", + "coordinates": {"x": 291.0, "y": 241.0}, + "level": "floor1" + }, + { + "id": "NOI:NOI-A1-Floor1-CO2", + "coordinates": {"x": 270.0, "y": 241.0}, + "level": "floor1" + } + ] +} +``` + +## Database Schema + +### DataOverlay fields: +- `data_source_url`: URL to scrape sensor data from +- `sensor_config`: JSON configuration for sensor mapping and processing + +### DataOverlayFeature fields: +- `sensor_id`: Unique identifier for the sensor +- `sensor_type`: Type of sensor (e.g., 'environmental') +- `sensor_value`: Single sensor value (nullable for multi-measurement sensors) +- `sensor_unit`: Unit of measurement (nullable for multi-measurement sensors) +- `coordinates_x`, `coordinates_y`: Position in c3nav coordinate system +- `last_updated`: Timestamp of last data update +- `sensor_data`: Raw sensor data for debugging +- `extra_data`: Processed sensor readings for display + +## Data Flow + +1. **Configuration**: Sensors are configured in the overlay's `sensor_config` +2. **Scraping**: The `manage_sensors` command fetches data from NOI Open Data Hub +3. **Processing**: Data is processed according to sensor configuration +4. **Storage**: Sensor features are created/updated in the database +5. **Display**: Sensors appear as interactive points on the map + +## Adding New Sensor Types + +To add a new sensor from the NOI Open Data Hub: + +1. Find the sensor ID in the NOI API (usually starts with "NOI:") +2. Determine the coordinates where it should appear on the map +3. Choose the appropriate level/floor +4. Add it using the `add_sensor` command +5. Run the scrape command to fetch initial data + +## Troubleshooting + +### Sensor not appearing on map +- Check if the level exists: `docker compose exec -T c3nav-core python manage.py shell -c "from c3nav.mapdata.models import Level; print([l.short_label for l in Level.objects.all()])"` +- Verify coordinates are within the map bounds +- Check if the overlay is enabled and visible + +### No data being scraped +- Verify the sensor ID exists in the NOI Open Data Hub API +- Check the API URL is accessible: https://mobility.api.opendatahub.com/v2/flat/IndoorStation/*/latest +- Review logs during scraping for errors + +### Data not updating +- Check the `last_updated` field in the sensor feature +- Verify the scraping command completed successfully +- Consider running the scrape command more frequently + +## Files + +- `up.sh`: Main setup script +- `manage_noi_sensors.sh`: Helper script for sensor management +- `src/c3nav/mapdata/management/commands/manage_sensors.py`: Core sensor management command +- `src/c3nav/mapdata/management/commands/add_sensor.py`: Command to add new sensors +- `src/c3nav/mapdata/management/commands/list_sensors.py`: Command to list sensors +- `src/c3nav/mapdata/models/overlay.py`: Database models +- `src/c3nav/mapdata/migrations/0140_add_temperature_fields.py`: Migration for sensor fields +- `src/c3nav/mapdata/migrations/0141_add_sensor_data_field.py`: Migration for sensor_data field diff --git a/local_run/compose.yml b/local_run/compose.yml index 6d2d9cd1..6af7d37d 100644 --- a/local_run/compose.yml +++ b/local_run/compose.yml @@ -15,7 +15,7 @@ x-healthcheck_defaults: &healthcheck_defaults interval: 10s timeout: 2s retries: 5 - start_period: 5s + start_period: 10s x-c3nav-defaults: &c3nav-defaults build: context: .. @@ -81,7 +81,7 @@ services: image: postgres:16 healthcheck: <<: *healthcheck_defaults - test: pg_isready -U postgres + test: pg_isready -U postgres && psql -U postgres -d ${C3NAV_DATABASE_NAME:-c3nav} -c "SELECT 1;" environment: POSTGRES_DB: ${C3NAV_DATABASE_NAME:-c3nav} POSTGRES_HOST_AUTH_METHOD: "trust" diff --git a/local_run/data-copy/c3nav.cfg b/local_run/data-copy/c3nav.cfg new file mode 100644 index 00000000..db3e8c76 --- /dev/null +++ b/local_run/data-copy/c3nav.cfg @@ -0,0 +1,44 @@ +# SPDX-FileCopyrightText: 2024 NOI Techpark +# +# SPDX-License-Identifier: CC0-1.0 + +[c3nav] +svg_renderer=rsvg +editor=False +user_registration=false +initial_level=2 +header_logo=/data/logo/sfscon.svg +imprint_link=https://www.sfscon.it/impressum/ +branding=SFSCON map +app_enabled=False + +[locale] +languages=en + +[theme] +#is_dark=False +#randomize_primary_color=False +map_background=#f7f8f8 +map_wall_fill=#e5e4e5 +map_wall_border=#c1bfbe +#map_door_fill=#ffffff +map_ground_fill=#f7f8f8 +#map_obstacles_default_fill=#b7b7b7 +#map_obstacles_default_border=#888888 +#css_primary=#9b4dca +#css_initial=#ffffff +#css_primary=#9b4dca +#css_logo=None +#css_secondary=#525862 +#css_tertiary=#f0f0f0 +#css_quaternary=#767676 +#css_quinary=#cccccc +css_header_text=#000000 +#css_header_text_hover=#eeeeee +css_header_background=#ffffff +#css_shadow=#000000 +#css_overlay_background=#ffffff +#css_grid=#000000 +#css_modal_backdrop=#000000 +#css_route_dots_shadow=#ffffff +#map_background=#dcdcdc diff --git a/local_run/data/logo/sfscon.svg b/local_run/data/logo/sfscon.svg deleted file mode 100644 index 8480f30c..00000000 --- a/local_run/data/logo/sfscon.svg +++ /dev/null @@ -1,6 +0,0 @@ - -logo diff --git a/local_run/manage_noi_sensors.sh b/local_run/manage_noi_sensors.sh new file mode 100755 index 00000000..d6c23b0a --- /dev/null +++ b/local_run/manage_noi_sensors.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Helper script to manage NOI sensors +# Usage: ./manage_noi_sensors.sh [add|list|scrape] [args...] + +COMPOSE_EXEC="docker compose exec -T c3nav-core python manage.py" + +case "$1" in + "add") + if [ $# -lt 6 ]; then + echo "Usage: $0 add " + echo "Example: $0 add 'NOI:MyNewSensor' 'My New Sensor' 300.0 250.0 floor1" + exit 1 + fi + SENSOR_ID="$2" + NAME="$3" + X="$4" + Y="$5" + LEVEL="$6" + echo "Adding sensor: $NAME ($SENSOR_ID) at ($X, $Y) on $LEVEL" + $COMPOSE_EXEC add_sensor --overlay-id 1 --sensor-id "$SENSOR_ID" --name "$NAME" --x "$X" --y "$Y" --level "$LEVEL" + ;; + "list") + echo "Listing all sensors in overlay 1:" + $COMPOSE_EXEC list_sensors --overlay-id 1 + ;; + "scrape") + echo "Scraping data for all sensors in overlay 1:" + $COMPOSE_EXEC manage_sensors --scrape-data --overlay-id 1 + ;; + *) + echo "NOI Sensor Management Helper" + echo "Usage: $0 [add|list|scrape] [args...]" + echo "" + echo "Commands:" + echo " add - Add a new sensor" + echo " list - List all sensors" + echo " scrape - Scrape data for all sensors" + echo "" + echo "Examples:" + echo " $0 add 'NOI:NewSensor' 'My Sensor' 300.0 250.0 floor1" + echo " $0 list" + echo " $0 scrape" + exit 1 + ;; +esac diff --git a/local_run/up.sh b/local_run/up.sh index 124a289e..a1c3e0e6 100755 --- a/local_run/up.sh +++ b/local_run/up.sh @@ -1,7 +1,4 @@ -#cd ../docker -#docker buildx rm c3nav-local 2>/dev/null || true -#docker buildx create --name c3nav-local --driver=docker-container --bootstrap --use -#cd ../local_run + docker compose down --remove-orphans rm -rf data || true cp -r data-copy data @@ -12,17 +9,13 @@ docker volume create c3nav-redis # Start only postgres and redis first (no build since we pre-built) docker compose up -d postgres redis + sleep 10 - - cat ./db/auth_user.sql | docker exec -i local_run-postgres-1 su - postgres -c 'psql c3nav' -# Create Django superuser - -sleep 1 # Load database dump before starting the main app -cat ./db/dump.sql | docker exec -i local_run-postgres-1 su - postgres -c 'psql c3nav' +cat ./db/dump.sql | docker exec -i local_run-postgres-1 su - postgres -c 'psql c3nav' > /dev/null # Fix geometry access permissions for anonymous users docker exec -i local_run-postgres-1 psql -U postgres -d c3nav -c "UPDATE mapdata_space SET base_mapdata_accessible = true;" @@ -38,11 +31,15 @@ user.set_password('admin') user.save() print('Password set successfully for user:', user.username) \" | /app/env/bin/python manage.py shell" -sleep 30 -# Fake apply all migrations since we loaded from dump +# Apply migrations after loading dump +echo "Applying migrations..." docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py migrate --fake' +echo "Creating new migrations for sensor_data field..." + docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py makemigrations mapdata' +echo "Applying new migrations..." + docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py migrate mapdata' docker compose ps -a docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py clearmapcache --include-history --include-geometries && /app/env/bin/python manage.py collectstatic -l --no-input' @@ -55,3 +52,66 @@ echo "Applying NumPy compatibility fix..." echo "Processing map updates to rebuild cache..." docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py processupdates' +# Setup sensor overlays +echo "Setting up sensor overlays..." +docker compose exec -T c3nav-core python manage.py shell << 'EOF' +from c3nav.mapdata.models import DataOverlay, DataOverlayFeature + +# Clear old overlays to avoid conflicts +DataOverlay.objects.filter(titles__en__icontains='Environmental').delete() +DataOverlay.objects.filter(titles__en__icontains='Temperature').delete() + +# Create single NOI environmental sensor overlay with multiple sensors configuration +overlay = DataOverlay.objects.create( + titles={'en': 'NOI Environmental Sensors'}, + description='Real-time CO2 and temperature sensors from NOI Open Data Hub - displays current readings with values and units', + default_geomtype='point', + data_source_url='https://mobility.api.opendatahub.com/v2/flat/IndoorStation/*/latest', + sensor_config={ + 'data_path': 'data', + 'mappings': { + 'id_field': 'scode', + 'name_field': 'sname', + 'x_field': 'scoordinate.x', + 'y_field': 'scoordinate.y' + }, + 'sensors': [ + { + 'id': 'NOI:FreeSoftwareLab-Temperature', + 'coordinates': {'x': 291.0, 'y': 241.0}, + 'level': 'floor1' + }, + { + 'id': 'NOI:NOI-A1-Floor1-CO2', + 'coordinates': {'x': 270.0, 'y': 241.0}, + 'level': 'floor1' + } + ] + }, + update_interval=120 +) +print(f"NOI sensor overlay created with ID {overlay.id}") + +EOF + +# Scrape real NOI sensor data +echo "Scraping NOI sensor data..." +# Give the database a moment to settle after overlay creation +sleep 2 + +# Scrape the overlay data (should automatically discover all configured sensors) +echo "Scraping overlay data (ID: 1)..." +docker compose exec -T c3nav-core python manage.py manage_sensors --scrape-data --overlay-id 1 + +# List all sensors to verify setup +echo "Listing all sensors in the overlay..." +docker compose exec -T c3nav-core python manage.py list_sensors --overlay-id 1 + +echo "Sensor setup completed!" +echo "" +echo "To add a new sensor to the overlay, use:" +echo "docker compose exec -T c3nav-core python manage.py add_sensor --overlay-id 1 --sensor-id 'NOI:YourSensorID' --name 'Your Sensor Name' --x 300.0 --y 250.0 --level floor1" +echo "" +echo "To scrape data for all sensors:" +echo "docker compose exec -T c3nav-core python manage.py manage_sensors --scrape-data --overlay-id 1" + diff --git a/src/c3nav/editor/static/editor/js/editor.js b/src/c3nav/editor/static/editor/js/editor.js index 35ec67c0..4533f057 100644 --- a/src/c3nav/editor/static/editor/js/editor.js +++ b/src/c3nav/editor/static/editor/js/editor.js @@ -186,6 +186,8 @@ editor = { // Clear snap indicators when unloading editor._clear_snap_indicators(); + + editor._destroy_staircase_editing(); }, _fill_level_control: function (level_control, level_list, geometryURLs) { var levels = level_list.find('a'); @@ -500,6 +502,10 @@ editor = { // listener for form submits in the sidebar. e.preventDefault(); if (editor._loading_geometry || $(this).is('.creation-lock') || $(this).is('.scan-lock')) return; + if (editor._staircase_layer) { + editor._staircase_submit($(this)) + return + } var data = $(this).serialize(); var btn = $(this).data('btn'); if (btn !== undefined && btn !== null) { @@ -1372,7 +1378,7 @@ editor = { } else if (mapitem_type) { // creating a new geometry, already drawn but form was rejected options = editor._get_mapitem_type_style(mapitem_type); - if (mapitem_type === 'area') { + if (mapitem_type === 'area' || mapitem_type === 'staircase') { options.fillOpacity = 0.5; } } @@ -1392,7 +1398,7 @@ editor = { } else if (form.is('[data-new]')) { // create new geometry options = editor._get_mapitem_type_style(mapitem_type); - if (mapitem_type === 'area') { + if (mapitem_type === 'area' || mapitem_type === 'staircase') { options.fillOpacity = 0.5; } form.addClass('creation-lock'); @@ -1441,6 +1447,10 @@ editor = { } startGeomEditing(selected_geomtype); } + + if (mapitem_type === 'staircase') { + editor._setup_staircase_editing() + } } }, _cancel_editing: function () { @@ -1730,7 +1740,7 @@ editor = { var snapTo90Control = L.control({position: 'topleft'}); snapTo90Control.onAdd = function() { var container = L.DomUtil.create('div', 'leaflet-bar leaflet-control leaflet-control-snap'); - container.innerHTML = ''; L.DomEvent.on(container.querySelector('.snap-to-90-toggle'), 'click', function(e) { @@ -1860,20 +1870,16 @@ editor = { // check if layer is within the area limit for infinite extension var allowInfiniteExtension = editor._is_layer_in_extension_area(layer, latlng, mapPoint); - var snapPoint = editor._find_closest_point_on_geometry(layer, latlng, mapPoint, allowInfiniteExtension); - if (snapPoint && snapPoint.distance < editor._snap_distance) { - candidates.push(snapPoint); - } + var snapPoints = editor._find_closest_point_on_geometry(layer, latlng, mapPoint, allowInfiniteExtension); + candidates.push(...snapPoints); }); // check current editing shape with infinite extension enabled if (editor._current_editing_shape) { - var currentShapeSnap = editor._find_closest_point_on_geometry( + var currentShapeSnapPoints = editor._find_closest_point_on_geometry( editor._current_editing_shape, latlng, mapPoint, true // Always enable infinite extension for current shape ); - if (currentShapeSnap && currentShapeSnap.distance < editor._snap_distance) { - candidates.push(currentShapeSnap); - } + candidates.push(...currentShapeSnapPoints); } // find closest candidate @@ -1881,6 +1887,30 @@ editor = { candidates.sort(function(a, b) { return a.distance - b.distance; }); var best = candidates[0]; + // see if we can snap to a corner, i.e. an edge intersection + if (candidates.length >= 2 && candidates[0].isLine && candidates[1].isLine) { + console.log(candidates.slice(0,2)) + var inters = editor._intersect_infinite( + [candidates[0].edgeStart, candidates[0].edgeEnd], + [candidates[1].edgeStart, candidates[1].edgeEnd] + ) + if (inters) { + intersMap = editor.map.latLngToContainerPoint(inters) + var distance = Math.sqrt( + Math.pow(intersMap.x - mapPoint.x, 2) + + Math.pow(intersMap.y - mapPoint.y, 2) + ) + if (distance < editor._snap_distance) { + best = { + latlng: inters, + distance: distance, + referenceVertex: inters, + } + } + } + } + + // show snap indicator with edge highlighting editor._show_snap_indicator(best.latlng, best); @@ -1891,6 +1921,24 @@ editor = { } }, + _intersect_infinite: function(line1, line2) { + const [p1, p2] = line1; + const [p3, p4] = line2; + + const x1 = p1.lng, y1 = p1.lat; + const x2 = p2.lng, y2 = p2.lat; + const x3 = p3.lng, y3 = p3.lat; + const x4 = p4.lng, y4 = p4.lat; + + const denom = (x1-x2)*(y3-y4) - (y1-y2)*(x3-x4); + if (denom === 0) return null; // parallel + + const px = ((x1*y2 - y1*x2)*(x3-x4) - (x1-x2)*(x3*y4 - y3*x4)) / denom; + const py = ((x1*y2 - y1*x2)*(y3-y4) - (y1-y2)*(x3*y4 - y3*x4)) / denom; + + return {lng: px, lat: py}; + }, + _is_layer_in_extension_area: function(layer, targetLatLng, targetMapPoint) { if (!layer.getLatLngs) return false; @@ -2054,10 +2102,9 @@ editor = { }, _find_closest_point_on_geometry: function(layer, targetLatLng, targetMapPoint, allowInfiniteExtension) { - if (!layer.getLatLngs) return null; + if (!layer.getLatLngs) return []; - var closestPoint = null; - var closestDistance = Infinity; + var closestPoints = []; try { var coordinates = []; @@ -2073,16 +2120,16 @@ editor = { var centerMapPoint = editor.map.latLngToContainerPoint(center); var distance = centerMapPoint.distanceTo(targetMapPoint); if (distance < editor._snap_distance) { - return { + return [{ latlng: center, distance: distance, edgeStart: center, edgeEnd: center, isInfiniteExtension: false, isRightAngle: false - }; + }]; } - return null; + return []; } // check each edge of the geometry @@ -2092,17 +2139,16 @@ editor = { var p2 = coordinates[(i + 1) % coordinates.length]; var snapPoint = editor._find_closest_point_on_edge(p1, p2, targetLatLng, targetMapPoint, allowInfiniteExtension); - if (snapPoint && snapPoint.distance < closestDistance) { - closestDistance = snapPoint.distance; - closestPoint = snapPoint; + if (snapPoint && snapPoint.distance < editor._snap_distance) { + closestPoints.push(snapPoint); } } } catch (error) { - return null; + return []; } - return closestPoint; + return closestPoints; }, _find_closest_point_on_edge: function(p1, p2, targetLatLng, targetMapPoint, allowInfiniteExtension) { @@ -2122,6 +2168,7 @@ editor = { distance: distance, edgeStart: p1, edgeEnd: p2, + isLine: true, isInfiniteExtension: false, isRightAngle: false }; @@ -2157,6 +2204,7 @@ editor = { distance: distance, edgeStart: p1, edgeEnd: p2, + isLine: true, isInfiniteExtension: isInfiniteExtension, isRightAngle: false, t: originalT @@ -2185,9 +2233,26 @@ editor = { editor._show_90_degree_highlight(snapInfo); } else if (snapInfo && snapInfo.edgeStart && snapInfo.edgeEnd) { editor._show_edge_highlight(snapInfo); + } else if (snapInfo && snapInfo.referenceVertex) { + editor._show_intersect_highlight(snapInfo); } }, + _show_intersect_highlight: function(snapInfo) { + var referenceVertex = snapInfo.referenceVertex; + var snapPoint = snapInfo.latlng; + + // Draw line from reference vertex to snap point + var guideLine = L.polyline([referenceVertex, snapPoint], { + color: '#00aaff', + weight: 2, + opacity: 0.8, + dashArray: '4, 4', + className: '90-degree-guide' + }); + editor._snap_indicator.addLayer(guideLine); + }, + _show_90_degree_highlight: function(snapInfo) { var referenceVertex = snapInfo.referenceVertex; var snapPoint = snapInfo.latlng; @@ -2301,6 +2366,150 @@ editor = { if (editor._snap_indicator) { editor._snap_indicator.clearLayers(); } + }, + + _setup_staircase_editing: function() { + editor._staircase_steps_count = 10 + editor._staircase_layer = L.layerGroup().addTo(editor.map); + $('#stairway-steps').on('input', function() { + editor._staircase_steps_count = parseInt($(this).val()) || 10; + editor._update_staircase_preview(); + }); + + editor.map.on('editable:editing', editor._update_staircase_preview); + }, + + _destroy_staircase_editing: function() { + if (editor._staircase_layer) { + editor.map.removeLayer(editor._staircase_layer) + editor._staircase_layer = null + } + editor.map.off('editable:editing', editor._update_staircase_preview) + if (editor._current_editing_shape && editor._current_editing_shape.editor) { + editor._current_editing_shape.editor.cancelDrawing() + editor._current_editing_shape.remove() + editor._current_editing_shape = null + } + }, + + _transform_point_for_staircase: function(p, p0, cos_a, sin_a) { + return { + x: + (p.x - p0.x) * cos_a + (p.y - p0.y) * sin_a + p0.x, + y: - (p.x - p0.x) * sin_a + (p.y - p0.y) * cos_a + p0.y, + } + }, + + _transform_for_staircase: function(xs, ys, num_stairs) { + let base_length = Math.sqrt((xs[1]-xs[0])**2 + (ys[1]-ys[0])**2) + let cos_a = (xs[1] - xs[0]) / base_length + let sin_a = (ys[1] - ys[0]) / base_length + let p0 = { x: xs[0], y: ys[0] } + + xs = points.map(p => editor._transform_point_for_staircase(p, p0, cos_a, sin_a).x) + ys = points.map(p => editor._transform_point_for_staircase(p, p0, cos_a, sin_a).y) + n = xs.length + + if (Math.abs(Math.max(...ys) - ys[0]) > Math.abs(Math.min(...ys) - ys[0])) { + height = Math.max(...ys) - ys[0] + } else { + height = Math.min(...ys) - ys[0] + } + + lines = [{p1: { x: xs[0], y: ys[0] }, p2: { x: xs[1], y: ys[1] }}] + for (i = 1; i < num_stairs; ++i) { + // intersect line y=y0+height/num_stairs*i with all transformed (xs,ys) + y = ys[0] + height/num_stairs*i + inters_xs = [] + for (j = 0; j < n; ++j) { + y1 = ys[j] + y2 = ys[(j+1)%n] + x1 = xs[j] + x2 = xs[(j+1)%n] + if ((y1 > y && y2 > y) || (y1 < y && y2 < y)) { + continue + } + + if (Math.abs(x2 - x1) < 0.0001) { + // vertical line, m would be infinity + inters_xs.push(x1) + continue + } + + m = (y2 - y1) / (x2 - x1) + q = y2 - m * x2 + inters_xs.push((y - q) / m) + } + + if (inters_xs.length < 2) { + continue + } + + min_xs = Math.min(...inters_xs) + max_xs = Math.max(...inters_xs) + lines.push({p1: {x: min_xs-2, y: y}, p2: {x: max_xs+2, y: y}}) + } + + lines = lines.map(l => ({ + p1: editor._transform_point_for_staircase(l.p1, p0, cos_a, -sin_a), + p2: editor._transform_point_for_staircase(l.p2, p0, cos_a, -sin_a), + })) + + return lines + }, + + _get_staircase_lines: function() { + if (!editor._current_editing_shape || !editor._current_editing_shape._parts) { + return [] + } + points = editor._current_editing_shape._parts[0] || [] + if (points.length < 3) { + return [] + } + + xs = points.map(p => p.x) + ys = points.map(p => p.y) + lines = editor._transform_for_staircase(xs, ys, editor._staircase_steps_count) + lines = lines.map(l => [ + editor.map.layerPointToLatLng([l.p1.x, l.p1.y]), + editor.map.layerPointToLatLng([l.p2.x, l.p2.y]), + ]) + return lines + }, + + _update_staircase_preview: function(e = null) { + if (editor._staircase_layer) { + editor._staircase_layer.clearLayers() + } + lines = editor._get_staircase_lines() + lines.forEach(l => { + L.polyline(l, {color: "red"}).addTo(editor._staircase_layer); + }) + }, + + _staircase_submit: function(form) { + csrfmiddlewaretoken = form.find('input[name=csrfmiddlewaretoken]').attr('value') + import_tag = form.find('input[name=import_tag]').val() + space = form.attr('space') + lines = editor._get_staircase_lines() + + Promise.all(lines.map(l => + fetch("/editor/spaces/" + space + "/stairs/create", { + method: "POST", + headers: { + "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", + "X-Requested-With": "XMLHttpRequest", + }, + body: "csrfmiddlewaretoken=" + encodeURIComponent(csrfmiddlewaretoken) + + "&geometry=" + encodeURIComponent(JSON.stringify({ + type: "LineString", + coordinates: [[l[0]["lng"], l[0]["lat"]], [l[1]["lng"], l[1]["lat"]]] + })) + + "&import_tag=" + encodeURIComponent(import_tag) + }) + )).then(() => { + form.remove() + window.location.href = "/editor/spaces/" + space + "/stairs" + }) } }; diff --git a/src/c3nav/editor/templates/editor/create_staircase.html b/src/c3nav/editor/templates/editor/create_staircase.html new file mode 100644 index 00000000..0c3937ea --- /dev/null +++ b/src/c3nav/editor/templates/editor/create_staircase.html @@ -0,0 +1,40 @@ +{% load bootstrap3 %} +{% load i18n %} + +{% include 'editor/fragment_levels.html' %} + +

+ {% blocktrans %}Add staircase{% endblocktrans %} +

+{% bootstrap_messages %} + +
+ {% csrf_token %} + {% bootstrap_form form %} +
+ + +
+ {% buttons %} + + + {% if can_edit %} + {% if not nosave %} + + {% endif %} + {% endif %} + + {% if can_edit %} + {% trans 'Cancel' %} + {% else %} + {% trans 'Back' %} + {% endif %} + + {% endbuttons %} +
diff --git a/src/c3nav/editor/templates/editor/list.html b/src/c3nav/editor/templates/editor/list.html index fc923038..1bd75d0a 100644 --- a/src/c3nav/editor/templates/editor/list.html +++ b/src/c3nav/editor/templates/editor/list.html @@ -20,6 +20,11 @@ {% blocktrans %}New {{ model_title }}{% endblocktrans %} + {% if model_title == "Stair" %} + + {% blocktrans %}New staircase{% endblocktrans %} + + {% endif %} {% endif %} {% if explicit_edit %} diff --git a/src/c3nav/editor/urls.py b/src/c3nav/editor/urls.py index fe4f50ef..4c987ea1 100644 --- a/src/c3nav/editor/urls.py +++ b/src/c3nav/editor/urls.py @@ -4,7 +4,7 @@ from django.views.generic import TemplateView from c3nav.editor.views.account import change_password_view, login_view, logout_view, register_view from c3nav.editor.views.changes import changeset_detail, changeset_edit, changeset_redirect -from c3nav.editor.views.edit import edit, graph_edit, level_detail, list_objects, main_index, sourceimage, space_detail +from c3nav.editor.views.edit import edit, graph_edit, level_detail, list_objects, main_index, staircase_edit, sourceimage, space_detail from c3nav.editor.views.overlays import overlays_list, overlay_features, overlay_feature_edit from c3nav.editor.views.quest import QuestFormView from c3nav.editor.views.users import user_detail, user_redirect @@ -33,7 +33,6 @@ def add_editor_urls(model_name, parent_model_name=None, with_list=True, explicit ]) return result - # todo: custom path converters urlpatterns = [ path('levels//', level_detail, name='editor.levels.detail'), @@ -91,3 +90,4 @@ urlpatterns.extend(add_editor_urls('LeaveDescription', 'Space')) urlpatterns.extend(add_editor_urls('CrossDescription', 'Space')) urlpatterns.extend(add_editor_urls('BeaconMeasurement', 'Space')) urlpatterns.extend(add_editor_urls('RangingBeacon', 'Space')) +urlpatterns.append(path('spaces//staircase', edit, name='editor.stairs.staircase', kwargs={'model': apps.get_model('mapdata', 'Stair')})) diff --git a/src/c3nav/editor/views/edit.py b/src/c3nav/editor/views/edit.py index fc3060c0..3b7aa8c7 100644 --- a/src/c3nav/editor/views/edit.py +++ b/src/c3nav/editor/views/edit.py @@ -70,6 +70,12 @@ def main_index(request): }) +@etag(editor_etag_func) +@accesses_mapdata +@sidebar_view +def staircase_edit(request, space): + return render(request, "editor/create_staircase.html") + @etag(editor_etag_func) @accesses_mapdata @sidebar_view @@ -405,7 +411,11 @@ def edit(request, pk=None, model=None, level=None, space=None, on_top_of=None, e "access_restriction_select": True, }) - return render(request, 'editor/edit.html', ctx) + if request.path.endswith("staircase"): + ctx["space"] = space_id + return render(request, 'editor/create_staircase.html', ctx) + else: + return render(request, 'editor/edit.html', ctx) def get_visible_spaces(request): diff --git a/src/c3nav/mapdata/management/commands/add_sensor.py b/src/c3nav/mapdata/management/commands/add_sensor.py new file mode 100644 index 00000000..829f8d68 --- /dev/null +++ b/src/c3nav/mapdata/management/commands/add_sensor.py @@ -0,0 +1,141 @@ +import json +from django.core.management.base import BaseCommand +from django.utils import timezone +from shapely.geometry import Point + +from c3nav.mapdata.models import DataOverlay, DataOverlayFeature, Level + + +class Command(BaseCommand): + help = 'Add a new sensor to an existing overlay' + + def add_arguments(self, parser): + parser.add_argument( + '--overlay-id', + type=int, + required=True, + help='ID of the overlay to add the sensor to', + ) + parser.add_argument( + '--sensor-id', + type=str, + required=True, + help='Unique ID for the sensor (e.g., NOI:Sensor-ID)', + ) + parser.add_argument( + '--name', + type=str, + required=True, + help='Display name for the sensor', + ) + parser.add_argument( + '--x', + type=float, + required=True, + help='X coordinate in c3nav coordinate system', + ) + parser.add_argument( + '--y', + type=float, + required=True, + help='Y coordinate in c3nav coordinate system', + ) + parser.add_argument( + '--level', + type=str, + default='floor0', + help='Level/floor where the sensor is located (default: floor0)', + ) + parser.add_argument( + '--sensor-type', + type=str, + default='environmental', + help='Type of sensor (default: environmental)', + ) + + def handle(self, *args, **options): + try: + overlay = DataOverlay.objects.get(id=options['overlay_id']) + except DataOverlay.DoesNotExist: + self.stderr.write(f'Overlay with ID {options["overlay_id"]} not found') + return + + try: + level = Level.objects.get(short_label=options['level']) + except Level.DoesNotExist: + self.stderr.write(f'Level "{options["level"]}" not found') + return + + # Update overlay configuration to include the new sensor + sensor_config = overlay.sensor_config or {} + if 'sensors' not in sensor_config: + sensor_config['sensors'] = [] + + # Check if sensor already exists in config + existing_sensor = None + for i, sensor in enumerate(sensor_config['sensors']): + if sensor['id'] == options['sensor_id']: + existing_sensor = i + break + + new_sensor_config = { + 'id': options['sensor_id'], + 'coordinates': {'x': options['x'], 'y': options['y']}, + 'level': options['level'] + } + + if existing_sensor is not None: + sensor_config['sensors'][existing_sensor] = new_sensor_config + self.stdout.write(f'Updated sensor configuration for {options["sensor_id"]}') + else: + sensor_config['sensors'].append(new_sensor_config) + self.stdout.write(f'Added sensor configuration for {options["sensor_id"]}') + + overlay.sensor_config = sensor_config + overlay.save() + + # Create the sensor feature (or update if it exists) + point = Point(options['x'], options['y']) + + feature, created = DataOverlayFeature.objects.update_or_create( + overlay=overlay, + sensor_id=options['sensor_id'], + defaults={ + 'titles': {'en': options['name']}, + 'geometry': point, + 'level': level, + 'sensor_type': options['sensor_type'], + 'coordinates_x': options['x'], + 'coordinates_y': options['y'], + 'fill_color': '#95A5A6', # Default gray + 'stroke_color': '#95A5A6', + 'stroke_width': 2, + 'fill_opacity': 0.8, + 'show_label': True, + 'show_geometry': True, + 'interactive': True, + 'point_icon': 'sensors', + 'last_updated': timezone.now(), + 'extra_data': { + 'Status': 'No data yet', + 'Last Updated': timezone.now().strftime('%Y-%m-%d %H:%M:%S'), + 'Data Source': 'Manual configuration', + 'Station ID': options['sensor_id'] + } + } + ) + + action = 'Created' if created else 'Updated' + self.stdout.write( + self.style.SUCCESS( + f'{action} sensor "{options["name"]}" (ID: {options["sensor_id"]}) ' + f'at coordinates ({options["x"]}, {options["y"]}) on level {options["level"]}' + ) + ) + + self.stdout.write( + 'You can now run the scrape command to fetch data for this sensor:' + ) + self.stdout.write( + f'python manage.py manage_sensors --scrape-data --overlay-id {options["overlay_id"]}' + ) diff --git a/src/c3nav/mapdata/management/commands/list_sensors.py b/src/c3nav/mapdata/management/commands/list_sensors.py new file mode 100644 index 00000000..75d1e6b8 --- /dev/null +++ b/src/c3nav/mapdata/management/commands/list_sensors.py @@ -0,0 +1,60 @@ +from django.core.management.base import BaseCommand +from c3nav.mapdata.models import DataOverlay, DataOverlayFeature + + +class Command(BaseCommand): + help = 'List all sensors in overlays' + + def add_arguments(self, parser): + parser.add_argument( + '--overlay-id', + type=int, + help='ID of a specific overlay to list sensors for', + ) + + def handle(self, *args, **options): + if options['overlay_id']: + try: + overlay = DataOverlay.objects.get(id=options['overlay_id']) + overlays = [overlay] + except DataOverlay.DoesNotExist: + self.stderr.write(f'Overlay with ID {options["overlay_id"]} not found') + return + else: + overlays = DataOverlay.objects.all() + + for overlay in overlays: + self.stdout.write(f'\n=== Overlay {overlay.id}: {overlay.titles.get("en", "Unknown")} ===') + + # Show overlay configuration + sensor_config = overlay.sensor_config or {} + configured_sensors = sensor_config.get('sensors', []) + if configured_sensors: + self.stdout.write('Configured sensors:') + for sensor in configured_sensors: + self.stdout.write(f' - {sensor["id"]} at ({sensor["coordinates"]["x"]}, {sensor["coordinates"]["y"]}) on level {sensor.get("level", "default")}') + + # Show actual sensor features in database + features = DataOverlayFeature.objects.filter(overlay=overlay) + if features: + self.stdout.write(f'\nSensor features in database ({features.count()}):') + for feature in features: + title = feature.titles.get('en', 'Unknown') if feature.titles else 'Unknown' + level_name = feature.level.short_label if feature.level else 'No level' + coords = f'({feature.coordinates_x}, {feature.coordinates_y})' if feature.coordinates_x is not None else 'No coords' + last_updated = feature.last_updated.strftime('%Y-%m-%d %H:%M:%S') if feature.last_updated else 'Never' + + self.stdout.write(f' - {feature.sensor_id}: {title}') + self.stdout.write(f' Level: {level_name}, Coords: {coords}') + self.stdout.write(f' Type: {feature.sensor_type or "Unknown"}, Last updated: {last_updated}') + + if feature.extra_data: + readings = [f'{k}: {v}' for k, v in feature.extra_data.items() + if k not in ['Last Updated', 'Data Source', 'Station ID']] + if readings: + self.stdout.write(f' Readings: {", ".join(readings)}') + else: + self.stdout.write('No sensor features found in database') + + if not overlays: + self.stdout.write('No overlays found') diff --git a/src/c3nav/mapdata/management/commands/manage_sensors.py b/src/c3nav/mapdata/management/commands/manage_sensors.py new file mode 100644 index 00000000..8996d06c --- /dev/null +++ b/src/c3nav/mapdata/management/commands/manage_sensors.py @@ -0,0 +1,461 @@ +import json +import requests +from django.core.management.base import BaseCommand +from django.utils import timezone +from shapely.geometry import Point + +from c3nav.mapdata.models import DataOverlay, DataOverlayFeature, Level + +class Command(BaseCommand): + help = 'Setup and manage sensor overlays with generic sensor data' + + def add_arguments(self, parser): + parser.add_argument( + '--create-overlay', + type=str, + help='Create a new sensor overlay with given name', + ) + parser.add_argument( + '--data-source-url', + type=str, + help='URL to scrape sensor data from', + ) + parser.add_argument( + '--sensor-config', + type=str, + help='JSON configuration for sensor data mapping', + ) + parser.add_argument( + '--add-sensor', + action='store_true', + help='Add sensors manually with provided coordinates', + ) + parser.add_argument( + '--scrape-data', + action='store_true', + help='Scrape data from configured data sources', + ) + parser.add_argument( + '--overlay-id', + type=int, + help='ID of the overlay to work with', + ) + + def handle(self, *args, **options): + if options['create_overlay']: + self.create_overlay(options) + elif options['add_sensor']: + self.add_sensors_manually(options) + elif options['scrape_data']: + self.scrape_sensor_data(options) + else: + self.stdout.write('Please specify an action: --create-overlay, --add-sensor, or --scrape-data') + + def create_overlay(self, options): + """Create a new sensor overlay""" + name = options['create_overlay'] + + # Parse sensor configuration + sensor_config = {} + if options['sensor_config']: + try: + sensor_config = json.loads(options['sensor_config']) + except json.JSONDecodeError: + self.stderr.write('Invalid JSON in sensor_config') + return + + overlay = DataOverlay.objects.create( + titles={'en': name}, + description=f'Sensor overlay for {name}', + default_geomtype=DataOverlay.GeometryType.POINT, + data_source_url=options['data_source_url'], + sensor_config=sensor_config, + update_interval=30, # Update every 30 seconds + ) + + self.stdout.write( + self.style.SUCCESS(f'Created overlay "{name}" with ID {overlay.id}') + ) + + def add_sensors_manually(self, options): + """Add sensors manually with coordinates""" + if not options['overlay_id']: + self.stderr.write('--overlay-id required when adding sensors manually') + return + + try: + overlay = DataOverlay.objects.get(id=options['overlay_id']) + except DataOverlay.DoesNotExist: + self.stderr.write(f'Overlay with ID {options["overlay_id"]} not found') + return + + # Get the ground floor level (floor0) + try: + level = Level.objects.get(short_label='floor0') + except Level.DoesNotExist: + level = Level.objects.first() # Fallback to first level + if not level: + self.stderr.write('No levels found in database') + return + + # Example sensors - this should be configurable via command line or config file + sensors = [ + { + 'sensor_id': 'temp_001', + 'sensor_type': 'temperature', + 'name': 'Meeting Room A1 - Temperature', + 'coordinates_x': 500, + 'coordinates_y': 300, + 'value': 22.5, + 'unit': '°C' + }, + { + 'sensor_id': 'hum_001', + 'sensor_type': 'humidity', + 'name': 'Meeting Room A1 - Humidity', + 'coordinates_x': 500, + 'coordinates_y': 300, + 'value': 55.0, + 'unit': '%' + }, + { + 'sensor_id': 'temp_002', + 'sensor_type': 'temperature', + 'name': 'Server Room - Temperature', + 'coordinates_x': 750, + 'coordinates_y': 400, + 'value': 18.2, + 'unit': '°C' + }, + { + 'sensor_id': 'co2_001', + 'sensor_type': 'co2', + 'name': 'Office Space - CO2', + 'coordinates_x': 300, + 'coordinates_y': 600, + 'value': 450, + 'unit': 'ppm' + } + ] + + for sensor_item in sensors: + # Create geometry from c3nav coordinates + point = Point(sensor_item['coordinates_x'], sensor_item['coordinates_y']) + + # Get color based on sensor type and value + color = self.get_sensor_color(sensor_item['sensor_type'], sensor_item['value']) + + feature = DataOverlayFeature.objects.create( + overlay=overlay, + titles={'en': sensor_item['name']}, + geometry=point, + level=level, + sensor_id=sensor_item['sensor_id'], + sensor_type=sensor_item['sensor_type'], + sensor_value=sensor_item['value'], + sensor_unit=sensor_item['unit'], + coordinates_x=sensor_item['coordinates_x'], + coordinates_y=sensor_item['coordinates_y'], + fill_color=color, + stroke_color=color, + stroke_width=2, + fill_opacity=0.8, + show_label=True, + show_geometry=True, + interactive=True, + point_icon=self.get_sensor_icon(sensor_item['sensor_type']), + last_updated=timezone.now(), + extra_data={ + 'value': str(sensor_item['value']), + 'unit': sensor_item['unit'], + 'sensor_type': sensor_item['sensor_type'] + } + ) + + self.stdout.write( + f'Created sensor {sensor_item["sensor_id"]}: {sensor_item["name"]} ' + f'({sensor_item["value"]}{sensor_item["unit"]}) at ({sensor_item["coordinates_x"]}, {sensor_item["coordinates_y"]})' + ) + + def scrape_sensor_data(self, options): + """Scrape sensor data from configured data sources""" + overlays = DataOverlay.objects.filter(data_source_url__isnull=False) + + if options['overlay_id']: + overlays = overlays.filter(id=options['overlay_id']) + + for overlay in overlays: + self.stdout.write(f'Scraping data for overlay: {overlay.titles.get("en", "Unknown")}') + + try: + # Fetch data from the source URL + response = requests.get(overlay.data_source_url, timeout=30) + response.raise_for_status() + data = response.json() + + # Process data using sensor configuration + self.process_scraped_data(overlay, data) + + except requests.RequestException as e: + self.stderr.write(f'Error fetching data from {overlay.data_source_url}: {e}') + except json.JSONDecodeError as e: + self.stderr.write(f'Error parsing JSON from {overlay.data_source_url}: {e}') + except Exception as e: + self.stderr.write(f'Error processing data for overlay {overlay.id}: {e}') + + def process_scraped_data(self, overlay, data): + """Process scraped data according to overlay configuration""" + sensor_config = overlay.sensor_config or {} + + # Default configuration for NOI Open Data Hub + default_config = { + "data_path": "data", + "mappings": { + "id_field": "scode", + "name_field": "sname", + "x_field": "scoordinate.x", + "y_field": "scoordinate.y" + }, + "sensors": [] # List of specific sensors to process + } + + config = {**default_config, **sensor_config} + + # Extract sensor data array + api_data = data + if config.get("data_path"): + for path_part in config["data_path"].split("."): + api_data = api_data.get(path_part, []) + + updated_count = 0 + created_count = 0 + + # Group measurements by station (scode) first and discover sensor types dynamically + stations = {} + for item in api_data: + station_id = self.get_nested_field(item, config["mappings"]["id_field"]) + station_name = self.get_nested_field(item, config["mappings"]["name_field"]) + measurement_type = self.get_nested_field(item, "tname") + + if not station_id or not measurement_type: + continue + + if station_id not in stations: + stations[station_id] = { + 'name': station_name, + 'measurements': {} + } + + stations[station_id]['measurements'][measurement_type] = item + + # If specific sensors are configured, only process those + configured_sensors = config.get('sensors', []) + sensor_configs = {s['id']: s for s in configured_sensors} if configured_sensors else {} + + # Process each station and its measurements + for station_id, station_data in stations.items(): + # Skip if we have specific sensors configured and this isn't one of them + if sensor_configs and station_id not in sensor_configs: + continue + + # Get sensor-specific configuration + sensor_specific_config = sensor_configs.get(station_id, {}) + + # Determine coordinates and level for this sensor + if sensor_specific_config.get('coordinates'): + # Use sensor-specific coordinates + x_coord = sensor_specific_config['coordinates']['x'] + y_coord = sensor_specific_config['coordinates']['y'] + else: + # Get coordinates from measurement data + first_measurement = next(iter(station_data['measurements'].values())) + x_coord = self.get_nested_field(first_measurement, config["mappings"]["x_field"]) + y_coord = self.get_nested_field(first_measurement, config["mappings"]["y_field"]) + + if x_coord is None or y_coord is None: + continue + + # Convert coordinates if needed + x_coord = float(x_coord) + y_coord = float(y_coord) + + # Determine level for this sensor + level_name = sensor_specific_config.get('level', config.get('level', 'floor0')) + try: + level = Level.objects.get(short_label=level_name) + except Level.DoesNotExist: + self.stderr.write(f'Level "{level_name}" not found for sensor {station_id}, using ground floor') + try: + level = Level.objects.get(short_label='floor0') + except Level.DoesNotExist: + level = Level.objects.first() # Final fallback + if not level: + self.stderr.write(f'No levels found in database for sensor {station_id}') + continue + + # Collect all sensor data for this station in one feature + sensor_readings = {} + raw_measurements = {} + + # Process ALL measurements found in the API response (dynamically discovered) + for measurement_type, measurement in station_data['measurements'].items(): + # Extract values dynamically from the API response + sensor_value = self.get_nested_field(measurement, "mvalue") + sensor_unit = self.get_nested_field(measurement, "tunit") + sensor_name = self.get_nested_field(measurement, "tname") + sensor_description = self.get_nested_field(measurement, "tdescription") + + if sensor_value is None: + continue # Skip if no value + + # Convert sensor value to float + try: + sensor_value = float(sensor_value) + except (ValueError, TypeError): + continue + + # Store this measurement for the combined sensor + display_name = sensor_description or sensor_name or measurement_type + unit_str = f" {sensor_unit}" if sensor_unit else "" + sensor_readings[display_name] = f"{sensor_value}{unit_str}" + raw_measurements[measurement_type] = measurement + + if not sensor_readings: + continue # Skip if no valid measurements + + # Create a single sensor feature with all measurements + sensor_id = station_id # Use station ID as sensor ID + display_name = f"{station_data['name']} - Environmental Sensor" + + # Determine primary color based on the most critical measurement + # Priority: CO2 > Temperature > Humidity + primary_color = '#95A5A6' # Default gray + + # Look for CO2 measurements (various naming conventions) + co2_measurement = None + for mtype, measurement in raw_measurements.items(): + if any(keyword in mtype.lower() for keyword in ['co2', 'carbon']): + co2_measurement = measurement + break + + if co2_measurement: + co2_value = self.get_nested_field(co2_measurement, "mvalue") + if co2_value: + primary_color = self.get_sensor_color('co2', float(co2_value)) + else: + # Look for temperature measurements + temp_measurement = None + for mtype, measurement in raw_measurements.items(): + if any(keyword in mtype.lower() for keyword in ['temperature', 'temp']): + temp_measurement = measurement + break + + if temp_measurement: + temp_value = self.get_nested_field(temp_measurement, "mvalue") + if temp_value: + primary_color = self.get_sensor_color('temperature', float(temp_value)) + + # Create geometry + point = Point(x_coord, y_coord) + + feature, created = DataOverlayFeature.objects.update_or_create( + overlay=overlay, + sensor_id=sensor_id, + defaults={ + 'titles': {'en': display_name}, + 'geometry': point, + 'level': level, + 'sensor_type': 'environmental', # Combined sensor type + 'sensor_value': None, # No single value for combined sensor + 'sensor_unit': None, # No single unit for combined sensor + 'coordinates_x': x_coord, + 'coordinates_y': y_coord, + 'fill_color': primary_color, + 'stroke_color': primary_color, + 'stroke_width': 2, + 'fill_opacity': 0.8, + 'show_label': True, + 'show_geometry': True, + 'interactive': True, + 'point_icon': 'sensors', # Generic sensor icon + 'last_updated': timezone.now(), + 'sensor_data': raw_measurements, # Store all raw measurements + 'extra_data': { + **sensor_readings, # All sensor readings as separate entries + 'Last Updated': timezone.now().strftime('%Y-%m-%d %H:%M:%S'), + 'Data Source': 'NOI Open Data Hub', + 'Station ID': station_id + } + } + ) + + if created: + created_count += 1 + readings_str = ', '.join([f"{k}: {v}" for k, v in sensor_readings.items()]) + self.stdout.write(f'Created sensor {sensor_id} on level {level.short_label}: {readings_str}') + else: + updated_count += 1 + readings_str = ', '.join([f"{k}: {v}" for k, v in sensor_readings.items()]) + self.stdout.write(f'Updated sensor {sensor_id} on level {level.short_label}: {readings_str}') + + self.stdout.write( + f'Processed overlay {overlay.id}: {created_count} created, {updated_count} updated' + ) + + def get_nested_field(self, data, field_path): + """Get value from nested field path like 'scoordinate.x'""" + try: + value = data + for part in field_path.split('.'): + value = value[part] + return value + except (KeyError, TypeError): + return None + + def get_sensor_color(self, sensor_type, value): + """Get color based on sensor type and value""" + sensor_type_lower = sensor_type.lower() + + if any(keyword in sensor_type_lower for keyword in ['temperature', 'temp']): + if value < 15: + return '#0066CC' # Cold - blue + elif value < 18: + return '#00AAFF' # Cool - light blue + elif value < 22: + return '#00CC66' # Comfortable - green + elif value < 25: + return '#FFAA00' # Warm - orange + else: + return '#FF3333' # Hot - red + elif any(keyword in sensor_type_lower for keyword in ['humidity', 'humid']): + if value < 30: + return '#FF6B6B' # Too dry - red + elif value < 60: + return '#4ECDC4' # Good - teal + else: + return '#45B7D1' # Too humid - blue + elif any(keyword in sensor_type_lower for keyword in ['co2', 'carbon']): + if value < 400: + return '#2ECC71' # Excellent - green + elif value < 800: + return '#F39C12' # Good - orange + elif value < 1200: + return '#E74C3C' # Poor - red + else: + return '#8E44AD' # Very poor - purple + else: + return '#95A5A6' # Default - gray + + def get_sensor_icon(self, sensor_type): + """Get icon based on sensor type""" + icons = { + 'air-temperature': 'thermostat', + 'air-humidity': 'water_drop', + 'co2-ppm': 'air', + 'temperature': 'thermostat', + 'humidity': 'water_drop', + 'co2': 'air', + 'pressure': 'compress', + 'light': 'light_mode' + } + return icons.get(sensor_type, 'sensors') diff --git a/src/c3nav/mapdata/management/commands/pulloverlaydata.py b/src/c3nav/mapdata/management/commands/pulloverlaydata.py new file mode 100644 index 00000000..80c3d810 --- /dev/null +++ b/src/c3nav/mapdata/management/commands/pulloverlaydata.py @@ -0,0 +1,187 @@ +import logging +import requests +from django.core.management.base import BaseCommand +from c3nav.mapdata.models import DataOverlay, DataOverlayFeature, MapUpdate, Level +from shapely.geometry import Point +import json +import traceback + +logger = logging.getLogger(__name__) + +class Command(BaseCommand): + help = 'Pull overlay data from external APIs' + + def add_arguments(self, parser): + parser.add_argument( + '--force', + action='store_true', + help='Force update even if not enough time has passed', + ) + + def handle(self, *args, **options): + overlays = DataOverlay.objects.all() + for overlay in overlays: + self.stdout.write(f"Processing overlay: {overlay.title}") + try: + self.update_overlay(overlay, force=options['force']) + except Exception as e: + logger.error(f"Error updating overlay {overlay.id}: {e}") + logger.error(traceback.format_exc()) + self.stderr.write(f"Error updating overlay {overlay.id}: {e}") + + def update_overlay(self, overlay, force=False): + """Update a single overlay from its API URL""" + + if not overlay.pull_url: + self.stdout.write(f"No API URL configured for overlay {overlay.id}") + return + + try: + self.stdout.write(f"Fetching data from: {overlay.pull_url}") + response = requests.get(overlay.pull_url, timeout=30) + response.raise_for_status() + data = response.json() + + self.stdout.write(f"Received {len(data)} items from API") + + # Process the data based on overlay type + if hasattr(overlay, 'description') and overlay.description and 'temperature' in overlay.description.lower(): + self.process_temperature_data(overlay, data) + else: + self.stdout.write(f"Unknown overlay type for overlay {overlay.id}") + + except requests.RequestException as e: + logger.error(f"HTTP error fetching {overlay.pull_url}: {e}") + self.stderr.write(f"HTTP error: {e}") + except Exception as e: + logger.error(f"Error processing overlay {overlay.id}: {e}") + logger.error(traceback.format_exc()) + self.stderr.write(f"Error: {e}") + + def process_temperature_data(self, overlay, data): + """Process temperature sensor data from NOI Open Data Hub""" + + # Clear existing features for this overlay + DataOverlayFeature.objects.filter(overlay=overlay).delete() + self.stdout.write(f"Cleared existing features for overlay {overlay.id}") + + # Group measurements by station + stations = {} + for item in data: + scode = item.get('scode') + if scode: + if scode not in stations: + stations[scode] = { + 'sname': item.get('sname'), + 'scoordinate': item.get('scoordinate'), + 'measurements': [] + } + stations[scode]['measurements'].append(item) + + self.stdout.write(f"Found {len(stations)} stations") + + for scode, station_data in stations.items(): + try: + self.create_temperature_feature(overlay, scode, station_data) + except Exception as e: + logger.error(f"Error creating feature for station {scode}: {e}") + logger.error(traceback.format_exc()) + self.stderr.write(f"Error creating feature for {scode}: {e}") + + def create_temperature_feature(self, overlay, scode, station_data): + """Create a DataOverlayFeature for a temperature station""" + + # Extract coordinates from scoordinate object + scoordinate = station_data.get('scoordinate') + if not scoordinate: + self.stdout.write(f"No coordinates for station {scode}") + return + + # scoordinate is a dict with x, y coordinates + x = scoordinate.get('x') + y = scoordinate.get('y') + + if x is None or y is None: + self.stdout.write(f"Invalid coordinates for station {scode}: {scoordinate}") + return + + self.stdout.write(f"Station {scode} coordinates: x={x}, y={y}") + + # Create point geometry (assuming WGS84/EPSG:4326) + point = Point(x, y) + + # Find temperature measurement + temperature = None + humidity = None + + for measurement in station_data['measurements']: + tname = measurement.get('tname', '').lower() + if 'temperature' in tname: + temperature = measurement.get('mvalue') + elif 'humidity' in tname: + humidity = measurement.get('mvalue') + + self.stdout.write(f"Station {scode}: temp={temperature}, humidity={humidity}") + + if temperature is None: + self.stdout.write(f"No temperature data for station {scode}") + return + + # Determine color based on temperature + color = self.get_temperature_color(temperature) + + # Try to find appropriate level (ground floor by default) + level = None + try: + # Look for ground floor or level 0 + level = Level.objects.filter( + short_label__in=['0', 'EG', 'Ground', 'G'] + ).first() + + if not level: + # Fallback to any level + level = Level.objects.first() + + except Exception as e: + logger.warning(f"Could not determine level: {e}") + + if not level: + self.stdout.write(f"No level found for station {scode}") + return + + self.stdout.write(f"Using level: {level.short_label} for station {scode}") + + # Create the feature + title = f"{station_data.get('sname', scode)} ({temperature}°C)" + + feature = DataOverlayFeature.objects.create( + overlay=overlay, + level=level, + geometry=point.wkt, + title=title, + color=color, + opacity=0.8, + icon='thermometer' + ) + + self.stdout.write(f"Created feature {feature.id} for station {scode}") + + def get_temperature_color(self, temperature): + """Get color based on temperature value""" + try: + temp = float(temperature) + except (ValueError, TypeError): + return '#808080' # Gray for invalid values + + if temp < 10: + return '#0000FF' # Blue for very cold + elif temp < 15: + return '#00BFFF' # Light blue for cold + elif temp < 20: + return '#00FF00' # Green for cool + elif temp < 25: + return '#FFFF00' # Yellow for comfortable + elif temp < 30: + return '#FFA500' # Orange for warm + else: + return '#FF0000' # Red for hot \ No newline at end of file diff --git a/src/c3nav/mapdata/migrations/0139_add_last_pull_time.py b/src/c3nav/mapdata/migrations/0139_add_last_pull_time.py new file mode 100644 index 00000000..ef731e36 --- /dev/null +++ b/src/c3nav/mapdata/migrations/0139_add_last_pull_time.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-08-02 06:06 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('mapdata', '0138_rangingbeacon_max_observed_num_clients_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='dataoverlay', + name='last_pull_time', + field=models.DateTimeField(blank=True, null=True, verbose_name='last pull time'), + ), + ] diff --git a/src/c3nav/mapdata/migrations/0140_add_temperature_fields.py b/src/c3nav/mapdata/migrations/0140_add_temperature_fields.py new file mode 100644 index 00000000..0ad4b9a2 --- /dev/null +++ b/src/c3nav/mapdata/migrations/0140_add_temperature_fields.py @@ -0,0 +1,73 @@ +# Generated by Django 5.1.5 on 2025-08-02 07:18 + +from django.db import migrations, models +import django_pydantic_field + + +class Migration(migrations.Migration): + + dependencies = [ + ('mapdata', '0139_add_last_pull_time'), + ] + + operations = [ + # Add data source URL to DataOverlay + migrations.AddField( + model_name='dataoverlay', + name='data_source_url', + field=models.URLField(blank=True, null=True, verbose_name='Data Source URL', + help_text='URL to scrape sensor data from'), + ), + + # Add sensor configuration to DataOverlay + migrations.AddField( + model_name='dataoverlay', + name='sensor_config', + field=django_pydantic_field.SchemaField( + schema=dict, blank=True, null=True, + verbose_name='Sensor Configuration', + help_text='JSON configuration for sensor data mapping and processing' + ), + ), + + # Add generic sensor fields to DataOverlayFeature + migrations.AddField( + model_name='dataoverlayfeature', + name='sensor_id', + field=models.CharField(max_length=100, blank=True, null=True, verbose_name='Sensor ID'), + ), + migrations.AddField( + model_name='dataoverlayfeature', + name='sensor_type', + field=models.CharField(max_length=50, blank=True, null=True, verbose_name='Sensor Type', + help_text='Type of sensor: temperature, humidity, co2, etc.'), + ), + migrations.AddField( + model_name='dataoverlayfeature', + name='sensor_value', + field=models.FloatField(blank=True, null=True, verbose_name='Sensor Value'), + ), + migrations.AddField( + model_name='dataoverlayfeature', + name='sensor_unit', + field=models.CharField(max_length=20, blank=True, null=True, verbose_name='Sensor Unit', + help_text='Unit of measurement: °C, %, ppm, etc.'), + ), + migrations.AddField( + model_name='dataoverlayfeature', + name='coordinates_x', + field=models.FloatField(blank=True, null=True, verbose_name='X Coordinate', + help_text='X coordinate in c3nav coordinate system'), + ), + migrations.AddField( + model_name='dataoverlayfeature', + name='coordinates_y', + field=models.FloatField(blank=True, null=True, verbose_name='Y Coordinate', + help_text='Y coordinate in c3nav coordinate system'), + ), + migrations.AddField( + model_name='dataoverlayfeature', + name='last_updated', + field=models.DateTimeField(blank=True, null=True, verbose_name='Last Updated'), + ), + ] diff --git a/src/c3nav/mapdata/migrations/0141_add_sensor_data_field.py b/src/c3nav/mapdata/migrations/0141_add_sensor_data_field.py new file mode 100644 index 00000000..392b5921 --- /dev/null +++ b/src/c3nav/mapdata/migrations/0141_add_sensor_data_field.py @@ -0,0 +1,23 @@ +# Generated by Django 5.1.5 on 2025-08-02 12:00 + +from django.db import migrations +import django_pydantic_field + + +class Migration(migrations.Migration): + + dependencies = [ + ('mapdata', '0140_add_temperature_fields'), + ] + + operations = [ + migrations.AddField( + model_name='dataoverlayfeature', + name='sensor_data', + field=django_pydantic_field.SchemaField( + schema=dict, blank=True, null=True, + verbose_name='Raw Sensor Data', + help_text='Raw data from sensor for debugging and additional info' + ), + ), + ] diff --git a/src/c3nav/mapdata/migrations/0139_add_cloned_item_sync.py b/src/c3nav/mapdata/migrations/0142_add_cloned_item_sync.py similarity index 97% rename from src/c3nav/mapdata/migrations/0139_add_cloned_item_sync.py rename to src/c3nav/mapdata/migrations/0142_add_cloned_item_sync.py index 68a07f23..215b3ae1 100644 --- a/src/c3nav/mapdata/migrations/0139_add_cloned_item_sync.py +++ b/src/c3nav/mapdata/migrations/0142_add_cloned_item_sync.py @@ -9,6 +9,7 @@ class Migration(migrations.Migration): dependencies = [ ('contenttypes', '0002_remove_content_type_name'), ('mapdata', '0138_rangingbeacon_max_observed_num_clients_and_more'), + ('mapdata', '0141_add_sensor_data_field') ] operations = [ diff --git a/src/c3nav/mapdata/models/overlay.py b/src/c3nav/mapdata/models/overlay.py index a66da733..3920bcd1 100644 --- a/src/c3nav/mapdata/models/overlay.py +++ b/src/c3nav/mapdata/models/overlay.py @@ -44,6 +44,13 @@ class DataOverlay(TitledMixin, AccessRestrictionMixin, models.Model): verbose_name=_('Editor Access Restriction'), on_delete=models.PROTECT) + # Generic sensor data configuration + data_source_url = models.URLField(blank=True, null=True, verbose_name=_('Data Source URL'), + help_text=_('URL to scrape sensor data from')) + sensor_config: Optional[dict] = SchemaField(schema=dict, blank=True, null=True, + verbose_name=_('Sensor Configuration'), + help_text=_('JSON configuration for sensor data mapping and processing')) + class Meta: verbose_name = _('Data Overlay') verbose_name_plural = _('Data Overlays') @@ -72,6 +79,22 @@ class DataOverlayFeature(TitledMixin, LevelGeometryMixin, models.Model): null=True, default=None, verbose_name=_('extra data (JSON object)')) + + # Generic sensor fields + sensor_id = models.CharField(max_length=100, blank=True, null=True, verbose_name=_('Sensor ID')) + sensor_type = models.CharField(max_length=50, blank=True, null=True, verbose_name=_('Sensor Type'), + help_text=_('Type of sensor: temperature, humidity, co2, etc.')) + sensor_value = models.FloatField(blank=True, null=True, verbose_name=_('Sensor Value')) + sensor_unit = models.CharField(max_length=20, blank=True, null=True, verbose_name=_('Sensor Unit'), + help_text=_('Unit of measurement: °C, %, ppm, etc.')) + coordinates_x = models.FloatField(blank=True, null=True, verbose_name=_('X Coordinate'), + help_text=_('X coordinate in c3nav coordinate system')) + coordinates_y = models.FloatField(blank=True, null=True, verbose_name=_('Y Coordinate'), + help_text=_('Y coordinate in c3nav coordinate system')) + last_updated = models.DateTimeField(blank=True, null=True, verbose_name=_('Last Updated')) + sensor_data: Optional[dict] = SchemaField(schema=dict, blank=True, null=True, + verbose_name=_('Raw Sensor Data'), + help_text=_('Raw data from sensor for debugging and additional info')) def to_geojson(self, instance=None) -> dict: result = { diff --git a/src/c3nav/mapdata/tasks.py b/src/c3nav/mapdata/tasks.py index 2897c29a..5d486bbe 100644 --- a/src/c3nav/mapdata/tasks.py +++ b/src/c3nav/mapdata/tasks.py @@ -82,3 +82,76 @@ def update_ap_names_bssid_mapping(self, map_name, user_id): with changeset.lock_to_edit() as locked_changeset: locked_changeset.title = 'passive update bssids' locked_changeset.apply(user) + + +@app.task(bind=True, max_retries=3) +def pull_overlay_data(self, overlay_id=None): + """ + Celery task to pull data overlay features from external URLs. + + Args: + overlay_id (int, optional): Specific overlay ID to update. + If None, updates all overlays with pull_url. + """ + logger.info('Starting overlay data pull task...') + + from django.core.management import call_command + from io import StringIO + + try: + # Capture output from the management command + output = StringIO() + + if overlay_id: + call_command('pulloverlaydata', overlay=overlay_id, stdout=output) + else: + call_command('pulloverlaydata', stdout=output) + + result = output.getvalue() + logger.info(f'Overlay data pull completed: {result}') + return result + + except Exception as e: + logger.error(f'Overlay data pull failed: {e}') + if self.request.retries < self.max_retries: + logger.info(f'Retrying in 60 seconds... (attempt {self.request.retries + 1}/{self.max_retries})') + raise self.retry(countdown=60, exc=e) + else: + logger.error('Max retries exceeded for overlay data pull') + raise + + +@app.task(bind=True) +def schedule_overlay_data_pulls(self): + """ + Periodic task to schedule individual overlay data pulls based on their intervals. + This should be called every minute by a periodic task scheduler. + """ + from c3nav.mapdata.models import DataOverlay + from django.utils import timezone + + logger.info('Checking overlays for scheduled pulls...') + + overlays = DataOverlay.objects.exclude(pull_url__isnull=True).exclude(pull_url='').exclude(pull_interval__isnull=True) + + scheduled_count = 0 + for overlay in overlays: + # Check if it's time to update this overlay + should_pull = False + + if overlay.last_pull_time is None: + # Never pulled before + should_pull = True + else: + # Check if enough time has passed since last pull + next_pull_time = overlay.last_pull_time + overlay.pull_interval + should_pull = timezone.now() >= next_pull_time + + if should_pull: + # Schedule the pull + pull_overlay_data.delay(overlay.pk) + scheduled_count += 1 + logger.info(f'Scheduled pull for overlay: {overlay.title}') + + logger.info(f'Scheduled {scheduled_count} overlay pulls') + return scheduled_count diff --git a/src/manage.py b/src/manage.py index 005d7dbf..e7892355 100755 --- a/src/manage.py +++ b/src/manage.py @@ -8,3 +8,7 @@ if __name__ == "__main__": from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) + + + + diff --git a/start_db.sh b/start_db.sh index 5a480395..a509b94c 100755 --- a/start_db.sh +++ b/start_db.sh @@ -12,6 +12,8 @@ if [[ $# == 1 ]] && [[ $1 == "stop" ]]; then echo "Stopped the postgres container" elif [[ $# == 1 ]] && [[ $1 == "db" ]]; then echo "Setting up database" + sudo docker stop postgres + sudo docker container rm -f postgres sudo docker run -d --name postgres -p 5432:5432 -e POSTGRES_PASSWORD=test -e POSTGRES_USER=mindshub postgres until psql "postgres://mindshub:test@localhost:5432" <<< "CREATE DATABASE insignorocketdb;"; do sleep 0.5; @@ -32,8 +34,8 @@ elif [[ $# == 1 ]] && [[ $1 == "run" ]]; then elif [[ $# == 1 ]] && [[ $1 == "run_without_output" ]]; then echo "Processing updates and running server without output" pushd src 2>&1 > /dev/null - python manage.py processupdates 2>&1 | (grep -e "^ERROR" -e "^WARNING" -e "^HTTP" || true) - python manage.py runserver 2>&1 | (grep -e "^ERROR" -e "^WARNING" -e "^HTTP" || true) + python manage.py processupdates 2>&1 | (grep -vE '^(INFO|DEBUG)|__debug__' || true) + python manage.py runserver 2>&1 | (grep -vE '^(INFO|DEBUG)|__debug__' || true) popd 2>&1 > /dev/null elif [[ $# > 0 ]] && [[ $1 == "manage" ]]; then pushd src