Compare commits

...
Sign in to create a new pull request.

45 commits

Author SHA1 Message Date
Alessio
7a7b1df1fa removed data garbadge 2025-08-02 13:41:33 +02:00
Alessio
bfcbd54147 final commit 2025-08-02 13:40:31 +02:00
Degra02
5dd7bce67b remove migration 2025-08-02 13:27:48 +02:00
Degra02
fb881d649b boh 2025-08-02 13:25:31 +02:00
fb1ae0f3bc
Fix start_db.sh again 2025-08-02 13:20:51 +02:00
6576da2660
Implement snap to corner 2025-08-02 13:09:19 +02:00
Alessio
bfd08978a1 Merge branch 'ours' of https://repos.hackathon.bz.it/2025-summer/team-3 into ours 2025-08-02 12:04:01 +02:00
Alessio
197264f27a funge 2025-08-02 12:03:57 +02:00
Alessio
789640998a mother fucking sensors 2025-08-02 11:53:58 +02:00
8938745251 mixing icon 2025-08-02 11:35:14 +02:00
Degra02
5c89cf116f Merge branch 'snap-to-grid' into ours 2025-08-02 11:32:01 +02:00
f94c30c608
Remove console.log 2025-08-02 11:07:38 +02:00
2c8b75b2ab
Merge branch 'ours' of https://repos.hackathon.bz.it/2025-summer/team-3 into ours 2025-08-02 11:00:26 +02:00
Degra02
3f528f78d9 Merge branch 'snap-to-grid' of https://repos.hackathon.bz.it/2025-summer/team-3 into snap-to-grid 2025-08-02 11:00:24 +02:00
Degra02
c4cfb4a4f5 functional clone feature 2025-08-02 11:00:19 +02:00
e1fe06a1b1
Make requests to create stairs 2025-08-02 11:00:11 +02:00
2e681dffb4 toggle to 90° 2025-08-02 10:58:33 +02:00
e893e53151
Implement create_staircase page 2025-08-02 09:46:43 +02:00
f99fcb8916 fix styling 2025-08-02 09:35:30 +02:00
593d4179e3 snap to original 2025-08-02 09:25:04 +02:00
51c0117b9d Merge branch 'snap-to-grid' of https://repos.hackathon.bz.it/2025-summer/team-3 into snap-to-grid 2025-08-02 09:13:51 +02:00
4158885c11 custom icons 2025-08-02 09:13:39 +02:00
Degra02
5dac49edb5 Merge branch 'snap-to-grid' of https://repos.hackathon.bz.it/2025-summer/team-3 into snap-to-grid 2025-08-02 08:41:56 +02:00
d5b9d8e97d add snap-to-original control, fix styling 2025-08-02 08:33:14 +02:00
Degra02
ca30fa1b4a Merge branch 'snap-to-grid' of https://repos.hackathon.bz.it/2025-summer/team-3 into snap-to-grid 2025-08-02 08:12:08 +02:00
Degra02
1e6aba1e7f level clone wip 2025-08-02 08:12:06 +02:00
1aa724e7fc fix indicator 2025-08-02 08:08:39 +02:00
9215e39131 PURGE ANIMATIONS SEEEEEEEEE 2025-08-02 07:59:16 +02:00
Alessio
98794751d0 Merge branch 'ours' of https://repos.hackathon.bz.it/2025-summer/team-3 into ours 2025-08-02 07:58:53 +02:00
Alessio
acf169edaf faster docker 2025-08-02 07:58:27 +02:00
Degra02
2f12b901ac test another snap-to-edge and level clone 2025-08-02 07:54:26 +02:00
cb567d3b8e merge conflict 2025-08-02 06:22:24 +02:00
e43f82b6ec merge conflict 2025-08-02 06:13:30 +02:00
Degra02
f301c424ba improve snap-to-edge 2025-08-02 02:25:10 +02:00
Degra02
f3acc31a1c improve snap-to-edge 2025-08-02 02:23:04 +02:00
2804fd4104
Improve start_db.sh again 2025-08-02 01:20:52 +02:00
e646dd9d83
Fix start_db.sh again 2025-08-02 01:09:45 +02:00
90d3c9b7f5
Merge branch 'ours' of https://repos.hackathon.bz.it/2025-summer/team-3 into ours 2025-08-02 00:47:02 +02:00
Alessio
9a3b6c1e75 gitignore 2025-08-02 00:45:48 +02:00
Alessio
921825b303 docker mtf 2025-08-02 00:45:04 +02:00
e24a99eb9d
Add run_without_output to start_db.sh 2025-08-02 00:38:51 +02:00
404117d170 fix styling for indicator 2025-08-02 00:37:20 +02:00
63f2ccac3e
Fix bug in updates/fetch (convert to dict()) 2025-08-02 00:29:23 +02:00
354949527b fix pallina che si muove 2025-08-01 23:59:21 +02:00
890d41b9d0
Changes to start_db.sh and install.sh 2025-08-01 23:33:35 +02:00
45 changed files with 14216 additions and 155 deletions

1
.gitignore vendored
View file

@ -73,3 +73,4 @@ target/
#Ipython Notebook #Ipython Notebook
.ipynb_checkpoints .ipynb_checkpoints
local_run/data

View file

@ -15,32 +15,32 @@ ENV DEBIAN_FRONTEND noninteractive
RUN --mount=type=cache,target=/var/cache/apt,id=apt_$TARGETARCH --mount=type=tmpfs,target=/var/lib/apt/lists \ RUN --mount=type=cache,target=/var/cache/apt,id=apt_$TARGETARCH --mount=type=tmpfs,target=/var/lib/apt/lists \
rm /etc/apt/apt.conf.d/docker-clean && \ rm /etc/apt/apt.conf.d/docker-clean && \
apt-get update && apt-get install -y --no-install-recommends \ apt-get update && apt-get install -y --no-install-recommends \
python3.12=3.12.3-1ubuntu0.5 \ python3.12 \
# renovate: srcname=python3.12 # renovate: srcname=python3.12
libpython3.12=3.12.3-1ubuntu0.5 \ libpython3.12 \
# renovate: srcname=python3.12 # renovate: srcname=python3.12
python3.12-venv=3.12.3-1ubuntu0.5 \ python3.12-venv \
# renovate: srcname=python-pip # renovate: srcname=python-pip
python3-pip=24.0+dfsg-1ubuntu1.1 \ python3-pip \
# renovate: srcname=postgresql-16 # renovate: srcname=postgresql-16
libpq5=16.8-0ubuntu0.24.04.1 \ libpq5 \
# renovate: srcname=postgresql-16 # renovate: srcname=postgresql-16
postgresql-client-16=16.8-0ubuntu0.24.04.1 \ postgresql-client-16 \
curl=8.5.0-2ubuntu10.6 \ curl \
# renovate: srcname=pcre3 # renovate: srcname=pcre3
libpcre3=2:8.39-15build1 \ libpcre3 \
# renovate: srcname=librsvg # renovate: srcname=librsvg
librsvg2-2=2.58.0+dfsg-1build1 \ librsvg2-2 \
# renovate: srcname=librsvg # renovate: srcname=librsvg
gir1.2-rsvg-2.0=2.58.0+dfsg-1build1 \ gir1.2-rsvg-2.0 \
# renovate: srcname=cairo # renovate: srcname=cairo
libcairo2=1.18.0-3build1 \ libcairo2 \
libgirepository-1.0-1=1.80.1-1 \ libgirepository-1.0-1 \
# renovate: srcname=libmemcached # renovate: srcname=libmemcached
libmemcached11t64=1.1.4-1.1build3 \ libmemcached11t64 \
gettext \ gettext \
tzdata=2025b-0ubuntu0.24.04 \ tzdata-legacy \
ca-certificates=20240203 \ ca-certificates \
# renovate: srcname=libzstd # renovate: srcname=libzstd
zstd=1.5.5+dfsg2-2build1.1 zstd=1.5.5+dfsg2-2build1.1
ENV PATH="/usr/lib/postgresql/16/bin/:$PATH" ENV PATH="/usr/lib/postgresql/16/bin/:$PATH"
@ -49,17 +49,17 @@ ENV PATH="/usr/lib/postgresql/16/bin/:$PATH"
FROM base as builder FROM base as builder
RUN --mount=type=cache,target=/var/cache/apt,id=apt_$TARGETARCH --mount=type=tmpfs,target=/var/lib/apt/lists \ RUN --mount=type=cache,target=/var/cache/apt,id=apt_$TARGETARCH --mount=type=tmpfs,target=/var/lib/apt/lists \
apt-get update && apt-get install -y --no-install-recommends \ apt-get update && apt-get install -y --no-install-recommends \
build-essential=12.10ubuntu1 \ build-essential \
# renovate: srcname=python3.12 # renovate: srcname=python3.12
python3.12-dev=3.12.3-1ubuntu0.5 \ python3.12-dev \
libpcre3-dev=2:8.39-15build1 \ libpcre3-dev \
# renovate: srcname=postgresql-16 # renovate: srcname=postgresql-16
libpq-dev=16.8-0ubuntu0.24.04.1 \ libpq-dev \
# renovate: srcname=cairo # renovate: srcname=cairo
libcairo2-dev=1.18.0-3build1 \ libcairo2-dev \
libgirepository1.0-dev=1.80.1-1 \ libgirepository1.0-dev \
# renovate: srcname=libmemcached # renovate: srcname=libmemcached
libmemcached-dev=1.1.4-1.1build3 libmemcached-dev
RUN mkdir /app RUN mkdir /app

6
local_run/.env Normal file
View file

@ -0,0 +1,6 @@
C3NAV_TAG=latest
C3NAV_DOMAIN=c3nav.docker.localhost
# to change the default database name
#C3NAV_DATABASE_NAME=c3nav-37c3
# if you want to change the default path to the data dir, which by default is a folder called data in this directory
C3NAV_DOCKER_DATA_DIR=./data

152
local_run/SENSOR_README.md Normal file
View file

@ -0,0 +1,152 @@
# NOI Sensor Management System
This system allows you to manage environmental sensors from the NOI Open Data Hub in c3nav, displaying them as overlay features on different levels/floors.
## Overview
The system supports:
- Multiple sensors on the same overlay but on different levels
- Dynamic addition of new sensors through Django management commands
- Automatic data scraping from NOI Open Data Hub APIs
- Real-time display of CO2, temperature, humidity and other environmental data
## Architecture
- **Single Overlay**: All NOI environmental sensors are managed under one `DataOverlay`
- **Multiple Levels**: Sensors can be placed on different floors (floor0, floor1, etc.)
- **Flexible Configuration**: Sensor locations and properties are configurable via the overlay's `sensor_config` field
- **Dynamic Discovery**: The system can automatically discover and display any sensor data from the NOI API
## Setup
The main setup is handled by the `up.sh` script, which:
1. Creates a single "NOI Environmental Sensors" overlay
2. Configures initial sensors with their coordinates and levels
3. Scrapes initial data from the NOI Open Data Hub
4. Applies necessary database migrations
## Managing Sensors
### 1. List All Sensors
```bash
# Using the helper script
./manage_noi_sensors.sh list
# Or directly
docker compose exec -T c3nav-core python manage.py list_sensors --overlay-id 1
```
### 2. Add a New Sensor
```bash
# Using the helper script
./manage_noi_sensors.sh add 'NOI:YourSensorID' 'Sensor Display Name' 300.0 250.0 floor1
# Or directly
docker compose exec -T c3nav-core python manage.py add_sensor \
--overlay-id 1 \
--sensor-id 'NOI:YourSensorID' \
--name 'Sensor Display Name' \
--x 300.0 \
--y 250.0 \
--level floor1
```
### 3. Scrape Data for All Sensors
```bash
# Using the helper script
./manage_noi_sensors.sh scrape
# Or directly
docker compose exec -T c3nav-core python manage.py manage_sensors --scrape-data --overlay-id 1
```
## Configuration Structure
The overlay's `sensor_config` field contains:
```json
{
"data_path": "data",
"mappings": {
"id_field": "scode",
"name_field": "sname",
"x_field": "scoordinate.x",
"y_field": "scoordinate.y"
},
"sensors": [
{
"id": "NOI:FreeSoftwareLab-Temperature",
"coordinates": {"x": 291.0, "y": 241.0},
"level": "floor1"
},
{
"id": "NOI:NOI-A1-Floor1-CO2",
"coordinates": {"x": 270.0, "y": 241.0},
"level": "floor1"
}
]
}
```
## Database Schema
### DataOverlay fields:
- `data_source_url`: URL to scrape sensor data from
- `sensor_config`: JSON configuration for sensor mapping and processing
### DataOverlayFeature fields:
- `sensor_id`: Unique identifier for the sensor
- `sensor_type`: Type of sensor (e.g., 'environmental')
- `sensor_value`: Single sensor value (nullable for multi-measurement sensors)
- `sensor_unit`: Unit of measurement (nullable for multi-measurement sensors)
- `coordinates_x`, `coordinates_y`: Position in c3nav coordinate system
- `last_updated`: Timestamp of last data update
- `sensor_data`: Raw sensor data for debugging
- `extra_data`: Processed sensor readings for display
## Data Flow
1. **Configuration**: Sensors are configured in the overlay's `sensor_config`
2. **Scraping**: The `manage_sensors` command fetches data from NOI Open Data Hub
3. **Processing**: Data is processed according to sensor configuration
4. **Storage**: Sensor features are created/updated in the database
5. **Display**: Sensors appear as interactive points on the map
## Adding New Sensor Types
To add a new sensor from the NOI Open Data Hub:
1. Find the sensor ID in the NOI API (usually starts with "NOI:")
2. Determine the coordinates where it should appear on the map
3. Choose the appropriate level/floor
4. Add it using the `add_sensor` command
5. Run the scrape command to fetch initial data
## Troubleshooting
### Sensor not appearing on map
- Check if the level exists: `docker compose exec -T c3nav-core python manage.py shell -c "from c3nav.mapdata.models import Level; print([l.short_label for l in Level.objects.all()])"`
- Verify coordinates are within the map bounds
- Check if the overlay is enabled and visible
### No data being scraped
- Verify the sensor ID exists in the NOI Open Data Hub API
- Check the API URL is accessible: https://mobility.api.opendatahub.com/v2/flat/IndoorStation/*/latest
- Review logs during scraping for errors
### Data not updating
- Check the `last_updated` field in the sensor feature
- Verify the scraping command completed successfully
- Consider running the scrape command more frequently
## Files
- `up.sh`: Main setup script
- `manage_noi_sensors.sh`: Helper script for sensor management
- `src/c3nav/mapdata/management/commands/manage_sensors.py`: Core sensor management command
- `src/c3nav/mapdata/management/commands/add_sensor.py`: Command to add new sensors
- `src/c3nav/mapdata/management/commands/list_sensors.py`: Command to list sensors
- `src/c3nav/mapdata/models/overlay.py`: Database models
- `src/c3nav/mapdata/migrations/0140_add_temperature_fields.py`: Migration for sensor fields
- `src/c3nav/mapdata/migrations/0141_add_sensor_data_field.py`: Migration for sensor_data field

109
local_run/compose.yml Normal file
View file

@ -0,0 +1,109 @@
# SPDX-FileCopyrightText: 2024 NOI Techpark
# SPDX-FileCopyrightText: 2024 https://github.com/c3nav/c3nav
#
# SPDX-License-Identifier: Apache-2.0
# Initially Copied from https://github.com/c3nav/c3nav/blob/034f8fe4633be3a982e94fd9e2f1fbc6d3dfafe1/deployment/docker/compose.yaml
x-restart-policy: &restart-policy
restart: unless-stopped
x-depends_on-default: &depends_on
condition: service_started
x-depends_on-healthy: &depends_on-healthy
condition: service_healthy
x-healthcheck_defaults: &healthcheck_defaults
interval: 10s
timeout: 2s
retries: 5
start_period: 10s
x-c3nav-defaults: &c3nav-defaults
build:
context: ..
dockerfile: docker/Dockerfile
args:
- COMMIT=${COMMIT}
depends_on:
redis:
<<: *depends_on-healthy
postgres:
<<: *depends_on-healthy
volumes:
- type: bind
source: ${C3NAV_DOCKER_DATA_DIR:-./data}
target: /data
bind:
create_host_path: true
x-c3nav-environment: &c3nav-environment
C3NAV_DEBUG: false
C3NAV_LOGLEVEL: info
C3NAV_CONFIG: /data/c3nav.cfg
C3NAV_DATA_DIR: /data
C3NAV_DJANGO_ALLOWED_HOSTS: localhost,127.0.0.1,c3nav-core,c3nav-tiles,${C3NAV_DJANGO_ALLOWED_HOSTS:-""},${C3NAV_DOMAIN}
C3NAV_DJANGO_REVERSE_PROXY: "true"
UWSGI_WORKERS: ${C3NAV_CORE_WORKERS:-2}
C3NAV_DATABASE_BACKEND: postgresql
C3NAV_DATABASE_NAME: ${C3NAV_DATABASE_NAME:-c3nav}
C3NAV_DATABASE_USER: ${C3NAV_DATABASE_USER:-postgres}
C3NAV_DATABASE_HOST: postgres
C3NAV_REDIS: "redis://redis:6379/0"
C3NAV_CELERY_BROKER: "redis://redis:6379/1"
C3NAV_CELERY_BACKEND: "redis://redis:6379/2"
services:
c3nav-core:
<<: [*restart-policy, *c3nav-defaults]
command: webstatic
environment:
<<: *c3nav-environment
C3NAV_AUTOMIGRATE: yes
healthcheck:
<<: *healthcheck_defaults
test: curl -f http://localhost:8000/
ports:
- "${SERVER_PORT:-8000}:8000"
c3nav-workers:
<<: [*restart-policy, *c3nav-defaults]
command: worker
environment:
<<: *c3nav-environment
C3NAV_AUTOMIGRATE: no
healthcheck:
interval: 30s
timeout: 15s
retries: 2
start_period: 5s
test: entrypoint worker_healthcheck
postgres:
<<: *restart-policy
image: postgres:16
healthcheck:
<<: *healthcheck_defaults
test: pg_isready -U postgres && psql -U postgres -d ${C3NAV_DATABASE_NAME:-c3nav} -c "SELECT 1;"
environment:
POSTGRES_DB: ${C3NAV_DATABASE_NAME:-c3nav}
POSTGRES_HOST_AUTH_METHOD: "trust"
volumes:
- "c3nav-postgres:/var/lib/postgresql/data"
redis:
<<: *restart-policy
image: redis:7.2
command: redis-server --save 60 1 --loglevel warning
healthcheck:
<<: *healthcheck_defaults
test: redis-cli ping
volumes:
- "c3nav-redis:/data"
ulimits:
nofile:
soft: 10032
hard: 10032
volumes:
c3nav-postgres:
external: true
c3nav-redis:
external: true

View file

@ -0,0 +1,44 @@
# SPDX-FileCopyrightText: 2024 NOI Techpark <digital@noi.bz.it>
#
# SPDX-License-Identifier: CC0-1.0
[c3nav]
svg_renderer=rsvg
editor=False
user_registration=false
initial_level=2
header_logo=/data/logo/sfscon.svg
imprint_link=https://www.sfscon.it/impressum/
branding=SFSCON map
app_enabled=False
[locale]
languages=en
[theme]
#is_dark=False
#randomize_primary_color=False
map_background=#f7f8f8
map_wall_fill=#e5e4e5
map_wall_border=#c1bfbe
#map_door_fill=#ffffff
map_ground_fill=#f7f8f8
#map_obstacles_default_fill=#b7b7b7
#map_obstacles_default_border=#888888
#css_primary=#9b4dca
#css_initial=#ffffff
#css_primary=#9b4dca
#css_logo=None
#css_secondary=#525862
#css_tertiary=#f0f0f0
#css_quaternary=#767676
#css_quinary=#cccccc
css_header_text=#000000
#css_header_text_hover=#eeeeee
css_header_background=#ffffff
#css_shadow=#000000
#css_overlay_background=#ffffff
#css_grid=#000000
#css_modal_backdrop=#000000
#css_route_dots_shadow=#ffffff
#map_background=#dcdcdc

View file

@ -0,0 +1,6 @@
<!--
SPDX-FileCopyrightText: 2024 NOI Techpark <digital@noi.bz.it>
SPDX-License-Identifier: CC0-1.0
-->
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 186.5 37.18"><defs><style>.a{isolation:isolate;}.b{fill:#1c1c1c;}</style></defs><title>logo</title><g class="a"><g class="a"><path class="b" d="M15.85,54.66a20.05,20.05,0,0,1-7-1.29,18.42,18.42,0,0,1-6.25-3.88l4.62-5.56a17.43,17.43,0,0,0,4.27,2.7,11.07,11.07,0,0,0,4.59,1,6.29,6.29,0,0,0,3.77-.93,3,3,0,0,0,1.23-2.53,2.83,2.83,0,0,0-.35-1.43,3.44,3.44,0,0,0-1-1.07A9.71,9.71,0,0,0,18,40.83l-2.12-.91-4.67-2a13.8,13.8,0,0,1-2.67-1.43,11.92,11.92,0,0,1-2.31-2.06A9.29,9.29,0,0,1,4.63,31.7,9.68,9.68,0,0,1,4,28.15,9.38,9.38,0,0,1,5,24,10.22,10.22,0,0,1,7.6,20.59a12.74,12.74,0,0,1,4-2.29,15.35,15.35,0,0,1,5.14-.82A16.59,16.59,0,0,1,23,18.69a15.66,15.66,0,0,1,5.36,3.52l-4.12,5.12a15.54,15.54,0,0,0-3.52-2.12,9.91,9.91,0,0,0-3.91-.75,5.73,5.73,0,0,0-3.32.86,2.73,2.73,0,0,0-1.24,2.39,2.36,2.36,0,0,0,.41,1.4,4,4,0,0,0,1.16,1.05,12.08,12.08,0,0,0,1.73.88c.66.27,1.37.58,2.14.91l4.62,1.87a12.6,12.6,0,0,1,5.2,3.63,9.27,9.27,0,0,1,1.9,6.1,10.19,10.19,0,0,1-3.55,7.78,13,13,0,0,1-4.26,2.42A16.79,16.79,0,0,1,15.85,54.66Z" transform="translate(-2.59 -17.48)"/><path class="b" d="M35.48,54V18.14H58.09V25H43.57v8.3H56v6.82H43.57V54Z" transform="translate(-2.59 -17.48)"/><path class="b" d="M73.76,54.66a20.13,20.13,0,0,1-7-1.29,18.29,18.29,0,0,1-6.24-3.88l4.62-5.56a17.18,17.18,0,0,0,4.26,2.7,11.14,11.14,0,0,0,4.59,1,6.27,6.27,0,0,0,3.77-.93A3,3,0,0,0,79,44.21a2.74,2.74,0,0,0-.36-1.43,3.44,3.44,0,0,0-1-1.07,9.22,9.22,0,0,0-1.64-.88l-2.12-.91-4.68-2a14,14,0,0,1-2.66-1.43,11.36,11.36,0,0,1-2.31-2.06,9.53,9.53,0,0,1-1.63-2.75,9.9,9.9,0,0,1-.6-3.55A9.51,9.51,0,0,1,62.87,24a10.49,10.49,0,0,1,2.64-3.41,12.9,12.9,0,0,1,4-2.29,15.31,15.31,0,0,1,5.14-.82,16.63,16.63,0,0,1,6.19,1.21,15.76,15.76,0,0,1,5.36,3.52l-4.13,5.12a15.12,15.12,0,0,0-3.52-2.12,9.83,9.83,0,0,0-3.9-.75,5.74,5.74,0,0,0-3.33.86,2.73,2.73,0,0,0-1.24,2.39,2.36,2.36,0,0,0,.41,1.4,4.1,4.1,0,0,0,1.16,1.05,12.08,12.08,0,0,0,1.73.88l2.15.91,4.62,1.87a12.66,12.66,0,0,1,5.2,3.63,9.33,9.33,0,0,1,1.89,6.1,10.14,10.14,0,0,1-3.54,7.78,13.09,13.09,0,0,1-4.27,2.42A16.71,16.71,0,0,1,73.76,54.66Z" transform="translate(-2.59 -17.48)"/><path class="b" d="M108.36,54.66a17.49,17.49,0,0,1-6.44-1.18A15.15,15.15,0,0,1,96.62,50,16.35,16.35,0,0,1,93,44.24a22.08,22.08,0,0,1-1.32-8,21.51,21.51,0,0,1,1.38-7.95,17.81,17.81,0,0,1,3.68-5.91,15.55,15.55,0,0,1,5.39-3.68,16.83,16.83,0,0,1,6.49-1.27,13.59,13.59,0,0,1,6.22,1.4,17.22,17.22,0,0,1,4.56,3.28l-4.29,5.17a13.28,13.28,0,0,0-2.89-2.07,7.23,7.23,0,0,0-3.43-.8,7.72,7.72,0,0,0-3.44.8,8.18,8.18,0,0,0-2.81,2.28,11.5,11.5,0,0,0-1.86,3.63A15.85,15.85,0,0,0,100,36q0,5.57,2.36,8.61a7.59,7.59,0,0,0,6.33,3.05,8,8,0,0,0,3.93-1,12.09,12.09,0,0,0,3-2.39L120,49.38A14.55,14.55,0,0,1,108.36,54.66Z" transform="translate(-2.59 -17.48)"/><path class="b" d="M138,54.66a16.61,16.61,0,0,1-6.63-1.29,14.4,14.4,0,0,1-5.14-3.71,17.17,17.17,0,0,1-3.33-5.89,24.22,24.22,0,0,1-1.18-7.86,24,24,0,0,1,1.18-7.84,16.29,16.29,0,0,1,3.33-5.78,14.17,14.17,0,0,1,5.14-3.57,18.34,18.34,0,0,1,13.26,0,14.09,14.09,0,0,1,5.14,3.6,16.39,16.39,0,0,1,3.32,5.8,23.82,23.82,0,0,1,1.19,7.79A24.22,24.22,0,0,1,153,43.77a17,17,0,0,1-3.32,5.89,14.49,14.49,0,0,1-5.14,3.71A16.61,16.61,0,0,1,138,54.66Zm0-7a6.7,6.7,0,0,0,5.83-3.16c1.43-2.11,2.14-5,2.14-8.6s-.71-6.4-2.14-8.42a7.12,7.12,0,0,0-11.66,0c-1.43,2-2.15,4.82-2.15,8.42s.72,6.49,2.15,8.6A6.69,6.69,0,0,0,138,47.67Z" transform="translate(-2.59 -17.48)"/><path class="b" d="M161,54V18.14h8.31L178.65,36l3.52,7.92h.22c-.19-1.91-.4-4-.64-6.38s-.35-4.58-.35-6.71V18.14h7.7V54h-8.31l-9.35-17.87-3.52-7.81h-.22c.18,2,.4,4.1.63,6.38a62.75,62.75,0,0,1,.36,6.59V54Z" transform="translate(-2.59 -17.48)"/></g></g></svg>

After

Width:  |  Height:  |  Size: 3.7 KiB

View file

@ -0,0 +1,85 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 17.5 (Debian 17.5-1.pgdg120+1)
-- Dumped by pg_dump version 17.5
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET transaction_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
SET default_table_access_method = heap;
--
-- Name: auth_user; Type: TABLE; Schema: public; Owner: mindshub
--
CREATE TABLE public.auth_user (
id integer NOT NULL,
password character varying(128) NOT NULL,
last_login timestamp with time zone,
is_superuser boolean NOT NULL,
username character varying(150) NOT NULL,
first_name character varying(150) NOT NULL,
last_name character varying(150) NOT NULL,
email character varying(254) NOT NULL,
is_staff boolean NOT NULL,
is_active boolean NOT NULL,
date_joined timestamp with time zone NOT NULL
);
ALTER TABLE public.auth_user OWNER TO mindshub;
--
-- Name: auth_user_id_seq; Type: SEQUENCE; Schema: public; Owner: mindshub
--
ALTER TABLE public.auth_user ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY (
SEQUENCE NAME public.auth_user_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1
);
--
-- Name: auth_user auth_user_pkey; Type: CONSTRAINT; Schema: public; Owner: mindshub
--
ALTER TABLE ONLY public.auth_user
ADD CONSTRAINT auth_user_pkey PRIMARY KEY (id);
--
-- Name: auth_user auth_user_username_key; Type: CONSTRAINT; Schema: public; Owner: mindshub
--
ALTER TABLE ONLY public.auth_user
ADD CONSTRAINT auth_user_username_key UNIQUE (username);
--
-- Name: auth_user_username_6821ab7c_like; Type: INDEX; Schema: public; Owner: mindshub
--
CREATE INDEX auth_user_username_6821ab7c_like ON public.auth_user USING btree (username varchar_pattern_ops);
--
-- PostgreSQL database dump complete
--

6
local_run/db/dump.sh Normal file
View file

@ -0,0 +1,6 @@
# SPDX-FileCopyrightText: 2024 NOI Techpark <digital@noi.bz.it>
#
# SPDX-License-Identifier: CC0-1.0
#/bin/bash
docker exec -i c3nav-postgres-1 su - postgres -c 'pg_dump --clean --if-exists --no-owner --exclude-table=public.auth_user c3nav' > dump.sql

10780
local_run/db/dump.sql Normal file

File diff suppressed because one or more lines are too long

6
local_run/db/restore.sh Normal file
View file

@ -0,0 +1,6 @@
# SPDX-FileCopyrightText: 2024 NOI Techpark <digital@noi.bz.it>
#
# SPDX-License-Identifier: CC0-1.0
#/bin/bash
cat dump.sql | docker exec -i c3nav-postgres-1 su - postgres -c 'psql c3nav'

46
local_run/manage_noi_sensors.sh Executable file
View file

@ -0,0 +1,46 @@
#!/bin/bash
# Helper script to manage NOI sensors
# Usage: ./manage_noi_sensors.sh [add|list|scrape] [args...]
COMPOSE_EXEC="docker compose exec -T c3nav-core python manage.py"
case "$1" in
"add")
if [ $# -lt 6 ]; then
echo "Usage: $0 add <sensor-id> <name> <x> <y> <level>"
echo "Example: $0 add 'NOI:MyNewSensor' 'My New Sensor' 300.0 250.0 floor1"
exit 1
fi
SENSOR_ID="$2"
NAME="$3"
X="$4"
Y="$5"
LEVEL="$6"
echo "Adding sensor: $NAME ($SENSOR_ID) at ($X, $Y) on $LEVEL"
$COMPOSE_EXEC add_sensor --overlay-id 1 --sensor-id "$SENSOR_ID" --name "$NAME" --x "$X" --y "$Y" --level "$LEVEL"
;;
"list")
echo "Listing all sensors in overlay 1:"
$COMPOSE_EXEC list_sensors --overlay-id 1
;;
"scrape")
echo "Scraping data for all sensors in overlay 1:"
$COMPOSE_EXEC manage_sensors --scrape-data --overlay-id 1
;;
*)
echo "NOI Sensor Management Helper"
echo "Usage: $0 [add|list|scrape] [args...]"
echo ""
echo "Commands:"
echo " add <sensor-id> <name> <x> <y> <level> - Add a new sensor"
echo " list - List all sensors"
echo " scrape - Scrape data for all sensors"
echo ""
echo "Examples:"
echo " $0 add 'NOI:NewSensor' 'My Sensor' 300.0 250.0 floor1"
echo " $0 list"
echo " $0 scrape"
exit 1
;;
esac

117
local_run/up.sh Executable file
View file

@ -0,0 +1,117 @@
docker compose down --remove-orphans
rm -rf data || true
cp -r data-copy data
chmod 777 data
docker volume rm c3nav-postgres c3nav-redis || true
docker volume create c3nav-postgres
docker volume create c3nav-redis
# Start only postgres and redis first (no build since we pre-built)
docker compose up -d postgres redis
sleep 10
cat ./db/auth_user.sql | docker exec -i local_run-postgres-1 su - postgres -c 'psql c3nav'
# Load database dump before starting the main app
cat ./db/dump.sql | docker exec -i local_run-postgres-1 su - postgres -c 'psql c3nav' > /dev/null
# Fix geometry access permissions for anonymous users
docker exec -i local_run-postgres-1 psql -U postgres -d c3nav -c "UPDATE mapdata_space SET base_mapdata_accessible = true;"
# Now start the main services (no build since we pre-built)
docker compose up --build -d
echo "Creating Django superuser"
# docker exec -i local_run-c3nav-core-1 sh -c "echo \"from django.contrib.auth.models import User; [0].set_password('admin'); User.objects.get(username='admin').save()\" | /app/env/bin/python manage.py shell"
docker exec -i local_run-c3nav-core-1 sh -c "echo \"
from django.contrib.auth.models import User
user = User.objects.get_or_create(username='admin', defaults={'email': 'admin@example.com', 'is_superuser': True, 'is_staff': True})[0]
user.set_password('admin')
user.save()
print('Password set successfully for user:', user.username)
\" | /app/env/bin/python manage.py shell"
# Apply migrations after loading dump
echo "Applying migrations..."
docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py migrate --fake'
echo "Creating new migrations for sensor_data field..."
docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py makemigrations mapdata'
echo "Applying new migrations..."
docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py migrate mapdata'
docker compose ps -a
docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py clearmapcache --include-history --include-geometries && /app/env/bin/python manage.py collectstatic -l --no-input'
# Fix NumPy compatibility issue
echo "Applying NumPy compatibility fix..."
docker exec -i local_run-c3nav-core-1 sed -i 's/np\.fromstring(/np.frombuffer(/g' /app/c3nav/mapdata/utils/cache/indexed.py
# Process map updates to rebuild cache with NumPy fix
echo "Processing map updates to rebuild cache..."
docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py processupdates'
# Setup sensor overlays
echo "Setting up sensor overlays..."
docker compose exec -T c3nav-core python manage.py shell << 'EOF'
from c3nav.mapdata.models import DataOverlay, DataOverlayFeature
# Clear old overlays to avoid conflicts
DataOverlay.objects.filter(titles__en__icontains='Environmental').delete()
DataOverlay.objects.filter(titles__en__icontains='Temperature').delete()
# Create single NOI environmental sensor overlay with multiple sensors configuration
overlay = DataOverlay.objects.create(
titles={'en': 'NOI Environmental Sensors'},
description='Real-time CO2 and temperature sensors from NOI Open Data Hub - displays current readings with values and units',
default_geomtype='point',
data_source_url='https://mobility.api.opendatahub.com/v2/flat/IndoorStation/*/latest',
sensor_config={
'data_path': 'data',
'mappings': {
'id_field': 'scode',
'name_field': 'sname',
'x_field': 'scoordinate.x',
'y_field': 'scoordinate.y'
},
'sensors': [
{
'id': 'NOI:FreeSoftwareLab-Temperature',
'coordinates': {'x': 291.0, 'y': 241.0},
'level': 'floor1'
},
{
'id': 'NOI:NOI-A1-Floor1-CO2',
'coordinates': {'x': 270.0, 'y': 241.0},
'level': 'floor1'
}
]
},
update_interval=120
)
print(f"NOI sensor overlay created with ID {overlay.id}")
EOF
# Scrape real NOI sensor data
echo "Scraping NOI sensor data..."
# Give the database a moment to settle after overlay creation
sleep 2
# Scrape the overlay data (should automatically discover all configured sensors)
echo "Scraping overlay data (ID: 1)..."
docker compose exec -T c3nav-core python manage.py manage_sensors --scrape-data --overlay-id 1
# List all sensors to verify setup
echo "Listing all sensors in the overlay..."
docker compose exec -T c3nav-core python manage.py list_sensors --overlay-id 1
echo "Sensor setup completed!"
echo ""
echo "To add a new sensor to the overlay, use:"
echo "docker compose exec -T c3nav-core python manage.py add_sensor --overlay-id 1 --sensor-id 'NOI:YourSensorID' --name 'Your Sensor Name' --x 300.0 --y 250.0 --level floor1"
echo ""
echo "To scrape data for all sensors:"
echo "docker compose exec -T c3nav-core python manage.py manage_sensors --scrape-data --overlay-id 1"

View file

@ -8,7 +8,7 @@ from c3nav.api.exceptions import API404
from c3nav.editor.api.base import api_etag_with_update_cache_key from c3nav.editor.api.base import api_etag_with_update_cache_key
from c3nav.editor.api.geometries import get_level_geometries_result, get_space_geometries_result from c3nav.editor.api.geometries import get_level_geometries_result, get_space_geometries_result
from c3nav.editor.api.schemas import EditorGeometriesElemSchema, EditorID, GeometryStylesSchema, UpdateCacheKey, \ from c3nav.editor.api.schemas import EditorGeometriesElemSchema, EditorID, GeometryStylesSchema, UpdateCacheKey, \
EditorBeaconsLookup EditorBeaconsLookup, CloneFloorRequestSchema, CloneFloorResponseSchema
from c3nav.editor.views.base import editor_etag_func, accesses_mapdata from c3nav.editor.views.base import editor_etag_func, accesses_mapdata
from c3nav.mapdata.api.base import api_etag from c3nav.mapdata.api.base import api_etag
from c3nav.mapdata.models import Source from c3nav.mapdata.models import Source
@ -146,3 +146,28 @@ def beacons_lookup(request):
wifi_beacons=wifi_beacons, wifi_beacons=wifi_beacons,
ibeacons=ibeacons, ibeacons=ibeacons,
).model_dump(mode="json") ).model_dump(mode="json")
@editor_api_router.post('/clone-floor/', summary="clone floor items",
description="clone selected map items from one floor to another",
response={200: CloneFloorResponseSchema, **API404.dict(),
**auth_permission_responses},
openapi_extra={"security": [{"APIKeyAuth": ["editor_access", "write"]}]})
def clone_floor(request, data: CloneFloorRequestSchema):
from c3nav.editor.utils import clone_level_items
try:
result = clone_level_items(
request=request,
source_level_id=data.source_level_id,
target_level_id=data.target_level_id,
items=data.items,
keep_sync=data.keep_sync
)
return result
except Exception as e:
return CloneFloorResponseSchema(
success=False,
cloned_items=[],
message=f"Error cloning items: {str(e)}"
).model_dump(mode="json")

View file

@ -125,3 +125,27 @@ class EditorBeaconsLookup(BaseSchema):
] ]
] ]
] ]
class CloneItemSchema(BaseSchema):
item_type: Annotated[str, APIField(title="geometry type (e.g., 'area', 'obstacle', 'space')")]
item_id: EditorID
class CloneFloorRequestSchema(BaseSchema):
source_level_id: EditorID
target_level_id: EditorID
items: list[CloneItemSchema]
keep_sync: Annotated[bool, APIField(default=False, title="keep cloned items synchronized across levels")]
class ClonedItemResult(BaseSchema):
item_type: str
original_id: EditorID
cloned_id: EditorID
class CloneFloorResponseSchema(BaseSchema):
success: bool
cloned_items: list[ClonedItemResult]
message: str

View file

@ -567,56 +567,83 @@ label.theme-color-label {
.leaflet-control-snap { .leaflet-control-snap {
background-color: white; background-color: white;
border-radius: 4px; border-radius: 4px;
border: 2px solid rgba(0,0,0,0.2);
background-clip: padding-box; background-clip: padding-box;
.snap-toggle {
/* watchout for leaflet.css trying to override a:hover with a different height/width */
a.snap-toggle, a.snap-to-original-toggle, a.snap-to-90-toggle {
background-size: 30px 30px;
display: block; display: block;
width: 30px; width: 30px;
height: 30px; height: 30px;
line-height: 26px;
text-align: center;
text-decoration: none;
font-size: 18px;
background-color: white; background-color: white;
color: #666; color: #666;
border-radius: 2px; border-radius: 4px;
transition: all 0.2s ease;
&:hover { &:hover {
background-color: #f4f4f4; background-color: #a7a7a7;
color: #333;
} }
&.active { &.active {
background-color: #4CAF50; background-color: #b0ecb2;
border: 2px solid green;
color: white; color: white;
&:hover { &:hover {
background-color: #45a049; background-color: #7ac27d;
} }
} }
} }
a.snap-to-90-toggle {
background-color: yellow !important;
}
/* icons */
a.snap-toggle {
background-image: url("/static/img/snap-to-edges-icon.svg");
}
a.snap-to-original-toggle {
background-image: url("/static/img/snap-to-original-icon.svg");
}
a.snap-to-90-toggle {
background-image: url("/static/img/snap-to-90-icon.svg");
}
} }
/* Snap indicator styles */ /* Snap indicator styles */
.snap-indicator { .snap-indicator {
transform: scale(11.6);
transform-box: fill-box;
transform-origin: center;
z-index: 1000; z-index: 1000;
pointer-events: none; pointer-events: none;
animation: snap-pulse 1s infinite;
} }
@keyframes snap-pulse {
0% { /* Edge highlight styles for snap-to-edges */
opacity: 0.8; .edge-highlight {
transform: scale(1); z-index: 999;
} pointer-events: none;
50% { }
opacity: 1;
transform: scale(1.2); .original-edge-highlight {
} z-index: 1000;
100% { pointer-events: none;
opacity: 0.8; }
transform: scale(1);
} /* Right-angle snap indicators */
.right-angle-reference {
z-index: 998;
pointer-events: none;
}
.right-angle-line {
z-index: 1001;
pointer-events: none;
}
.right-angle-square {
z-index: 1002;
pointer-events: none;
} }

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,40 @@
{% load bootstrap3 %}
{% load i18n %}
{% include 'editor/fragment_levels.html' %}
<h3>
{% blocktrans %}Add staircase{% endblocktrans %}
</h3>
{% bootstrap_messages %}
<form space="{{ space }}" {% if nozoom %}data-nozoom {% endif %}data-onbeforeunload data-new="staircase" data-geomtype="polygon" {% if access_restriction_select %} data-access-restriction-select{% endif %}>
{% csrf_token %}
{% bootstrap_form form %}
<div class="form-group">
<label for="stairway-steps">Number of Steps:</label>
<input type="number" id="stairway-steps" class="form-control" value="10">
</div>
{% buttons %}
<button class="invisiblesubmit" type="submit"></button>
<!-- <div class="btn-group">
<button type="button" id="generate-staircase" accesskey="g" class="btn btn-primary pull-right">
{% trans 'Generate stairs' %}
</button>
</div> -->
{% if can_edit %}
{% if not nosave %}
<button type="submit" accesskey="m" class="btn btn-primary pull-right">
{% trans 'Save' %}
</button>
{% endif %}
{% endif %}
<a class="btn {% if new %}btn-danger{% else %}btn-default {% if can_edit %}pull-right{% endif %}{% endif %} cancel-btn" href="{{ back_url }}">
{% if can_edit %}
{% trans 'Cancel' %}
{% else %}
{% trans 'Back' %}
{% endif %}
</a>
{% endbuttons %}
</form>

View file

@ -16,6 +16,9 @@
{% trans 'Level' as model_title %} {% trans 'Level' as model_title %}
<i class="glyphicon glyphicon-pencil"></i> {% blocktrans %}Edit {{ model_title }}{% endblocktrans %} <i class="glyphicon glyphicon-pencil"></i> {% blocktrans %}Edit {{ model_title }}{% endblocktrans %}
</a> </a>
<button id="clone-floor-btn" class="btn btn-info btn-xs" style="margin-left: 5px;" title="Clone selected items to another floor">
<i class="glyphicon glyphicon-copy"></i> Clone to Floor
</button>
</p> </p>
<p> <p>
{% if level.on_top_of == None %} {% if level.on_top_of == None %}
@ -25,6 +28,45 @@
{% endif %} {% endif %}
</p> </p>
<!-- Clone Floor Interface -->
<div id="clone-floor-selector" style="display: none; margin: 10px 0; padding: 10px; border: 1px solid #ddd; border-radius: 4px; background-color: #f9f9f9;">
<h4><i class="glyphicon glyphicon-copy"></i> Clone Selected Items</h4>
<div class="form-group">
<label for="target-level-select">Target Level:</label>
<select id="target-level-select" class="form-control">
<option value="">Select target level...</option>
{% for l in levels %}
{% if l.pk != level.pk %}
<option value="{{ l.pk }}">{{ l.title }}</option>
{% endif %}
{% endfor %}
</select>
</div>
<div class="checkbox">
<label>
<input type="checkbox" id="keep-sync-checkbox"> Keep items synchronized across levels
</label>
</div>
<div class="form-group">
<button id="execute-clone-btn" class="btn btn-primary btn-sm">
<i class="glyphicon glyphicon-ok"></i> Clone Items
</button>
<button id="select-all-btn" class="btn btn-success btn-sm" style="margin-left: 5px;">
<i class="glyphicon glyphicon-check"></i> Select All
</button>
<button id="clear-selection-btn" class="btn btn-warning btn-sm" style="margin-left: 5px;">
<i class="glyphicon glyphicon-unchecked"></i> Clear Selection
</button>
<button id="cancel-clone-btn" class="btn btn-default btn-sm" style="margin-left: 5px;">
<i class="glyphicon glyphicon-remove"></i> Cancel
</button>
</div>
<div id="selected-items-info" class="alert alert-info" style="margin-bottom: 0;">
<strong>Selected Items: <span id="selected-count">0</span></strong>
<p style="margin: 5px 0 0 0; font-size: 12px;">Click on map items to select them for cloning. Selected items will be highlighted in red.</p>
</div>
</div>
{% url 'editor.levels.graph' level=level.pk as graph_url %} {% url 'editor.levels.graph' level=level.pk as graph_url %}
{% url 'editor.levels.overlays' level=level.pk as overlays_url %} {% url 'editor.levels.overlays' level=level.pk as overlays_url %}
{% include 'editor/fragment_child_models.html' with graph_url=graph_url overlays_url=overlays_url %} {% include 'editor/fragment_child_models.html' with graph_url=graph_url overlays_url=overlays_url %}

View file

@ -20,6 +20,11 @@
<a class="btn btn-default btn-xs" accesskey="n" href="{{ create_url }}"> <a class="btn btn-default btn-xs" accesskey="n" href="{{ create_url }}">
<i class="glyphicon glyphicon-plus"></i> {% blocktrans %}New {{ model_title }}{% endblocktrans %} <i class="glyphicon glyphicon-plus"></i> {% blocktrans %}New {{ model_title }}{% endblocktrans %}
</a> </a>
{% if model_title == "Stair" %}
<a class="btn btn-default btn-xs" accesskey="n" href="/editor/spaces/{{ space.id }}/staircase">
<i class="glyphicon glyphicon-plus"></i> {% blocktrans %}New staircase{% endblocktrans %}
</a>
{% endif %}
{% endif %} {% endif %}
{% if explicit_edit %} {% if explicit_edit %}

View file

@ -4,7 +4,7 @@ from django.views.generic import TemplateView
from c3nav.editor.views.account import change_password_view, login_view, logout_view, register_view from c3nav.editor.views.account import change_password_view, login_view, logout_view, register_view
from c3nav.editor.views.changes import changeset_detail, changeset_edit, changeset_redirect from c3nav.editor.views.changes import changeset_detail, changeset_edit, changeset_redirect
from c3nav.editor.views.edit import edit, graph_edit, level_detail, list_objects, main_index, sourceimage, space_detail from c3nav.editor.views.edit import edit, graph_edit, level_detail, list_objects, main_index, staircase_edit, sourceimage, space_detail
from c3nav.editor.views.overlays import overlays_list, overlay_features, overlay_feature_edit from c3nav.editor.views.overlays import overlays_list, overlay_features, overlay_feature_edit
from c3nav.editor.views.quest import QuestFormView from c3nav.editor.views.quest import QuestFormView
from c3nav.editor.views.users import user_detail, user_redirect from c3nav.editor.views.users import user_detail, user_redirect
@ -33,7 +33,6 @@ def add_editor_urls(model_name, parent_model_name=None, with_list=True, explicit
]) ])
return result return result
# todo: custom path converters # todo: custom path converters
urlpatterns = [ urlpatterns = [
path('levels/<int:pk>/', level_detail, name='editor.levels.detail'), path('levels/<int:pk>/', level_detail, name='editor.levels.detail'),
@ -91,3 +90,4 @@ urlpatterns.extend(add_editor_urls('LeaveDescription', 'Space'))
urlpatterns.extend(add_editor_urls('CrossDescription', 'Space')) urlpatterns.extend(add_editor_urls('CrossDescription', 'Space'))
urlpatterns.extend(add_editor_urls('BeaconMeasurement', 'Space')) urlpatterns.extend(add_editor_urls('BeaconMeasurement', 'Space'))
urlpatterns.extend(add_editor_urls('RangingBeacon', 'Space')) urlpatterns.extend(add_editor_urls('RangingBeacon', 'Space'))
urlpatterns.append(path('spaces/<int:space>/staircase', edit, name='editor.stairs.staircase', kwargs={'model': apps.get_model('mapdata', 'Stair')}))

View file

@ -55,3 +55,274 @@ class SpaceChildEditUtils(DefaultEditUtils):
@property @property
def _geometry_url(self): def _geometry_url(self):
return '/api/v2/editor/geometries/space/'+str(self.space.pk) # todo: resolve correctly return '/api/v2/editor/geometries/space/'+str(self.space.pk) # todo: resolve correctly
def clone_level_items(request, source_level_id, target_level_id, items, keep_sync=False):
"""
Clone selected map items from one level to another.
Args:
request: Django request object
source_level_id: ID of the source level
target_level_id: ID of the target level
items: List of items to clone (each with item_type and item_id)
keep_sync: Whether to keep cloned items synchronized
Returns:
Dictionary with success status, cloned items list, and message
"""
from django.apps import apps
from django.contrib.contenttypes.models import ContentType
from c3nav.mapdata.models import Level
from c3nav.mapdata.models.sync import ClonedItemSync
from c3nav.editor.api.schemas import CloneFloorResponseSchema
# Get the source and target levels
try:
source_level = Level.objects.get(pk=source_level_id)
target_level = Level.objects.get(pk=target_level_id)
except Level.DoesNotExist:
return CloneFloorResponseSchema(
success=False,
cloned_items=[],
message="Source or target level not found"
).model_dump(mode="json")
# Check if user has editor permissions (simplified check for API)
if not hasattr(request, 'user') or not request.user.is_authenticated:
return CloneFloorResponseSchema(
success=False,
cloned_items=[],
message="Authentication required"
).model_dump(mode="json")
cloned_items = []
# Define supported item types and their model mappings
SUPPORTED_TYPES = {
'area': 'Area',
'obstacle': 'Obstacle',
'lineobstacle': 'LineObstacle',
'stair': 'Stair',
'ramp': 'Ramp',
'hole': 'Hole',
'column': 'Column',
'poi': 'POI',
'altitudemarker': 'AltitudeMarker',
'space': 'Space',
'building': 'Building',
'door': 'Door'
}
try:
print(f"Starting to process {len(items)} items")
for i, item in enumerate(items):
item_type = item.item_type.lower()
item_id = item.item_id
print(f"Processing item {i+1}/{len(items)}: {item_type} with ID {item_id}")
if item_type not in SUPPORTED_TYPES:
print(f"Item type '{item_type}' not supported. Supported types: {list(SUPPORTED_TYPES.keys())}")
continue
model_name = SUPPORTED_TYPES[item_type]
Model = apps.get_model('mapdata', model_name)
print(f"Looking for {model_name} with ID {item_id}")
# Get the original item
try:
original_item = Model.objects.get(pk=item_id)
print(f"Found original item: {original_item}")
except Model.DoesNotExist:
print(f"{model_name} with ID {item_id} not found")
continue
# Prepare the clone data
clone_data = {}
print(f"Model fields: {[f.name for f in Model._meta.fields]}")
# Handle different item types differently
if item_type == 'space':
# For spaces, we need level but no space reference
print(f"Processing space item with fields: {[f.name for f in Model._meta.fields]}")
for field in Model._meta.fields:
if field.name in ['id', 'pk']:
continue
# Skip auto fields and read-only fields
if (hasattr(field, 'auto_created') and field.auto_created) or \
(hasattr(field, 'editable') and not field.editable):
print(f"Skipping field {field.name}: auto_created={getattr(field, 'auto_created', False)}, editable={getattr(field, 'editable', True)}")
continue
try:
field_value = getattr(original_item, field.name)
print(f"Field {field.name}: {field_value} (type: {type(field_value)})")
except (AttributeError, ValueError) as e:
print(f"Could not get field {field.name}: {e}")
continue
# Handle level reference
if field.name == 'level':
clone_data[field.name] = target_level
print(f"Set level to target_level: {target_level}")
else:
# Copy other fields - but check for special fields that shouldn't be copied
if field.name in ['slug']:
# Don't copy slug directly as it needs to be unique
# Instead, create a new unique slug based on the original
if field_value:
import re
base_slug = re.sub(r'-\d+$', '', field_value) # Remove trailing numbers
new_slug = f"{base_slug}-clone"
# Make sure the new slug is unique
counter = 1
test_slug = new_slug
while Model.objects.filter(slug=test_slug).exists():
test_slug = f"{new_slug}-{counter}"
counter += 1
clone_data[field.name] = test_slug
print(f"Generated unique slug: {test_slug}")
continue
if field_value is not None:
clone_data[field.name] = field_value
print(f"Copied field {field.name}: {field_value}")
print(f"Final space clone data: {clone_data}")
else:
# For space-related items (areas, obstacles, etc.)
space_found = False
for field in Model._meta.fields:
if field.name in ['id', 'pk']:
continue
# Skip auto fields and read-only fields
if (hasattr(field, 'auto_created') and field.auto_created) or \
(hasattr(field, 'editable') and not field.editable):
continue
try:
field_value = getattr(original_item, field.name)
except (AttributeError, ValueError):
continue
# Handle level reference
if field.name == 'level':
clone_data[field.name] = target_level
# Handle space reference - need to find equivalent space on target level
elif field.name == 'space':
if hasattr(original_item, 'space') and original_item.space:
original_space = original_item.space
# Try to find a space with the same slug/title on target level
try:
target_space = target_level.spaces.filter(
title=original_space.title
).first()
if target_space:
clone_data[field.name] = target_space
space_found = True
print(f"Found target space: {target_space}")
else:
print(f"No equivalent space found for '{original_space.title}' on target level")
except Exception as e:
print(f"Error finding target space: {e}")
else:
# Copy other fields
if field_value is not None:
clone_data[field.name] = field_value
# Skip space-related items if no equivalent space found
if 'space' in [f.name for f in Model._meta.fields] and not space_found:
print(f"Skipping {item_type} {item_id} because no equivalent space found")
continue
print(f"Clone data for {item_type} {item_id}: {clone_data}")
# Create the cloned item
try:
print(f"Attempting to clone {model_name} with data: {clone_data}")
print(f"Creating {model_name} object...")
cloned_item = Model(**clone_data)
print(f"Created object, now saving...")
cloned_item.save()
print(f"Successfully created cloned item with ID: {cloned_item.pk}")
except Exception as create_error:
print(f"Error creating {model_name}: {create_error}")
print(f"Error type: {type(create_error)}")
print(f"Clone data was: {clone_data}")
# Try a different approach - create empty object and set fields one by one
try:
print("Trying field-by-field approach...")
cloned_item = Model()
for field_name, field_value in clone_data.items():
try:
setattr(cloned_item, field_name, field_value)
print(f"Set {field_name} = {field_value}")
except Exception as field_error:
print(f"Could not set {field_name}={field_value}: {field_error}")
cloned_item.save()
print(f"Successfully created item using setattr approach with ID: {cloned_item.pk}")
except Exception as setattr_error:
print(f"Setattr approach also failed: {setattr_error}")
continue # Skip this item
# Create sync relationship if requested
if keep_sync:
try:
original_ct = ContentType.objects.get_for_model(Model)
cloned_ct = ContentType.objects.get_for_model(Model)
# Define fields that should be synchronized for each model type
sync_field_map = {
'Area': ['title', 'access_restriction', 'slow_down_factor'],
'Obstacle': ['height', 'altitude'],
'LineObstacle': ['width', 'height', 'altitude'],
'Stair': [], # Geometry-only, no additional fields to sync
'Ramp': [],
'Hole': [],
'Column': ['access_restriction'],
'POI': ['title', 'access_restriction'],
'AltitudeMarker': ['groundaltitude'],
'Space': ['title', 'access_restriction', 'outside'],
'Building': ['title'],
'Door': ['access_restriction']
}
sync_fields = sync_field_map.get(model_name, [])
ClonedItemSync.objects.create(
original_content_type=original_ct,
original_object_id=original_item.pk,
cloned_content_type=cloned_ct,
cloned_object_id=cloned_item.pk,
sync_fields=sync_fields
)
except Exception as sync_error:
# Don't fail the entire operation if sync setup fails
print(f"Warning: Could not create sync relationship: {sync_error}")
cloned_items.append({
'item_type': item_type,
'original_id': item_id,
'cloned_id': cloned_item.pk
})
print(f"Successfully added item {i+1} to cloned_items list")
print(f"Finished processing. Total cloned items: {len(cloned_items)}")
return CloneFloorResponseSchema(
success=True,
cloned_items=cloned_items,
message=f"Successfully cloned {len(cloned_items)} items"
).model_dump(mode="json")
except Exception as e:
return CloneFloorResponseSchema(
success=False,
cloned_items=cloned_items,
message=f"Error during cloning: {str(e)}"
).model_dump(mode="json")

View file

@ -70,6 +70,12 @@ def main_index(request):
}) })
@etag(editor_etag_func)
@accesses_mapdata
@sidebar_view
def staircase_edit(request, space):
return render(request, "editor/create_staircase.html")
@etag(editor_etag_func) @etag(editor_etag_func)
@accesses_mapdata @accesses_mapdata
@sidebar_view @sidebar_view
@ -405,6 +411,10 @@ def edit(request, pk=None, model=None, level=None, space=None, on_top_of=None, e
"access_restriction_select": True, "access_restriction_select": True,
}) })
if request.path.endswith("staircase"):
ctx["space"] = space_id
return render(request, 'editor/create_staircase.html', ctx)
else:
return render(request, 'editor/edit.html', ctx) return render(request, 'editor/edit.html', ctx)

View file

@ -125,7 +125,7 @@ def fetch_updates(request, response: HttpResponse):
} }
if cross_origin is None: if cross_origin is None:
result.update({ result.update({
'user_data': request.user_data, 'user_data': dict(request.user_data),
}) })
if cross_origin is not None: if cross_origin is not None:

View file

@ -8,3 +8,4 @@ class MapdataConfig(AppConfig):
from c3nav.mapdata.utils.cache.changes import register_signals from c3nav.mapdata.utils.cache.changes import register_signals
register_signals() register_signals()
import c3nav.mapdata.metrics # noqa import c3nav.mapdata.metrics # noqa
import c3nav.mapdata.signals.sync # noqa

View file

@ -0,0 +1,141 @@
import json
from django.core.management.base import BaseCommand
from django.utils import timezone
from shapely.geometry import Point
from c3nav.mapdata.models import DataOverlay, DataOverlayFeature, Level
class Command(BaseCommand):
help = 'Add a new sensor to an existing overlay'
def add_arguments(self, parser):
parser.add_argument(
'--overlay-id',
type=int,
required=True,
help='ID of the overlay to add the sensor to',
)
parser.add_argument(
'--sensor-id',
type=str,
required=True,
help='Unique ID for the sensor (e.g., NOI:Sensor-ID)',
)
parser.add_argument(
'--name',
type=str,
required=True,
help='Display name for the sensor',
)
parser.add_argument(
'--x',
type=float,
required=True,
help='X coordinate in c3nav coordinate system',
)
parser.add_argument(
'--y',
type=float,
required=True,
help='Y coordinate in c3nav coordinate system',
)
parser.add_argument(
'--level',
type=str,
default='floor0',
help='Level/floor where the sensor is located (default: floor0)',
)
parser.add_argument(
'--sensor-type',
type=str,
default='environmental',
help='Type of sensor (default: environmental)',
)
def handle(self, *args, **options):
try:
overlay = DataOverlay.objects.get(id=options['overlay_id'])
except DataOverlay.DoesNotExist:
self.stderr.write(f'Overlay with ID {options["overlay_id"]} not found')
return
try:
level = Level.objects.get(short_label=options['level'])
except Level.DoesNotExist:
self.stderr.write(f'Level "{options["level"]}" not found')
return
# Update overlay configuration to include the new sensor
sensor_config = overlay.sensor_config or {}
if 'sensors' not in sensor_config:
sensor_config['sensors'] = []
# Check if sensor already exists in config
existing_sensor = None
for i, sensor in enumerate(sensor_config['sensors']):
if sensor['id'] == options['sensor_id']:
existing_sensor = i
break
new_sensor_config = {
'id': options['sensor_id'],
'coordinates': {'x': options['x'], 'y': options['y']},
'level': options['level']
}
if existing_sensor is not None:
sensor_config['sensors'][existing_sensor] = new_sensor_config
self.stdout.write(f'Updated sensor configuration for {options["sensor_id"]}')
else:
sensor_config['sensors'].append(new_sensor_config)
self.stdout.write(f'Added sensor configuration for {options["sensor_id"]}')
overlay.sensor_config = sensor_config
overlay.save()
# Create the sensor feature (or update if it exists)
point = Point(options['x'], options['y'])
feature, created = DataOverlayFeature.objects.update_or_create(
overlay=overlay,
sensor_id=options['sensor_id'],
defaults={
'titles': {'en': options['name']},
'geometry': point,
'level': level,
'sensor_type': options['sensor_type'],
'coordinates_x': options['x'],
'coordinates_y': options['y'],
'fill_color': '#95A5A6', # Default gray
'stroke_color': '#95A5A6',
'stroke_width': 2,
'fill_opacity': 0.8,
'show_label': True,
'show_geometry': True,
'interactive': True,
'point_icon': 'sensors',
'last_updated': timezone.now(),
'extra_data': {
'Status': 'No data yet',
'Last Updated': timezone.now().strftime('%Y-%m-%d %H:%M:%S'),
'Data Source': 'Manual configuration',
'Station ID': options['sensor_id']
}
}
)
action = 'Created' if created else 'Updated'
self.stdout.write(
self.style.SUCCESS(
f'{action} sensor "{options["name"]}" (ID: {options["sensor_id"]}) '
f'at coordinates ({options["x"]}, {options["y"]}) on level {options["level"]}'
)
)
self.stdout.write(
'You can now run the scrape command to fetch data for this sensor:'
)
self.stdout.write(
f'python manage.py manage_sensors --scrape-data --overlay-id {options["overlay_id"]}'
)

View file

@ -0,0 +1,60 @@
from django.core.management.base import BaseCommand
from c3nav.mapdata.models import DataOverlay, DataOverlayFeature
class Command(BaseCommand):
help = 'List all sensors in overlays'
def add_arguments(self, parser):
parser.add_argument(
'--overlay-id',
type=int,
help='ID of a specific overlay to list sensors for',
)
def handle(self, *args, **options):
if options['overlay_id']:
try:
overlay = DataOverlay.objects.get(id=options['overlay_id'])
overlays = [overlay]
except DataOverlay.DoesNotExist:
self.stderr.write(f'Overlay with ID {options["overlay_id"]} not found')
return
else:
overlays = DataOverlay.objects.all()
for overlay in overlays:
self.stdout.write(f'\n=== Overlay {overlay.id}: {overlay.titles.get("en", "Unknown")} ===')
# Show overlay configuration
sensor_config = overlay.sensor_config or {}
configured_sensors = sensor_config.get('sensors', [])
if configured_sensors:
self.stdout.write('Configured sensors:')
for sensor in configured_sensors:
self.stdout.write(f' - {sensor["id"]} at ({sensor["coordinates"]["x"]}, {sensor["coordinates"]["y"]}) on level {sensor.get("level", "default")}')
# Show actual sensor features in database
features = DataOverlayFeature.objects.filter(overlay=overlay)
if features:
self.stdout.write(f'\nSensor features in database ({features.count()}):')
for feature in features:
title = feature.titles.get('en', 'Unknown') if feature.titles else 'Unknown'
level_name = feature.level.short_label if feature.level else 'No level'
coords = f'({feature.coordinates_x}, {feature.coordinates_y})' if feature.coordinates_x is not None else 'No coords'
last_updated = feature.last_updated.strftime('%Y-%m-%d %H:%M:%S') if feature.last_updated else 'Never'
self.stdout.write(f' - {feature.sensor_id}: {title}')
self.stdout.write(f' Level: {level_name}, Coords: {coords}')
self.stdout.write(f' Type: {feature.sensor_type or "Unknown"}, Last updated: {last_updated}')
if feature.extra_data:
readings = [f'{k}: {v}' for k, v in feature.extra_data.items()
if k not in ['Last Updated', 'Data Source', 'Station ID']]
if readings:
self.stdout.write(f' Readings: {", ".join(readings)}')
else:
self.stdout.write('No sensor features found in database')
if not overlays:
self.stdout.write('No overlays found')

View file

@ -0,0 +1,461 @@
import json
import requests
from django.core.management.base import BaseCommand
from django.utils import timezone
from shapely.geometry import Point
from c3nav.mapdata.models import DataOverlay, DataOverlayFeature, Level
class Command(BaseCommand):
help = 'Setup and manage sensor overlays with generic sensor data'
def add_arguments(self, parser):
parser.add_argument(
'--create-overlay',
type=str,
help='Create a new sensor overlay with given name',
)
parser.add_argument(
'--data-source-url',
type=str,
help='URL to scrape sensor data from',
)
parser.add_argument(
'--sensor-config',
type=str,
help='JSON configuration for sensor data mapping',
)
parser.add_argument(
'--add-sensor',
action='store_true',
help='Add sensors manually with provided coordinates',
)
parser.add_argument(
'--scrape-data',
action='store_true',
help='Scrape data from configured data sources',
)
parser.add_argument(
'--overlay-id',
type=int,
help='ID of the overlay to work with',
)
def handle(self, *args, **options):
if options['create_overlay']:
self.create_overlay(options)
elif options['add_sensor']:
self.add_sensors_manually(options)
elif options['scrape_data']:
self.scrape_sensor_data(options)
else:
self.stdout.write('Please specify an action: --create-overlay, --add-sensor, or --scrape-data')
def create_overlay(self, options):
"""Create a new sensor overlay"""
name = options['create_overlay']
# Parse sensor configuration
sensor_config = {}
if options['sensor_config']:
try:
sensor_config = json.loads(options['sensor_config'])
except json.JSONDecodeError:
self.stderr.write('Invalid JSON in sensor_config')
return
overlay = DataOverlay.objects.create(
titles={'en': name},
description=f'Sensor overlay for {name}',
default_geomtype=DataOverlay.GeometryType.POINT,
data_source_url=options['data_source_url'],
sensor_config=sensor_config,
update_interval=30, # Update every 30 seconds
)
self.stdout.write(
self.style.SUCCESS(f'Created overlay "{name}" with ID {overlay.id}')
)
def add_sensors_manually(self, options):
"""Add sensors manually with coordinates"""
if not options['overlay_id']:
self.stderr.write('--overlay-id required when adding sensors manually')
return
try:
overlay = DataOverlay.objects.get(id=options['overlay_id'])
except DataOverlay.DoesNotExist:
self.stderr.write(f'Overlay with ID {options["overlay_id"]} not found')
return
# Get the ground floor level (floor0)
try:
level = Level.objects.get(short_label='floor0')
except Level.DoesNotExist:
level = Level.objects.first() # Fallback to first level
if not level:
self.stderr.write('No levels found in database')
return
# Example sensors - this should be configurable via command line or config file
sensors = [
{
'sensor_id': 'temp_001',
'sensor_type': 'temperature',
'name': 'Meeting Room A1 - Temperature',
'coordinates_x': 500,
'coordinates_y': 300,
'value': 22.5,
'unit': '°C'
},
{
'sensor_id': 'hum_001',
'sensor_type': 'humidity',
'name': 'Meeting Room A1 - Humidity',
'coordinates_x': 500,
'coordinates_y': 300,
'value': 55.0,
'unit': '%'
},
{
'sensor_id': 'temp_002',
'sensor_type': 'temperature',
'name': 'Server Room - Temperature',
'coordinates_x': 750,
'coordinates_y': 400,
'value': 18.2,
'unit': '°C'
},
{
'sensor_id': 'co2_001',
'sensor_type': 'co2',
'name': 'Office Space - CO2',
'coordinates_x': 300,
'coordinates_y': 600,
'value': 450,
'unit': 'ppm'
}
]
for sensor_item in sensors:
# Create geometry from c3nav coordinates
point = Point(sensor_item['coordinates_x'], sensor_item['coordinates_y'])
# Get color based on sensor type and value
color = self.get_sensor_color(sensor_item['sensor_type'], sensor_item['value'])
feature = DataOverlayFeature.objects.create(
overlay=overlay,
titles={'en': sensor_item['name']},
geometry=point,
level=level,
sensor_id=sensor_item['sensor_id'],
sensor_type=sensor_item['sensor_type'],
sensor_value=sensor_item['value'],
sensor_unit=sensor_item['unit'],
coordinates_x=sensor_item['coordinates_x'],
coordinates_y=sensor_item['coordinates_y'],
fill_color=color,
stroke_color=color,
stroke_width=2,
fill_opacity=0.8,
show_label=True,
show_geometry=True,
interactive=True,
point_icon=self.get_sensor_icon(sensor_item['sensor_type']),
last_updated=timezone.now(),
extra_data={
'value': str(sensor_item['value']),
'unit': sensor_item['unit'],
'sensor_type': sensor_item['sensor_type']
}
)
self.stdout.write(
f'Created sensor {sensor_item["sensor_id"]}: {sensor_item["name"]} '
f'({sensor_item["value"]}{sensor_item["unit"]}) at ({sensor_item["coordinates_x"]}, {sensor_item["coordinates_y"]})'
)
def scrape_sensor_data(self, options):
"""Scrape sensor data from configured data sources"""
overlays = DataOverlay.objects.filter(data_source_url__isnull=False)
if options['overlay_id']:
overlays = overlays.filter(id=options['overlay_id'])
for overlay in overlays:
self.stdout.write(f'Scraping data for overlay: {overlay.titles.get("en", "Unknown")}')
try:
# Fetch data from the source URL
response = requests.get(overlay.data_source_url, timeout=30)
response.raise_for_status()
data = response.json()
# Process data using sensor configuration
self.process_scraped_data(overlay, data)
except requests.RequestException as e:
self.stderr.write(f'Error fetching data from {overlay.data_source_url}: {e}')
except json.JSONDecodeError as e:
self.stderr.write(f'Error parsing JSON from {overlay.data_source_url}: {e}')
except Exception as e:
self.stderr.write(f'Error processing data for overlay {overlay.id}: {e}')
def process_scraped_data(self, overlay, data):
"""Process scraped data according to overlay configuration"""
sensor_config = overlay.sensor_config or {}
# Default configuration for NOI Open Data Hub
default_config = {
"data_path": "data",
"mappings": {
"id_field": "scode",
"name_field": "sname",
"x_field": "scoordinate.x",
"y_field": "scoordinate.y"
},
"sensors": [] # List of specific sensors to process
}
config = {**default_config, **sensor_config}
# Extract sensor data array
api_data = data
if config.get("data_path"):
for path_part in config["data_path"].split("."):
api_data = api_data.get(path_part, [])
updated_count = 0
created_count = 0
# Group measurements by station (scode) first and discover sensor types dynamically
stations = {}
for item in api_data:
station_id = self.get_nested_field(item, config["mappings"]["id_field"])
station_name = self.get_nested_field(item, config["mappings"]["name_field"])
measurement_type = self.get_nested_field(item, "tname")
if not station_id or not measurement_type:
continue
if station_id not in stations:
stations[station_id] = {
'name': station_name,
'measurements': {}
}
stations[station_id]['measurements'][measurement_type] = item
# If specific sensors are configured, only process those
configured_sensors = config.get('sensors', [])
sensor_configs = {s['id']: s for s in configured_sensors} if configured_sensors else {}
# Process each station and its measurements
for station_id, station_data in stations.items():
# Skip if we have specific sensors configured and this isn't one of them
if sensor_configs and station_id not in sensor_configs:
continue
# Get sensor-specific configuration
sensor_specific_config = sensor_configs.get(station_id, {})
# Determine coordinates and level for this sensor
if sensor_specific_config.get('coordinates'):
# Use sensor-specific coordinates
x_coord = sensor_specific_config['coordinates']['x']
y_coord = sensor_specific_config['coordinates']['y']
else:
# Get coordinates from measurement data
first_measurement = next(iter(station_data['measurements'].values()))
x_coord = self.get_nested_field(first_measurement, config["mappings"]["x_field"])
y_coord = self.get_nested_field(first_measurement, config["mappings"]["y_field"])
if x_coord is None or y_coord is None:
continue
# Convert coordinates if needed
x_coord = float(x_coord)
y_coord = float(y_coord)
# Determine level for this sensor
level_name = sensor_specific_config.get('level', config.get('level', 'floor0'))
try:
level = Level.objects.get(short_label=level_name)
except Level.DoesNotExist:
self.stderr.write(f'Level "{level_name}" not found for sensor {station_id}, using ground floor')
try:
level = Level.objects.get(short_label='floor0')
except Level.DoesNotExist:
level = Level.objects.first() # Final fallback
if not level:
self.stderr.write(f'No levels found in database for sensor {station_id}')
continue
# Collect all sensor data for this station in one feature
sensor_readings = {}
raw_measurements = {}
# Process ALL measurements found in the API response (dynamically discovered)
for measurement_type, measurement in station_data['measurements'].items():
# Extract values dynamically from the API response
sensor_value = self.get_nested_field(measurement, "mvalue")
sensor_unit = self.get_nested_field(measurement, "tunit")
sensor_name = self.get_nested_field(measurement, "tname")
sensor_description = self.get_nested_field(measurement, "tdescription")
if sensor_value is None:
continue # Skip if no value
# Convert sensor value to float
try:
sensor_value = float(sensor_value)
except (ValueError, TypeError):
continue
# Store this measurement for the combined sensor
display_name = sensor_description or sensor_name or measurement_type
unit_str = f" {sensor_unit}" if sensor_unit else ""
sensor_readings[display_name] = f"{sensor_value}{unit_str}"
raw_measurements[measurement_type] = measurement
if not sensor_readings:
continue # Skip if no valid measurements
# Create a single sensor feature with all measurements
sensor_id = station_id # Use station ID as sensor ID
display_name = f"{station_data['name']} - Environmental Sensor"
# Determine primary color based on the most critical measurement
# Priority: CO2 > Temperature > Humidity
primary_color = '#95A5A6' # Default gray
# Look for CO2 measurements (various naming conventions)
co2_measurement = None
for mtype, measurement in raw_measurements.items():
if any(keyword in mtype.lower() for keyword in ['co2', 'carbon']):
co2_measurement = measurement
break
if co2_measurement:
co2_value = self.get_nested_field(co2_measurement, "mvalue")
if co2_value:
primary_color = self.get_sensor_color('co2', float(co2_value))
else:
# Look for temperature measurements
temp_measurement = None
for mtype, measurement in raw_measurements.items():
if any(keyword in mtype.lower() for keyword in ['temperature', 'temp']):
temp_measurement = measurement
break
if temp_measurement:
temp_value = self.get_nested_field(temp_measurement, "mvalue")
if temp_value:
primary_color = self.get_sensor_color('temperature', float(temp_value))
# Create geometry
point = Point(x_coord, y_coord)
feature, created = DataOverlayFeature.objects.update_or_create(
overlay=overlay,
sensor_id=sensor_id,
defaults={
'titles': {'en': display_name},
'geometry': point,
'level': level,
'sensor_type': 'environmental', # Combined sensor type
'sensor_value': None, # No single value for combined sensor
'sensor_unit': None, # No single unit for combined sensor
'coordinates_x': x_coord,
'coordinates_y': y_coord,
'fill_color': primary_color,
'stroke_color': primary_color,
'stroke_width': 2,
'fill_opacity': 0.8,
'show_label': True,
'show_geometry': True,
'interactive': True,
'point_icon': 'sensors', # Generic sensor icon
'last_updated': timezone.now(),
'sensor_data': raw_measurements, # Store all raw measurements
'extra_data': {
**sensor_readings, # All sensor readings as separate entries
'Last Updated': timezone.now().strftime('%Y-%m-%d %H:%M:%S'),
'Data Source': 'NOI Open Data Hub',
'Station ID': station_id
}
}
)
if created:
created_count += 1
readings_str = ', '.join([f"{k}: {v}" for k, v in sensor_readings.items()])
self.stdout.write(f'Created sensor {sensor_id} on level {level.short_label}: {readings_str}')
else:
updated_count += 1
readings_str = ', '.join([f"{k}: {v}" for k, v in sensor_readings.items()])
self.stdout.write(f'Updated sensor {sensor_id} on level {level.short_label}: {readings_str}')
self.stdout.write(
f'Processed overlay {overlay.id}: {created_count} created, {updated_count} updated'
)
def get_nested_field(self, data, field_path):
"""Get value from nested field path like 'scoordinate.x'"""
try:
value = data
for part in field_path.split('.'):
value = value[part]
return value
except (KeyError, TypeError):
return None
def get_sensor_color(self, sensor_type, value):
"""Get color based on sensor type and value"""
sensor_type_lower = sensor_type.lower()
if any(keyword in sensor_type_lower for keyword in ['temperature', 'temp']):
if value < 15:
return '#0066CC' # Cold - blue
elif value < 18:
return '#00AAFF' # Cool - light blue
elif value < 22:
return '#00CC66' # Comfortable - green
elif value < 25:
return '#FFAA00' # Warm - orange
else:
return '#FF3333' # Hot - red
elif any(keyword in sensor_type_lower for keyword in ['humidity', 'humid']):
if value < 30:
return '#FF6B6B' # Too dry - red
elif value < 60:
return '#4ECDC4' # Good - teal
else:
return '#45B7D1' # Too humid - blue
elif any(keyword in sensor_type_lower for keyword in ['co2', 'carbon']):
if value < 400:
return '#2ECC71' # Excellent - green
elif value < 800:
return '#F39C12' # Good - orange
elif value < 1200:
return '#E74C3C' # Poor - red
else:
return '#8E44AD' # Very poor - purple
else:
return '#95A5A6' # Default - gray
def get_sensor_icon(self, sensor_type):
"""Get icon based on sensor type"""
icons = {
'air-temperature': 'thermostat',
'air-humidity': 'water_drop',
'co2-ppm': 'air',
'temperature': 'thermostat',
'humidity': 'water_drop',
'co2': 'air',
'pressure': 'compress',
'light': 'light_mode'
}
return icons.get(sensor_type, 'sensors')

View file

@ -0,0 +1,187 @@
import logging
import requests
from django.core.management.base import BaseCommand
from c3nav.mapdata.models import DataOverlay, DataOverlayFeature, MapUpdate, Level
from shapely.geometry import Point
import json
import traceback
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Pull overlay data from external APIs'
def add_arguments(self, parser):
parser.add_argument(
'--force',
action='store_true',
help='Force update even if not enough time has passed',
)
def handle(self, *args, **options):
overlays = DataOverlay.objects.all()
for overlay in overlays:
self.stdout.write(f"Processing overlay: {overlay.title}")
try:
self.update_overlay(overlay, force=options['force'])
except Exception as e:
logger.error(f"Error updating overlay {overlay.id}: {e}")
logger.error(traceback.format_exc())
self.stderr.write(f"Error updating overlay {overlay.id}: {e}")
def update_overlay(self, overlay, force=False):
"""Update a single overlay from its API URL"""
if not overlay.pull_url:
self.stdout.write(f"No API URL configured for overlay {overlay.id}")
return
try:
self.stdout.write(f"Fetching data from: {overlay.pull_url}")
response = requests.get(overlay.pull_url, timeout=30)
response.raise_for_status()
data = response.json()
self.stdout.write(f"Received {len(data)} items from API")
# Process the data based on overlay type
if hasattr(overlay, 'description') and overlay.description and 'temperature' in overlay.description.lower():
self.process_temperature_data(overlay, data)
else:
self.stdout.write(f"Unknown overlay type for overlay {overlay.id}")
except requests.RequestException as e:
logger.error(f"HTTP error fetching {overlay.pull_url}: {e}")
self.stderr.write(f"HTTP error: {e}")
except Exception as e:
logger.error(f"Error processing overlay {overlay.id}: {e}")
logger.error(traceback.format_exc())
self.stderr.write(f"Error: {e}")
def process_temperature_data(self, overlay, data):
"""Process temperature sensor data from NOI Open Data Hub"""
# Clear existing features for this overlay
DataOverlayFeature.objects.filter(overlay=overlay).delete()
self.stdout.write(f"Cleared existing features for overlay {overlay.id}")
# Group measurements by station
stations = {}
for item in data:
scode = item.get('scode')
if scode:
if scode not in stations:
stations[scode] = {
'sname': item.get('sname'),
'scoordinate': item.get('scoordinate'),
'measurements': []
}
stations[scode]['measurements'].append(item)
self.stdout.write(f"Found {len(stations)} stations")
for scode, station_data in stations.items():
try:
self.create_temperature_feature(overlay, scode, station_data)
except Exception as e:
logger.error(f"Error creating feature for station {scode}: {e}")
logger.error(traceback.format_exc())
self.stderr.write(f"Error creating feature for {scode}: {e}")
def create_temperature_feature(self, overlay, scode, station_data):
"""Create a DataOverlayFeature for a temperature station"""
# Extract coordinates from scoordinate object
scoordinate = station_data.get('scoordinate')
if not scoordinate:
self.stdout.write(f"No coordinates for station {scode}")
return
# scoordinate is a dict with x, y coordinates
x = scoordinate.get('x')
y = scoordinate.get('y')
if x is None or y is None:
self.stdout.write(f"Invalid coordinates for station {scode}: {scoordinate}")
return
self.stdout.write(f"Station {scode} coordinates: x={x}, y={y}")
# Create point geometry (assuming WGS84/EPSG:4326)
point = Point(x, y)
# Find temperature measurement
temperature = None
humidity = None
for measurement in station_data['measurements']:
tname = measurement.get('tname', '').lower()
if 'temperature' in tname:
temperature = measurement.get('mvalue')
elif 'humidity' in tname:
humidity = measurement.get('mvalue')
self.stdout.write(f"Station {scode}: temp={temperature}, humidity={humidity}")
if temperature is None:
self.stdout.write(f"No temperature data for station {scode}")
return
# Determine color based on temperature
color = self.get_temperature_color(temperature)
# Try to find appropriate level (ground floor by default)
level = None
try:
# Look for ground floor or level 0
level = Level.objects.filter(
short_label__in=['0', 'EG', 'Ground', 'G']
).first()
if not level:
# Fallback to any level
level = Level.objects.first()
except Exception as e:
logger.warning(f"Could not determine level: {e}")
if not level:
self.stdout.write(f"No level found for station {scode}")
return
self.stdout.write(f"Using level: {level.short_label} for station {scode}")
# Create the feature
title = f"{station_data.get('sname', scode)} ({temperature}°C)"
feature = DataOverlayFeature.objects.create(
overlay=overlay,
level=level,
geometry=point.wkt,
title=title,
color=color,
opacity=0.8,
icon='thermometer'
)
self.stdout.write(f"Created feature {feature.id} for station {scode}")
def get_temperature_color(self, temperature):
"""Get color based on temperature value"""
try:
temp = float(temperature)
except (ValueError, TypeError):
return '#808080' # Gray for invalid values
if temp < 10:
return '#0000FF' # Blue for very cold
elif temp < 15:
return '#00BFFF' # Light blue for cold
elif temp < 20:
return '#00FF00' # Green for cool
elif temp < 25:
return '#FFFF00' # Yellow for comfortable
elif temp < 30:
return '#FFA500' # Orange for warm
else:
return '#FF0000' # Red for hot

View file

@ -0,0 +1,18 @@
# Generated by Django 5.1.5 on 2025-08-02 06:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0138_rangingbeacon_max_observed_num_clients_and_more'),
]
operations = [
migrations.AddField(
model_name='dataoverlay',
name='last_pull_time',
field=models.DateTimeField(blank=True, null=True, verbose_name='last pull time'),
),
]

View file

@ -0,0 +1,73 @@
# Generated by Django 5.1.5 on 2025-08-02 07:18
from django.db import migrations, models
import django_pydantic_field
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0139_add_last_pull_time'),
]
operations = [
# Add data source URL to DataOverlay
migrations.AddField(
model_name='dataoverlay',
name='data_source_url',
field=models.URLField(blank=True, null=True, verbose_name='Data Source URL',
help_text='URL to scrape sensor data from'),
),
# Add sensor configuration to DataOverlay
migrations.AddField(
model_name='dataoverlay',
name='sensor_config',
field=django_pydantic_field.SchemaField(
schema=dict, blank=True, null=True,
verbose_name='Sensor Configuration',
help_text='JSON configuration for sensor data mapping and processing'
),
),
# Add generic sensor fields to DataOverlayFeature
migrations.AddField(
model_name='dataoverlayfeature',
name='sensor_id',
field=models.CharField(max_length=100, blank=True, null=True, verbose_name='Sensor ID'),
),
migrations.AddField(
model_name='dataoverlayfeature',
name='sensor_type',
field=models.CharField(max_length=50, blank=True, null=True, verbose_name='Sensor Type',
help_text='Type of sensor: temperature, humidity, co2, etc.'),
),
migrations.AddField(
model_name='dataoverlayfeature',
name='sensor_value',
field=models.FloatField(blank=True, null=True, verbose_name='Sensor Value'),
),
migrations.AddField(
model_name='dataoverlayfeature',
name='sensor_unit',
field=models.CharField(max_length=20, blank=True, null=True, verbose_name='Sensor Unit',
help_text='Unit of measurement: °C, %, ppm, etc.'),
),
migrations.AddField(
model_name='dataoverlayfeature',
name='coordinates_x',
field=models.FloatField(blank=True, null=True, verbose_name='X Coordinate',
help_text='X coordinate in c3nav coordinate system'),
),
migrations.AddField(
model_name='dataoverlayfeature',
name='coordinates_y',
field=models.FloatField(blank=True, null=True, verbose_name='Y Coordinate',
help_text='Y coordinate in c3nav coordinate system'),
),
migrations.AddField(
model_name='dataoverlayfeature',
name='last_updated',
field=models.DateTimeField(blank=True, null=True, verbose_name='Last Updated'),
),
]

View file

@ -0,0 +1,23 @@
# Generated by Django 5.1.5 on 2025-08-02 12:00
from django.db import migrations
import django_pydantic_field
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0140_add_temperature_fields'),
]
operations = [
migrations.AddField(
model_name='dataoverlayfeature',
name='sensor_data',
field=django_pydantic_field.SchemaField(
schema=dict, blank=True, null=True,
verbose_name='Raw Sensor Data',
help_text='Raw data from sensor for debugging and additional info'
),
),
]

View file

@ -0,0 +1,35 @@
# Generated by Django 5.1.5 on 2025-08-02 00:38
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('mapdata', '0138_rangingbeacon_max_observed_num_clients_and_more'),
('mapdata', '0141_add_sensor_data_field')
]
operations = [
migrations.CreateModel(
name='ClonedItemSync',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('original_object_id', models.PositiveIntegerField()),
('cloned_object_id', models.PositiveIntegerField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('is_active', models.BooleanField(default=True)),
('sync_fields', models.JSONField(default=list, help_text='List of field names to keep synchronized')),
('cloned_content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cloned_synced_items', to='contenttypes.contenttype')),
('original_content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='original_synced_items', to='contenttypes.contenttype')),
],
options={
'verbose_name': 'Cloned Item Sync',
'verbose_name_plural': 'Cloned Item Syncs',
'indexes': [models.Index(fields=['original_content_type', 'original_object_id'], name='mapdata_clo_origina_62f4ee_idx'), models.Index(fields=['cloned_content_type', 'cloned_object_id'], name='mapdata_clo_cloned__027e07_idx')],
'unique_together': {('original_content_type', 'original_object_id', 'cloned_content_type', 'cloned_object_id')},
},
),
]

View file

@ -9,3 +9,4 @@ from c3nav.mapdata.models.source import Source # noqa
from c3nav.mapdata.models.graph import GraphNode, WayType, GraphEdge # noqa from c3nav.mapdata.models.graph import GraphNode, WayType, GraphEdge # noqa
from c3nav.mapdata.models.theme import Theme # noqa from c3nav.mapdata.models.theme import Theme # noqa
from c3nav.mapdata.models.overlay import DataOverlay, DataOverlayFeature # noqa from c3nav.mapdata.models.overlay import DataOverlay, DataOverlayFeature # noqa
from c3nav.mapdata.models.sync import ClonedItemSync # noqa

View file

@ -44,6 +44,13 @@ class DataOverlay(TitledMixin, AccessRestrictionMixin, models.Model):
verbose_name=_('Editor Access Restriction'), verbose_name=_('Editor Access Restriction'),
on_delete=models.PROTECT) on_delete=models.PROTECT)
# Generic sensor data configuration
data_source_url = models.URLField(blank=True, null=True, verbose_name=_('Data Source URL'),
help_text=_('URL to scrape sensor data from'))
sensor_config: Optional[dict] = SchemaField(schema=dict, blank=True, null=True,
verbose_name=_('Sensor Configuration'),
help_text=_('JSON configuration for sensor data mapping and processing'))
class Meta: class Meta:
verbose_name = _('Data Overlay') verbose_name = _('Data Overlay')
verbose_name_plural = _('Data Overlays') verbose_name_plural = _('Data Overlays')
@ -73,6 +80,22 @@ class DataOverlayFeature(TitledMixin, LevelGeometryMixin, models.Model):
default=None, default=None,
verbose_name=_('extra data (JSON object)')) verbose_name=_('extra data (JSON object)'))
# Generic sensor fields
sensor_id = models.CharField(max_length=100, blank=True, null=True, verbose_name=_('Sensor ID'))
sensor_type = models.CharField(max_length=50, blank=True, null=True, verbose_name=_('Sensor Type'),
help_text=_('Type of sensor: temperature, humidity, co2, etc.'))
sensor_value = models.FloatField(blank=True, null=True, verbose_name=_('Sensor Value'))
sensor_unit = models.CharField(max_length=20, blank=True, null=True, verbose_name=_('Sensor Unit'),
help_text=_('Unit of measurement: °C, %, ppm, etc.'))
coordinates_x = models.FloatField(blank=True, null=True, verbose_name=_('X Coordinate'),
help_text=_('X coordinate in c3nav coordinate system'))
coordinates_y = models.FloatField(blank=True, null=True, verbose_name=_('Y Coordinate'),
help_text=_('Y coordinate in c3nav coordinate system'))
last_updated = models.DateTimeField(blank=True, null=True, verbose_name=_('Last Updated'))
sensor_data: Optional[dict] = SchemaField(schema=dict, blank=True, null=True,
verbose_name=_('Raw Sensor Data'),
help_text=_('Raw data from sensor for debugging and additional info'))
def to_geojson(self, instance=None) -> dict: def to_geojson(self, instance=None) -> dict:
result = { result = {
'type': 'Feature', 'type': 'Feature',

View file

@ -0,0 +1,47 @@
from django.db import models
from django.utils.translation import gettext_lazy as _
class ClonedItemSync(models.Model):
"""
Tracks relationships between cloned items across different levels
to keep them synchronized when one is modified.
"""
# The original item
original_content_type = models.ForeignKey(
'contenttypes.ContentType',
on_delete=models.CASCADE,
related_name='original_synced_items'
)
original_object_id = models.PositiveIntegerField()
# The cloned item
cloned_content_type = models.ForeignKey(
'contenttypes.ContentType',
on_delete=models.CASCADE,
related_name='cloned_synced_items'
)
cloned_object_id = models.PositiveIntegerField()
# Metadata
created_at = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=True)
# Fields to sync (JSON field storing field names to keep in sync)
sync_fields = models.JSONField(
default=list,
help_text=_('List of field names to keep synchronized')
)
class Meta:
verbose_name = _('Cloned Item Sync')
verbose_name_plural = _('Cloned Item Syncs')
unique_together = ('original_content_type', 'original_object_id',
'cloned_content_type', 'cloned_object_id')
indexes = [
models.Index(fields=['original_content_type', 'original_object_id']),
models.Index(fields=['cloned_content_type', 'cloned_object_id']),
]
def __str__(self):
return f"Sync: {self.original_content_type.model}#{self.original_object_id} -> {self.cloned_content_type.model}#{self.cloned_object_id}"

View file

@ -0,0 +1,95 @@
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from django.contrib.contenttypes.models import ContentType
from c3nav.mapdata.models.sync import ClonedItemSync
@receiver(post_save)
def sync_cloned_items_on_save(sender, instance, created, **kwargs):
"""
When a model instance is saved, update any cloned items that should be synchronized.
"""
if created:
return # Only sync on updates, not creation
# Check if ClonedItemSync table exists (avoid errors during migrations)
try:
from django.db import connection
with connection.cursor() as cursor:
cursor.execute("SELECT 1 FROM information_schema.tables WHERE table_name = 'mapdata_cloneditemsync' LIMIT 1")
if not cursor.fetchone():
return # Table doesn't exist yet, skip sync
except Exception:
return # Any database error, skip sync
content_type = ContentType.objects.get_for_model(sender)
# Find all sync relationships where this item is the original
try:
sync_relationships = ClonedItemSync.objects.filter(
original_content_type=content_type,
original_object_id=instance.pk,
is_active=True
)
except Exception:
return # ClonedItemSync model not available, skip sync
for sync_rel in sync_relationships:
try:
# Get the cloned item
cloned_model = sync_rel.cloned_content_type.model_class()
cloned_item = cloned_model.objects.get(pk=sync_rel.cloned_object_id)
# Update synchronized fields
updated = False
for field_name in sync_rel.sync_fields:
if hasattr(instance, field_name) and hasattr(cloned_item, field_name):
original_value = getattr(instance, field_name)
current_value = getattr(cloned_item, field_name)
if original_value != current_value:
setattr(cloned_item, field_name, original_value)
updated = True
if updated:
cloned_item.save()
except Exception as e:
# Log error but don't break the original save operation
print(f"Error syncing cloned item: {e}")
# Optionally deactivate the sync relationship if it's broken
sync_rel.is_active = False
sync_rel.save()
@receiver(post_delete)
def cleanup_sync_on_delete(sender, instance, **kwargs):
"""
When a model instance is deleted, clean up any sync relationships.
"""
# Check if ClonedItemSync table exists (avoid errors during migrations)
try:
from django.db import connection
with connection.cursor() as cursor:
cursor.execute("SELECT 1 FROM information_schema.tables WHERE table_name = 'mapdata_cloneditemsync' LIMIT 1")
if not cursor.fetchone():
return # Table doesn't exist yet, skip cleanup
except Exception:
return # Any database error, skip cleanup
try:
content_type = ContentType.objects.get_for_model(sender)
# Clean up sync relationships where this item is either original or cloned
ClonedItemSync.objects.filter(
original_content_type=content_type,
original_object_id=instance.pk
).delete()
ClonedItemSync.objects.filter(
cloned_content_type=content_type,
cloned_object_id=instance.pk
).delete()
except Exception:
pass # ClonedItemSync model not available, skip cleanup

View file

@ -82,3 +82,76 @@ def update_ap_names_bssid_mapping(self, map_name, user_id):
with changeset.lock_to_edit() as locked_changeset: with changeset.lock_to_edit() as locked_changeset:
locked_changeset.title = 'passive update bssids' locked_changeset.title = 'passive update bssids'
locked_changeset.apply(user) locked_changeset.apply(user)
@app.task(bind=True, max_retries=3)
def pull_overlay_data(self, overlay_id=None):
"""
Celery task to pull data overlay features from external URLs.
Args:
overlay_id (int, optional): Specific overlay ID to update.
If None, updates all overlays with pull_url.
"""
logger.info('Starting overlay data pull task...')
from django.core.management import call_command
from io import StringIO
try:
# Capture output from the management command
output = StringIO()
if overlay_id:
call_command('pulloverlaydata', overlay=overlay_id, stdout=output)
else:
call_command('pulloverlaydata', stdout=output)
result = output.getvalue()
logger.info(f'Overlay data pull completed: {result}')
return result
except Exception as e:
logger.error(f'Overlay data pull failed: {e}')
if self.request.retries < self.max_retries:
logger.info(f'Retrying in 60 seconds... (attempt {self.request.retries + 1}/{self.max_retries})')
raise self.retry(countdown=60, exc=e)
else:
logger.error('Max retries exceeded for overlay data pull')
raise
@app.task(bind=True)
def schedule_overlay_data_pulls(self):
"""
Periodic task to schedule individual overlay data pulls based on their intervals.
This should be called every minute by a periodic task scheduler.
"""
from c3nav.mapdata.models import DataOverlay
from django.utils import timezone
logger.info('Checking overlays for scheduled pulls...')
overlays = DataOverlay.objects.exclude(pull_url__isnull=True).exclude(pull_url='').exclude(pull_interval__isnull=True)
scheduled_count = 0
for overlay in overlays:
# Check if it's time to update this overlay
should_pull = False
if overlay.last_pull_time is None:
# Never pulled before
should_pull = True
else:
# Check if enough time has passed since last pull
next_pull_time = overlay.last_pull_time + overlay.pull_interval
should_pull = timezone.now() >= next_pull_time
if should_pull:
# Schedule the pull
pull_overlay_data.delay(overlay.pk)
scheduled_count += 1
logger.info(f'Scheduled pull for overlay: {overlay.title}')
logger.info(f'Scheduled {scheduled_count} overlay pulls')
return scheduled_count

View file

@ -32,7 +32,6 @@ class LocalCacheProxy:
# not in our cache # not in our cache
result = cache.get(key, default=NoneFromCache) result = cache.get(key, default=NoneFromCache)
if result is not NoneFromCache: if result is not NoneFromCache:
print("result", result, result is NoneFromCache)
if self._items.get(None) is None: if self._items.get(None) is None:
self._items.set(OrderedDict()) self._items.set(OrderedDict())
self._items.get()[key] = result self._items.get()[key] = result

View file

@ -0,0 +1,18 @@
# Generated by Django 5.1.5 on 2025-08-02 00:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mesh', '0014_remove_meshnode_name'),
]
operations = [
migrations.AlterField(
model_name='nodemessage',
name='message_type',
field=models.CharField(choices=[('NOOP', 'noop'), ('ECHO_REQUEST', 'echo request'), ('ECHO_RESPONSE', 'echo response'), ('MESH_SIGNIN', 'mesh signin'), ('MESH_LAYER_ANNOUNCE', 'mesh layer announce'), ('MESH_ADD_DESTINATION', 'mesh add destination'), ('MESH_REMOVE_DESTINATIONS', 'mesh remove destinations'), ('MESH_ROUTE_REQUEST', 'mesh route request'), ('MESH_ROUTE_RESPONSE', 'mesh route response'), ('MESH_ROUTE_TRACE', 'mesh route trace'), ('MESH_ROUTING_FAILED', 'mesh routing failed'), ('MESH_SIGNIN_CONFIRM', 'mesh signin confirm'), ('MESH_RESET', 'mesh reset'), ('CONFIG_DUMP', 'dump config'), ('CONFIG_HARDWARE', 'hardware config'), ('CONFIG_BOARD', 'board config'), ('CONFIG_FIRMWARE', 'firmware config'), ('CONFIG_UPLINK', 'uplink config'), ('CONFIG_POSITION', 'position config'), ('CONFIG_NODE', 'node config'), ('CONFIG_IBEACON', 'ibeacon config'), ('OTA_STATUS', 'ota status'), ('OTA_REQUEST_STATUS', 'ota request status'), ('OTA_START', 'ota start'), ('OTA_URL', 'ota url'), ('OTA_FRAGMENT', 'ota fragment'), ('OTA_REQUEST_FRAGMENTS', 'ota request fragments'), ('OTA_SETTING', 'ota setting'), ('OTA_APPLY', 'ota apply'), ('OTA_ABORT', 'ota abort'), ('LOCATE_REQUEST_RANGE', 'locate request range'), ('LOCATE_RANGE_RESULTS', 'locate range results'), ('LOCATE_RAW_FTM_RESULTS', 'locate raw ftm results'), ('REBOOT', 'reboot'), ('REPORT_ERROR', 'report error')], db_index=True, max_length=24, verbose_name='message type'),
),
]

View file

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" style="background: transparent; background-color: transparent; color-scheme: light;" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="132px" height="154px" viewBox="-0.5 -0.5 132 154"><defs/><g><g data-cell-id="0"><g data-cell-id="1"><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-14"><g><rect x="110" y="0" width="20" height="20" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" pointer-events="all"/></g></g><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-16"><g><rect x="0" y="50" width="120" height="60" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" pointer-events="all"/></g></g><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-15"><g><path d="M 120 150 L 120 20" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" stroke-width="4" stroke-miterlimit="10" stroke-dasharray="12 12" pointer-events="stroke"/></g></g><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-20"><g><rect x="70" y="110" width="60" height="30" fill="none" stroke="none" pointer-events="all"/></g><g><g><switch><foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 58px; height: 1px; padding-top: 125px; margin-left: 71px;"><div style="box-sizing: border-box; font-size: 0; text-align: center; color: #000000; "><div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; "><div><font style="font-size: 16px;">90°</font></div></div></div></div></foreignObject><text x="100" y="129" fill="light-dark(#000000, #ffffff)" font-family="Helvetica" font-size="12px" text-anchor="middle">90°</text></switch></g></g></g></g></g></g></svg>

After

Width:  |  Height:  |  Size: 2 KiB

View file

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" style="background: transparent; background-color: transparent; color-scheme: light dark;" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="101px" height="126px" viewBox="-0.5 -0.5 101 126"><defs/><g><g data-cell-id="0"><g data-cell-id="1"><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-4"><g><path d="M 60 22 L 100 52 L 60 82 Z" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" stroke-miterlimit="10" pointer-events="all"/></g></g><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-6"><g><rect x="0" y="32" width="60" height="60" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" pointer-events="all"/></g></g><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-3"><g><path d="M 60 2 L 60 122" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" stroke-width="5" stroke-miterlimit="10" stroke-dasharray="15 15" pointer-events="stroke"/></g></g></g></g></g></svg>

After

Width:  |  Height:  |  Size: 1 KiB

View file

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" style="background: transparent; background-color: transparent; color-scheme: light;" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="114px" height="124px" viewBox="-0.5 -0.5 114 124"><defs/><g><g data-cell-id="0"><g data-cell-id="1"><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-7"><g><path d="M 11 71 L 71 1" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" stroke-width="2" stroke-miterlimit="10" stroke-dasharray="6 6" pointer-events="stroke"/></g></g><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-8"><g><path d="M 81 101 L 11 71" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" stroke-width="2" stroke-miterlimit="10" stroke-dasharray="6 6" pointer-events="stroke"/></g></g><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-9"><g><path d="M 81 101 L 71 1" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" stroke-width="2" stroke-miterlimit="10" stroke-dasharray="6 6" pointer-events="stroke"/></g></g><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-10"><g><path d="M 1 71 L 31 1" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" stroke-width="3" stroke-miterlimit="10" pointer-events="stroke"/></g></g><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-11"><g><path d="M 111 121 L 1 71" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" stroke-width="3" stroke-miterlimit="10" pointer-events="stroke"/></g></g><g data-cell-id="pwFeD8oKGOq7wNB7DZdw-12"><g><path d="M 111 121 L 31 1" fill="none" stroke="#000000" style="stroke: rgb(0, 0, 0);" stroke-width="3" stroke-miterlimit="10" pointer-events="stroke"/></g></g></g></g></g></svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

View file

@ -8,3 +8,7 @@ if __name__ == "__main__":
from django.core.management import execute_from_command_line from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv) execute_from_command_line(sys.argv)

View file

@ -12,6 +12,8 @@ if [[ $# == 1 ]] && [[ $1 == "stop" ]]; then
echo "Stopped the postgres container" echo "Stopped the postgres container"
elif [[ $# == 1 ]] && [[ $1 == "db" ]]; then elif [[ $# == 1 ]] && [[ $1 == "db" ]]; then
echo "Setting up database" echo "Setting up database"
sudo docker stop postgres
sudo docker container rm -f postgres
sudo docker run -d --name postgres -p 5432:5432 -e POSTGRES_PASSWORD=test -e POSTGRES_USER=mindshub postgres sudo docker run -d --name postgres -p 5432:5432 -e POSTGRES_PASSWORD=test -e POSTGRES_USER=mindshub postgres
until psql "postgres://mindshub:test@localhost:5432" <<< "CREATE DATABASE insignorocketdb;"; do until psql "postgres://mindshub:test@localhost:5432" <<< "CREATE DATABASE insignorocketdb;"; do
sleep 0.5; sleep 0.5;
@ -29,6 +31,16 @@ elif [[ $# == 1 ]] && [[ $1 == "run" ]]; then
python manage.py processupdates python manage.py processupdates
python manage.py runserver python manage.py runserver
popd popd
elif [[ $# == 1 ]] && [[ $1 == "run_without_output" ]]; then
echo "Processing updates and running server without output"
pushd src 2>&1 > /dev/null
python manage.py processupdates 2>&1 | (grep -vE '^(INFO|DEBUG)|__debug__' || true)
python manage.py runserver 2>&1 | (grep -vE '^(INFO|DEBUG)|__debug__' || true)
popd 2>&1 > /dev/null
elif [[ $# > 0 ]] && [[ $1 == "manage" ]]; then
pushd src
python manage.py "${@:2}"
popd
else else
echo "Usage: $0 [stop|db|run]" echo "Usage: $0 [stop|db|run|run_without_output|manage]"
fi fi