mother fucking sensors

This commit is contained in:
Alessio 2025-08-02 11:53:58 +02:00
parent 98794751d0
commit 789640998a
8 changed files with 902 additions and 5 deletions

View file

@ -1,7 +1,4 @@
#cd ../docker
#docker buildx rm c3nav-local 2>/dev/null || true
#docker buildx create --name c3nav-local --driver=docker-container --bootstrap --use
#cd ../local_run
docker compose down --remove-orphans
rm -rf data || true
cp -r data-copy data
@ -36,8 +33,13 @@ print('Password set successfully for user:', user.username)
\" | /app/env/bin/python manage.py shell"
# Fake apply all migrations since we loaded from dump
# Apply migrations after loading dump
echo "Applying migrations..."
docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py migrate --fake'
echo "Creating new migrations for sensor_data field..."
docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py makemigrations mapdata'
echo "Applying new migrations..."
docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py migrate mapdata'
docker compose ps -a
docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py clearmapcache --include-history --include-geometries && /app/env/bin/python manage.py collectstatic -l --no-input'
@ -50,3 +52,70 @@ echo "Applying NumPy compatibility fix..."
echo "Processing map updates to rebuild cache..."
docker exec -i local_run-c3nav-core-1 sh -c '/app/env/bin/python manage.py processupdates'
# Setup sensor overlays
echo "Setting up sensor overlays..."
docker compose exec -T c3nav-core python manage.py shell << 'EOF'
from c3nav.mapdata.models import DataOverlay, DataOverlayFeature
# Clear old overlays to avoid conflicts
DataOverlay.objects.filter(titles__en__icontains='Environmental').delete()
DataOverlay.objects.filter(titles__en__icontains='Temperature').delete()
# Create NOI environmental sensor overlay with real data configuration
overlay = DataOverlay.objects.create(
titles={'en': 'NOI Environmental Sensors'},
description='Real-time CO2 and temperature sensors from NOI Open Data Hub - displays current readings with values and units',
default_geomtype='point',
data_source_url='https://mobility.api.opendatahub.com/v2/flat/IndoorStation/*/latest?where=and(scode.eq.%22NOI:FreeSoftwareLab-Temperature%22)',
sensor_config={
'data_path': 'data',
'level': 'floor1', # Specify which floor/level to place sensors on
'mappings': {
'id_field': 'scode',
'name_field': 'sname',
'fixed_coordinates': {
'x': 291.0,
'y': 241.0
}
}
},
update_interval=120
)
overlay2 = DataOverlay.objects.create(
titles={'en': 'NOI Environmental Sensors 2'},
description='Real-time CO2 and temperature sensors from NOI Open Data Hub - displays current readings with values and units',
default_geomtype='point',
data_source_url='https://mobility.api.opendatahub.com/v2/flat/IndoorStation/*/latest?where=and(scode.eq.%22NOI:NOI-A1-Floor1-CO2%22)',
sensor_config={
'data_path': 'data',
'level': 'floor1', # Specify which floor/level to place sensors on
'mappings': {
'id_field': 'scode',
'name_field': 'sname',
'fixed_coordinates': {
'x': 270.0,
'y': 241.0
}
}
},
update_interval=120
)
print(f"NOI sensor overlay created with ID {overlay.id}")
print(f"NOI sensor overlay 2 created with ID {overlay2.id}")
EOF
# Scrape real NOI sensor data for both overlays
echo "Scraping NOI sensor data..."
# Give the database a moment to settle after overlay creation
sleep 2
# Scrape the overlays directly using their expected IDs (1 and 2)
echo "Scraping first overlay (ID: 1)..."
docker compose exec -T c3nav-core python manage.py manage_sensors --scrape-data --overlay-id 1
echo "Scraping second overlay (ID: 2)..."
docker compose exec -T c3nav-core python manage.py manage_sensors --scrape-data --overlay-id 2
echo "Sensor setup completed!"