2023-11-07 14:25:54 +01:00
|
|
|
import asyncio
|
2022-04-15 20:02:42 +02:00
|
|
|
import traceback
|
2023-11-07 14:25:54 +01:00
|
|
|
from asyncio import get_event_loop
|
2023-11-07 16:35:46 +01:00
|
|
|
from functools import cached_property
|
2022-04-15 20:02:42 +02:00
|
|
|
|
2023-10-03 17:23:29 +02:00
|
|
|
from asgiref.sync import async_to_sync
|
2023-11-07 14:25:54 +01:00
|
|
|
from channels.db import database_sync_to_async
|
2023-11-09 17:04:55 +01:00
|
|
|
from channels.exceptions import DenyConnection
|
2023-11-07 14:25:54 +01:00
|
|
|
from channels.generic.websocket import AsyncJsonWebsocketConsumer, AsyncWebsocketConsumer
|
2023-11-06 14:22:35 +01:00
|
|
|
from django.db import transaction
|
2023-10-03 17:51:49 +02:00
|
|
|
from django.utils import timezone
|
2022-04-04 01:13:48 +02:00
|
|
|
|
2022-04-04 14:48:43 +02:00
|
|
|
from c3nav.mesh import messages
|
2023-10-06 02:46:43 +02:00
|
|
|
from c3nav.mesh.messages import (MESH_BROADCAST_ADDRESS, MESH_NONE_ADDRESS, MESH_ROOT_ADDRESS, MeshMessage,
|
|
|
|
MeshMessageType)
|
2023-11-06 14:22:35 +01:00
|
|
|
from c3nav.mesh.models import MeshNode, MeshUplink, NodeMessage
|
2023-11-07 16:35:46 +01:00
|
|
|
from c3nav.mesh.utils import MESH_ALL_UPLINKS_GROUP, UPLINK_PING, get_mesh_uplink_group
|
2022-04-04 01:13:48 +02:00
|
|
|
|
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
class MeshConsumer(AsyncWebsocketConsumer):
|
|
|
|
def __init__(self):
|
|
|
|
super().__init__()
|
2023-11-06 14:22:35 +01:00
|
|
|
self.uplink = None
|
2023-10-03 17:23:29 +02:00
|
|
|
self.dst_nodes = set()
|
2023-10-06 02:39:19 +02:00
|
|
|
self.open_requests = set()
|
2023-11-07 14:25:54 +01:00
|
|
|
self.ping_task = None
|
|
|
|
|
|
|
|
async def connect(self):
|
|
|
|
# todo: auth
|
2022-04-04 14:48:43 +02:00
|
|
|
|
2023-11-11 12:20:10 +01:00
|
|
|
#await self.log_text(None, "new mesh websocket connection")
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.accept()
|
|
|
|
self.ping_task = get_event_loop().create_task(self.ping_regularly())
|
2023-11-06 14:22:35 +01:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def disconnect(self, close_code):
|
|
|
|
self.ping_task.cancel()
|
|
|
|
await self.log_text(self.uplink.node, "mesh websocket disconnected")
|
2023-11-06 14:22:35 +01:00
|
|
|
if self.uplink is not None:
|
2023-10-03 17:51:49 +02:00
|
|
|
# leave broadcast group
|
2023-11-07 16:35:46 +01:00
|
|
|
await self.channel_layer.group_discard("mesh_comm_broadcast", self.channel_name)
|
2023-10-03 17:51:49 +02:00
|
|
|
|
|
|
|
# remove all other destinations
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.remove_dst_nodes(self.dst_nodes)
|
2022-04-04 01:13:48 +02:00
|
|
|
|
2023-11-06 14:22:35 +01:00
|
|
|
# set end reason (unless we set it to replaced already)
|
2023-11-07 14:25:54 +01:00
|
|
|
# todo: make this better? idk
|
|
|
|
await MeshUplink.objects.filter(
|
2023-11-06 14:22:35 +01:00
|
|
|
pk=self.uplink.pk,
|
|
|
|
).exclude(
|
|
|
|
end_reason=MeshUplink.EndReason.REPLACED
|
2023-11-07 14:25:54 +01:00
|
|
|
).aupdate(
|
2023-11-06 14:22:35 +01:00
|
|
|
end_reason=MeshUplink.EndReason.CLOSED
|
|
|
|
)
|
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def send_msg(self, msg, sender=None, exclude_uplink_address=None):
|
2023-10-06 02:46:43 +02:00
|
|
|
# print("sending", msg, MeshMessage.encode(msg).hex(' ', 1))
|
|
|
|
# self.log_text(msg.dst, "sending %s" % msg)
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.send(bytes_data=MeshMessage.encode(msg))
|
|
|
|
await self.channel_layer.group_send("mesh_msg_sent", {
|
2023-10-05 04:05:29 +02:00
|
|
|
"type": "mesh.msg_sent",
|
|
|
|
"timestamp": timezone.now().strftime("%d.%m.%y %H:%M:%S.%f"),
|
|
|
|
"channel": self.channel_name,
|
|
|
|
"sender": sender,
|
2023-11-06 14:22:35 +01:00
|
|
|
"uplink": self.uplink.node.address if self.uplink else None,
|
2023-10-05 04:05:29 +02:00
|
|
|
"recipient": msg.dst,
|
2023-10-06 02:46:43 +02:00
|
|
|
# "msg": msg.tojson(), # not doing this part for privacy reasons
|
2023-10-05 04:05:29 +02:00
|
|
|
})
|
2022-04-06 22:56:08 +02:00
|
|
|
|
2023-11-07 16:35:46 +01:00
|
|
|
@cached_property
|
|
|
|
def same_uplinks_group(self):
|
|
|
|
return 'mesh_uplink_%s' % self.uplink.node.address.replace(':', '-')
|
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def receive(self, text_data=None, bytes_data=None):
|
2022-04-04 14:48:43 +02:00
|
|
|
if bytes_data is None:
|
|
|
|
return
|
2022-04-15 20:02:42 +02:00
|
|
|
try:
|
2023-10-06 01:06:30 +02:00
|
|
|
msg, data = messages.MeshMessage.decode(bytes_data)
|
2022-04-15 20:02:42 +02:00
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
return
|
|
|
|
|
2023-10-28 19:56:03 +02:00
|
|
|
print(msg)
|
|
|
|
|
2023-10-05 05:02:01 +02:00
|
|
|
if msg.dst != messages.MESH_ROOT_ADDRESS and msg.dst != messages.MESH_PARENT_ADDRESS:
|
2023-10-06 02:15:51 +02:00
|
|
|
# message not adressed to us, forward it
|
2022-04-15 20:02:42 +02:00
|
|
|
print('Received message for forwarding:', msg)
|
2023-10-06 02:15:51 +02:00
|
|
|
|
2023-11-06 14:22:35 +01:00
|
|
|
if not self.uplink:
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_text(None, "received message not for us before sign in message, ignoring...")
|
2023-10-06 02:15:51 +02:00
|
|
|
print('no sign in yet, ignoring')
|
|
|
|
return
|
|
|
|
|
|
|
|
# trace messages collect node adresses before forwarding
|
|
|
|
if isinstance(msg, messages.MeshRouteTraceMessage):
|
|
|
|
print('adding ourselves to trace message before forwarding')
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_text(MESH_ROOT_ADDRESS, "adding ourselves to trace message before forwarding")
|
2023-10-06 02:15:51 +02:00
|
|
|
msg.trace.append(MESH_ROOT_ADDRESS)
|
|
|
|
|
2023-11-07 16:35:46 +01:00
|
|
|
result = await msg.send(exclude_uplink_address=self.uplink.node.address)
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
print('message had no route')
|
2023-10-06 02:15:51 +02:00
|
|
|
|
|
|
|
# don't handle this message unless it's a broadcast message
|
|
|
|
if msg.dst != messages.MESH_BROADCAST_ADDRESS:
|
|
|
|
# don't handle this message unless it's a broadcast message
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_text(MESH_ROOT_ADDRESS, "received non-broadcast message not for us, forwarding...")
|
2023-10-06 02:15:51 +02:00
|
|
|
return
|
|
|
|
print('it\'s a broadcast so it\'s also for us')
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_text(MESH_ROOT_ADDRESS, "received broadcast message, forwarding and handling...")
|
2022-04-15 20:02:42 +02:00
|
|
|
|
2023-10-06 02:46:43 +02:00
|
|
|
# print('Received message:', msg)
|
2022-04-15 20:57:11 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
src_node, created = await MeshNode.objects.aget_or_create(address=msg.src)
|
2023-10-03 17:23:29 +02:00
|
|
|
|
2022-04-04 14:48:43 +02:00
|
|
|
if isinstance(msg, messages.MeshSigninMessage):
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.create_uplink_in_database(msg.src)
|
2023-11-06 14:22:35 +01:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
# inform other uplinks to shut down
|
2023-11-07 16:35:46 +01:00
|
|
|
await self.channel_layer.group_send(get_mesh_uplink_group(msg.src), {
|
2023-11-07 14:25:54 +01:00
|
|
|
"type": "mesh.uplink_consumer",
|
|
|
|
"name": self.channel_name,
|
|
|
|
})
|
2023-11-06 14:22:35 +01:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
# log message, since we will not log it further down
|
|
|
|
await self.log_received_message(src_node, msg)
|
2023-11-06 14:22:35 +01:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
# inform signed in uplink node about its layer
|
|
|
|
await self.send_msg(messages.MeshLayerAnnounceMessage(
|
|
|
|
src=messages.MESH_ROOT_ADDRESS,
|
|
|
|
dst=msg.src,
|
|
|
|
layer=messages.NO_LAYER
|
|
|
|
))
|
2023-11-06 14:22:35 +01:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
# add signed in uplink node to broadcast group
|
2023-11-07 16:35:46 +01:00
|
|
|
await self.channel_layer.group_add(MESH_ALL_UPLINKS_GROUP, self.channel_name)
|
2023-11-06 14:22:35 +01:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
# add this node as a destination that this uplink handles (duh)
|
|
|
|
await self.add_dst_nodes(nodes=(src_node, ))
|
2022-04-15 20:57:11 +02:00
|
|
|
|
2023-10-02 22:02:25 +02:00
|
|
|
return
|
|
|
|
|
2023-11-06 14:22:35 +01:00
|
|
|
if self.uplink is None:
|
2023-10-03 17:23:29 +02:00
|
|
|
print('Expected sign-in message, but got a different one!')
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.close()
|
2023-10-03 17:23:29 +02:00
|
|
|
return
|
2023-10-02 22:02:25 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_received_message(src_node, msg)
|
2022-04-15 20:02:42 +02:00
|
|
|
|
2023-10-04 23:32:56 +02:00
|
|
|
if isinstance(msg, messages.MeshAddDestinationsMessage):
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.add_dst_nodes(addresses=msg.addresses)
|
2023-10-04 23:32:56 +02:00
|
|
|
|
|
|
|
if isinstance(msg, messages.MeshRemoveDestinationsMessage):
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.remove_dst_nodes(addresses=msg.addresses)
|
2023-10-04 23:32:56 +02:00
|
|
|
|
2023-10-06 02:15:51 +02:00
|
|
|
if isinstance(msg, messages.MeshRouteRequestMessage):
|
|
|
|
if msg.address == MESH_ROOT_ADDRESS:
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_text(MESH_ROOT_ADDRESS, "route request about us, start a trace")
|
2023-11-07 16:35:46 +01:00
|
|
|
await self.send_msg(messages.MeshRouteTraceMessage(
|
2023-10-06 02:15:51 +02:00
|
|
|
src=MESH_ROOT_ADDRESS,
|
|
|
|
dst=msg.src,
|
|
|
|
request_id=msg.request_id,
|
|
|
|
trace=[MESH_ROOT_ADDRESS],
|
2023-11-07 16:35:46 +01:00
|
|
|
))
|
2023-10-06 02:15:51 +02:00
|
|
|
else:
|
2023-11-07 16:35:46 +01:00
|
|
|
await self.log_text(MESH_ROOT_ADDRESS, "route request about someone else, sending response")
|
2023-10-06 02:39:19 +02:00
|
|
|
self.open_requests.add(msg.request_id)
|
2023-11-07 16:35:46 +01:00
|
|
|
uplink = database_sync_to_async(MeshNode.get_node_and_uplink)(msg.address)
|
|
|
|
await self.send_msg(messages.MeshRouteResponseMessage(
|
|
|
|
src=MESH_ROOT_ADDRESS,
|
|
|
|
dst=msg.src,
|
|
|
|
request_id=msg.request_id,
|
|
|
|
route=uplink.node_id if uplink else MESH_NONE_ADDRESS,
|
|
|
|
))
|
2023-11-07 14:25:54 +01:00
|
|
|
|
|
|
|
@database_sync_to_async
|
|
|
|
def create_uplink_in_database(self, address):
|
|
|
|
with transaction.atomic():
|
|
|
|
# tatabase fumbling, lock the mesh node database row
|
|
|
|
locked_node = MeshNode.objects.select_for_update().get(address=address)
|
|
|
|
|
|
|
|
# close other uplinks in the database (they might add their own close reason in a bit)
|
|
|
|
locked_node.uplink_sessions.filter(end_reason__isnull=True).update(
|
|
|
|
end_reason=MeshUplink.EndReason.NEW_TIMEOUT
|
|
|
|
)
|
|
|
|
|
|
|
|
# create our own uplink in the database
|
|
|
|
self.uplink = MeshUplink.objects.create(
|
|
|
|
node=locked_node,
|
|
|
|
last_ping=timezone.now(),
|
|
|
|
name=self.channel_name,
|
|
|
|
)
|
|
|
|
|
|
|
|
async def ping_regularly(self):
|
|
|
|
while True:
|
2023-11-07 16:35:46 +01:00
|
|
|
await asyncio.sleep(UPLINK_PING)
|
2023-11-07 14:25:54 +01:00
|
|
|
await MeshUplink.objects.filter(pk=self.uplink.pk).aupdate(last_ping=timezone.now())
|
2023-10-06 02:15:51 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def delayed_group_send(self, delay: int, group: str, msg: dict):
|
|
|
|
await asyncio.sleep(delay)
|
|
|
|
await self.channel_layer.group_send(group, msg)
|
|
|
|
|
|
|
|
"""
|
|
|
|
internal event handlers
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def mesh_uplink_consumer(self, data):
|
|
|
|
"""
|
|
|
|
message handler: if we are not the given uplink, leave this group
|
|
|
|
"""
|
2023-10-03 17:51:49 +02:00
|
|
|
if data["name"] != self.channel_name:
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_text(self.uplink.node, "shutting down, uplink now served by new consumer")
|
|
|
|
await MeshUplink.objects.filter(pk=self.uplink.pk,).aupdate(
|
2023-11-06 14:22:35 +01:00
|
|
|
end_reason=MeshUplink.EndReason.REPLACED
|
|
|
|
)
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.close()
|
2023-10-03 17:51:49 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def mesh_dst_node_uplink(self, data):
|
|
|
|
"""
|
|
|
|
message handler: if we are not the given uplink, leave this group
|
|
|
|
"""
|
2023-11-07 16:35:46 +01:00
|
|
|
if data["uplink"] != self.channel_name:
|
2023-11-11 12:20:10 +01:00
|
|
|
await self.log_text(data["node"], "node now served by new consumer")
|
2023-11-07 16:35:46 +01:00
|
|
|
# going the short way cause the other consumer will already have done database stuff
|
2023-11-11 12:20:10 +01:00
|
|
|
self.dst_nodes.discard(data["node"])
|
2023-10-03 17:23:29 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def mesh_send(self, data):
|
2023-11-06 14:22:35 +01:00
|
|
|
if self.uplink.node.address == data["exclude_uplink_address"]:
|
2023-10-06 02:15:51 +02:00
|
|
|
if data["msg"]["dst"] == MESH_BROADCAST_ADDRESS:
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_text(
|
2023-11-06 14:22:35 +01:00
|
|
|
self.uplink.node.address, "not forwarding this broadcast message via us since it came from here"
|
2023-10-06 02:15:51 +02:00
|
|
|
)
|
|
|
|
else:
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_text(
|
2023-11-06 14:22:35 +01:00
|
|
|
self.uplink.node.address, "we're the route for this message but it came from here so... no"
|
2023-10-06 02:15:51 +02:00
|
|
|
)
|
|
|
|
return
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.send_msg(MeshMessage.fromjson(data["msg"]), data["sender"])
|
2023-10-04 15:42:03 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
"""
|
|
|
|
helper functions
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def log_received_message(self, src_node: MeshNode, msg: messages.MeshMessage):
|
2023-10-06 01:06:30 +02:00
|
|
|
as_json = MeshMessage.tojson(msg)
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.channel_layer.group_send("mesh_msg_received", {
|
2023-10-05 04:05:29 +02:00
|
|
|
"type": "mesh.msg_received",
|
|
|
|
"timestamp": timezone.now().strftime("%d.%m.%y %H:%M:%S.%f"),
|
|
|
|
"channel": self.channel_name,
|
2023-11-06 14:22:35 +01:00
|
|
|
"uplink": self.uplink.node.address if self.uplink else None,
|
2023-10-05 04:05:29 +02:00
|
|
|
"msg": as_json,
|
|
|
|
})
|
2023-11-07 14:25:54 +01:00
|
|
|
await NodeMessage.objects.acreate(
|
2023-11-06 14:22:35 +01:00
|
|
|
uplink=self.uplink,
|
2023-10-03 17:23:29 +02:00
|
|
|
src_node=src_node,
|
2023-10-20 15:41:46 +02:00
|
|
|
message_type=msg.msg_type.name,
|
2023-10-05 04:05:29 +02:00
|
|
|
data=as_json,
|
2022-04-15 20:02:42 +02:00
|
|
|
)
|
2022-04-15 20:57:11 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def log_text(self, address, text):
|
2023-10-05 01:36:24 +02:00
|
|
|
address = getattr(address, 'address', address)
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.channel_layer.group_send("mesh_log", {
|
2023-10-05 01:36:24 +02:00
|
|
|
"type": "mesh.log_entry",
|
|
|
|
"timestamp": timezone.now().strftime("%d.%m.%y %H:%M:%S.%f"),
|
|
|
|
"channel": self.channel_name,
|
2023-11-06 14:22:35 +01:00
|
|
|
"uplink": self.uplink.node.address if self.uplink else None,
|
2023-10-05 01:36:24 +02:00
|
|
|
"node": address,
|
|
|
|
"text": text,
|
|
|
|
})
|
2023-11-06 14:22:35 +01:00
|
|
|
print("MESH %s: [%s] %s" % (self.uplink.node, address, text))
|
2023-10-05 01:36:24 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def add_dst_nodes(self, nodes=None, addresses=None):
|
2023-10-05 00:03:08 +02:00
|
|
|
nodes = list(nodes) if nodes else []
|
|
|
|
addresses = set(addresses) if addresses else set()
|
|
|
|
|
2023-10-05 01:36:24 +02:00
|
|
|
node_addresses = set(node.address for node in nodes)
|
2023-10-05 00:03:08 +02:00
|
|
|
missing_addresses = addresses - set(node.address for node in nodes)
|
|
|
|
|
|
|
|
if missing_addresses:
|
2023-11-07 14:25:54 +01:00
|
|
|
await MeshNode.objects.abulk_create(
|
2023-10-05 00:03:08 +02:00
|
|
|
[MeshNode(address=address) for address in missing_addresses],
|
|
|
|
ignore_conflicts=True
|
|
|
|
)
|
2023-10-05 01:36:24 +02:00
|
|
|
|
|
|
|
addresses |= node_addresses
|
2023-10-05 00:03:08 +02:00
|
|
|
addresses |= missing_addresses
|
|
|
|
|
2023-10-03 17:23:29 +02:00
|
|
|
for address in addresses:
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_text(address, "destination added")
|
2023-10-05 01:36:24 +02:00
|
|
|
|
2023-11-07 15:37:45 +01:00
|
|
|
# add ourselves as uplink
|
|
|
|
await self._add_destination(address)
|
|
|
|
|
|
|
|
# tell the node to dump its current information
|
|
|
|
await self.send_msg(
|
|
|
|
messages.ConfigDumpMessage(
|
|
|
|
src=messages.MESH_ROOT_ADDRESS,
|
|
|
|
dst=address,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
@database_sync_to_async
|
|
|
|
def _add_destination(self, address):
|
|
|
|
with transaction.atomic():
|
|
|
|
node = MeshNode.objects.select_for_update().get(address=address)
|
2023-11-07 16:35:46 +01:00
|
|
|
# update database
|
2023-11-11 12:20:10 +01:00
|
|
|
node.uplink = self.uplink
|
2023-11-07 16:35:46 +01:00
|
|
|
node.last_signin = timezone.now()
|
|
|
|
node.save()
|
2023-11-07 15:37:45 +01:00
|
|
|
|
2023-11-07 16:35:46 +01:00
|
|
|
# tell other consumers that it's us now
|
|
|
|
async_to_sync(self.channel_layer.group_send)(MESH_ALL_UPLINKS_GROUP, {
|
2023-10-03 17:51:49 +02:00
|
|
|
"type": "mesh.dst_node_uplink",
|
2023-10-03 17:23:29 +02:00
|
|
|
"node": address,
|
2023-11-07 16:35:46 +01:00
|
|
|
"uplink": self.channel_name
|
2023-10-03 17:23:29 +02:00
|
|
|
})
|
|
|
|
|
2023-11-07 16:35:46 +01:00
|
|
|
# if we aren't handling this address yet, write it down
|
|
|
|
if address not in self.dst_nodes:
|
|
|
|
self.dst_nodes.add(address)
|
2023-10-03 17:51:49 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def remove_dst_nodes(self, addresses):
|
2023-10-04 15:42:03 +02:00
|
|
|
for address in tuple(addresses):
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.log_text(address, "destination removed")
|
2023-10-05 01:36:24 +02:00
|
|
|
|
2023-11-07 15:37:45 +01:00
|
|
|
await self._remove_destination(address)
|
|
|
|
|
|
|
|
@database_sync_to_async
|
|
|
|
def _remove_destination(self, address):
|
|
|
|
with transaction.atomic():
|
2023-11-07 16:35:46 +01:00
|
|
|
try:
|
|
|
|
node = MeshNode.objects.select_for_update().get(address=address, uplink=self.uplink)
|
|
|
|
except MeshNode.DoesNotExist:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
node.uplink = None
|
|
|
|
node.save()
|
2023-10-03 17:51:49 +02:00
|
|
|
|
2023-11-07 16:35:46 +01:00
|
|
|
# no longer serving this node
|
2023-10-03 17:51:49 +02:00
|
|
|
if address in self.dst_nodes:
|
|
|
|
self.dst_nodes.discard(address)
|
|
|
|
|
2023-10-05 01:36:24 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
class MeshUIConsumer(AsyncJsonWebsocketConsumer):
|
|
|
|
def __init__(self):
|
|
|
|
super().__init__()
|
2023-10-05 04:05:29 +02:00
|
|
|
self.msg_sent_filter = {}
|
|
|
|
self.msg_received_filter = {}
|
2023-10-05 01:36:24 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def connect(self):
|
2023-11-09 17:04:55 +01:00
|
|
|
if not self.scope["user_permisions"].mesh_control:
|
|
|
|
raise DenyConnection
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.accept()
|
|
|
|
|
|
|
|
async def receive_json(self, content, **kwargs):
|
2023-10-05 01:36:24 +02:00
|
|
|
if content.get("subscribe", None) == "log":
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.channel_layer.group_add("mesh_log", self.channel_name)
|
2023-10-05 04:05:29 +02:00
|
|
|
if content.get("subscribe", None) == "msg_sent":
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.channel_layer.group_add("mesh_msg_sent", self.channel_name)
|
2023-10-05 04:05:29 +02:00
|
|
|
self.msg_sent_filter = dict(content.get("filter", {}))
|
|
|
|
if content.get("subscribe", None) == "msg_received":
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.channel_layer.group_add("mesh_msg_sent", self.channel_name)
|
2023-10-05 04:05:29 +02:00
|
|
|
self.msg_received_filter = dict(content.get("filter", {}))
|
|
|
|
if "send_msg" in content:
|
|
|
|
msg_to_send = self.scope["session"].pop("mesh_msg_%s" % content["send_msg"], None)
|
|
|
|
if not msg_to_send:
|
|
|
|
return
|
|
|
|
self.scope["session"].save()
|
2023-10-05 05:02:01 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.channel_layer.group_add("mesh_msg_sent", self.channel_name)
|
2023-10-05 04:05:29 +02:00
|
|
|
self.msg_sent_filter = {"sender": self.channel_name}
|
2023-10-05 05:02:01 +02:00
|
|
|
|
2023-10-20 15:41:46 +02:00
|
|
|
if msg_to_send["msg_data"]["msg_type"] == MeshMessageType.MESH_ROUTE_REQUEST.name:
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.channel_layer.group_add("mesh_msg_received", self.channel_name)
|
2023-10-05 05:02:01 +02:00
|
|
|
self.msg_received_filter = {"request_id": msg_to_send["msg_data"]["request_id"]}
|
|
|
|
|
2023-10-05 04:05:29 +02:00
|
|
|
for recipient in msg_to_send["recipients"]:
|
2023-11-07 16:35:46 +01:00
|
|
|
await MeshMessage.fromjson({
|
2023-10-05 04:05:29 +02:00
|
|
|
'dst': recipient,
|
|
|
|
**msg_to_send["msg_data"],
|
|
|
|
}).send(sender=self.channel_name)
|
2023-10-05 01:36:24 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def mesh_log_entry(self, data):
|
|
|
|
await self.send_json(data)
|
2023-10-05 01:36:24 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def mesh_msg_sent(self, data):
|
2023-10-05 04:05:29 +02:00
|
|
|
for key, value in self.msg_sent_filter.items():
|
|
|
|
if isinstance(value, list):
|
|
|
|
if data.get(key, None) not in value:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
if data.get(key, None) != value:
|
|
|
|
return
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.send_json(data)
|
2023-10-05 04:05:29 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def mesh_msg_received(self, data):
|
2023-10-05 05:02:01 +02:00
|
|
|
for key, filter_value in self.msg_received_filter.items():
|
|
|
|
value = data.get(key, data["msg"].get(key, None))
|
|
|
|
if isinstance(filter_value, list):
|
|
|
|
if value not in filter_value:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
if value != filter_value:
|
|
|
|
return
|
2023-11-07 14:25:54 +01:00
|
|
|
await self.send_json(data)
|
2023-10-05 05:02:01 +02:00
|
|
|
|
2023-11-07 14:25:54 +01:00
|
|
|
async def disconnect(self, code):
|
|
|
|
await self.channel_layer.group_discard("mesh_log", self.channel_name)
|
|
|
|
await self.channel_layer.group_discard("mesh_msg_sent", self.channel_name)
|
|
|
|
await self.channel_layer.group_discard("mesh_msg_received", self.channel_name)
|