1
0
mirror of https://github.com/daniviga/bite.git synced 2024-11-24 22:06:13 +01:00

Compare commits

..

No commits in common. "b4b6294aa7b9a8dd30f98f95507331ee167d0152" and "cc93c5ae7596f1553f78d4e35bcff7f643844396" have entirely different histories.

35 changed files with 130 additions and 314 deletions

View File

@ -4,7 +4,7 @@ Playing with IoT
[![Build Status](https://travis-ci.com/daniviga/bite.svg?branch=master)](https://travis-ci.com/daniviga/bite)
![AGPLv3](./docs/.badges/agpl3.svg)
![Python 3.11](./docs/.badges/python.svg)
![Python 3.9](./docs/.badges/python.svg)
![MQTT](./docs/.badges/mqtt.svg)
![Moby](./docs/.badges/moby.svg)
![docker-compose 3.7+](./docs/.badges/docker-compose.svg)
@ -15,6 +15,11 @@ production.
![Application Schema](./docs/application_chart.png)
### Future implementations
- Broker HA via [VerneMQ clustering](https://docs.vernemq.com/clustering/introduction)
- Stream analytics via [Apache Spark](https://spark.apache.org/)
## Installation
### Requirements
@ -57,10 +62,8 @@ The application stack is composed by the following components:
- [Django](https://www.djangoproject.com/) with
[Django REST framework](https://www.django-rest-framework.org/)
web application (running via `gunicorn` in production mode)
- `dispatcher` custom daemon to dump telemetry into the Kafka queue
- `handler` custom daemon to dump telemetry into the timeseries database from the Kafka queue
- `mqtt-to-db` custom daemon to dump telemetry into the timeseries database
- telemetry payload is stored as json object (via PostgreSQL JSON data type)
- [Kafka](https://kafka.apache.org/) broker
- [Timescale](https://www.timescale.com/) DB,
a [PostgreSQL](https://www.postgresql.org/) database with a timeseries extension
- [Mosquitto](https://mosquitto.org/) MQTT broker (see alternatives below)

View File

@ -55,7 +55,7 @@ struct netConfig {
} config;
char serial[9];
const String dpsURL = "/dps/device/subscribe/";
const String apiURL = "/api/device/subscribe/";
const String telemetryURL = "/telemetry/";
void setup(void) {
@ -63,7 +63,7 @@ void setup(void) {
analogReference(EXTERNAL);
StaticJsonDocument<20> dps;
StaticJsonDocument<20> api;
byte mac[6];
int eeAddress = 0;
@ -110,8 +110,8 @@ void setup(void) {
Serial.println("DEBUG: clock updated via NTP.");
#endif
dps["serial"] = serial;
postData(config, dpsURL, dps);
api["serial"] = serial;
postData(config, apiURL, api);
telemetry["device"] = serial;
// payload["id"] = serverName;

View File

@ -18,7 +18,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.contrib import admin
from dps.models import Device, WhiteList
from api.models import Device, WhiteList
@admin.register(Device)

View File

@ -20,5 +20,5 @@
from django.apps import AppConfig
class DPSConfig(AppConfig):
name = 'dps'
class ApiConfig(AppConfig):
name = 'api'

View File

@ -1,6 +1,6 @@
# Generated by Django 3.1.3 on 2021-03-19 08:08
import dps.models
import api.models
from django.db import migrations, models
import uuid
@ -16,7 +16,7 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Device',
fields=[
('serial', models.CharField(max_length=128, unique=True, validators=[dps.models.device_validation])),
('serial', models.CharField(max_length=128, unique=True, validators=[api.models.device_validation])),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_time', models.DateTimeField(auto_now_add=True)),
('updated_time', models.DateTimeField(auto_now=True)),

View File

@ -18,7 +18,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from rest_framework import serializers
from dps.models import Device, device_validation
from api.models import Device, device_validation
class DeviceSerializer(serializers.ModelSerializer):

View File

@ -18,10 +18,10 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.test import TestCase, Client
from dps.models import Device, WhiteList
from api.models import Device, WhiteList
class DPSTestCase(TestCase):
class ApiTestCase(TestCase):
c = Client()
def setUp(self):
@ -29,17 +29,17 @@ class DPSTestCase(TestCase):
Device.objects.create(serial='test1234')
def test_no_whitelist(self):
response = self.c.post('/dps/device/provision/',
response = self.c.post('/api/device/subscribe/',
{'serial': 'test12345'})
self.assertEqual(response.status_code, 400)
def test_provision_post(self):
def test_subscribe_post(self):
WhiteList.objects.create(serial='test12345')
response = self.c.post('/dps/device/provision/',
response = self.c.post('/api/device/subscribe/',
{'serial': 'test12345'})
self.assertEqual(response.status_code, 201)
def test_provision_get(self):
response = self.c.get('/dps/device/list/')
def test_subscribe_get(self):
response = self.c.get('/api/device/list/')
self.assertEqual(
response.json()[0]['serial'], 'test1234')

View File

@ -33,13 +33,13 @@ Including another URLconf
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from dps.views import DPS
from api.views import APISubscribe
urlpatterns = [
path('device/provision/',
DPS.as_view({'post': 'create'}),
name='device-provision'),
path('device/subscribe/',
APISubscribe.as_view({'post': 'create'}),
name='device-subscribe'),
path('device/list/',
DPS.as_view({'get': 'list'}),
APISubscribe.as_view({'get': 'list'}),
name='device-list'),
]

View File

@ -19,10 +19,10 @@
from rest_framework.viewsets import ModelViewSet
from dps.models import Device
from dps.serializers import DeviceSerializer
from api.models import Device
from api.serializers import DeviceSerializer
class DPS(ModelViewSet):
class APISubscribe(ModelViewSet):
queryset = Device.objects.all()
serializer_class = DeviceSerializer

View File

@ -1,20 +0,0 @@
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'bite',
'USER': 'bite',
'PASSWORD': 'password',
'HOST': 'localhost',
'PORT': '5432',
}
}
MQTT_BROKER = {
'HOST': 'localhost',
'PORT': '1883',
}
KAFKA_BROKER = {
'HOST': 'localhost',
'PORT': '29092',
}

View File

@ -61,7 +61,7 @@ INSTALLED_APPS = [
# 'health_check.storage',
'rest_framework',
'bite',
'dps',
'api',
'telemetry',
]
@ -151,10 +151,6 @@ STATIC_URL = '/static/'
STATIC_ROOT = '/srv/appdata/bite/static'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': []
}
SKIP_WHITELIST = True
MQTT_BROKER = {
@ -162,17 +158,11 @@ MQTT_BROKER = {
'PORT': '1883',
}
KAFKA_BROKER = {
'HOST': 'kafka',
'PORT': '9092',
}
try:
from bite.local_settings import *
except ImportError:
pass
# If no local_settings.py is availble in the current folder let's try to
# load it from the application root
try:
from bite.production import *
except ImportError:
# If a local_setting.py does not exist
# settings in this file only will be used
pass

View File

@ -37,13 +37,13 @@ from django.contrib import admin
from django.conf import settings
from django.urls import include, path
from dps import urls as dps_urls
from api import urls as api_urls
from telemetry import urls as telemetry_urls
urlpatterns = [
path('admin/', admin.site.urls),
path('ht/', include('health_check.urls')),
path('dps/', include(dps_urls)),
path('api/', include(api_urls)),
path('telemetry/', include(telemetry_urls)),
]

View File

@ -1,76 +0,0 @@
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# BITE - A Basic/IoT/Example
# Copyright (C) 2020-2021 Daniele Viganò <daniele@vigano.me>
#
# BITE is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BITE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
import time
from kafka import KafkaConsumer
from kafka.errors import NoBrokersAvailable
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.exceptions import ObjectDoesNotExist
from dps.models import Device
from telemetry.models import Telemetry
class Command(BaseCommand):
help = "Telemetry handler"
KAFKA_HOST = settings.KAFKA_BROKER["HOST"]
KAFKA_PORT = int(settings.KAFKA_BROKER["PORT"])
def get_device(self, serial):
try:
return Device.objects.get(serial=serial)
except ObjectDoesNotExist:
return None
def store_telemetry(self, transport, message):
Telemetry.objects.create(
transport=transport,
device=self.get_device(message["device"]),
clock=message["clock"],
payload=message["payload"],
)
def handle(self, *args, **options):
while True:
try:
consumer = KafkaConsumer(
"telemetry",
bootstrap_servers="{}:{}".format(
self.KAFKA_HOST, self.KAFKA_PORT
),
group_id="handler",
value_deserializer=lambda m: json.loads(m.decode("utf8")),
)
break
except NoBrokersAvailable:
self.stdout.write(
self.style.WARNING("WARNING: Kafka broker not available")
)
time.sleep(5)
self.stdout.write(self.style.SUCCESS("INFO: Kafka broker subscribed"))
for message in consumer:
self.store_telemetry(
message.value["transport"], message.value["body"]
)
consumer.unsuscribe()

View File

@ -22,26 +22,22 @@ import asyncio
import json
import time
import paho.mqtt.client as mqtt
from kafka import KafkaProducer
from kafka.errors import NoBrokersAvailable
from asgiref.sync import sync_to_async
from aiomqtt import Client
from asyncio_mqtt import Client
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.exceptions import ObjectDoesNotExist
from dps.models import Device
from api.models import Device
from telemetry.models import Telemetry
MQTT_HOST = settings.MQTT_BROKER['HOST']
MQTT_PORT = int(settings.MQTT_BROKER['PORT'])
class Command(BaseCommand):
help = "Telemetry dispatcher"
MQTT_HOST = settings.MQTT_BROKER["HOST"]
MQTT_PORT = int(settings.MQTT_BROKER["PORT"])
KAFKA_HOST = settings.KAFKA_BROKER["HOST"]
KAFKA_PORT = int(settings.KAFKA_BROKER["PORT"])
producer = None
help = 'MQTT to DB deamon'
@sync_to_async
def get_device(self, serial):
@ -51,54 +47,40 @@ class Command(BaseCommand):
return None
@sync_to_async
def dispatch(self, message):
self.producer.send("telemetry", {"transport": "mqtt", "body": message})
def store_telemetry(self, device, payload):
Telemetry.objects.create(
device=device,
transport='mqtt',
clock=payload['clock'],
payload=payload['payload']
)
async def mqtt_broker(self):
async with Client(self.MQTT_HOST, port=self.MQTT_PORT) as client:
async with Client(MQTT_HOST, port=MQTT_PORT) as client:
# use shared subscription for HA/balancing
await client.subscribe("$share/telemetry/#")
async with client.messages() as messages:
async with client.unfiltered_messages() as messages:
async for message in messages:
payload = json.loads(message.payload.decode('utf-8'))
device = await self.get_device(message.topic)
if device is not None:
message_body = json.loads(
message.payload.decode("utf-8")
)
await self.dispatch(message_body)
await self.store_telemetry(device, payload)
else:
self.stdout.write(
self.style.ERROR("DEBUG: message discarded")
)
self.style.ERROR(
'DEBUG: message discarded'))
def handle(self, *args, **options):
client = mqtt.Client()
while True:
try:
client.connect(self.MQTT_HOST, self.MQTT_PORT)
client.connect(MQTT_HOST, MQTT_PORT)
break
except (socket.gaierror, ConnectionRefusedError):
self.stdout.write(
self.style.WARNING("WARNING: MQTT broker not available")
)
self.style.WARNING('WARNING: Broker not available'))
time.sleep(5)
while True:
try:
self.producer = KafkaProducer(
bootstrap_servers="{}:{}".format(
self.KAFKA_HOST, self.KAFKA_PORT
),
value_serializer=lambda v: json.dumps(v).encode("utf-8"),
retries=5,
)
break
except NoBrokersAvailable:
self.stdout.write(
self.style.WARNING("WARNING: Kafka broker not available")
)
time.sleep(5)
self.stdout.write(self.style.SUCCESS("INFO: Brokers subscribed"))
self.stdout.write(self.style.SUCCESS('INFO: Broker subscribed'))
client.disconnect()
asyncio.run(self.mqtt_broker())

View File

@ -11,7 +11,7 @@ class Migration(migrations.Migration):
initial = True
dependencies = [
('dps', '0001_initial'),
('api', '0001_initial'),
]
operations = [
@ -23,7 +23,7 @@ class Migration(migrations.Migration):
('transport', models.CharField(choices=[('http', 'http'), ('mqtt', 'mqtt')], default='http', max_length=4)),
('clock', models.IntegerField(null=True, validators=[django.core.validators.MinValueValidator(0)])),
('payload', models.JSONField(validators=[telemetry.models.telemetry_validation])),
('device', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dps.device')),
('device', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.device')),
],
options={
'verbose_name_plural': 'Telemetry',

View File

@ -21,7 +21,7 @@ from django.db import models
from django.core.validators import MinValueValidator
from django.core.exceptions import ValidationError
from dps.models import Device
from api.models import Device
def telemetry_validation(value):

View File

@ -18,7 +18,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from rest_framework import serializers
from dps.models import Device
from api.models import Device
from telemetry.models import Telemetry

View File

@ -19,7 +19,7 @@
import json
from django.test import TestCase, Client
from dps.models import Device, WhiteList
from api.models import Device, WhiteList
class ApiTestCase(TestCase):

View File

@ -17,20 +17,20 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
FROM python:3.11-alpine AS builder
FROM python:3.9-alpine AS builder
RUN apk update && apk add gcc musl-dev postgresql-dev \
&& pip install psycopg2-binary
# ---
FROM python:3.11-alpine
FROM python:3.9-alpine
ENV PYTHONUNBUFFERED 1
ENV DJANGO_SETTINGS_MODULE "bite.settings"
RUN apk update && apk add --no-cache postgresql-libs \
&& wget https://github.com/jwilder/dockerize/releases/download/v0.7.0/dockerize-alpine-linux-amd64-v0.7.0.tar.gz -qO- \
&& wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-alpine-linux-amd64-v0.6.1.tar.gz -qO- \
| tar -xz -C /usr/local/bin
COPY --from=builder /usr/local/lib/python3.11/site-packages/ /usr/local/lib/python3.11/site-packages/
COPY --from=builder /usr/local/lib/python3.9/site-packages/ /usr/local/lib/python3.9/site-packages/
COPY --chown=1000:1000 requirements.txt /srv/app/bite/requirements.txt
RUN pip3 install --no-cache-dir -r /srv/app/bite/requirements.txt

View File

@ -36,13 +36,6 @@ services:
ports:
- "${CUSTOM_DOCKER_IP:-0.0.0.0}:8000:8000"
kafka:
environment:
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
ports:
- "${CUSTOM_DOCKER_IP:-0.0.0.0}:29092:29092"
data-migration:
volumes:
- ../bite:/srv/app/bite
@ -51,10 +44,6 @@ services:
volumes:
- ../bite:/srv/app/bite
dispatcher:
volumes:
- ../bite:/srv/app/bite
handler:
mqtt-to-db:
volumes:
- ../bite:/srv/app/bite

View File

@ -29,10 +29,6 @@ services:
volumes:
- ./django/production.py.sample:/srv/app/bite/bite/production.py
dispatcher:
volumes:
- ./django/production.py.sample:/srv/app/bite/bite/production.py
handler:
mqtt-to-db:
volumes:
- ./django/production.py.sample:/srv/app/bite/bite/production.py

View File

@ -62,28 +62,6 @@ services:
ports:
- "${CUSTOM_DOCKER_IP:-0.0.0.0}:1883:1883"
zookeeper:
image: confluentinc/cp-zookeeper:latest
networks:
- net
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
kafka:
image: confluentinc/cp-kafka:latest
depends_on:
- zookeeper
networks:
- net
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
ingress:
<<: *service_default
image: nginx:stable-alpine
@ -126,21 +104,13 @@ services:
- "staticdata:/srv/appdata/bite/static:U" # REMOVE ':U' ON MOBY/DOCKER
command: ["python3", "manage.py", "collectstatic", "--noinput"]
dispatcher:
mqtt-to-db:
<<: *service_default
image: daniviga/bite
command: ["python3", "manage.py", "dispatcher"]
networks:
- net
depends_on:
- broker
handler:
<<: *service_default
image: daniviga/bite
command: ["python3", "manage.py", "handler"]
command: ["python3", "manage.py", "mqtt-to-db"]
networks:
- net
depends_on:
- data-migration
- timescale
- broker

View File

@ -17,9 +17,9 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
FROM alpine:3.18
FROM alpine:3.15
RUN apk add --no-cache chrony && \
RUN apk update && apk add chrony && \
chown -R chrony:chrony /var/lib/chrony
COPY ./chrony.conf /etc/chrony/chrony.conf

View File

@ -17,7 +17,7 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
FROM python:3.11-alpine
FROM python:3.9-alpine
RUN pip3 install urllib3 paho-mqtt
COPY ./device_simulator.py /opt/bite/device_simulator.py

View File

@ -29,7 +29,7 @@ import argparse
from time import sleep
import paho.mqtt.publish as publish
DEBUG = bool(os.environ.get("IOT_DEBUG", False))
DEBUG = bool(os.environ.get('IOT_DEBUG', False))
http = urllib3.PoolManager()
@ -39,16 +39,15 @@ def post_json(endpoint, url, data):
if DEBUG:
print(json_data)
encoded_data = json_data.encode("utf8")
encoded_data = json_data.encode('utf8')
while True:
try:
r = http.request(
"POST",
'POST',
endpoint + url,
body=encoded_data,
headers={"content-type": "application/json"},
)
headers={'content-type': 'application/json'})
return r
except urllib3.exceptions.MaxRetryError:
pass
@ -58,89 +57,74 @@ def post_json(endpoint, url, data):
def publish_json(transport, endpoint, data):
json_data = json.dumps(data)
serial = data["device"]
serial = data['device']
if DEBUG:
print(json_data)
encoded_data = json_data.encode("utf8")
encoded_data = json_data.encode('utf8')
publish.single(
topic=serial,
payload=encoded_data,
hostname=endpoint.split(":")[0],
port=int(endpoint.split(":")[1]),
hostname=endpoint.split(':')[0],
port=int(endpoint.split(':')[1]),
client_id=serial,
transport=("websockets" if transport == "ws" else "tcp"),
transport=('websockets' if transport == 'ws' else 'tcp'),
# auth=auth FIXME
)
def main():
parser = argparse.ArgumentParser(description="IoT simulator oprions")
parser = argparse.ArgumentParser(
description='IoT simulator oprions')
parser.add_argument(
"-e",
"--endpoint",
default=os.environ.get("IOT_HTTP", "http://127.0.0.1:8000"),
help="IoT HTTP endpoint",
)
parser.add_argument(
"-m",
"--mqtt",
default=os.environ.get("IOT_MQTT", "127.0.0.1:1883"),
help="IoT MQTT endpoint",
)
parser.add_argument(
"-t",
"--transport",
choices=["mqtt", "ws", "http"],
default=os.environ.get("IOT_TL", "http"),
help="IoT transport layer",
)
parser.add_argument(
"-s",
"--serial",
default=os.environ.get("IOT_SERIAL"),
help="IoT device serial number",
)
parser.add_argument(
"-d",
"--delay",
metavar="s",
type=float,
default=os.environ.get("IOT_DELAY", 10),
help="Delay between requests",
)
parser.add_argument('-e', '--endpoint',
default=os.environ.get('IOT_HTTP',
'http://127.0.0.1:8000'),
help='IoT HTTP endpoint')
parser.add_argument('-m', '--mqtt',
default=os.environ.get('IOT_MQTT',
'127.0.0.1:1883'),
help='IoT MQTT endpoint')
parser.add_argument('-t', '--transport',
choices=['mqtt', 'ws', 'http'],
default=os.environ.get('IOT_TL', 'http'),
help='IoT transport layer')
parser.add_argument('-s', '--serial',
default=os.environ.get('IOT_SERIAL'),
help='IoT device serial number')
parser.add_argument('-d', '--delay', metavar='s', type=int,
default=os.environ.get('IOT_DELAY', 10),
help='Delay between requests')
args = parser.parse_args()
dps = "/dps/device/provision/"
telemetry = "/telemetry/"
subscribe = '/api/device/subscribe/'
telemetry = '/telemetry/'
if args.serial is None:
args.serial = "".join(
random.choices(string.ascii_lowercase + string.digits, k=8)
)
args.serial = ''.join(
random.choices(string.ascii_lowercase + string.digits, k=8))
data = {"serial": args.serial}
post_json(args.endpoint, dps, data)
data = {'serial': args.serial}
post_json(args.endpoint, subscribe, data)
while True:
data = {
"device": args.serial,
"clock": int(datetime.datetime.now().timestamp()),
'device': args.serial,
'clock': int(datetime.datetime.now().timestamp()),
}
payload = {
"id": "device_simulator",
"light": random.randint(300, 500),
"temperature": {"celsius": round(random.uniform(20, 28), 1)},
'id': 'device_simulator',
'light': random.randint(300, 500),
'temperature': {
'celsius': round(random.uniform(20, 28), 1)}
}
if args.transport == "http":
post_json(args.endpoint, telemetry, {**data, "payload": payload})
elif args.transport in ("mqtt", "ws"):
if args.transport == 'http':
post_json(args.endpoint, telemetry, {**data, 'payload': payload})
elif args.transport in ('mqtt', 'ws'):
publish_json(
args.transport, args.mqtt, {**data, "payload": payload}
)
args.transport, args.mqtt, {**data, 'payload': payload})
else:
raise NotImplementedError
sleep(args.delay)

View File

@ -1 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="93.0" height="20"><linearGradient id="smooth" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="round"><rect width="93.0" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#round)"><rect width="65.5" height="20" fill="#555"/><rect x="65.5" width="27.5" height="20" fill="#007ec6"/><rect width="93.0" height="20" fill="url(#smooth)"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"><image x="5" y="3" width="14" height="14" xlink:href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIj4KICA8ZGVmcz4KICAgIDxsaW5lYXJHcmFkaWVudCBpZD0icHlZZWxsb3ciIGdyYWRpZW50VHJhbnNmb3JtPSJyb3RhdGUoNDUpIj4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iI2ZlNSIgb2Zmc2V0PSIwLjYiLz4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iI2RhMSIgb2Zmc2V0PSIxIi8+CiAgICA8L2xpbmVhckdyYWRpZW50PgogICAgPGxpbmVhckdyYWRpZW50IGlkPSJweUJsdWUiIGdyYWRpZW50VHJhbnNmb3JtPSJyb3RhdGUoNDUpIj4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iIzY5ZiIgb2Zmc2V0PSIwLjQiLz4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iIzQ2OCIgb2Zmc2V0PSIxIi8+CiAgICA8L2xpbmVhckdyYWRpZW50PgogIDwvZGVmcz4KCiAgPHBhdGggZD0iTTI3LDE2YzAtNyw5LTEzLDI0LTEzYzE1LDAsMjMsNiwyMywxM2wwLDIyYzAsNy01LDEyLTExLDEybC0yNCwwYy04LDAtMTQsNi0xNCwxNWwwLDEwbC05LDBjLTgsMC0xMy05LTEzLTI0YzAtMTQsNS0yMywxMy0yM2wzNSwwbDAtM2wtMjQsMGwwLTlsMCwweiBNODgsNTB2MSIgZmlsbD0idXJsKCNweUJsdWUpIi8+CiAgPHBhdGggZD0iTTc0LDg3YzAsNy04LDEzLTIzLDEzYy0xNSwwLTI0LTYtMjQtMTNsMC0yMmMwLTcsNi0xMiwxMi0xMmwyNCwwYzgsMCwxNC03LDE0LTE1bDAtMTBsOSwwYzcsMCwxMyw5LDEzLDIzYzAsMTUtNiwyNC0xMywyNGwtMzUsMGwwLDNsMjMsMGwwLDlsMCwweiBNMTQwLDUwdjEiIGZpbGw9InVybCgjcHlZZWxsb3cpIi8+CgogIDxjaXJjbGUgcj0iNCIgY3g9IjY0IiBjeT0iODgiIGZpbGw9IiNGRkYiLz4KICA8Y2lyY2xlIHI9IjQiIGN4PSIzNyIgY3k9IjE1IiBmaWxsPSIjRkZGIi8+Cjwvc3ZnPgo="/><text x="422.5" y="150" fill="#010101" fill-opacity=".3" transform="scale(0.1)" textLength="385.0" lengthAdjust="spacing">python</text><text x="422.5" y="140" transform="scale(0.1)" textLength="385.0" lengthAdjust="spacing">python</text><text x="782.5" y="150" fill="#010101" fill-opacity=".3" transform="scale(0.1)" textLength="175.0" lengthAdjust="spacing">3.11</text><text x="782.5" y="140" transform="scale(0.1)" textLength="175.0" lengthAdjust="spacing">3.11</text></g></svg>
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="93.0" height="20"><linearGradient id="smooth" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="round"><rect width="93.0" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#round)"><rect width="65.5" height="20" fill="#555"/><rect x="65.5" width="27.5" height="20" fill="#007ec6"/><rect width="93.0" height="20" fill="url(#smooth)"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"><image x="5" y="3" width="14" height="14" xlink:href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIj4KICA8ZGVmcz4KICAgIDxsaW5lYXJHcmFkaWVudCBpZD0icHlZZWxsb3ciIGdyYWRpZW50VHJhbnNmb3JtPSJyb3RhdGUoNDUpIj4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iI2ZlNSIgb2Zmc2V0PSIwLjYiLz4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iI2RhMSIgb2Zmc2V0PSIxIi8+CiAgICA8L2xpbmVhckdyYWRpZW50PgogICAgPGxpbmVhckdyYWRpZW50IGlkPSJweUJsdWUiIGdyYWRpZW50VHJhbnNmb3JtPSJyb3RhdGUoNDUpIj4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iIzY5ZiIgb2Zmc2V0PSIwLjQiLz4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iIzQ2OCIgb2Zmc2V0PSIxIi8+CiAgICA8L2xpbmVhckdyYWRpZW50PgogIDwvZGVmcz4KCiAgPHBhdGggZD0iTTI3LDE2YzAtNyw5LTEzLDI0LTEzYzE1LDAsMjMsNiwyMywxM2wwLDIyYzAsNy01LDEyLTExLDEybC0yNCwwYy04LDAtMTQsNi0xNCwxNWwwLDEwbC05LDBjLTgsMC0xMy05LTEzLTI0YzAtMTQsNS0yMywxMy0yM2wzNSwwbDAtM2wtMjQsMGwwLTlsMCwweiBNODgsNTB2MSIgZmlsbD0idXJsKCNweUJsdWUpIi8+CiAgPHBhdGggZD0iTTc0LDg3YzAsNy04LDEzLTIzLDEzYy0xNSwwLTI0LTYtMjQtMTNsMC0yMmMwLTcsNi0xMiwxMi0xMmwyNCwwYzgsMCwxNC03LDE0LTE1bDAtMTBsOSwwYzcsMCwxMyw5LDEzLDIzYzAsMTUtNiwyNC0xMywyNGwtMzUsMGwwLDNsMjMsMGwwLDlsMCwweiBNMTQwLDUwdjEiIGZpbGw9InVybCgjcHlZZWxsb3cpIi8+CgogIDxjaXJjbGUgcj0iNCIgY3g9IjY0IiBjeT0iODgiIGZpbGw9IiNGRkYiLz4KICA8Y2lyY2xlIHI9IjQiIGN4PSIzNyIgY3k9IjE1IiBmaWxsPSIjRkZGIi8+Cjwvc3ZnPgo="/><text x="422.5" y="150" fill="#010101" fill-opacity=".3" transform="scale(0.1)" textLength="385.0" lengthAdjust="spacing">python</text><text x="422.5" y="140" transform="scale(0.1)" textLength="385.0" lengthAdjust="spacing">python</text><text x="782.5" y="150" fill="#010101" fill-opacity=".3" transform="scale(0.1)" textLength="175.0" lengthAdjust="spacing">3.9</text><text x="782.5" y="140" transform="scale(0.1)" textLength="175.0" lengthAdjust="spacing">3.9</text></g></svg>

Before

Width:  |  Height:  |  Size: 2.4 KiB

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 122 KiB

After

Width:  |  Height:  |  Size: 101 KiB

View File

@ -55,13 +55,13 @@ struct netConfig {
} config;
char* serial;
const String dpsURL = "/dps/device/subscribe/";
const String apiURL = "/api/device/subscribe/";
const String telemetryURL = "/telemetry/";
void setup(void) {
Serial.begin(115200);
StaticJsonDocument<64> dps;
StaticJsonDocument<64> api;
preferences.begin("iot");
// Get the serial number from flash
@ -117,8 +117,8 @@ void setup(void) {
Serial.println("DEBUG: clock updated via NTP.");
#endif
dps["serial"] = serial;
postData(config, dpsURL, dps);
api["serial"] = serial;
postData(config, apiURL, api);
telemetry["device"] = serial;
// payload["id"] = serverName;

View File

@ -4,4 +4,3 @@ ipython
flake8
pyinstrument
django-debug-toolbar
urllib3

View File

@ -4,8 +4,7 @@ djangorestframework
django-health-check
psycopg2-binary
paho-mqtt
kafka-python
aiomqtt
asyncio-mqtt
PyYAML
uritemplate
pygments