mirror of
https://github.com/daniviga/django-ram.git
synced 2025-08-07 06:37:50 +02:00
Compare commits
4 Commits
better-doc
...
asset-mqtt
Author | SHA1 | Date | |
---|---|---|---|
d8f406d628
|
|||
05f7bf582c
|
|||
c8cc8c5ed0
|
|||
e80dc604a7 |
Submodule arduino/CommandStation-EX updated: 13488e1e93...911bbd63be
Submodule arduino/dcc-ex.github.io updated: 9acc446358...a0f886b69f
Submodule arduino/vim-arduino updated: 111db616db...2ded67cdf0
129
monitoring/README.md
Normal file
129
monitoring/README.md
Normal file
@@ -0,0 +1,129 @@
|
||||
# Asset telemetry monitoring
|
||||
|
||||
[!CAUTION]
|
||||
This is a PoC, not suitable for real world due to lack of any authentication and security
|
||||
|
||||
## Pre-requisites
|
||||
|
||||
- Python 3.12
|
||||
- Podman (or Docker)
|
||||
|
||||
## Architecture
|
||||
|
||||
The `dispatcher.py` script collects data (`cab` commands) from a CommandStation and sends it a MQTT broker.
|
||||
|
||||
The command being monitored is the `<l cab reg speedByte functMap>` one returned by the `<t cab speed dir>` throttle command. See the [DCC-EX command reference](https://dcc-ex.com/reference/software/command-summary-consolidated.html#t-cab-speed-dir-set-cab-loco-speed).
|
||||
|
||||
`mosquitto` is the MQTT broker.
|
||||
|
||||
The `handler.py` script subscribes to the MQTT broker and saves relevant data to the Timescale database.
|
||||
|
||||
Data is finally save into a Timescale hypertable.
|
||||
|
||||
## How to run
|
||||
|
||||
### Deploy Timescale
|
||||
|
||||
```bash
|
||||
$ podman run -d -p 5432:5432 -v $(pwd)/data:/var/lib/postgresql/data -e "POSTGRES_USER=dccmonitor" -e "POSTGRES_PASSWORD=dccmonitor" --name timescale timescale/timescaledb:latest-pg17
|
||||
```
|
||||
[!IMPORTANT]
|
||||
A volume should be created for persistent data
|
||||
|
||||
Tables and hypertables are automatically created by the `handler.py` script
|
||||
|
||||
### Deploy Mosquitto
|
||||
|
||||
```bash
|
||||
$ podman run --userns=keep-id -d -p 1883:1883 -v $(pwd)/config/mosquitto.conf:/mosquitto/config/mosquitto.conf --name mosquitto eclipse-mosquitto:2.0
|
||||
```
|
||||
|
||||
### Run the dispatcher and the handler
|
||||
|
||||
```bash
|
||||
$ python dispatcher.py
|
||||
```
|
||||
|
||||
```bash
|
||||
$ python handler.py
|
||||
```
|
||||
|
||||
## Debug data in Timescale
|
||||
|
||||
### Create a 10 secs aggregated data table
|
||||
|
||||
```sql
|
||||
CREATE MATERIALIZED VIEW telemetry_10secs
|
||||
WITH (timescaledb.continuous) AS
|
||||
SELECT
|
||||
time_bucket('10 seconds', timestamp) AS bucket,
|
||||
cab,
|
||||
ROUND(CAST(AVG(speed) AS NUMERIC), 1) AS avg_speed,
|
||||
MIN(speed) AS min_speed,
|
||||
MAX(speed) AS max_speed
|
||||
FROM telemetry
|
||||
GROUP BY bucket, cab;
|
||||
```
|
||||
|
||||
and set the update policy:
|
||||
|
||||
```sql
|
||||
SELECT add_continuous_aggregate_policy(
|
||||
'telemetry_10secs',
|
||||
start_offset => INTERVAL '1 hour', -- Go back 1 hour for updates
|
||||
end_offset => INTERVAL '1 minute', -- Keep the latest 5 min fresh
|
||||
schedule_interval => INTERVAL '1 minute' -- Run every minute
|
||||
);
|
||||
|
||||
```
|
||||
|
||||
### Running statistics from 10 seconds table
|
||||
|
||||
```sql
|
||||
WITH speed_durations AS (
|
||||
SELECT
|
||||
cab,
|
||||
avg_speed,
|
||||
max_speed,
|
||||
bucket AS start_time,
|
||||
LEAD(bucket) OVER (
|
||||
PARTITION BY cab ORDER BY bucket
|
||||
) AS end_time,
|
||||
LEAD(bucket) OVER (PARTITION BY cab ORDER BY bucket) - bucket AS duration
|
||||
FROM telemetry_10secs
|
||||
)
|
||||
SELECT * FROM speed_durations WHERE end_time IS NOT NULL;
|
||||
```
|
||||
|
||||
and filtered by `cab` number, via a function
|
||||
|
||||
```sql
|
||||
CREATE FUNCTION get_speed_durations(cab_id INT)
|
||||
RETURNS TABLE (
|
||||
cab INT,
|
||||
speed DOUBLE PRECISION,
|
||||
dir TEXT,
|
||||
start_time TIMESTAMPTZ,
|
||||
end_time TIMESTAMPTZ,
|
||||
duration INTERVAL
|
||||
)
|
||||
AS $$
|
||||
WITH speed_durations AS (
|
||||
SELECT
|
||||
cab,
|
||||
avg_speed,
|
||||
max_speed,
|
||||
bucket AS start_time,
|
||||
LEAD(bucket) OVER (
|
||||
PARTITION BY cab ORDER BY bucket
|
||||
) AS end_time,
|
||||
LEAD(bucket) OVER (PARTITION BY cab ORDER BY bucket) - bucket AS duration
|
||||
FROM telemetry_10secs
|
||||
)
|
||||
SELECT * FROM speed_durations WHERE end_time IS NOT NULL AND cab = cab_id;
|
||||
$$ LANGUAGE sql;
|
||||
|
||||
-- Refresh data
|
||||
CALL refresh_continuous_aggregate('telemetry_10secs', NULL, NULL);
|
||||
SELECT * FROM get_speed_durations(1);
|
||||
```
|
36
monitoring/compose.yml
Normal file
36
monitoring/compose.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# vim: tabstop=2 shiftwidth=2 softtabstop=2
|
||||
networks:
|
||||
net:
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
staticdata:
|
||||
|
||||
x-op-service-default: &service_default
|
||||
restart: always # unless-stopped
|
||||
init: true
|
||||
|
||||
services:
|
||||
timescale:
|
||||
<<: *service_default
|
||||
image: timescale/timescaledb:latest-pg17
|
||||
ports:
|
||||
- "${CUSTOM_DOCKER_IP:-0.0.0.0}:5432:5432"
|
||||
environment:
|
||||
POSTGRES_USER: "dccmonitor"
|
||||
POSTGRES_PASSWORD: "dccmonitor"
|
||||
volumes:
|
||||
- "pgdata:/var/lib/postgresql/data"
|
||||
networks:
|
||||
- net
|
||||
|
||||
broker:
|
||||
<<: *service_default
|
||||
image: eclipse-mosquitto:2.0
|
||||
ports:
|
||||
- "${CUSTOM_DOCKER_IP:-0.0.0.0}:1883:1883"
|
||||
volumes:
|
||||
- "./config/mosquitto.conf:/mosquitto/config/mosquitto.conf:ro"
|
||||
networks:
|
||||
- net
|
2
monitoring/config/mosquitto.conf
Normal file
2
monitoring/config/mosquitto.conf
Normal file
@@ -0,0 +1,2 @@
|
||||
allow_anonymous true
|
||||
listener 1883
|
107
monitoring/dispatcher.py
Executable file
107
monitoring/dispatcher.py
Executable file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import socket
|
||||
import logging
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
# FIXME: create a configuration
|
||||
# TCP Socket Configuration
|
||||
TCP_HOST = "192.168.10.110" # Replace with your TCP server IP
|
||||
TCP_PORT = 2560 # Replace with your TCP server port
|
||||
|
||||
# FIXME: create a configuration
|
||||
# MQTT Broker Configuration
|
||||
MQTT_BROKER = "localhost"
|
||||
MQTT_PORT = 1883
|
||||
MQTT_TOPIC = "telemetry/commandstation"
|
||||
|
||||
# Connect to MQTT Broker
|
||||
mqtt_client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2)
|
||||
|
||||
|
||||
# Connect function with automatic reconnection
|
||||
def connect_mqtt():
|
||||
while True:
|
||||
try:
|
||||
mqtt_client.connect(MQTT_BROKER, MQTT_PORT, keepalive=60)
|
||||
mqtt_client.loop_start() # Start background loop
|
||||
logging.info("Connected to MQTT broker!")
|
||||
return
|
||||
except Exception as e:
|
||||
logging.info(f"Connection failed: {e}. Retrying in 5 seconds...")
|
||||
time.sleep(5) # Wait before Retrying
|
||||
|
||||
|
||||
# Ensure connection before publishing
|
||||
def safe_publish(topic, message):
|
||||
if not mqtt_client.is_connected():
|
||||
print("MQTT Disconnected! Reconnecting...")
|
||||
connect_mqtt() # Reconnect if disconnected
|
||||
|
||||
result = mqtt_client.publish(topic, message, qos=1)
|
||||
result.wait_for_publish() # Ensure message is published
|
||||
logging.debug(f"Published: {message}")
|
||||
|
||||
|
||||
def process_message(message):
|
||||
"""Parses the '<l cab speed dir>' format and converts it to JSON."""
|
||||
if not message.startswith("<l"):
|
||||
return None
|
||||
|
||||
parts = message.strip().split() # Split by spaces
|
||||
if len(parts) != 5:
|
||||
logging.debug(f"Invalid speed command: {message}")
|
||||
return None
|
||||
|
||||
_, _cab, _, _speed, _ = parts # Ignore the first `<t`
|
||||
cab = int(_cab)
|
||||
speed = int(_speed)
|
||||
if speed > 1 and speed < 128:
|
||||
direction = "r"
|
||||
speed = speed - 1
|
||||
elif speed > 129 and speed < 256:
|
||||
direction = "f"
|
||||
speed = speed - 129
|
||||
else:
|
||||
speed = 0
|
||||
direction = "n"
|
||||
|
||||
try:
|
||||
json_data = {
|
||||
"cab": cab,
|
||||
"speed": speed,
|
||||
"dir": direction
|
||||
}
|
||||
return json_data
|
||||
except ValueError as e:
|
||||
logging.error(f"Error parsing message: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def start_tcp_listener():
|
||||
"""Listens for incoming TCP messages and publishes them to MQTT."""
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
||||
sock.connect((TCP_HOST, TCP_PORT))
|
||||
logging.info(
|
||||
f"Connected to TCP server at {TCP_HOST}:{TCP_PORT}"
|
||||
)
|
||||
|
||||
while True:
|
||||
data = sock.recv(1024).decode("utf-8") # Read a chunk of data
|
||||
if not data:
|
||||
break
|
||||
|
||||
lines = data.strip().split("\n") # Handle multiple lines
|
||||
for line in lines:
|
||||
json_data = process_message(line)
|
||||
if json_data:
|
||||
safe_publish(MQTT_TOPIC, json.dumps(json_data))
|
||||
|
||||
|
||||
# Start the listener
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=os.getenv("DCC_LOGLEVEL", "INFO").upper())
|
||||
start_tcp_listener()
|
87
monitoring/handler.py
Executable file
87
monitoring/handler.py
Executable file
@@ -0,0 +1,87 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import datetime
|
||||
import psycopg2
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
# MQTT Broker Configuration
|
||||
MQTT_BROKER = "localhost"
|
||||
MQTT_PORT = 1883
|
||||
MQTT_TOPIC = "telemetry/commandstation"
|
||||
|
||||
# TimescaleDB Configuration
|
||||
DB_HOST = "localhost"
|
||||
DB_NAME = "dccmonitor"
|
||||
DB_USER = "dccmonitor"
|
||||
DB_PASSWORD = "dccmonitor"
|
||||
|
||||
|
||||
# The callback for when the client receives a CONNACK response from the server.
|
||||
def on_connect(client, userdata, flags, reason_code, properties):
|
||||
logging.info(f"Connected with result code {reason_code}")
|
||||
# Subscribing in on_connect() means that if we lose the connection and
|
||||
# reconnect then subscriptions will be renewed.
|
||||
client.subscribe(MQTT_TOPIC)
|
||||
|
||||
|
||||
# MQTT Callback: When a new message arrives
|
||||
def on_message(client, userdata, msg):
|
||||
try:
|
||||
payload = json.loads(msg.payload.decode("utf-8"))
|
||||
cab = payload["cab"]
|
||||
speed = payload["speed"]
|
||||
direction = payload["dir"]
|
||||
timestamp = datetime.datetime.now(datetime.UTC)
|
||||
|
||||
# Insert into TimescaleDB
|
||||
cur.execute(
|
||||
"INSERT INTO telemetry (timestamp, cab, speed, dir) VALUES (%s, %s, %s, %s)", # noqa: E501
|
||||
(timestamp, cab, speed, direction),
|
||||
)
|
||||
conn.commit()
|
||||
logging.debug(
|
||||
f"Inserted: {timestamp} | Cab: {cab} | Speed: {speed} | Dir: {direction}" # noqa: E501
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error processing message: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=os.getenv("DCC_LOGLEVEL", "INFO").upper())
|
||||
|
||||
# Connect to TimescaleDB
|
||||
conn = psycopg2.connect(
|
||||
dbname=DB_NAME, user=DB_USER, password=DB_PASSWORD, host=DB_HOST
|
||||
)
|
||||
cur = conn.cursor()
|
||||
|
||||
# Ensure hypertable exists
|
||||
cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS telemetry (
|
||||
timestamp TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
cab INT NOT NULL,
|
||||
speed DOUBLE PRECISION NOT NULL,
|
||||
dir TEXT NOT NULL
|
||||
);
|
||||
""")
|
||||
conn.commit()
|
||||
|
||||
# Convert table to hypertable if not already
|
||||
cur.execute("SELECT EXISTS (SELECT 1 FROM timescaledb_information.hypertables WHERE hypertable_name = 'telemetry');") # noqa: E501
|
||||
if not cur.fetchone()[0]:
|
||||
cur.execute("SELECT create_hypertable('telemetry', 'timestamp');")
|
||||
conn.commit()
|
||||
|
||||
# Setup MQTT Client
|
||||
client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2)
|
||||
client.on_connect = on_connect
|
||||
client.on_message = on_message
|
||||
client.connect(MQTT_BROKER, MQTT_PORT)
|
||||
|
||||
# Start listening for messages
|
||||
logging.info(f"Listening for MQTT messages on {MQTT_TOPIC}...")
|
||||
client.loop_forever()
|
2
monitoring/requirements.txt
Normal file
2
monitoring/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
paho-mqtt
|
||||
psycopg2-binary
|
@@ -8,10 +8,10 @@ from adminsortable2.admin import SortableAdminBase, SortableInlineAdminMixin
|
||||
from ram.admin import publish, unpublish
|
||||
from ram.utils import generate_csv
|
||||
from portal.utils import get_site_conf
|
||||
from repository.models import BookDocument, CatalogDocument
|
||||
from bookshelf.models import (
|
||||
BaseBookProperty,
|
||||
BaseBookImage,
|
||||
BaseBookDocument,
|
||||
Book,
|
||||
Author,
|
||||
Publisher,
|
||||
@@ -28,13 +28,6 @@ class BookImageInline(SortableInlineAdminMixin, admin.TabularInline):
|
||||
verbose_name = "Image"
|
||||
|
||||
|
||||
class BookDocInline(admin.TabularInline):
|
||||
model = BaseBookDocument
|
||||
min_num = 0
|
||||
extra = 0
|
||||
classes = ["collapse"]
|
||||
|
||||
|
||||
class BookPropertyInline(admin.TabularInline):
|
||||
model = BaseBookProperty
|
||||
min_num = 0
|
||||
@@ -44,6 +37,17 @@ class BookPropertyInline(admin.TabularInline):
|
||||
verbose_name_plural = "Properties"
|
||||
|
||||
|
||||
class BookDocInline(admin.TabularInline):
|
||||
model = BookDocument
|
||||
min_num = 0
|
||||
extra = 0
|
||||
classes = ["collapse"]
|
||||
|
||||
|
||||
class CatalogDocInline(BookDocInline):
|
||||
model = CatalogDocument
|
||||
|
||||
|
||||
@admin.register(Book)
|
||||
class BookAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
inlines = (
|
||||
@@ -60,7 +64,7 @@ class BookAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
"published",
|
||||
)
|
||||
autocomplete_fields = ("authors", "publisher", "shop")
|
||||
readonly_fields = ("creation_time", "updated_time")
|
||||
readonly_fields = ("invoices", "creation_time", "updated_time")
|
||||
search_fields = ("title", "publisher__name", "authors__last_name")
|
||||
list_filter = ("publisher__name", "authors")
|
||||
|
||||
@@ -89,6 +93,7 @@ class BookAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
"shop",
|
||||
"purchase_date",
|
||||
"price",
|
||||
"invoices",
|
||||
)
|
||||
},
|
||||
),
|
||||
@@ -115,6 +120,17 @@ class BookAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
)
|
||||
return form
|
||||
|
||||
@admin.display(description="Invoices")
|
||||
def invoices(self, obj):
|
||||
if obj.invoice.exists():
|
||||
html = "<br>".join(
|
||||
"<a href=\"{}\" target=\"_blank\">{}</a>".format(
|
||||
i.file.url, i
|
||||
) for i in obj.invoice.all())
|
||||
else:
|
||||
html = "-"
|
||||
return format_html(html)
|
||||
|
||||
@admin.display(description="Publisher")
|
||||
def get_publisher(self, obj):
|
||||
return obj.publisher.name
|
||||
@@ -200,7 +216,7 @@ class CatalogAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
inlines = (
|
||||
BookPropertyInline,
|
||||
BookImageInline,
|
||||
BookDocInline,
|
||||
CatalogDocInline,
|
||||
)
|
||||
list_display = (
|
||||
"__str__",
|
||||
@@ -210,7 +226,7 @@ class CatalogAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
"published",
|
||||
)
|
||||
autocomplete_fields = ("manufacturer",)
|
||||
readonly_fields = ("creation_time", "updated_time")
|
||||
readonly_fields = ("invoices", "creation_time", "updated_time")
|
||||
search_fields = ("manufacturer__name", "years", "scales__scale")
|
||||
list_filter = ("manufacturer__name", "publication_year", "scales__scale")
|
||||
|
||||
@@ -236,8 +252,10 @@ class CatalogAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
"Purchase data",
|
||||
{
|
||||
"fields": (
|
||||
"shop",
|
||||
"purchase_date",
|
||||
"price",
|
||||
"invoices",
|
||||
)
|
||||
},
|
||||
),
|
||||
@@ -264,6 +282,17 @@ class CatalogAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
)
|
||||
return form
|
||||
|
||||
@admin.display(description="Invoices")
|
||||
def invoices(self, obj):
|
||||
if obj.invoice.exists():
|
||||
html = "<br>".join(
|
||||
"<a href=\"{}\" target=\"_blank\">{}</a>".format(
|
||||
i.file.url, i
|
||||
) for i in obj.invoice.all())
|
||||
else:
|
||||
html = "-"
|
||||
return format_html(html)
|
||||
|
||||
def download_csv(modeladmin, request, queryset):
|
||||
header = [
|
||||
"Catalog",
|
||||
|
17
ram/bookshelf/migrations/0023_delete_basebookdocument.py
Normal file
17
ram/bookshelf/migrations/0023_delete_basebookdocument.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-09 13:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookshelf", "0022_basebook_shop"),
|
||||
("repository", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name="BaseBookDocument",
|
||||
),
|
||||
]
|
@@ -6,7 +6,7 @@ from django.urls import reverse
|
||||
from django_countries.fields import CountryField
|
||||
|
||||
from ram.utils import DeduplicatedStorage
|
||||
from ram.models import BaseModel, Image, Document, PropertyInstance
|
||||
from ram.models import BaseModel, Image, PropertyInstance
|
||||
from metadata.models import Scale, Manufacturer, Shop, Tag
|
||||
|
||||
|
||||
@@ -89,21 +89,6 @@ class BaseBookImage(Image):
|
||||
)
|
||||
|
||||
|
||||
class BaseBookDocument(Document):
|
||||
book = models.ForeignKey(
|
||||
BaseBook, on_delete=models.CASCADE, related_name="document"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Documents"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["book", "file"],
|
||||
name="unique_book_file"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class BaseBookProperty(PropertyInstance):
|
||||
book = models.ForeignKey(
|
||||
BaseBook,
|
||||
|
@@ -2,18 +2,16 @@ from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from adminsortable2.admin import SortableAdminMixin
|
||||
|
||||
from ram.admin import publish, unpublish
|
||||
from repository.models import DecoderDocument
|
||||
from metadata.models import (
|
||||
Property,
|
||||
Decoder,
|
||||
DecoderDocument,
|
||||
Scale,
|
||||
Shop,
|
||||
Manufacturer,
|
||||
Company,
|
||||
Tag,
|
||||
RollingStockType,
|
||||
GenericDocument,
|
||||
)
|
||||
|
||||
|
||||
@@ -88,51 +86,6 @@ class RollingStockTypeAdmin(SortableAdminMixin, admin.ModelAdmin):
|
||||
search_fields = ("type", "category")
|
||||
|
||||
|
||||
@admin.register(GenericDocument)
|
||||
class GenericDocumentAdmin(admin.ModelAdmin):
|
||||
readonly_fields = ("size", "creation_time", "updated_time")
|
||||
list_display = (
|
||||
"__str__",
|
||||
"description",
|
||||
"private",
|
||||
"size",
|
||||
"download",
|
||||
)
|
||||
search_fields = (
|
||||
"description",
|
||||
"file",
|
||||
)
|
||||
fieldsets = (
|
||||
(
|
||||
None,
|
||||
{
|
||||
"fields": (
|
||||
"private",
|
||||
"description",
|
||||
"file",
|
||||
"size",
|
||||
"tags",
|
||||
)
|
||||
},
|
||||
),
|
||||
(
|
||||
"Notes",
|
||||
{"classes": ("collapse",), "fields": ("notes",)},
|
||||
),
|
||||
(
|
||||
"Audit",
|
||||
{
|
||||
"classes": ("collapse",),
|
||||
"fields": (
|
||||
"creation_time",
|
||||
"updated_time",
|
||||
),
|
||||
},
|
||||
),
|
||||
)
|
||||
actions = [publish, unpublish]
|
||||
|
||||
|
||||
@admin.register(Shop)
|
||||
class ShopAdmin(admin.ModelAdmin):
|
||||
list_display = ("name", "on_line", "active")
|
||||
|
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-09 13:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("metadata", "0023_shop"),
|
||||
("repository", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="genericdocument",
|
||||
name="tags",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="DecoderDocument",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="GenericDocument",
|
||||
),
|
||||
]
|
@@ -6,9 +6,6 @@ from django.dispatch.dispatcher import receiver
|
||||
from django.core.exceptions import ValidationError
|
||||
from django_countries.fields import CountryField
|
||||
|
||||
from tinymce import models as tinymce
|
||||
|
||||
from ram.models import Document
|
||||
from ram.utils import DeduplicatedStorage, get_image_preview, slugify
|
||||
from ram.managers import PublicManager
|
||||
|
||||
@@ -132,20 +129,6 @@ class Decoder(models.Model):
|
||||
image_thumbnail.short_description = "Preview"
|
||||
|
||||
|
||||
class DecoderDocument(Document):
|
||||
decoder = models.ForeignKey(
|
||||
Decoder, on_delete=models.CASCADE, related_name="document"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["decoder", "file"],
|
||||
name="unique_decoder_file"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def calculate_ratio(ratio):
|
||||
try:
|
||||
num, den = ratio.split(":")
|
||||
@@ -239,14 +222,6 @@ class Tag(models.Model):
|
||||
)
|
||||
|
||||
|
||||
class GenericDocument(Document):
|
||||
notes = tinymce.HTMLField(blank=True)
|
||||
tags = models.ManyToManyField(Tag, blank=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Generic Documents"
|
||||
|
||||
|
||||
class Shop(models.Model):
|
||||
name = models.CharField(max_length=128, unique=True)
|
||||
country = CountryField(blank=True)
|
||||
|
@@ -43,13 +43,15 @@ a.badge, a.badge:hover {
|
||||
border-top: calc(var(--bs-border-width) * 3) solid var(--bs-border-color);
|
||||
}
|
||||
|
||||
#nav-journal ul, #nav-journal ol {
|
||||
margin: 0;
|
||||
#nav-journal ul,
|
||||
#nav-journal ol {
|
||||
padding-left: 1rem;
|
||||
}
|
||||
|
||||
#nav-journal p {
|
||||
margin: 0;
|
||||
#nav-journal p:last-child,
|
||||
#nav-journal ul:last-child,
|
||||
#nav-journal ol:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
#footer > p {
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from ram.utils import git_suffix
|
||||
|
||||
__version__ = "0.16.9"
|
||||
__version__ = "0.17.1"
|
||||
__version__ += git_suffix(__file__)
|
||||
|
32
ram/ram/db_router.py
Normal file
32
ram/ram/db_router.py
Normal file
@@ -0,0 +1,32 @@
|
||||
class TelemetryRouter:
|
||||
db_table = "telemetry_10secs"
|
||||
|
||||
def db_for_read(self, model, **hints):
|
||||
"""Send read operations to the correct database."""
|
||||
if model._meta.db_table == self.db_table:
|
||||
return "telemetry" # Replace with your database name
|
||||
return None # Default database
|
||||
|
||||
def db_for_write(self, model, **hints):
|
||||
"""Send write operations to the correct database."""
|
||||
if model._meta.db_table == self.db_table:
|
||||
return False # Prevent Django from writing RO tables
|
||||
return None
|
||||
|
||||
def allow_relation(self, obj1, obj2, **hints):
|
||||
"""
|
||||
Allow relations if a model in the auth or contenttypes apps is
|
||||
involved.
|
||||
"""
|
||||
if (
|
||||
obj1._meta.db_table == self.db_table
|
||||
or obj2._meta.db_table == self.db_table
|
||||
):
|
||||
return True
|
||||
return None
|
||||
|
||||
def allow_migrate(self, db, app_label, model_name=None, **hints):
|
||||
"""Prevent Django from migrating this model if it's using a specific database."""
|
||||
if db == "telemetry":
|
||||
return False # Prevent Django from creating/modifying tables
|
||||
return None
|
@@ -27,11 +27,6 @@ class Document(models.Model):
|
||||
description = models.CharField(max_length=128, blank=True)
|
||||
file = models.FileField(
|
||||
upload_to="files/",
|
||||
storage=DeduplicatedStorage(),
|
||||
)
|
||||
private = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Document will be visible only to logged users",
|
||||
)
|
||||
creation_time = models.DateTimeField(auto_now_add=True)
|
||||
updated_time = models.DateTimeField(auto_now=True)
|
||||
@@ -61,8 +56,17 @@ class Document(models.Model):
|
||||
'<a href="{0}" target="_blank">Link</a>'.format(self.file.url)
|
||||
)
|
||||
|
||||
|
||||
class PrivateDocument(Document):
|
||||
private = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Document will be visible only to logged users",
|
||||
)
|
||||
objects = PublicManager()
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class Image(models.Model):
|
||||
order = models.PositiveIntegerField(default=0, blank=False, null=False)
|
||||
|
@@ -50,6 +50,7 @@ INSTALLED_APPS = [
|
||||
"portal",
|
||||
# "driver", # uncomment this to enable the "driver" API
|
||||
"metadata",
|
||||
"repository",
|
||||
"roster",
|
||||
"consist",
|
||||
"bookshelf",
|
||||
@@ -94,8 +95,16 @@ DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3",
|
||||
"NAME": STORAGE_DIR / "db.sqlite3",
|
||||
}
|
||||
},
|
||||
"telemetry": {
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
"HOST": "127.0.0.1",
|
||||
"NAME": "dccmonitor",
|
||||
"USER": "dccmonitor",
|
||||
"PASSWORD": "dccmonitor",
|
||||
},
|
||||
}
|
||||
DATABASE_ROUTERS = ["ram.db_router.TelemetryRouter"]
|
||||
|
||||
|
||||
# Password validation
|
||||
|
0
ram/repository/__init__.py
Normal file
0
ram/repository/__init__.py
Normal file
248
ram/repository/admin.py
Normal file
248
ram/repository/admin.py
Normal file
@@ -0,0 +1,248 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from ram.admin import publish, unpublish
|
||||
from repository.models import (
|
||||
GenericDocument,
|
||||
InvoiceDocument,
|
||||
BookDocument,
|
||||
CatalogDocument,
|
||||
DecoderDocument,
|
||||
RollingStockDocument
|
||||
)
|
||||
|
||||
|
||||
@admin.register(GenericDocument)
|
||||
class GenericDocumentAdmin(admin.ModelAdmin):
|
||||
readonly_fields = ("size", "creation_time", "updated_time")
|
||||
list_display = (
|
||||
"__str__",
|
||||
"description",
|
||||
"private",
|
||||
"size",
|
||||
"download",
|
||||
)
|
||||
search_fields = (
|
||||
"description",
|
||||
"file",
|
||||
)
|
||||
fieldsets = (
|
||||
(
|
||||
None,
|
||||
{
|
||||
"fields": (
|
||||
"private",
|
||||
"description",
|
||||
"file",
|
||||
"size",
|
||||
"tags",
|
||||
)
|
||||
},
|
||||
),
|
||||
(
|
||||
"Notes",
|
||||
{"classes": ("collapse",), "fields": ("notes",)},
|
||||
),
|
||||
(
|
||||
"Audit",
|
||||
{
|
||||
"classes": ("collapse",),
|
||||
"fields": (
|
||||
"creation_time",
|
||||
"updated_time",
|
||||
),
|
||||
},
|
||||
),
|
||||
)
|
||||
actions = [publish, unpublish]
|
||||
|
||||
|
||||
@admin.register(InvoiceDocument)
|
||||
class InvoiceDocumentAdmin(admin.ModelAdmin):
|
||||
readonly_fields = ("size", "creation_time", "updated_time")
|
||||
list_display = (
|
||||
"__str__",
|
||||
"description",
|
||||
"date",
|
||||
"shop",
|
||||
"size",
|
||||
"download",
|
||||
)
|
||||
search_fields = (
|
||||
"rolling_stock__manufacturer__name",
|
||||
"rolling_stock__item_number",
|
||||
"book__title",
|
||||
"catalog__manufacturer__name",
|
||||
"shop__name",
|
||||
"description",
|
||||
"file",
|
||||
)
|
||||
autocomplete_fields = ("rolling_stock", "book", "catalog", "shop")
|
||||
fieldsets = (
|
||||
(
|
||||
None,
|
||||
{
|
||||
"fields": (
|
||||
"rolling_stock",
|
||||
"book",
|
||||
"catalog",
|
||||
"description",
|
||||
"date",
|
||||
"shop",
|
||||
"file",
|
||||
"size",
|
||||
)
|
||||
},
|
||||
),
|
||||
(
|
||||
"Notes",
|
||||
{"classes": ("collapse",), "fields": ("notes",)},
|
||||
),
|
||||
(
|
||||
"Audit",
|
||||
{
|
||||
"classes": ("collapse",),
|
||||
"fields": (
|
||||
"creation_time",
|
||||
"updated_time",
|
||||
),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@admin.register(BookDocument)
|
||||
class BookDocumentAdmin(admin.ModelAdmin):
|
||||
readonly_fields = ("size",)
|
||||
list_display = (
|
||||
"__str__",
|
||||
"book",
|
||||
"description",
|
||||
"private",
|
||||
"size",
|
||||
"download",
|
||||
)
|
||||
search_fields = (
|
||||
"book__title",
|
||||
"description",
|
||||
"file",
|
||||
)
|
||||
autocomplete_fields = ("book",)
|
||||
fieldsets = (
|
||||
(
|
||||
None,
|
||||
{
|
||||
"fields": (
|
||||
"private",
|
||||
"book",
|
||||
"description",
|
||||
"file",
|
||||
"size",
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
actions = [publish, unpublish]
|
||||
|
||||
|
||||
@admin.register(CatalogDocument)
|
||||
class CatalogDocumentAdmin(admin.ModelAdmin):
|
||||
readonly_fields = ("size",)
|
||||
list_display = (
|
||||
"__str__",
|
||||
"catalog",
|
||||
"description",
|
||||
"private",
|
||||
"size",
|
||||
"download",
|
||||
)
|
||||
search_fields = (
|
||||
"catalog__title",
|
||||
"description",
|
||||
"file",
|
||||
)
|
||||
autocomplete_fields = ("catalog",)
|
||||
fieldsets = (
|
||||
(
|
||||
None,
|
||||
{
|
||||
"fields": (
|
||||
"private",
|
||||
"catalog",
|
||||
"description",
|
||||
"file",
|
||||
"size",
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
actions = [publish, unpublish]
|
||||
|
||||
|
||||
@admin.register(DecoderDocument)
|
||||
class DecoderDocumentAdmin(admin.ModelAdmin):
|
||||
readonly_fields = ("size",)
|
||||
list_display = (
|
||||
"__str__",
|
||||
"decoder",
|
||||
"description",
|
||||
"private",
|
||||
"size",
|
||||
"download",
|
||||
)
|
||||
search_fields = (
|
||||
"decoder__name",
|
||||
"decoder__manufacturer__name",
|
||||
"description",
|
||||
"file",
|
||||
)
|
||||
autocomplete_fields = ("decoder",)
|
||||
fieldsets = (
|
||||
(
|
||||
None,
|
||||
{
|
||||
"fields": (
|
||||
"private",
|
||||
"decoder",
|
||||
"description",
|
||||
"file",
|
||||
"size",
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
actions = [publish, unpublish]
|
||||
|
||||
|
||||
@admin.register(RollingStockDocument)
|
||||
class RollingStockDocumentAdmin(admin.ModelAdmin):
|
||||
readonly_fields = ("size",)
|
||||
list_display = (
|
||||
"__str__",
|
||||
"rolling_stock",
|
||||
"description",
|
||||
"private",
|
||||
"size",
|
||||
"download",
|
||||
)
|
||||
search_fields = (
|
||||
"rolling_stock__rolling_class__identifier",
|
||||
"rolling_stock__item_number",
|
||||
"description",
|
||||
"file",
|
||||
)
|
||||
autocomplete_fields = ("rolling_stock",)
|
||||
fieldsets = (
|
||||
(
|
||||
None,
|
||||
{
|
||||
"fields": (
|
||||
"private",
|
||||
"rolling_stock",
|
||||
"description",
|
||||
"file",
|
||||
"size",
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
actions = [publish, unpublish]
|
6
ram/repository/apps.py
Normal file
6
ram/repository/apps.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class RepositoryConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "repository"
|
361
ram/repository/migrations/0001_initial.py
Normal file
361
ram/repository/migrations/0001_initial.py
Normal file
@@ -0,0 +1,361 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-09 13:04
|
||||
|
||||
import django.db.models.deletion
|
||||
import ram.utils
|
||||
import tinymce.models
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def migrate_document(apps, schema_editor):
|
||||
document = apps.get_model("metadata", "GenericDocument")
|
||||
document_new = apps.get_model("repository", "GenericDocument")
|
||||
for d in document.objects.all():
|
||||
n = document_new.objects.create(
|
||||
notes=d.notes,
|
||||
description=d.description,
|
||||
file=d.file,
|
||||
private=d.private,
|
||||
creation_time=d.creation_time,
|
||||
updated_time=d.updated_time,
|
||||
)
|
||||
for t in d.tags.all():
|
||||
n.tags.add(t)
|
||||
|
||||
|
||||
def migrate_decoder(apps, schema_editor):
|
||||
dcc_document = apps.get_model("metadata", "DecoderDocument")
|
||||
dcc_document_new = apps.get_model("repository", "DecoderDocument")
|
||||
for d in dcc_document.objects.all():
|
||||
dcc_document_new.objects.create(
|
||||
decoder=d.decoder,
|
||||
description=d.description,
|
||||
file=d.file,
|
||||
private=d.private,
|
||||
creation_time=d.creation_time,
|
||||
updated_time=d.updated_time,
|
||||
)
|
||||
|
||||
|
||||
def migrate_rollingstock(apps, schema_editor):
|
||||
rs_document = apps.get_model("roster", "RollingStockDocument")
|
||||
rs_document_new = apps.get_model("repository", "RollingStockDocument")
|
||||
for d in rs_document.objects.all():
|
||||
rs_document_new.objects.create(
|
||||
rolling_stock=d.rolling_stock,
|
||||
description=d.description,
|
||||
file=d.file,
|
||||
private=d.private,
|
||||
creation_time=d.creation_time,
|
||||
updated_time=d.updated_time,
|
||||
)
|
||||
|
||||
|
||||
def migrate_book(apps, schema_editor):
|
||||
book_document = apps.get_model("bookshelf", "BaseBookDocument")
|
||||
book_document_new = apps.get_model("repository", "BaseBookDocument")
|
||||
catalog_document_new = apps.get_model("repository", "CatalogDocument")
|
||||
for d in book_document.objects.all():
|
||||
if hasattr(d.book, "book"):
|
||||
book_document_new.objects.create(
|
||||
book=d.book.book,
|
||||
description=d.description,
|
||||
file=d.file,
|
||||
private=d.private,
|
||||
creation_time=d.creation_time,
|
||||
updated_time=d.updated_time,
|
||||
)
|
||||
else:
|
||||
catalog_document_new.objects.create(
|
||||
catalog=d.book.catalog,
|
||||
description=d.description,
|
||||
file=d.file,
|
||||
private=d.private,
|
||||
creation_time=d.creation_time,
|
||||
updated_time=d.updated_time,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookshelf", "0022_basebook_shop"),
|
||||
("metadata", "0023_shop"),
|
||||
("roster", "0035_alter_rollingstock_shop"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="BaseBookDocument",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("description", models.CharField(blank=True, max_length=128)),
|
||||
(
|
||||
"file",
|
||||
models.FileField(
|
||||
storage=ram.utils.DeduplicatedStorage(), upload_to="files/"
|
||||
),
|
||||
),
|
||||
(
|
||||
"private",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Document will be visible only to logged users",
|
||||
),
|
||||
),
|
||||
("creation_time", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_time", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"book",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="new_document",
|
||||
to="bookshelf.basebook",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "Documents",
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="BookDocument",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("description", models.CharField(blank=True, max_length=128)),
|
||||
(
|
||||
"file",
|
||||
models.FileField(
|
||||
storage=ram.utils.DeduplicatedStorage(), upload_to="files/"
|
||||
),
|
||||
),
|
||||
("creation_time", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_time", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"private",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Document will be visible only to logged users",
|
||||
),
|
||||
),
|
||||
(
|
||||
"book",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="document",
|
||||
to="bookshelf.book",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "Book documents",
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("book", "file"), name="unique_book_file"
|
||||
)
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="CatalogDocument",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("description", models.CharField(blank=True, max_length=128)),
|
||||
(
|
||||
"file",
|
||||
models.FileField(
|
||||
storage=ram.utils.DeduplicatedStorage(), upload_to="files/"
|
||||
),
|
||||
),
|
||||
("creation_time", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_time", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"private",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Document will be visible only to logged users",
|
||||
),
|
||||
),
|
||||
(
|
||||
"catalog",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="document",
|
||||
to="bookshelf.catalog",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "Catalog documents",
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("catalog", "file"), name="unique_catalog_file"
|
||||
)
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="GenericDocument",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("description", models.CharField(blank=True, max_length=128)),
|
||||
(
|
||||
"file",
|
||||
models.FileField(
|
||||
storage=ram.utils.DeduplicatedStorage(), upload_to="files/"
|
||||
),
|
||||
),
|
||||
(
|
||||
"private",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Document will be visible only to logged users",
|
||||
),
|
||||
),
|
||||
("creation_time", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_time", models.DateTimeField(auto_now=True)),
|
||||
("notes", tinymce.models.HTMLField(blank=True)),
|
||||
(
|
||||
"tags",
|
||||
models.ManyToManyField(
|
||||
blank=True, related_name="new_document", to="metadata.tag"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "Generic Documents",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RollingStockDocument",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("description", models.CharField(blank=True, max_length=128)),
|
||||
(
|
||||
"file",
|
||||
models.FileField(
|
||||
storage=ram.utils.DeduplicatedStorage(), upload_to="files/"
|
||||
),
|
||||
),
|
||||
(
|
||||
"private",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Document will be visible only to logged users",
|
||||
),
|
||||
),
|
||||
("creation_time", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_time", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"rolling_stock",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="new_document",
|
||||
to="roster.rollingstock",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "Documents",
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="DecoderDocument",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("description", models.CharField(blank=True, max_length=128)),
|
||||
(
|
||||
"file",
|
||||
models.FileField(
|
||||
storage=ram.utils.DeduplicatedStorage(), upload_to="files/"
|
||||
),
|
||||
),
|
||||
(
|
||||
"private",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Document will be visible only to logged users",
|
||||
),
|
||||
),
|
||||
("creation_time", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_time", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"decoder",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="new_document",
|
||||
to="metadata.decoder",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "Documents",
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.RunPython(
|
||||
migrate_document,
|
||||
reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
migrations.RunPython(
|
||||
migrate_decoder,
|
||||
reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
migrations.RunPython(
|
||||
migrate_rollingstock,
|
||||
reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
migrations.RunPython(
|
||||
migrate_book,
|
||||
reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
]
|
@@ -0,0 +1,157 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-09 23:10
|
||||
|
||||
import django.db.models.deletion
|
||||
import tinymce.models
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookshelf", "0023_delete_basebookdocument"),
|
||||
(
|
||||
"metadata",
|
||||
"0024_remove_genericdocument_tags_delete_decoderdocument_and_more",
|
||||
),
|
||||
("repository", "0001_initial"),
|
||||
("roster", "0036_delete_rollingstockdocument"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="InvoiceDocument",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("description", models.CharField(blank=True, max_length=128)),
|
||||
("creation_time", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_time", models.DateTimeField(auto_now=True)),
|
||||
("private", models.BooleanField(default=True, editable=False)),
|
||||
("date", models.DateField()),
|
||||
("file", models.FileField(upload_to="files/invoices/")),
|
||||
("notes", tinymce.models.HTMLField(blank=True)),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="basebookdocument",
|
||||
name="book",
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="decoderdocument",
|
||||
options={},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="genericdocument",
|
||||
options={"verbose_name_plural": "Generic documents"},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="rollingstockdocument",
|
||||
options={},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="bookdocument",
|
||||
name="file",
|
||||
field=models.FileField(upload_to="files/"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="catalogdocument",
|
||||
name="file",
|
||||
field=models.FileField(upload_to="files/"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="decoderdocument",
|
||||
name="decoder",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="document",
|
||||
to="metadata.decoder",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="decoderdocument",
|
||||
name="file",
|
||||
field=models.FileField(upload_to="files/"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="genericdocument",
|
||||
name="file",
|
||||
field=models.FileField(upload_to="files/"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="genericdocument",
|
||||
name="tags",
|
||||
field=models.ManyToManyField(
|
||||
blank=True, related_name="document", to="metadata.tag"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="rollingstockdocument",
|
||||
name="file",
|
||||
field=models.FileField(upload_to="files/"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="rollingstockdocument",
|
||||
name="rolling_stock",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="document",
|
||||
to="roster.rollingstock",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="decoderdocument",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("decoder", "file"), name="unique_decoder_file"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="rollingstockdocument",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("rolling_stock", "file"), name="unique_stock_file"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="invoicedocument",
|
||||
name="book",
|
||||
field=models.ManyToManyField(
|
||||
blank=True, related_name="invoice", to="bookshelf.book"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="invoicedocument",
|
||||
name="catalog",
|
||||
field=models.ManyToManyField(
|
||||
blank=True, related_name="invoice", to="bookshelf.catalog"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="invoicedocument",
|
||||
name="rolling_stock",
|
||||
field=models.ManyToManyField(
|
||||
blank=True, related_name="invoice", to="roster.rollingstock"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="invoicedocument",
|
||||
name="shop",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="metadata.shop",
|
||||
),
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="BaseBookDocument",
|
||||
),
|
||||
]
|
0
ram/repository/migrations/__init__.py
Normal file
0
ram/repository/migrations/__init__.py
Normal file
90
ram/repository/models.py
Normal file
90
ram/repository/models.py
Normal file
@@ -0,0 +1,90 @@
|
||||
from django.db import models
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from tinymce import models as tinymce
|
||||
|
||||
from ram.models import PrivateDocument
|
||||
from metadata.models import Decoder, Shop, Tag
|
||||
from roster.models import RollingStock
|
||||
from bookshelf.models import Book, Catalog
|
||||
|
||||
|
||||
class GenericDocument(PrivateDocument):
|
||||
notes = tinymce.HTMLField(blank=True)
|
||||
tags = models.ManyToManyField(Tag, blank=True, related_name="document")
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Generic documents"
|
||||
|
||||
|
||||
class InvoiceDocument(PrivateDocument):
|
||||
private = models.BooleanField(default=True, editable=False)
|
||||
rolling_stock = models.ManyToManyField(
|
||||
RollingStock, related_name="invoice", blank=True
|
||||
)
|
||||
book = models.ManyToManyField(Book, related_name="invoice", blank=True)
|
||||
catalog = models.ManyToManyField(
|
||||
Catalog, related_name="invoice", blank=True
|
||||
)
|
||||
date = models.DateField()
|
||||
shop = models.ForeignKey(
|
||||
Shop, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
file = models.FileField(
|
||||
upload_to="files/invoices/",
|
||||
)
|
||||
notes = tinymce.HTMLField(blank=True)
|
||||
|
||||
|
||||
class DecoderDocument(PrivateDocument):
|
||||
decoder = models.ForeignKey(
|
||||
Decoder, on_delete=models.CASCADE, related_name="document"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["decoder", "file"], name="unique_decoder_file"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class BookDocument(PrivateDocument):
|
||||
book = models.ForeignKey(
|
||||
Book, on_delete=models.CASCADE, related_name="document"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Book documents"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["book", "file"], name="unique_book_file"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class CatalogDocument(PrivateDocument):
|
||||
catalog = models.ForeignKey(
|
||||
Catalog, on_delete=models.CASCADE, related_name="document"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Catalog documents"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["catalog", "file"], name="unique_catalog_file"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class RollingStockDocument(PrivateDocument):
|
||||
rolling_stock = models.ForeignKey(
|
||||
RollingStock, on_delete=models.CASCADE, related_name="document"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["rolling_stock", "file"], name="unique_stock_file"
|
||||
)
|
||||
]
|
3
ram/repository/tests.py
Normal file
3
ram/repository/tests.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
3
ram/repository/views.py
Normal file
3
ram/repository/views.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from django.shortcuts import render
|
||||
|
||||
# Create your views here.
|
@@ -8,15 +8,16 @@ from adminsortable2.admin import SortableAdminBase, SortableInlineAdminMixin
|
||||
|
||||
from ram.admin import publish, unpublish
|
||||
from ram.utils import generate_csv
|
||||
from repository.models import RollingStockDocument
|
||||
from portal.utils import get_site_conf
|
||||
from roster.models import (
|
||||
RollingClass,
|
||||
RollingClassProperty,
|
||||
RollingStock,
|
||||
RollingStockImage,
|
||||
RollingStockDocument,
|
||||
RollingStockProperty,
|
||||
RollingStockJournal,
|
||||
RollingStockTelemetry,
|
||||
)
|
||||
|
||||
|
||||
@@ -76,42 +77,8 @@ class RollingStockJournalInline(admin.TabularInline):
|
||||
classes = ["collapse"]
|
||||
|
||||
|
||||
@admin.register(RollingStockDocument)
|
||||
class RollingStockDocumentAdmin(admin.ModelAdmin):
|
||||
readonly_fields = ("size",)
|
||||
list_display = (
|
||||
"__str__",
|
||||
"rolling_stock",
|
||||
"description",
|
||||
"private",
|
||||
"size",
|
||||
"download",
|
||||
)
|
||||
search_fields = (
|
||||
"rolling_stock__rolling_class__identifier",
|
||||
"rolling_stock__item_number",
|
||||
"description",
|
||||
"file",
|
||||
)
|
||||
autocomplete_fields = ("rolling_stock",)
|
||||
fieldsets = (
|
||||
(
|
||||
None,
|
||||
{
|
||||
"fields": (
|
||||
"private",
|
||||
"rolling_stock",
|
||||
"description",
|
||||
"file",
|
||||
"size",
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@admin.register(RollingStockJournal)
|
||||
class RollingJournalDocumentAdmin(admin.ModelAdmin):
|
||||
class RollingJournalAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"__str__",
|
||||
"date",
|
||||
@@ -152,7 +119,7 @@ class RollingStockAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
RollingStockJournalInline,
|
||||
)
|
||||
autocomplete_fields = ("rolling_class", "shop")
|
||||
readonly_fields = ("preview", "creation_time", "updated_time")
|
||||
readonly_fields = ("preview", "invoices", "creation_time", "updated_time")
|
||||
list_display = (
|
||||
"__str__",
|
||||
"address",
|
||||
@@ -223,6 +190,7 @@ class RollingStockAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
"shop",
|
||||
"purchase_date",
|
||||
"price",
|
||||
"invoices",
|
||||
)
|
||||
},
|
||||
),
|
||||
@@ -249,6 +217,17 @@ class RollingStockAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
)
|
||||
return form
|
||||
|
||||
@admin.display(description="Invoices")
|
||||
def invoices(self, obj):
|
||||
if obj.invoice.exists():
|
||||
html = "<br>".join(
|
||||
"<a href=\"{}\" target=\"_blank\">{}</a>".format(
|
||||
i.file.url, i
|
||||
) for i in obj.invoice.all())
|
||||
else:
|
||||
html = "-"
|
||||
return format_html(html)
|
||||
|
||||
def download_csv(modeladmin, request, queryset):
|
||||
header = [
|
||||
"Name",
|
||||
@@ -309,3 +288,29 @@ class RollingStockAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
|
||||
download_csv.short_description = "Download selected items as CSV"
|
||||
actions = [publish, unpublish, download_csv]
|
||||
|
||||
|
||||
@admin.register(RollingStockTelemetry)
|
||||
class RollingTelemtryAdmin(admin.ModelAdmin):
|
||||
list_filter = ("bucket", "cab")
|
||||
list_display = ("bucket_highres", "cab", "max_speed", "avg_speed")
|
||||
|
||||
def bucket_highres(self, obj):
|
||||
return obj.bucket.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
bucket_highres.admin_order_field = "bucket" # Enable sorting
|
||||
bucket_highres.short_description = "Bucket" # Column name in admin
|
||||
|
||||
def get_changelist_instance(self, request):
|
||||
changelist = super().get_changelist_instance(request)
|
||||
changelist.list_display_links = None # Disable links
|
||||
return changelist
|
||||
|
||||
def has_add_permission(self, request):
|
||||
return False # Disable adding new objects
|
||||
|
||||
def has_change_permission(self, request, obj=None):
|
||||
return False # Disable editing objects
|
||||
|
||||
def has_delete_permission(self, request, obj=None):
|
||||
return False # Disable deleting objects
|
||||
|
17
ram/roster/migrations/0036_delete_rollingstockdocument.py
Normal file
17
ram/roster/migrations/0036_delete_rollingstockdocument.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-09 13:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("roster", "0035_alter_rollingstock_shop"),
|
||||
("repository", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name="RollingStockDocument",
|
||||
),
|
||||
]
|
@@ -8,7 +8,7 @@ from django.dispatch import receiver
|
||||
|
||||
from tinymce import models as tinymce
|
||||
|
||||
from ram.models import BaseModel, Document, Image, PropertyInstance
|
||||
from ram.models import BaseModel, Image, PropertyInstance
|
||||
from ram.utils import DeduplicatedStorage, slugify
|
||||
from ram.managers import PublicManager
|
||||
from metadata.models import (
|
||||
@@ -169,20 +169,6 @@ def pre_save_internal_fields(sender, instance, *args, **kwargs):
|
||||
instance.item_number_slug = slugify(instance.item_number)
|
||||
|
||||
|
||||
class RollingStockDocument(Document):
|
||||
rolling_stock = models.ForeignKey(
|
||||
RollingStock, on_delete=models.CASCADE, related_name="document"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["rolling_stock", "file"],
|
||||
name="unique_stock_file"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def rolling_stock_image_upload(instance, filename):
|
||||
return os.path.join(
|
||||
"images",
|
||||
@@ -238,6 +224,20 @@ class RollingStockJournal(models.Model):
|
||||
objects = PublicManager()
|
||||
|
||||
|
||||
# trick: this is technically an abstract class
|
||||
# it is made readonly via db_router and admin to avoid any unwanted change
|
||||
class RollingStockTelemetry(models.Model):
|
||||
bucket = models.DateTimeField(primary_key=True, editable=False)
|
||||
cab = models.PositiveIntegerField(editable=False)
|
||||
avg_speed = models.FloatField(editable=False)
|
||||
max_speed = models.PositiveIntegerField(editable=False)
|
||||
|
||||
class Meta:
|
||||
db_table = "telemetry_10secs"
|
||||
ordering = ["cab", "bucket"]
|
||||
verbose_name_plural = "Telemetries"
|
||||
|
||||
|
||||
# @receiver(models.signals.post_delete, sender=Cab)
|
||||
# def post_save_image(sender, instance, *args, **kwargs):
|
||||
# try:
|
||||
|
@@ -8,7 +8,7 @@ django-countries
|
||||
django-health-check
|
||||
django-admin-sortable2
|
||||
django-tinymce
|
||||
# Optional: # psycopg2-binary
|
||||
psycopg2-binary
|
||||
# Required by django-countries and not always installed
|
||||
# by default on modern venvs (like Python 3.12 on Fedora 39)
|
||||
setuptools
|
||||
|
Reference in New Issue
Block a user