Compare commits

..

2 Commits

Author SHA1 Message Date
792b60cdc6 Implement query optimization 2026-01-17 22:59:23 +01:00
cfc7531b59 Extend test coverage 2026-01-17 22:58:41 +01:00
31 changed files with 181 additions and 2050 deletions

View File

@@ -128,7 +128,6 @@ python manage.py runserver --noreload # With pyinstrument middleware
- **Long lines**: Use `# noqa: E501` comment when necessary (see settings.py) - **Long lines**: Use `# noqa: E501` comment when necessary (see settings.py)
- **Indentation**: 4 spaces (no tabs) - **Indentation**: 4 spaces (no tabs)
- **Encoding**: UTF-8 - **Encoding**: UTF-8
- **Blank lines**: Must not contain any whitespace (spaces or tabs)
### Import Organization ### Import Organization
Follow Django's import style (as seen in models.py, views.py, admin.py): Follow Django's import style (as seen in models.py, views.py, admin.py):

141
Makefile
View File

@@ -1,141 +0,0 @@
# Makefile for Django RAM project
# Handles frontend asset minification and common development tasks
.PHONY: help minify minify-js minify-css clean install test
# Directories
JS_SRC_DIR = ram/portal/static/js/src
JS_OUT_DIR = ram/portal/static/js
CSS_SRC_DIR = ram/portal/static/css/src
CSS_OUT_DIR = ram/portal/static/css
# Source files
JS_SOURCES = $(JS_SRC_DIR)/theme_selector.js $(JS_SRC_DIR)/tabs_selector.js $(JS_SRC_DIR)/validators.js
CSS_SOURCES = $(CSS_SRC_DIR)/main.css
# Output files
JS_OUTPUT = $(JS_OUT_DIR)/main.min.js
CSS_OUTPUT = $(CSS_OUT_DIR)/main.min.css
# Default target
help:
@echo "Django RAM - Available Make targets:"
@echo ""
@echo " make install - Install npm dependencies (terser, clean-css-cli)"
@echo " make minify - Minify both JS and CSS files"
@echo " make minify-js - Minify JavaScript files only"
@echo " make minify-css - Minify CSS files only"
@echo " make clean - Remove minified files"
@echo " make watch - Watch for changes and auto-minify (requires inotify-tools)"
@echo " make run - Run Django development server"
@echo " make test - Run Django test suite"
@echo " make lint - Run flake8 linter"
@echo " make format - Run black formatter (line length 79)"
@echo " make ruff-check - Run ruff linter"
@echo " make ruff-format - Run ruff formatter"
@echo " make dump-data - Dump database to gzipped JSON (usage: make dump-data FILE=backup.json.gz)"
@echo " make load-data - Load data from fixture file (usage: make load-data FILE=backup.json.gz)"
@echo " make help - Show this help message"
@echo ""
# Install npm dependencies
install:
@echo "Installing npm dependencies..."
npm install
@echo "Done! terser and clean-css-cli installed."
# Minify both JS and CSS
minify: minify-js minify-css
# Minify JavaScript
minify-js: $(JS_OUTPUT)
$(JS_OUTPUT): $(JS_SOURCES)
@echo "Minifying JavaScript..."
npx terser $(JS_SOURCES) \
--compress \
--mangle \
--source-map "url=main.min.js.map" \
--output $(JS_OUTPUT)
@echo "Created: $(JS_OUTPUT)"
# Minify CSS
minify-css: $(CSS_OUTPUT)
$(CSS_OUTPUT): $(CSS_SOURCES)
@echo "Minifying CSS..."
npx cleancss -o $(CSS_OUTPUT) $(CSS_SOURCES)
@echo "Created: $(CSS_OUTPUT)"
# Clean minified files
clean:
@echo "Removing minified files..."
rm -f $(JS_OUTPUT) $(CSS_OUTPUT)
@echo "Clean complete."
# Watch for changes (requires inotify-tools on Linux)
watch:
@echo "Watching for file changes..."
@echo "Press Ctrl+C to stop"
@while true; do \
inotifywait -e modify,create $(JS_SRC_DIR)/*.js $(CSS_SRC_DIR)/*.css 2>/dev/null && \
make minify; \
done || echo "Note: install inotify-tools for file watching support"
# Run Django development server
run:
@cd ram && python manage.py runserver
# Run Django tests
test:
@echo "Running Django tests..."
@cd ram && python manage.py test
# Run flake8 linter
lint:
@echo "Running flake8..."
@flake8 ram/
# Run black formatter
format:
@echo "Running black formatter..."
@black -l 79 --extend-exclude="/migrations/" ram/
# Run ruff linter
ruff-check:
@echo "Running ruff check..."
@ruff check ram/
# Run ruff formatter
ruff-format:
@echo "Running ruff format..."
@ruff format ram/
# Dump database to gzipped JSON file
# Usage: make dump-data FILE=backup.json.gz
dump-data:
ifndef FILE
$(error FILE is not set. Usage: make dump-data FILE=backup.json.gz)
endif
$(eval FILE_ABS := $(shell realpath -m $(FILE)))
@echo "Dumping database to $(FILE_ABS)..."
@cd ram && python manage.py dumpdata \
--indent=2 \
-e admin \
-e contenttypes \
-e sessions \
--natural-foreign \
--natural-primary | gzip > $(FILE_ABS)
@echo "✓ Database dumped successfully to $(FILE_ABS)"
# Load data from fixture file
# Usage: make load-data FILE=backup.json.gz
load-data:
ifndef FILE
$(error FILE is not set. Usage: make load-data FILE=backup.json.gz)
endif
$(eval FILE_ABS := $(shell realpath $(FILE)))
@echo "Loading data from $(FILE_ABS)..."
@cd ram && python manage.py loaddata $(FILE_ABS)
@echo "✓ Data loaded successfully from $(FILE_ABS)"

View File

@@ -1,22 +0,0 @@
# Udev rule to auto-start/stop dcc-usb-connector.service when USB device is connected/removed
#
# This rule detects when a CH340 USB-to-serial adapter (ID 1a86:7523)
# is connected/removed on /dev/ttyUSB0, then automatically starts/stops
# the dcc-usb-connector.service (user systemd service).
#
# Installation:
# sudo cp 99-dcc-usb-connector.rules /etc/udev/rules.d/
# sudo udevadm control --reload-rules
# sudo udevadm trigger --subsystem-match=tty
#
# Testing:
# udevadm test /sys/class/tty/ttyUSB0
# udevadm monitor --property --subsystem-match=tty
#
# The service will be started when the device is plugged in and stopped
# when the device is unplugged.
# Match USB device 1a86:7523 on ttyUSB0
# TAG+="systemd" tells systemd to track this device
# ENV{SYSTEMD_USER_WANTS} starts the service on "add" and stops it on "remove"
SUBSYSTEM=="tty", ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7523", KERNEL=="ttyUSB0", TAG+="systemd", ENV{SYSTEMD_USER_WANTS}="dcc-usb-connector.service"

View File

@@ -1,345 +0,0 @@
# DCC USB-to-Network Bridge Auto-Start Installation
This directory contains configuration files to automatically start the `dcc-usb-connector.service` when a specific USB device (CH340 USB-to-serial adapter, ID `1a86:7523`) is connected to `/dev/ttyUSB0`.
## Overview
The setup uses:
- **Udev rule** (`99-dcc-usb-connector.rules`) - Detects USB device connection/disconnection
- **Systemd user service** (`dcc-usb-connector.service`) - Bridges serial port to network port 2560
- **Installation script** (`install-udev-rule.sh`) - Automated installation helper
When the USB device is plugged in, the service automatically starts. When unplugged, it stops.
## Prerequisites
1. **Operating System**: Linux with systemd and udev
2. **Required packages**:
```bash
sudo dnf install nmap-ncat systemd udev
```
3. **User permissions**: Your user should be in the `dialout` group:
```bash
sudo usermod -a -G dialout $USER
# Log out and log back in for changes to take effect
```
## Quick Installation
Run the installation script:
```bash
./install-udev-rule.sh
```
This script will:
- Install the udev rule (requires sudo)
- Install the systemd user service to `~/.config/systemd/user/`
- Enable systemd lingering for your user
- Check for required tools and permissions
- Provide testing instructions
## Manual Installation
If you prefer to install manually:
### 1. Install the udev rule
```bash
sudo cp 99-dcc-usb-connector.rules /etc/udev/rules.d/
sudo udevadm control --reload-rules
sudo udevadm trigger --subsystem-match=tty
```
### 2. Install the systemd service
```bash
mkdir -p ~/.config/systemd/user/
cp dcc-usb-connector.service ~/.config/systemd/user/
systemctl --user daemon-reload
```
### 3. Enable lingering (optional but recommended)
This allows your user services to run even when you're not logged in:
```bash
sudo loginctl enable-linger $USER
```
## Verification
### Test the udev rule
```bash
# Monitor udev events (plug/unplug device while this runs)
udevadm monitor --property --subsystem-match=tty
# Test udev rule (when device is connected)
udevadm test /sys/class/tty/ttyUSB0
```
### Check service status
```bash
# Check if service is running
systemctl --user status dcc-usb-connector.service
# View service logs
journalctl --user -u dcc-usb-connector.service -f
```
### Test the network bridge
```bash
# Connect to the bridge
telnet localhost 2560
# Or using netcat
nc localhost 2560
```
## Usage
### Automatic Operation
Once installed, the service will:
- **Start automatically** when USB device `1a86:7523` is connected to `/dev/ttyUSB0`
- **Stop automatically** when the device is disconnected
- Bridge serial communication to network port `2560`
### Manual Control
You can still manually control the service:
```bash
# Start the service
systemctl --user start dcc-usb-connector.service
# Stop the service
systemctl --user stop dcc-usb-connector.service
# Check status
systemctl --user status dcc-usb-connector.service
# View logs
journalctl --user -u dcc-usb-connector.service
```
## How It Works
### Component Interaction
```
USB Device Connected (1a86:7523 on /dev/ttyUSB0)
Udev Rule Triggered
Systemd User Service Started
stty configures serial port (115200 baud)
ncat bridges /dev/ttyUSB0 ↔ TCP port 2560
Client apps connect to localhost:2560
```
### Udev Rule Details
The udev rule (`99-dcc-usb-connector.rules`) matches:
- **Subsystem**: `tty` (TTY/serial devices)
- **Vendor ID**: `1a86` (CH340 manufacturer)
- **Product ID**: `7523` (CH340 serial adapter)
- **Kernel device**: `ttyUSB0` (specific port)
When matched, it sets `ENV{SYSTEMD_USER_WANTS}="dcc-usb-connector.service"`, telling systemd to start the service.
### Service Configuration
The service (`dcc-usb-connector.service`):
1. Runs `stty -F /dev/ttyUSB0 -echo 115200` to configure the serial port
2. Executes `ncat -n -k -l 2560 </dev/ttyUSB0 >/dev/ttyUSB0` to bridge serial ↔ network
3. Uses `KillMode=mixed` for proper process cleanup
4. Terminates within 5 seconds when stopped
5. **Uses `StopWhenUnneeded=yes`** - This ensures the service stops when the device is removed
### Auto-Stop Mechanism
When the USB device is unplugged:
1. **Udev detects** the removal event
2. **Systemd removes** the device dependency from the service
3. **StopWhenUnneeded=yes** tells systemd to automatically stop the service when no longer needed
4. **Service terminates** gracefully within 5 seconds
This combination ensures clean automatic stop without requiring manual intervention or custom scripts.
## Troubleshooting
### Service doesn't start automatically
1. **Check udev rule is loaded**:
```bash
udevadm test /sys/class/tty/ttyUSB0 | grep SYSTEMD_USER_WANTS
```
Should show: `ENV{SYSTEMD_USER_WANTS}='dcc-usb-connector.service'`
2. **Check device is recognized**:
```bash
lsusb | grep 1a86:7523
ls -l /dev/ttyUSB0
```
3. **Verify systemd user instance is running**:
```bash
systemctl --user status
loginctl show-user $USER | grep Linger
```
### Permission denied on /dev/ttyUSB0
Add your user to the `dialout` group:
```bash
sudo usermod -a -G dialout $USER
# Log out and log back in
groups # Verify 'dialout' appears
```
### Device appears as /dev/ttyUSB1 instead of /dev/ttyUSB0
The udev rule specifically matches `ttyUSB0`. To make it flexible:
Edit `99-dcc-usb-connector.rules` and change:
```
KERNEL=="ttyUSB0"
```
to:
```
KERNEL=="ttyUSB[0-9]*"
```
Then reload:
```bash
sudo udevadm control --reload-rules
sudo udevadm trigger --subsystem-match=tty
```
### Service starts but ncat fails
1. **Check ncat is installed**:
```bash
which ncat
ncat --version
```
2. **Verify serial port works**:
```bash
stty -F /dev/ttyUSB0
cat /dev/ttyUSB0 # Should not error
```
3. **Check port 2560 is available**:
```bash
netstat -tuln | grep 2560
# Should be empty if nothing is listening
```
### View detailed logs
```bash
# Follow service logs in real-time
journalctl --user -u dcc-usb-connector.service -f
# View all logs for the service
journalctl --user -u dcc-usb-connector.service
# View with timestamps
journalctl --user -u dcc-usb-connector.service -o short-iso
```
## Uninstallation
To remove the auto-start feature:
```bash
# Remove udev rule
sudo rm /etc/udev/rules.d/99-dcc-usb-connector.rules
sudo udevadm control --reload-rules
sudo udevadm trigger --subsystem-match=tty
# Remove systemd service
systemctl --user stop dcc-usb-connector.service
rm ~/.config/systemd/user/dcc-usb-connector.service
systemctl --user daemon-reload
# (Optional) Disable lingering
sudo loginctl disable-linger $USER
```
## Advanced Configuration
### Customize for different USB device
Edit `99-dcc-usb-connector.rules` and change:
- `ATTRS{idVendor}=="1a86"` - USB vendor ID
- `ATTRS{idProduct}=="7523"` - USB product ID
Find your device IDs with:
```bash
lsusb
# Output: Bus 001 Device 003: ID 1a86:7523 QinHeng Electronics ...
# ^^^^:^^^^
# VID PID
```
### Change network port
Edit `dcc-usb-connector.service` and change:
```
ExecStart=/usr/bin/bash -c "/usr/bin/ncat -n -k -l 2560 ...
```
Replace `2560` with your desired port number.
### Enable auto-restart on failure
Edit `dcc-usb-connector.service` and add under `[Service]`:
```
Restart=on-failure
RestartSec=5
```
Then reload:
```bash
systemctl --user daemon-reload
```
## Testing Without Physical Device
For development/testing without the actual USB device:
```bash
# Create a virtual serial port pair
socat -d -d pty,raw,echo=0 pty,raw,echo=0
# This creates two linked devices, e.g., /dev/pts/3 and /dev/pts/4
# Update the service to use one of these instead of /dev/ttyUSB0
```
## References
- [systemd user services](https://www.freedesktop.org/software/systemd/man/systemd.service.html)
- [udev rules writing](https://www.reactivated.net/writing_udev_rules.html)
- [ncat documentation](https://nmap.org/ncat/)
- [DCC++ EX](https://dcc-ex.com/) - The DCC command station software
## License
See the main project LICENSE file.
## Support
For issues specific to the auto-start feature:
1. Check the troubleshooting section above
2. Review logs: `journalctl --user -u dcc-usb-connector.service`
3. Test udev rules: `udevadm test /sys/class/tty/ttyUSB0`
For DCC++ EX or django-ram issues, see the main project documentation.

View File

@@ -1,53 +1,17 @@
# DCC Serial-to-Network Bridge # Use a container to implement a serial to net bridge
This directory provides two ways to bridge a serial port to a network port using `ncat` from [nmap](https://nmap.org/ncat/): This uses `ncat` from [nmap](https://nmap.org/ncat/) to bridge a serial port to a network port. The serial port is passed to the Podman command (eg. `/dev/ttyACM0`) and the network port is `2560`.
1. **Auto-Start with systemd + udev** (Recommended) - Automatically starts/stops when USB device is plugged/unplugged
2. **Container-based** - Manual control using Podman/Docker
> [!IMPORTANT] > [!IMPORTANT]
> Other variants of `nc` or `ncat` may not work as expected. > Other variants of `nc` or `ncat` may not work as expected.
## Option 1: Auto-Start with systemd + udev (Recommended) ## Build and run the container
Automatically start the bridge when USB device `1a86:7523` is connected to `/dev/ttyUSB0` and stop it when removed.
### Quick Install
```bash ```bash
./install-udev-rule.sh $ podman buil -t dcc/bridge .
```
### Features
- ✅ Auto-start when device connected
- ✅ Auto-stop when device removed
- ✅ User-level service (no root needed)
- ✅ Runs on boot (with lingering enabled)
See [INSTALL.md](INSTALL.md) for detailed documentation.
### Test
```bash
# Run the test script
./test-udev-autostart.sh
# Or manually check
systemctl --user status dcc-usb-connector.service
telnet localhost 2560
```
## Option 2: Container-based (Manual)
### Build and run the container
```bash
$ podman build -t dcc/bridge .
$ podman run -d --group-add keep-groups --device=/dev/ttyACM0:/dev/arduino -p 2560:2560 --name dcc-bridge dcc/bridge $ podman run -d --group-add keep-groups --device=/dev/ttyACM0:/dev/arduino -p 2560:2560 --name dcc-bridge dcc/bridge
``` ```
### Test
It can be tested with `telnet`: It can be tested with `telnet`:
```bash ```bash

View File

@@ -1,17 +0,0 @@
[Unit]
Description=DCC USB-to-Network Bridge Daemon
After=network.target
# Device will be available via udev rule, but add condition as safety check
ConditionPathIsReadWrite=/dev/ttyUSB0
# Stop this service when the device is no longer needed (removed)
StopWhenUnneeded=yes
[Service]
ExecStartPre=/usr/bin/stty -F /dev/ttyUSB0 -echo 115200
ExecStart=/usr/bin/bash -c "/usr/bin/ncat -n -k -l 2560 </dev/ttyUSB0 >/dev/ttyUSB0"
KillMode=mixed
TimeoutStopSec=5
PrivateTmp=true
[Install]
WantedBy=default.target

View File

@@ -1,127 +0,0 @@
#!/usr/bin/env bash
#
# Installation script for DCC USB-to-Network Bridge auto-start
#
# This script installs the udev rule and systemd service to automatically
# start the dcc-usb-connector.service when USB device 1a86:7523 is connected.
#
# Usage:
# ./install-udev-rule.sh
#
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Get the directory where this script is located
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
echo -e "${GREEN}DCC USB-to-Network Bridge Auto-Start Installation${NC}"
echo "=========================================================="
echo
# Check if running as root (not recommended for systemd user service)
if [ "$EUID" -eq 0 ]; then
echo -e "${YELLOW}Warning: You are running as root.${NC}"
echo "This script will install a user systemd service."
echo "Please run as a regular user (not with sudo)."
echo
read -p "Continue anyway? (y/N) " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
exit 1
fi
fi
# Check for required files
echo "Checking required files..."
if [ ! -f "$SCRIPT_DIR/99-dcc-usb-connector.rules" ]; then
echo -e "${RED}Error: 99-dcc-usb-connector.rules not found${NC}"
exit 1
fi
if [ ! -f "$SCRIPT_DIR/dcc-usb-connector.service" ]; then
echo -e "${RED}Error: dcc-usb-connector.service not found${NC}"
exit 1
fi
echo -e "${GREEN}✓ All required files found${NC}"
echo
# Install udev rule (requires sudo)
echo "Installing udev rule..."
echo "This requires sudo privileges."
sudo cp "$SCRIPT_DIR/99-dcc-usb-connector.rules" /etc/udev/rules.d/
sudo udevadm control --reload-rules
sudo udevadm trigger --subsystem-match=tty
echo -e "${GREEN}✓ Udev rule installed${NC}"
echo
# Install systemd user service
echo "Installing systemd user service..."
mkdir -p ~/.config/systemd/user/
cp "$SCRIPT_DIR/dcc-usb-connector.service" ~/.config/systemd/user/
systemctl --user daemon-reload
echo -e "${GREEN}✓ Systemd service installed${NC}"
echo
# Enable lingering (allows user services to run without being logged in)
echo "Enabling systemd lingering for user..."
if loginctl show-user "$USER" | grep -q "Linger=yes"; then
echo -e "${GREEN}✓ Lingering already enabled${NC}"
else
sudo loginctl enable-linger "$USER"
echo -e "${GREEN}✓ Lingering enabled${NC}"
fi
echo
# Check user groups
echo "Checking user permissions..."
if groups "$USER" | grep -q '\bdialout\b'; then
echo -e "${GREEN}✓ User is in 'dialout' group${NC}"
else
echo -e "${YELLOW}Warning: User is not in 'dialout' group${NC}"
echo "You may need to add yourself to the dialout group:"
echo " sudo usermod -a -G dialout $USER"
echo "Then log out and log back in for changes to take effect."
fi
echo
# Check for ncat
echo "Checking for required tools..."
if command -v ncat &> /dev/null; then
echo -e "${GREEN}✓ ncat is installed${NC}"
else
echo -e "${YELLOW}Warning: ncat is not installed${NC}"
echo "Install it with: sudo dnf install nmap-ncat"
fi
echo
# Summary
echo "=========================================================="
echo -e "${GREEN}Installation complete!${NC}"
echo
echo "The service will automatically start when USB device 1a86:7523"
echo "is connected to /dev/ttyUSB0"
echo
echo "To test:"
echo " 1. Plug in the USB device"
echo " 2. Check service status: systemctl --user status dcc-usb-connector.service"
echo " 3. Test connection: telnet localhost 2560"
echo
echo "To manually control:"
echo " Start: systemctl --user start dcc-usb-connector.service"
echo " Stop: systemctl --user stop dcc-usb-connector.service"
echo " Status: systemctl --user status dcc-usb-connector.service"
echo
echo "To view logs:"
echo " journalctl --user -u dcc-usb-connector.service -f"
echo
echo "To uninstall:"
echo " sudo rm /etc/udev/rules.d/99-dcc-usb-connector.rules"
echo " rm ~/.config/systemd/user/dcc-usb-connector.service"
echo " systemctl --user daemon-reload"
echo " sudo udevadm control --reload-rules"
echo

View File

@@ -1,147 +0,0 @@
#!/usr/bin/env bash
#
# Test script for DCC USB-to-Network Bridge auto-start/stop functionality
#
# This script helps verify that the service starts when the USB device
# is connected and stops when it's removed.
#
# Usage:
# ./test-udev-autostart.sh
#
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
echo -e "${BLUE}=== DCC USB-to-Network Bridge Auto-Start/Stop Test ===${NC}"
echo
# Check if udev rule is installed
echo -e "${BLUE}1. Checking udev rule installation...${NC}"
if [ -f /etc/udev/rules.d/99-dcc-usb-connector.rules ]; then
echo -e "${GREEN}✓ Udev rule is installed${NC}"
echo " Location: /etc/udev/rules.d/99-dcc-usb-connector.rules"
else
echo -e "${RED}✗ Udev rule is NOT installed${NC}"
echo " Run: sudo cp 99-dcc-usb-connector.rules /etc/udev/rules.d/"
exit 1
fi
echo
# Check if service is installed
echo -e "${BLUE}2. Checking systemd service installation...${NC}"
if [ -f ~/.config/systemd/user/dcc-usb-connector.service ]; then
echo -e "${GREEN}✓ Systemd service is installed${NC}"
echo " Location: ~/.config/systemd/user/dcc-usb-connector.service"
else
echo -e "${RED}✗ Systemd service is NOT installed${NC}"
echo " Run: cp dcc-usb-connector.service ~/.config/systemd/user/"
exit 1
fi
echo
# Check lingering
echo -e "${BLUE}3. Checking systemd lingering...${NC}"
if loginctl show-user "$USER" | grep -q "Linger=yes"; then
echo -e "${GREEN}✓ Lingering is enabled${NC}"
else
echo -e "${YELLOW}⚠ Lingering is NOT enabled${NC}"
echo " Services may not start automatically when you're not logged in"
echo " Run: sudo loginctl enable-linger $USER"
fi
echo
# Check if device is connected
echo -e "${BLUE}4. Checking USB device...${NC}"
if lsusb | grep -q "1a86:7523"; then
echo -e "${GREEN}✓ USB device 1a86:7523 is connected${NC}"
lsusb | grep "1a86:7523"
if [ -e /dev/ttyUSB0 ]; then
echo -e "${GREEN}✓ /dev/ttyUSB0 exists${NC}"
ls -l /dev/ttyUSB0
else
echo -e "${YELLOW}⚠ /dev/ttyUSB0 does NOT exist${NC}"
echo " The device may be on a different port"
echo " Available ttyUSB devices:"
ls -l /dev/ttyUSB* 2>/dev/null || echo " (none found)"
fi
else
echo -e "${YELLOW}⚠ USB device 1a86:7523 is NOT connected${NC}"
echo " Please plug in the device to test"
fi
echo
# Check service status
echo -e "${BLUE}5. Checking service status...${NC}"
if systemctl --user is-active --quiet dcc-usb-connector.service; then
echo -e "${GREEN}✓ Service is RUNNING${NC}"
systemctl --user status dcc-usb-connector.service --no-pager -l
else
echo -e "${YELLOW}⚠ Service is NOT running${NC}"
echo " Status:"
systemctl --user status dcc-usb-connector.service --no-pager -l || true
fi
echo
# Test udev rule
echo -e "${BLUE}6. Testing udev rule (if device is connected)...${NC}"
if [ -e /dev/ttyUSB0 ]; then
echo " Running: udevadm test /sys/class/tty/ttyUSB0"
echo " Looking for SYSTEMD_USER_WANTS..."
if udevadm test /sys/class/tty/ttyUSB0 2>&1 | grep -q "SYSTEMD_USER_WANTS"; then
echo -e "${GREEN}✓ Udev rule is triggering systemd${NC}"
udevadm test /sys/class/tty/ttyUSB0 2>&1 | grep "SYSTEMD_USER_WANTS"
else
echo -e "${RED}✗ Udev rule is NOT triggering systemd${NC}"
echo " The rule may not be matching correctly"
fi
else
echo -e "${YELLOW}⚠ Cannot test udev rule - device not connected${NC}"
fi
echo
# Check network port
echo -e "${BLUE}7. Checking network port 2560...${NC}"
if netstat -tuln 2>/dev/null | grep -q ":2560" || ss -tuln 2>/dev/null | grep -q ":2560"; then
echo -e "${GREEN}✓ Port 2560 is listening${NC}"
netstat -tuln 2>/dev/null | grep ":2560" || ss -tuln 2>/dev/null | grep ":2560"
else
echo -e "${YELLOW}⚠ Port 2560 is NOT listening${NC}"
echo " Service may not be running or ncat failed to start"
fi
echo
# Summary and instructions
echo -e "${BLUE}=== Test Summary ===${NC}"
echo
echo "To test auto-start/stop behavior:"
echo
echo "1. ${YELLOW}Monitor the service in one terminal:${NC}"
echo " watch -n 1 'systemctl --user status dcc-usb-connector.service'"
echo
echo "2. ${YELLOW}Monitor udev events in another terminal:${NC}"
echo " udevadm monitor --property --subsystem-match=tty"
echo
echo "3. ${YELLOW}Plug in the USB device${NC} and watch:"
echo " - Udev should detect the device"
echo " - Service should automatically start"
echo " - Port 2560 should become available"
echo
echo "4. ${YELLOW}Unplug the USB device${NC} and watch:"
echo " - Udev should detect device removal"
echo " - Service should automatically stop (thanks to StopWhenUnneeded=yes)"
echo " - Port 2560 should close"
echo
echo "5. ${YELLOW}Check logs:${NC}"
echo " journalctl --user -u dcc-usb-connector.service -f"
echo
echo "Expected behavior:"
echo " • Device connected → Service starts → Port 2560 opens"
echo " • Device removed → Service stops → Port 2560 closes"
echo

View File

@@ -192,646 +192,5 @@ These models have separate Image model classes with `related_name="image"`:
--- ---
## 🔄 **Manager Helper Refactoring** (2026-01-18)
Successfully replaced all explicit `prefetch_related()` and `select_related()` calls with centralized manager helper methods. **Updated to use custom QuerySet classes to enable method chaining after `get_published()`.**
### Implementation Details
The optimization uses a **QuerySet-based approach** where helper methods are defined on custom QuerySet classes that extend `PublicQuerySet`. This allows method chaining like:
```python
RollingStock.objects.get_published(user).with_related().filter(...)
```
**Architecture:**
- **`PublicQuerySet`**: Base QuerySet with `get_published()` and `get_public()` methods
- **Model-specific QuerySets**: `RollingStockQuerySet`, `ConsistQuerySet`, `BookQuerySet`, etc.
- **Managers**: Delegate to QuerySets via `get_queryset()` override
This pattern ensures that helper methods (`with_related()`, `with_details()`, `with_rolling_stock()`) are available both on the manager and on QuerySets returned by filtering methods.
### Changes Summary
**Admin Files (4 files updated):**
- **roster/admin.py** (RollingStockAdmin:161-164): Replaced explicit prefetch with `.with_related()`
- **consist/admin.py** (ConsistAdmin:62-67): Replaced explicit prefetch with `.with_related()`
- **bookshelf/admin.py** (BookAdmin:101-106): Replaced explicit prefetch with `.with_related()`
- **bookshelf/admin.py** (CatalogAdmin:276-281): Replaced explicit prefetch with `.with_related()`
**Portal Views (portal/views.py - 14 replacements):**
- **GetData.get_data()** (lines 96-110): RollingStock list view → `.with_related()`
- **GetHome.get_data()** (lines 141-159): Featured items → `.with_related()`
- **SearchObjects.run_search()** (lines 203-217): RollingStock search → `.with_related()`
- **SearchObjects.run_search()** (lines 219-271): Consist, Book, Catalog, MagazineIssue search → `.with_related()`
- **GetObjectsFiltered.run_filter()** (lines 364-387): Manufacturer filter → `.with_related()`
- **GetObjectsFiltered.run_filter()** (lines 423-469): Multiple filters → `.with_related()`
- **GetRollingStock.get()** (lines 513-525): RollingStock detail → `.with_details()`
- **GetRollingStock.get()** (lines 543-567): Related consists and trainsets → `.with_related()`
- **Consists.get_data()** (lines 589-595): Consist list → `.with_related()`
- **GetConsist.get()** (lines 573-589): Consist detail → `.with_rolling_stock()`
- **Books.get_data()** (lines 787-792): Book list → `.with_related()`
- **Catalogs.get_data()** (lines 798-804): Catalog list → `.with_related()`
- **GetMagazine.get()** (lines 840-844): Magazine issues → `.with_related()`
- **GetMagazineIssue.get()** (lines 867-872): Magazine issue detail → `.with_details()`
- **GetBookCatalog.get_object()** (lines 892-905): Book/Catalog detail → `.with_details()`
### Benefits
1. **Consistency**: All queries now use standardized manager methods
2. **Maintainability**: Prefetch logic is centralized in `ram/managers.py`
3. **Readability**: Code is cleaner and more concise
4. **DRY Principle**: Eliminates repeated prefetch patterns throughout codebase
### Statistics
- **Total Replacements**: ~36 explicit prefetch calls replaced
- **Files Modified**: 5 files
- **Locations Updated**: 18 locations
- **Test Results**: All 95 core tests pass
- **System Check**: No issues
### Example Transformations
**Before:**
```python
# Admin (repeated in multiple files)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.select_related(
'rolling_class',
'rolling_class__company',
'rolling_class__type',
'manufacturer',
'scale',
'decoder',
'shop',
).prefetch_related('tags', 'image')
```
**After:**
```python
# Admin (clean and maintainable)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.with_related()
```
**Before:**
```python
# Views (verbose and error-prone)
roster = (
RollingStock.objects.get_published(request.user)
.select_related(
'rolling_class',
'rolling_class__company',
'rolling_class__type',
'manufacturer',
'scale',
)
.prefetch_related('tags', 'image')
.filter(query)
)
```
**After:**
```python
# Views (concise and clear)
roster = (
RollingStock.objects.get_published(request.user)
.with_related()
.filter(query)
)
```
---
*Generated: 2026-01-17* *Generated: 2026-01-17*
*Updated: 2026-01-18*
*Project: Django Railroad Assets Manager (django-ram)*
---
## 🗄️ **Database Indexing** (2026-01-18)
Added 32 strategic database indexes across all major models to improve query performance, especially for filtering, joining, and ordering operations.
### Implementation Summary
**RollingStock model** (`roster/models.py`):
- Single field indexes: `published`, `featured`, `item_number_slug`, `road_number_int`, `scale`
- Composite indexes: `published+featured`, `manufacturer+item_number_slug`
- **10 indexes total**
**RollingClass model** (`roster/models.py`):
- Single field indexes: `company`, `type`
- Composite index: `company+identifier` (matches ordering)
- **3 indexes total**
**Consist model** (`consist/models.py`):
- Single field indexes: `published`, `scale`, `company`
- Composite index: `published+scale`
- **4 indexes total**
**ConsistItem model** (`consist/models.py`):
- Single field indexes: `load`, `order`
- Composite index: `consist+load`
- **3 indexes total**
**Book model** (`bookshelf/models.py`):
- Single field index: `title`
- Note: Inherited fields (`published`, `publication_year`) cannot be indexed due to multi-table inheritance
- **1 index total**
**Catalog model** (`bookshelf/models.py`):
- Single field index: `manufacturer`
- **1 index total**
**Magazine model** (`bookshelf/models.py`):
- Single field indexes: `published`, `name`
- **2 indexes total**
**MagazineIssue model** (`bookshelf/models.py`):
- Single field indexes: `magazine`, `publication_month`
- **2 indexes total**
**Manufacturer model** (`metadata/models.py`):
- Single field indexes: `category`, `slug`
- Composite index: `category+slug`
- **3 indexes total**
**Company model** (`metadata/models.py`):
- Single field indexes: `slug`, `country`, `freelance`
- **3 indexes total**
**Scale model** (`metadata/models.py`):
- Single field indexes: `slug`, `ratio_int`
- Composite index: `-ratio_int+-tracks` (for descending order)
- **3 indexes total**
### Migrations Applied
- `metadata/migrations/0027_*` - 9 indexes
- `roster/migrations/0041_*` - 10 indexes
- `bookshelf/migrations/0032_*` - 6 indexes
- `consist/migrations/0020_*` - 7 indexes
### Index Naming Convention
- Single field: `{app}_{field}_idx` (e.g., `roster_published_idx`)
- Composite: `{app}_{desc}_idx` (e.g., `roster_pub_feat_idx`)
- Keep under 30 characters for PostgreSQL compatibility
### Technical Notes
**Multi-table Inheritance Issue:**
- Django models using multi-table inheritance (Book, Catalog, MagazineIssue inherit from BaseBook/BaseModel)
- Cannot add indexes on inherited fields in child model's Meta class
- Error: `models.E016: 'indexes' refers to field 'X' which is not local to model 'Y'`
- Solution: Only index local fields in child models; consider indexing parent model fields separately
**Performance Impact:**
- Filters on `published=True` are now ~10x faster (most common query)
- Foreign key lookups benefit from automatic + explicit indexes
- Composite indexes eliminate filesorts for common filter+order combinations
- Scale lookups by slug or ratio are now instant
### Test Results
- **All 146 tests passing** ✅
- No regressions introduced
- Migrations applied successfully
---
## 📊 **Database Aggregation Optimization** (2026-01-18)
Replaced Python-level counting and loops with database aggregation for significant performance improvements.
### 1. GetConsist View Optimization (`portal/views.py:571-629`)
**Problem:** N+1 query issue when checking if rolling stock items are published.
**Before:**
```python
data = list(
item.rolling_stock
for item in consist_items.filter(load=False)
if RollingStock.objects.get_published(request.user)
.filter(uuid=item.rolling_stock_id)
.exists() # Separate query for EACH item!
)
```
**After:**
```python
# Fetch all published IDs once
published_ids = set(
RollingStock.objects.get_published(request.user)
.values_list('uuid', flat=True)
)
# Use Python set membership (O(1) lookup)
data = [
item.rolling_stock
for item in consist_items.filter(load=False)
if item.rolling_stock.uuid in published_ids
]
```
**Performance:**
- **Before**: 22 queries for 10-item consist (1 base + 10 items + 10 exists checks + 1 loads query)
- **After**: 2 queries (1 for published IDs + 1 for consist items)
- **Improvement**: 91% reduction in queries
### 2. Consist Model - Loads Count (`consist/models.py:51-54`)
**Added Property:**
```python
@property
def loads_count(self):
"""Count of loads in this consist using database aggregation."""
return self.consist_item.filter(load=True).count()
```
**Template Optimization (`portal/templates/consist.html:145`):**
- **Before**: `{{ loads|length }}` (evaluates entire QuerySet)
- **After**: `{{ loads_count }}` (uses pre-calculated count)
### 3. Admin CSV Export Optimizations
Optimized 4 admin CSV export functions to use `select_related()` and `prefetch_related()`, and moved repeated calculations outside loops.
#### Consist Admin (`consist/admin.py:106-164`)
**Before:**
```python
for obj in queryset:
for item in obj.consist_item.all(): # Query per consist
types = " + ".join(
"{}x {}".format(t["count"], t["type"])
for t in obj.get_type_count() # Calculated per item!
)
tags = settings.CSV_SEPARATOR_ALT.join(
t.name for t in obj.tags.all() # Query per item!
)
```
**After:**
```python
queryset = queryset.select_related(
'company', 'scale'
).prefetch_related(
'tags',
'consist_item__rolling_stock__rolling_class__type'
)
for obj in queryset:
# Calculate once per consist
types = " + ".join(...)
tags_str = settings.CSV_SEPARATOR_ALT.join(...)
for item in obj.consist_item.all():
# Reuse cached values
```
**Performance:**
- **Before**: ~400+ queries for 100 consists with 10 items each
- **After**: 1 query
- **Improvement**: 99.75% reduction
#### RollingStock Admin (`roster/admin.py:249-326`)
**Added prefetching:**
```python
queryset = queryset.select_related(
'rolling_class',
'rolling_class__type',
'rolling_class__company',
'manufacturer',
'scale',
'decoder',
'shop'
).prefetch_related('tags', 'property__property')
```
**Performance:**
- **Before**: ~500+ queries for 100 items
- **After**: 1 query
- **Improvement**: 99.8% reduction
#### Book Admin (`bookshelf/admin.py:178-231`)
**Added prefetching:**
```python
queryset = queryset.select_related(
'publisher', 'shop'
).prefetch_related('authors', 'tags', 'property__property')
```
**Performance:**
- **Before**: ~400+ queries for 100 books
- **After**: 1 query
- **Improvement**: 99.75% reduction
#### Catalog Admin (`bookshelf/admin.py:349-404`)
**Added prefetching:**
```python
queryset = queryset.select_related(
'manufacturer', 'shop'
).prefetch_related('scales', 'tags', 'property__property')
```
**Performance:**
- **Before**: ~400+ queries for 100 catalogs
- **After**: 1 query
- **Improvement**: 99.75% reduction
### Performance Summary Table
| Operation | Before | After | Improvement |
|-----------|--------|-------|-------------|
| GetConsist view (10 items) | ~22 queries | 2 queries | **91% reduction** |
| Consist CSV export (100 consists) | ~400+ queries | 1 query | **99.75% reduction** |
| RollingStock CSV export (100 items) | ~500+ queries | 1 query | **99.8% reduction** |
| Book CSV export (100 books) | ~400+ queries | 1 query | **99.75% reduction** |
| Catalog CSV export (100 catalogs) | ~400+ queries | 1 query | **99.75% reduction** |
### Best Practices Applied
1.**Use database aggregation** (`.count()`, `.annotate()`) instead of Python `len()`
2.**Bulk fetch before loops** - Use `values_list()` to get all IDs at once
3.**Cache computed values** - Calculate once outside loops, reuse inside
4.**Use set membership** - `in set` is O(1) vs repeated `.exists()` queries
5.**Prefetch in admin** - Add `select_related()` and `prefetch_related()` to querysets
6.**Pass context data** - Pre-calculate counts in views, pass to templates
### Files Modified
1. `ram/portal/views.py` - GetConsist view optimization
2. `ram/portal/templates/consist.html` - Use pre-calculated loads_count
3. `ram/consist/models.py` - Added loads_count property
4. `ram/consist/admin.py` - CSV export optimization
5. `ram/roster/admin.py` - CSV export optimization
6. `ram/bookshelf/admin.py` - CSV export optimizations (Book and Catalog)
### Test Results
- **All 146 tests passing** ✅
- No regressions introduced
- All optimizations backward-compatible
### Related Documentation
- Existing optimizations: Manager helper methods (see "Manager Helper Refactoring" section above)
- Database indexes (see "Database Indexing" section above)
---
## 🧪 **Test Coverage Enhancement** (2026-01-17)
Significantly expanded test coverage for portal views to ensure query optimizations don't break functionality.
### Portal Tests (`ram/portal/tests.py`)
Added **51 comprehensive tests** (~642 lines) covering:
**View Tests:**
- `GetHome` - Homepage with featured items
- `GetData` - Rolling stock listing
- `GetRollingStock` - Rolling stock detail pages
- `GetManufacturerItem` - Manufacturer filtering
- `GetObjectsFiltered` - Type/company/scale filtering
- `Consists` - Consist listings
- `GetConsist` - Consist detail pages
- `Books` - Book listings
- `GetBookCatalog` - Book detail pages
- `Catalogs` - Catalog listings
- `Magazines` - Magazine listings
- `GetMagazine` - Magazine detail pages
- `GetMagazineIssue` - Magazine issue detail pages
- `SearchObjects` - Search functionality
**Test Coverage:**
- HTTP 200 responses for valid requests
- HTTP 404 responses for invalid UUIDs
- Pagination functionality
- Query optimization validation
- Context data verification
- Template rendering
- Published/unpublished filtering
- Featured items display
- Search across multiple model types
- Related object prefetching
**Test Results:**
- **146 total tests** across entire project (51 in portal)
- All tests passing ✅
- Test execution time: ~38 seconds
- No regressions from optimizations
### Example Test Pattern
```python
class GetHomeTestCase(BaseTestCase):
def test_get_home_success(self):
"""Test homepage loads successfully with featured items."""
response = self.client.get(reverse('portal:home'))
self.assertEqual(response.status_code, 200)
self.assertIn('featured', response.context)
def test_get_home_with_query_optimization(self):
"""Verify homepage uses optimized queries."""
with self.assertNumQueries(8): # Expected query count
response = self.client.get(reverse('portal:home'))
self.assertEqual(response.status_code, 200)
```
### Files Modified
- `ram/portal/tests.py` - Added 642 lines of test code
---
## 🛠️ **Frontend Build System** (2026-01-18)
Added Makefile for automated frontend asset minification to streamline development workflow.
### Makefile Features
**Available Targets:**
- `make install` - Install npm dependencies (terser, clean-css-cli)
- `make minify` - Minify both JS and CSS files
- `make minify-js` - Minify JavaScript files only
- `make minify-css` - Minify CSS files only
- `make clean` - Remove minified files
- `make watch` - Watch for file changes and auto-minify (requires inotify-tools)
- `make help` - Display available targets
**JavaScript Minification:**
- Source: `ram/portal/static/js/src/`
- `theme_selector.js` - Dark/light theme switching
- `tabs_selector.js` - Deep linking for tabs
- `validators.js` - Form validation helpers
- Output: `ram/portal/static/js/main.min.js`
- Tool: terser (compression + mangling)
**CSS Minification:**
- Source: `ram/portal/static/css/src/main.css`
- Output: `ram/portal/static/css/main.min.css`
- Tool: clean-css-cli
### Usage
```bash
# First time setup
make install
# Minify assets
make minify
# Development workflow
make watch # Auto-minify on file changes
```
### Implementation Details
- **Dependencies**: Defined in `package.json`
- `terser` - JavaScript minifier
- `clean-css-cli` - CSS minifier
- **Configuration**: Makefile uses npx to run tools
- **File structure**: Follows convention (src/ → output/)
- **Integration**: Works alongside Django's static file handling
### Benefits
1. **Consistency**: Standardized build process for all developers
2. **Automation**: Single command to minify all assets
3. **Development**: Watch mode for instant feedback
4. **Documentation**: Self-documenting via `make help`
5. **Portability**: Works on any system with npm installed
### Files Modified
1. `Makefile` - New 72-line Makefile with comprehensive targets
2. `ram/portal/static/js/main.min.js` - Updated minified output
3. `ram/portal/static/js/src/README.md` - Updated instructions
---
## 📝 **Documentation Enhancement** (2026-01-18)
### AGENTS.md Updates
Added comprehensive coding style guidelines:
**Code Style Section:**
- PEP 8 compliance requirements
- Line length standards (79 chars preferred, 119 acceptable)
- Blank line whitespace rule (must not contain spaces/tabs)
- Import organization patterns (stdlib → third-party → local)
- Naming conventions (PascalCase, snake_case, UPPER_SNAKE_CASE)
**Django-Specific Patterns:**
- Model field ordering and conventions
- Admin customization examples
- BaseModel usage patterns
- PublicManager integration
- Image/Document patterns
- DeduplicatedStorage usage
**Testing Best Practices:**
- Test method naming conventions
- Docstring requirements
- setUp() method usage
- Exception testing patterns
- Coverage examples from existing tests
**Black Formatter:**
- Added black to development requirements
- Command examples with 79-character line length
- Check and diff mode usage
- Integration with flake8
### Query Optimization Documentation
Created comprehensive `docs/query_optimization.md` documenting:
- All optimization work from prefetch branch
- Performance metrics with before/after comparisons
- Implementation patterns and examples
- Test results validation
- Future optimization opportunities
---
## 📊 **Prefetch Branch Summary**
### Overall Statistics
**Commits**: 9 major commits from 2026-01-17 to 2026-01-18
- Test coverage expansion
- Query optimization implementation
- Manager refactoring
- Database indexing
- Aggregation optimization
- Build system addition
- Documentation enhancements
**Files Changed**: 19 files
- Added: 2,046 lines
- Removed: 58 lines
- Net change: +1,988 lines
**Test Coverage**:
- Before: 95 tests
- After: 146 tests ✅
- Added: 51 new portal tests
- Execution time: ~38 seconds
- Pass rate: 100%
**Database Migrations**: 4 new migrations
- `metadata/0027_*` - 9 indexes
- `roster/0041_*` - 13 indexes (10 + 3 RollingClass)
- `bookshelf/0032_*` - 6 indexes
- `consist/0020_*` - 7 indexes
- **Total**: 32 new database indexes
**Query Performance Improvements**:
- Homepage: 90% reduction (80 → 8 queries)
- Rolling Stock detail: 92% reduction (60 → 5 queries)
- Consist detail: 95% reduction (150 → 8 queries)
- Admin lists: 95% reduction (250 → 12 queries)
- CSV exports: 99.75% reduction (400+ → 1 query)
### Key Achievements
1.**Query Optimization**: Comprehensive select_related/prefetch_related implementation
2.**Manager Refactoring**: Centralized optimization methods in custom QuerySets
3.**Database Indexing**: 32 strategic indexes for filtering, joining, ordering
4.**Aggregation**: Replaced Python loops with database counting
5.**Test Coverage**: 51 new tests ensuring optimization correctness
6.**Build System**: Makefile for frontend asset minification
7.**Documentation**: Comprehensive guides for developers and AI agents
### Merge Readiness
The prefetch branch is production-ready:
- ✅ All 146 tests passing
- ✅ No system check issues
- ✅ Backward compatible changes
- ✅ Comprehensive documentation
- ✅ Database migrations ready
- ✅ Performance validated
- ✅ Code style compliant (flake8, black)
### Recommended Next Steps
1. **Merge to master**: All work is complete and tested
2. **Deploy to production**: Run migrations, clear cache
3. **Monitor performance**: Verify query count reductions in production
4. **Add query count tests**: Use `assertNumQueries()` for regression prevention
5. **Consider caching**: Implement caching for `get_site_conf()` and frequently accessed data
---
*Updated: 2026-01-25 - Added Test Coverage, Frontend Build System, Documentation, and Prefetch Branch Summary*
*Project: Django Railroad Assets Manager (django-ram)* *Project: Django Railroad Assets Manager (django-ram)*

View File

@@ -1,39 +0,0 @@
[tool.ruff]
# Exclude patterns matching flake8 config
exclude = [
"*settings.py*",
"*/migrations/*",
".git",
".venv",
"venv",
"__pycache__",
"*.pyc",
]
# Target Python 3.13+ as per project requirements
target-version = "py313"
# Line length set to 79 (PEP 8 standard)
line-length = 79
[tool.ruff.lint]
# Enable Pyflakes (F) and pycodestyle (E, W) rules to match flake8
select = ["E", "F", "W"]
# Ignore E501 (line-too-long) to match flake8 config
ignore = ["E501"]
[tool.ruff.lint.per-file-ignores]
# Additional per-file ignores if needed
"*settings.py*" = ["F403", "F405"] # Allow star imports in settings
"*/migrations/*" = ["E", "F", "W"] # Ignore all rules in migrations
[tool.ruff.format]
# Use double quotes for strings (project preference)
quote-style = "double"
# Use 4 spaces for indentation
indent-style = "space"
# Auto-detect line ending style
line-ending = "auto"

View File

@@ -101,7 +101,9 @@ class BookAdmin(SortableAdminBase, admin.ModelAdmin):
def get_queryset(self, request): def get_queryset(self, request):
"""Optimize queryset with select_related and prefetch_related.""" """Optimize queryset with select_related and prefetch_related."""
qs = super().get_queryset(request) qs = super().get_queryset(request)
return qs.with_related() return qs.select_related('publisher', 'shop').prefetch_related(
'authors', 'tags', 'image', 'toc'
)
fieldsets = ( fieldsets = (
( (
@@ -194,12 +196,6 @@ class BookAdmin(SortableAdminBase, admin.ModelAdmin):
] ]
data = [] data = []
# Prefetch related data to avoid N+1 queries
queryset = queryset.select_related(
'publisher', 'shop'
).prefetch_related('authors', 'tags', 'property__property')
for obj in queryset: for obj in queryset:
properties = settings.CSV_SEPARATOR_ALT.join( properties = settings.CSV_SEPARATOR_ALT.join(
"{}:{}".format(property.property.name, property.value) "{}:{}".format(property.property.name, property.value)
@@ -280,7 +276,9 @@ class CatalogAdmin(SortableAdminBase, admin.ModelAdmin):
def get_queryset(self, request): def get_queryset(self, request):
"""Optimize queryset with select_related and prefetch_related.""" """Optimize queryset with select_related and prefetch_related."""
qs = super().get_queryset(request) qs = super().get_queryset(request)
return qs.with_related() return qs.select_related('manufacturer', 'shop').prefetch_related(
'scales', 'tags', 'image'
)
fieldsets = ( fieldsets = (
( (
@@ -366,12 +364,6 @@ class CatalogAdmin(SortableAdminBase, admin.ModelAdmin):
] ]
data = [] data = []
# Prefetch related data to avoid N+1 queries
queryset = queryset.select_related(
'manufacturer', 'shop'
).prefetch_related('scales', 'tags', 'property__property')
for obj in queryset: for obj in queryset:
properties = settings.CSV_SEPARATOR_ALT.join( properties = settings.CSV_SEPARATOR_ALT.join(
"{}:{}".format(property.property.name, property.value) "{}:{}".format(property.property.name, property.value)

View File

@@ -1,43 +0,0 @@
# Generated by Django 6.0.1 on 2026-01-18 13:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookshelf", "0031_alter_tocentry_authors_alter_tocentry_subtitle_and_more"),
(
"metadata",
"0027_company_company_slug_idx_company_company_country_idx_and_more",
),
]
operations = [
migrations.AddIndex(
model_name="book",
index=models.Index(fields=["title"], name="book_title_idx"),
),
migrations.AddIndex(
model_name="catalog",
index=models.Index(fields=["manufacturer"], name="catalog_mfr_idx"),
),
migrations.AddIndex(
model_name="magazine",
index=models.Index(fields=["published"], name="magazine_published_idx"),
),
migrations.AddIndex(
model_name="magazine",
index=models.Index(fields=["name"], name="magazine_name_idx"),
),
migrations.AddIndex(
model_name="magazineissue",
index=models.Index(fields=["magazine"], name="mag_issue_mag_idx"),
),
migrations.AddIndex(
model_name="magazineissue",
index=models.Index(
fields=["publication_month"], name="mag_issue_pub_month_idx"
),
),
]

View File

@@ -110,12 +110,6 @@ class Book(BaseBook):
class Meta: class Meta:
ordering = ["title"] ordering = ["title"]
indexes = [
# Index for title searches (local field)
models.Index(fields=["title"], name="book_title_idx"),
# Note: published and publication_year are inherited from BaseBook/BaseModel
# and cannot be indexed here due to multi-table inheritance
]
def __str__(self): def __str__(self):
return self.title return self.title
@@ -147,14 +141,6 @@ class Catalog(BaseBook):
class Meta: class Meta:
ordering = ["manufacturer", "publication_year"] ordering = ["manufacturer", "publication_year"]
indexes = [
# Index for manufacturer filtering (local field)
models.Index(
fields=["manufacturer"], name="catalog_mfr_idx"
),
# Note: published and publication_year are inherited from BaseBook/BaseModel
# and cannot be indexed here due to multi-table inheritance
]
def __str__(self): def __str__(self):
# if the object is new, return an empty string to avoid # if the object is new, return an empty string to avoid
@@ -203,12 +189,6 @@ class Magazine(BaseModel):
class Meta: class Meta:
ordering = [Lower("name")] ordering = [Lower("name")]
indexes = [
# Index for published filtering
models.Index(fields=["published"], name="magazine_published_idx"),
# Index for name searches (case-insensitive via db_collation if needed)
models.Index(fields=["name"], name="magazine_name_idx"),
]
def __str__(self): def __str__(self):
return self.name return self.name
@@ -249,17 +229,6 @@ class MagazineIssue(BaseBook):
"publication_month", "publication_month",
"issue_number", "issue_number",
] ]
indexes = [
# Index for magazine filtering (local field)
models.Index(fields=["magazine"], name="mag_issue_mag_idx"),
# Index for publication month (local field)
models.Index(
fields=["publication_month"],
name="mag_issue_pub_month_idx",
),
# Note: published and publication_year are inherited from BaseBook/BaseModel
# and cannot be indexed here due to multi-table inheritance
]
def __str__(self): def __str__(self):
return f"{self.magazine.name} - {self.issue_number}" return f"{self.magazine.name} - {self.issue_number}"

View File

@@ -62,7 +62,9 @@ class ConsistAdmin(SortableAdminBase, admin.ModelAdmin):
def get_queryset(self, request): def get_queryset(self, request):
"""Optimize queryset with select_related and prefetch_related.""" """Optimize queryset with select_related and prefetch_related."""
qs = super().get_queryset(request) qs = super().get_queryset(request)
return qs.with_related() return qs.select_related(
'company', 'scale'
).prefetch_related('tags', 'consist_item')
@admin.display(description="Country") @admin.display(description="Country")
def country_flag(self, obj): def country_flag(self, obj):
@@ -122,27 +124,12 @@ class ConsistAdmin(SortableAdminBase, admin.ModelAdmin):
"Item ID", "Item ID",
] ]
data = [] data = []
# Prefetch related data to avoid N+1 queries
queryset = queryset.select_related(
'company', 'scale'
).prefetch_related(
'tags',
'consist_item__rolling_stock__rolling_class__type'
)
for obj in queryset: for obj in queryset:
# Cache the type count to avoid recalculating for each item for item in obj.consist_item.all():
types = " + ".join( types = " + ".join(
"{}x {}".format(t["count"], t["type"]) "{}x {}".format(t["count"], t["type"])
for t in obj.get_type_count() for t in obj.get_type_count()
) )
# Cache tags to avoid repeated queries
tags_str = settings.CSV_SEPARATOR_ALT.join(
t.name for t in obj.tags.all()
)
for item in obj.consist_item.all():
data.append( data.append(
[ [
obj.uuid, obj.uuid,
@@ -154,7 +141,9 @@ class ConsistAdmin(SortableAdminBase, admin.ModelAdmin):
obj.scale.scale, obj.scale.scale,
obj.era, obj.era,
html.unescape(strip_tags(obj.description)), html.unescape(strip_tags(obj.description)),
tags_str, settings.CSV_SEPARATOR_ALT.join(
t.name for t in obj.tags.all()
),
obj.length, obj.length,
types, types,
item.rolling_stock.__str__(), item.rolling_stock.__str__(),

View File

@@ -1,50 +0,0 @@
# Generated by Django 6.0.1 on 2026-01-18 13:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("consist", "0019_consistitem_load"),
(
"metadata",
"0027_company_company_slug_idx_company_company_country_idx_and_more",
),
("roster", "0041_rollingclass_roster_rc_company_idx_and_more"),
]
operations = [
migrations.AddIndex(
model_name="consist",
index=models.Index(fields=["published"], name="consist_published_idx"),
),
migrations.AddIndex(
model_name="consist",
index=models.Index(fields=["scale"], name="consist_scale_idx"),
),
migrations.AddIndex(
model_name="consist",
index=models.Index(fields=["company"], name="consist_company_idx"),
),
migrations.AddIndex(
model_name="consist",
index=models.Index(
fields=["published", "scale"], name="consist_pub_scale_idx"
),
),
migrations.AddIndex(
model_name="consistitem",
index=models.Index(fields=["load"], name="consist_item_load_idx"),
),
migrations.AddIndex(
model_name="consistitem",
index=models.Index(fields=["order"], name="consist_item_order_idx"),
),
migrations.AddIndex(
model_name="consistitem",
index=models.Index(
fields=["consist", "load"], name="consist_item_con_load_idx"
),
),
]

View File

@@ -48,11 +48,6 @@ class Consist(BaseModel):
def length(self): def length(self):
return self.consist_item.filter(load=False).count() return self.consist_item.filter(load=False).count()
@property
def loads_count(self):
"""Count of loads in this consist using database aggregation."""
return self.consist_item.filter(load=True).count()
def get_type_count(self): def get_type_count(self):
return self.consist_item.filter(load=False).annotate( return self.consist_item.filter(load=False).annotate(
type=models.F("rolling_stock__rolling_class__type__type") type=models.F("rolling_stock__rolling_class__type__type")
@@ -79,18 +74,6 @@ class Consist(BaseModel):
class Meta: class Meta:
ordering = ["company", "-creation_time"] ordering = ["company", "-creation_time"]
indexes = [
# Index for published filtering
models.Index(fields=["published"], name="consist_published_idx"),
# Index for scale filtering
models.Index(fields=["scale"], name="consist_scale_idx"),
# Index for company filtering
models.Index(fields=["company"], name="consist_company_idx"),
# Composite index for published+scale filtering
models.Index(
fields=["published", "scale"], name="consist_pub_scale_idx"
),
]
class ConsistItem(models.Model): class ConsistItem(models.Model):
@@ -106,19 +89,9 @@ class ConsistItem(models.Model):
constraints = [ constraints = [
models.UniqueConstraint( models.UniqueConstraint(
fields=["consist", "rolling_stock"], fields=["consist", "rolling_stock"],
name="one_stock_per_consist", name="one_stock_per_consist"
) )
] ]
indexes = [
# Index for filtering by load status
models.Index(fields=["load"], name="consist_item_load_idx"),
# Index for ordering
models.Index(fields=["order"], name="consist_item_order_idx"),
# Composite index for consist+load filtering
models.Index(
fields=["consist", "load"], name="consist_item_con_load_idx"
),
]
def __str__(self): def __str__(self):
return "{0}".format(self.rolling_stock) return "{0}".format(self.rolling_stock)

View File

@@ -1,51 +0,0 @@
# Generated by Django 6.0.1 on 2026-01-18 13:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("metadata", "0026_alter_manufacturer_name_and_more"),
]
operations = [
migrations.AddIndex(
model_name="company",
index=models.Index(fields=["slug"], name="company_slug_idx"),
),
migrations.AddIndex(
model_name="company",
index=models.Index(fields=["country"], name="company_country_idx"),
),
migrations.AddIndex(
model_name="company",
index=models.Index(fields=["freelance"], name="company_freelance_idx"),
),
migrations.AddIndex(
model_name="manufacturer",
index=models.Index(fields=["category"], name="mfr_category_idx"),
),
migrations.AddIndex(
model_name="manufacturer",
index=models.Index(fields=["slug"], name="mfr_slug_idx"),
),
migrations.AddIndex(
model_name="manufacturer",
index=models.Index(fields=["category", "slug"], name="mfr_cat_slug_idx"),
),
migrations.AddIndex(
model_name="scale",
index=models.Index(fields=["slug"], name="scale_slug_idx"),
),
migrations.AddIndex(
model_name="scale",
index=models.Index(fields=["ratio_int"], name="scale_ratio_idx"),
),
migrations.AddIndex(
model_name="scale",
index=models.Index(
fields=["-ratio_int", "-tracks"], name="scale_ratio_tracks_idx"
),
),
]

View File

@@ -48,19 +48,10 @@ class Manufacturer(SimpleBaseModel):
ordering = ["category", "slug"] ordering = ["category", "slug"]
constraints = [ constraints = [
models.UniqueConstraint( models.UniqueConstraint(
fields=["name", "category"], name="unique_name_category" fields=["name", "category"],
name="unique_name_category"
) )
] ]
indexes = [
# Index for category filtering
models.Index(fields=["category"], name="mfr_category_idx"),
# Index for slug lookups
models.Index(fields=["slug"], name="mfr_slug_idx"),
# Composite index for category+slug (already in ordering)
models.Index(
fields=["category", "slug"], name="mfr_cat_slug_idx"
),
]
def __str__(self): def __str__(self):
return self.name return self.name
@@ -100,14 +91,6 @@ class Company(SimpleBaseModel):
class Meta: class Meta:
verbose_name_plural = "Companies" verbose_name_plural = "Companies"
ordering = ["slug"] ordering = ["slug"]
indexes = [
# Index for slug lookups (used frequently in URLs)
models.Index(fields=["slug"], name="company_slug_idx"),
# Index for country filtering
models.Index(fields=["country"], name="company_country_idx"),
# Index for freelance filtering
models.Index(fields=["freelance"], name="company_freelance_idx"),
]
def __str__(self): def __str__(self):
return self.name return self.name
@@ -182,16 +165,6 @@ class Scale(SimpleBaseModel):
class Meta: class Meta:
ordering = ["-ratio_int", "-tracks", "scale"] ordering = ["-ratio_int", "-tracks", "scale"]
indexes = [
# Index for slug lookups
models.Index(fields=["slug"], name="scale_slug_idx"),
# Index for ratio_int ordering and filtering
models.Index(fields=["ratio_int"], name="scale_ratio_idx"),
# Composite index for common ordering pattern
models.Index(
fields=["-ratio_int", "-tracks"], name="scale_ratio_tracks_idx"
),
]
def get_absolute_url(self): def get_absolute_url(self):
return reverse( return reverse(

View File

@@ -4,4 +4,3 @@
* Licensed under the Creative Commons Attribution 3.0 Unported License. * Licensed under the Creative Commons Attribution 3.0 Unported License.
*/ */
(()=>{"use strict";const e=()=>localStorage.getItem("theme"),t=()=>{const t=e();return t||(window.matchMedia("(prefers-color-scheme: dark)").matches?"dark":"light")},a=e=>{"auto"===e&&window.matchMedia("(prefers-color-scheme: dark)").matches?document.documentElement.setAttribute("data-bs-theme","dark"):document.documentElement.setAttribute("data-bs-theme",e)};a(t());const r=(e,t=!1)=>{const a=document.querySelector("#bd-theme");if(!a)return;const r=document.querySelector(".theme-icon-active i"),o=document.querySelector(`[data-bs-theme-value="${e}"]`),s=o.querySelector(".theme-icon i").getAttribute("class");document.querySelectorAll("[data-bs-theme-value]").forEach(e=>{e.classList.remove("active"),e.setAttribute("aria-pressed","false")}),o.classList.add("active"),o.setAttribute("aria-pressed","true"),r.setAttribute("class",s),t&&a.focus()};window.matchMedia("(prefers-color-scheme: dark)").addEventListener("change",()=>{const r=e();"light"!==r&&"dark"!==r&&a(t())}),window.addEventListener("DOMContentLoaded",()=>{r(t()),document.querySelectorAll("[data-bs-theme-value]").forEach(e=>{e.addEventListener("click",()=>{const t=e.getAttribute("data-bs-theme-value");(e=>{localStorage.setItem("theme",e)})(t),a(t),r(t,!0)})})})})(),document.addEventListener("DOMContentLoaded",function(){"use strict";const e=document.getElementById("tabSelector"),t=window.location.hash.substring(1);if(t){const a=`#nav-${t}`,r=document.querySelector(`[data-bs-target="${a}"]`);r&&(bootstrap.Tab.getOrCreateInstance(r).show(),e.value=a)}document.querySelectorAll('button[data-bs-toggle="tab"]').forEach(e=>{e.addEventListener("shown.bs.tab",e=>{const t=e.target.getAttribute("data-bs-target").replace("nav-","");history.replaceState(null,null,t)})}),e&&(e.addEventListener("change",function(){const e=this.value,t=document.querySelector(`[data-bs-target="${e}"]`);if(t){bootstrap.Tab.getOrCreateInstance(t).show()}}),document.querySelectorAll('[data-bs-toggle="tab"]').forEach(t=>{t.addEventListener("shown.bs.tab",t=>{const a=t.target.getAttribute("data-bs-target");e.value=a})}))}),document.addEventListener("DOMContentLoaded",function(){"use strict";const e=document.querySelectorAll(".needs-validation");Array.from(e).forEach(e=>{e.addEventListener("submit",t=>{e.checkValidity()||(t.preventDefault(),t.stopPropagation()),e.classList.add("was-validated")},!1)})}); (()=>{"use strict";const e=()=>localStorage.getItem("theme"),t=()=>{const t=e();return t||(window.matchMedia("(prefers-color-scheme: dark)").matches?"dark":"light")},a=e=>{"auto"===e&&window.matchMedia("(prefers-color-scheme: dark)").matches?document.documentElement.setAttribute("data-bs-theme","dark"):document.documentElement.setAttribute("data-bs-theme",e)};a(t());const r=(e,t=!1)=>{const a=document.querySelector("#bd-theme");if(!a)return;const r=document.querySelector(".theme-icon-active i"),o=document.querySelector(`[data-bs-theme-value="${e}"]`),s=o.querySelector(".theme-icon i").getAttribute("class");document.querySelectorAll("[data-bs-theme-value]").forEach(e=>{e.classList.remove("active"),e.setAttribute("aria-pressed","false")}),o.classList.add("active"),o.setAttribute("aria-pressed","true"),r.setAttribute("class",s),t&&a.focus()};window.matchMedia("(prefers-color-scheme: dark)").addEventListener("change",()=>{const r=e();"light"!==r&&"dark"!==r&&a(t())}),window.addEventListener("DOMContentLoaded",()=>{r(t()),document.querySelectorAll("[data-bs-theme-value]").forEach(e=>{e.addEventListener("click",()=>{const t=e.getAttribute("data-bs-theme-value");(e=>{localStorage.setItem("theme",e)})(t),a(t),r(t,!0)})})})})(),document.addEventListener("DOMContentLoaded",function(){"use strict";const e=document.getElementById("tabSelector"),t=window.location.hash.substring(1);if(t){const a=`#nav-${t}`,r=document.querySelector(`[data-bs-target="${a}"]`);r&&(bootstrap.Tab.getOrCreateInstance(r).show(),e.value=a)}document.querySelectorAll('button[data-bs-toggle="tab"]').forEach(e=>{e.addEventListener("shown.bs.tab",e=>{const t=e.target.getAttribute("data-bs-target").replace("nav-","");history.replaceState(null,null,t)})}),e&&(e.addEventListener("change",function(){const e=this.value,t=document.querySelector(`[data-bs-target="${e}"]`);if(t){bootstrap.Tab.getOrCreateInstance(t).show()}}),document.querySelectorAll('[data-bs-toggle="tab"]').forEach(t=>{t.addEventListener("shown.bs.tab",t=>{const a=t.target.getAttribute("data-bs-target");e.value=a})}))}),document.addEventListener("DOMContentLoaded",function(){"use strict";const e=document.querySelectorAll(".needs-validation");Array.from(e).forEach(e=>{e.addEventListener("submit",t=>{e.checkValidity()||(t.preventDefault(),t.stopPropagation()),e.classList.add("was-validated")},!1)})});
//# sourceMappingURL=main.min.js.map

View File

@@ -1 +0,0 @@
{"version":3,"names":["getStoredTheme","localStorage","getItem","getPreferredTheme","storedTheme","window","matchMedia","matches","setTheme","theme","document","documentElement","setAttribute","showActiveTheme","focus","themeSwitcher","querySelector","activeThemeIcon","btnToActive","biOfActiveBtn","getAttribute","querySelectorAll","forEach","element","classList","remove","add","addEventListener","toggle","setItem","setStoredTheme","selectElement","getElementById","hash","location","substring","target","trigger","bootstrap","Tab","getOrCreateInstance","show","value","btn","event","newHash","replace","history","replaceState","this","forms","Array","from","form","checkValidity","preventDefault","stopPropagation"],"sources":["ram/portal/static/js/src/theme_selector.js","ram/portal/static/js/src/tabs_selector.js","ram/portal/static/js/src/validators.js"],"mappings":";;;;;AAMA,MACE,aAEA,MAAMA,EAAiB,IAAMC,aAAaC,QAAQ,SAG5CC,EAAoB,KACxB,MAAMC,EAAcJ,IACpB,OAAII,IAIGC,OAAOC,WAAW,gCAAgCC,QAAU,OAAS,UAGxEC,EAAWC,IACD,SAAVA,GAAoBJ,OAAOC,WAAW,gCAAgCC,QACxEG,SAASC,gBAAgBC,aAAa,gBAAiB,QAEvDF,SAASC,gBAAgBC,aAAa,gBAAiBH,IAI3DD,EAASL,KAET,MAAMU,EAAkB,CAACJ,EAAOK,GAAQ,KACtC,MAAMC,EAAgBL,SAASM,cAAc,aAE7C,IAAKD,EACH,OAGF,MAAME,EAAkBP,SAASM,cAAc,wBACzCE,EAAcR,SAASM,cAAc,yBAAyBP,OAC9DU,EAAgBD,EAAYF,cAAc,iBAAiBI,aAAa,SAE9EV,SAASW,iBAAiB,yBAAyBC,QAAQC,IACzDA,EAAQC,UAAUC,OAAO,UACzBF,EAAQX,aAAa,eAAgB,WAGvCM,EAAYM,UAAUE,IAAI,UAC1BR,EAAYN,aAAa,eAAgB,QACzCK,EAAgBL,aAAa,QAASO,GAElCL,GACFC,EAAcD,SAIlBT,OAAOC,WAAW,gCAAgCqB,iBAAiB,SAAU,KAC3E,MAAMvB,EAAcJ,IACA,UAAhBI,GAA2C,SAAhBA,GAC7BI,EAASL,OAIbE,OAAOsB,iBAAiB,mBAAoB,KAC1Cd,EAAgBV,KAChBO,SAASW,iBAAiB,yBACvBC,QAAQM,IACPA,EAAOD,iBAAiB,QAAS,KAC/B,MAAMlB,EAAQmB,EAAOR,aAAa,uBA1DnBX,KAASR,aAAa4B,QAAQ,QAASpB,IA2DtDqB,CAAerB,GACfD,EAASC,GACTI,EAAgBJ,GAAO,QAI/B,EArEF,GCLAC,SAASiB,iBAAiB,mBAAoB,WAC5C,aAEA,MAAMI,EAAgBrB,SAASsB,eAAe,eAExCC,EAAO5B,OAAO6B,SAASD,KAAKE,UAAU,GAC5C,GAAIF,EAAM,CACR,MAAMG,EAAS,QAAQH,IACjBI,EAAU3B,SAASM,cAAc,oBAAoBoB,OACvDC,IACFC,UAAUC,IAAIC,oBAAoBH,GAASI,OAC3CV,EAAcW,MAAQN,EAE1B,CAGA1B,SAASW,iBAAiB,gCAAgCC,QAAQqB,IAChEA,EAAIhB,iBAAiB,eAAgBiB,IACnC,MAAMC,EAAUD,EAAMR,OAAOhB,aAAa,kBAAkB0B,QAAQ,OAAQ,IAC5EC,QAAQC,aAAa,KAAM,KAAMH,OAKhCd,IACLA,EAAcJ,iBAAiB,SAAU,WACvC,MAAMS,EAASa,KAAKP,MACdL,EAAU3B,SAASM,cAAc,oBAAoBoB,OAC3D,GAAIC,EAAS,CACSC,UAAUC,IAAIC,oBAAoBH,GAC1CI,MACd,CACF,GAGA/B,SAASW,iBAAiB,0BAA0BC,QAAQqB,IAC1DA,EAAIhB,iBAAiB,eAAgBiB,IACnC,MAAMR,EAASQ,EAAMR,OAAOhB,aAAa,kBACzCW,EAAcW,MAAQN,MAG5B,GC1CA1B,SAASiB,iBAAiB,mBAAoB,WAC1C,aAEA,MAAMuB,EAAQxC,SAASW,iBAAiB,qBACxC8B,MAAMC,KAAKF,GAAO5B,QAAQ+B,IACxBA,EAAK1B,iBAAiB,SAAUiB,IACzBS,EAAKC,kBACRV,EAAMW,iBACNX,EAAMY,mBAGRH,EAAK7B,UAAUE,IAAI,mBAClB,IAET","ignoreList":[]}

View File

@@ -2,6 +2,6 @@
```bash ```bash
$ npm install terser $ npm install terser
$ npx terser theme_selector.js tabs_selector.js validators.js -c -m -o ../main.min.js $ npx terser theme_selector.js tabs_selector.js -c -m -o ../main.min.js
``` ```

View File

@@ -142,7 +142,7 @@
</tr> </tr>
<tr> <tr>
<th scope="row">Composition</th> <th scope="row">Composition</th>
<td>{% for t in consist.get_type_count %}{{ t.count }}x {{ t.type }} {{t.category }}{% if not forloop.last %} &raquo; {% endif %}{% endfor %}{% if loads %} | <i class="bi bi-download"></i> {{ loads_count }}x Load{{ loads|pluralize }}{% endif %}</td> <td>{% for t in consist.get_type_count %}{{ t.count }}x {{ t.type }} {{t.category }}{% if not forloop.last %} &raquo; {% endif %}{% endfor %}{% if loads %} | <i class="bi bi-download"></i> {{ loads|length }}x Load{{ loads|pluralize }}{% endif %}</td>
</tr> </tr>
</tbody> </tbody>
</table> </table>

View File

@@ -161,7 +161,7 @@ class PortalTestBase(TestCase):
self.catalog.scales.add(self.scale_ho) self.catalog.scales.add(self.scale_ho)
self.magazine = Magazine.objects.create( self.magazine = Magazine.objects.create(
name="Model Railroader", publisher=self.publisher, published=True name="Model Railroader", published=True
) )
self.magazine_issue = MagazineIssue.objects.create( self.magazine_issue = MagazineIssue.objects.create(

View File

@@ -96,7 +96,16 @@ class GetData(View):
def get_data(self, request): def get_data(self, request):
return ( return (
RollingStock.objects.get_published(request.user) RollingStock.objects.get_published(request.user)
.with_related() .select_related(
'rolling_class',
'rolling_class__company',
'rolling_class__type',
'manufacturer',
'scale',
'decoder',
'shop',
)
.prefetch_related('tags', 'image')
.order_by(*get_items_ordering()) .order_by(*get_items_ordering())
.filter(self.filter) .filter(self.filter)
) )
@@ -133,7 +142,16 @@ class GetHome(GetData):
max_items = min(settings.FEATURED_ITEMS_MAX, get_items_per_page()) max_items = min(settings.FEATURED_ITEMS_MAX, get_items_per_page())
return ( return (
RollingStock.objects.get_published(request.user) RollingStock.objects.get_published(request.user)
.with_related() .select_related(
'rolling_class',
'rolling_class__company',
'rolling_class__type',
'manufacturer',
'scale',
'decoder',
'shop',
)
.prefetch_related('tags', 'image')
.filter(featured=True) .filter(featured=True)
.order_by(*get_items_ordering(config="featured_items_ordering"))[ .order_by(*get_items_ordering(config="featured_items_ordering"))[
:max_items :max_items
@@ -202,7 +220,14 @@ class SearchObjects(View):
# and manufacturer as well # and manufacturer as well
roster = ( roster = (
RollingStock.objects.get_published(request.user) RollingStock.objects.get_published(request.user)
.with_related() .select_related(
'rolling_class',
'rolling_class__company',
'rolling_class__type',
'manufacturer',
'scale',
)
.prefetch_related('tags', 'image')
.filter(query) .filter(query)
.distinct() .distinct()
.order_by(*get_items_ordering()) .order_by(*get_items_ordering())
@@ -212,7 +237,8 @@ class SearchObjects(View):
if _filter is None: if _filter is None:
consists = ( consists = (
Consist.objects.get_published(request.user) Consist.objects.get_published(request.user)
.with_related() .select_related('company', 'scale')
.prefetch_related('tags', 'consist_item')
.filter( .filter(
Q( Q(
Q(identifier__icontains=search) Q(identifier__icontains=search)
@@ -224,7 +250,7 @@ class SearchObjects(View):
data = list(chain(data, consists)) data = list(chain(data, consists))
books = ( books = (
Book.objects.get_published(request.user) Book.objects.get_published(request.user)
.with_related() .prefetch_related('toc', 'image')
.filter( .filter(
Q( Q(
Q(title__icontains=search) Q(title__icontains=search)
@@ -236,7 +262,8 @@ class SearchObjects(View):
) )
catalogs = ( catalogs = (
Catalog.objects.get_published(request.user) Catalog.objects.get_published(request.user)
.with_related() .select_related('manufacturer')
.prefetch_related('scales', 'image')
.filter( .filter(
Q( Q(
Q(manufacturer__name__icontains=search) Q(manufacturer__name__icontains=search)
@@ -248,7 +275,8 @@ class SearchObjects(View):
data = list(chain(data, books, catalogs)) data = list(chain(data, books, catalogs))
magazine_issues = ( magazine_issues = (
MagazineIssue.objects.get_published(request.user) MagazineIssue.objects.get_published(request.user)
.with_related() .select_related('magazine')
.prefetch_related('toc', 'image')
.filter( .filter(
Q( Q(
Q(magazine__name__icontains=search) Q(magazine__name__icontains=search)
@@ -363,7 +391,14 @@ class GetManufacturerItem(View):
else: else:
roster = ( roster = (
RollingStock.objects.get_published(request.user) RollingStock.objects.get_published(request.user)
.with_related() .select_related(
'rolling_class',
'rolling_class__company',
'rolling_class__type',
'manufacturer',
'scale',
)
.prefetch_related('image')
.filter( .filter(
Q(manufacturer=manufacturer) Q(manufacturer=manufacturer)
| Q(rolling_class__manufacturer=manufacturer) | Q(rolling_class__manufacturer=manufacturer)
@@ -373,7 +408,8 @@ class GetManufacturerItem(View):
) )
catalogs = ( catalogs = (
Catalog.objects.get_published(request.user) Catalog.objects.get_published(request.user)
.with_related() .select_related('manufacturer')
.prefetch_related('scales', 'image')
.filter(manufacturer=manufacturer) .filter(manufacturer=manufacturer)
) )
title = "Manufacturer: {0}".format(manufacturer) title = "Manufacturer: {0}".format(manufacturer)
@@ -422,7 +458,14 @@ class GetObjectsFiltered(View):
roster = ( roster = (
RollingStock.objects.get_published(request.user) RollingStock.objects.get_published(request.user)
.with_related() .select_related(
'rolling_class',
'rolling_class__company',
'rolling_class__type',
'manufacturer',
'scale',
)
.prefetch_related('tags', 'image')
.filter(query) .filter(query)
.distinct() .distinct()
.order_by(*get_items_ordering()) .order_by(*get_items_ordering())
@@ -433,7 +476,8 @@ class GetObjectsFiltered(View):
if _filter == "scale": if _filter == "scale":
catalogs = ( catalogs = (
Catalog.objects.get_published(request.user) Catalog.objects.get_published(request.user)
.with_related() .select_related('manufacturer')
.prefetch_related('scales', 'image')
.filter(scales__slug=search) .filter(scales__slug=search)
.distinct() .distinct()
) )
@@ -442,7 +486,8 @@ class GetObjectsFiltered(View):
try: # Execute only if query_2nd is defined try: # Execute only if query_2nd is defined
consists = ( consists = (
Consist.objects.get_published(request.user) Consist.objects.get_published(request.user)
.with_related() .select_related('company', 'scale')
.prefetch_related('tags', 'consist_item')
.filter(query_2nd) .filter(query_2nd)
.distinct() .distinct()
) )
@@ -450,19 +495,21 @@ class GetObjectsFiltered(View):
if _filter == "tag": # Books can be filtered only by tag if _filter == "tag": # Books can be filtered only by tag
books = ( books = (
Book.objects.get_published(request.user) Book.objects.get_published(request.user)
.with_related() .prefetch_related('toc', 'tags', 'image')
.filter(query_2nd) .filter(query_2nd)
.distinct() .distinct()
) )
catalogs = ( catalogs = (
Catalog.objects.get_published(request.user) Catalog.objects.get_published(request.user)
.with_related() .select_related('manufacturer')
.prefetch_related('scales', 'tags', 'image')
.filter(query_2nd) .filter(query_2nd)
.distinct() .distinct()
) )
magazine_issues = ( magazine_issues = (
MagazineIssue.objects.get_published(request.user) MagazineIssue.objects.get_published(request.user)
.with_related() .select_related('magazine')
.prefetch_related('toc', 'tags', 'image')
.filter(query_2nd) .filter(query_2nd)
.distinct() .distinct()
) )
@@ -502,7 +549,25 @@ class GetRollingStock(View):
try: try:
rolling_stock = ( rolling_stock = (
RollingStock.objects.get_published(request.user) RollingStock.objects.get_published(request.user)
.with_details() .select_related(
'rolling_class',
'rolling_class__company',
'rolling_class__type',
'manufacturer',
'scale',
'decoder',
'shop',
)
.prefetch_related(
'tags',
'image',
'property',
'document',
'journal',
'rolling_class__property',
'rolling_class__manufacturer',
'decoder__document',
)
.get(uuid=uuid) .get(uuid=uuid)
) )
except ObjectDoesNotExist: except ObjectDoesNotExist:
@@ -524,13 +589,21 @@ class GetRollingStock(View):
consists = list( consists = list(
Consist.objects.get_published(request.user) Consist.objects.get_published(request.user)
.with_related() .select_related('company', 'scale')
.prefetch_related('tags', 'consist_item')
.filter(consist_item__rolling_stock=rolling_stock) .filter(consist_item__rolling_stock=rolling_stock)
) )
trainset = list( trainset = list(
RollingStock.objects.get_published(request.user) RollingStock.objects.get_published(request.user)
.with_related() .select_related(
'rolling_class',
'rolling_class__company',
'rolling_class__type',
'manufacturer',
'scale',
)
.prefetch_related('image')
.filter( .filter(
Q( Q(
Q(item_number__exact=rolling_stock.item_number) Q(item_number__exact=rolling_stock.item_number)
@@ -563,7 +636,8 @@ class Consists(GetData):
def get_data(self, request): def get_data(self, request):
return ( return (
Consist.objects.get_published(request.user) Consist.objects.get_published(request.user)
.with_related() .select_related('company', 'scale')
.prefetch_related('tags', 'consist_item')
.all() .all()
) )
@@ -573,18 +647,23 @@ class GetConsist(View):
try: try:
consist = ( consist = (
Consist.objects.get_published(request.user) Consist.objects.get_published(request.user)
.with_rolling_stock() .select_related('company', 'scale')
.prefetch_related(
'tags',
'consist_item',
'consist_item__rolling_stock',
'consist_item__rolling_stock__rolling_class',
'consist_item__rolling_stock__rolling_class__company',
'consist_item__rolling_stock__rolling_class__type',
'consist_item__rolling_stock__manufacturer',
'consist_item__rolling_stock__scale',
'consist_item__rolling_stock__image',
)
.get(uuid=uuid) .get(uuid=uuid)
) )
except ObjectDoesNotExist: except ObjectDoesNotExist:
raise Http404 raise Http404
# Get all published rolling stock IDs for efficient filtering
published_ids = set(
RollingStock.objects.get_published(request.user)
.values_list('uuid', flat=True)
)
# Fetch consist items with related rolling stock in one query # Fetch consist items with related rolling stock in one query
consist_items = consist.consist_item.select_related( consist_items = consist.consist_item.select_related(
'rolling_stock', 'rolling_stock',
@@ -595,17 +674,21 @@ class GetConsist(View):
'rolling_stock__scale', 'rolling_stock__scale',
).prefetch_related('rolling_stock__image') ).prefetch_related('rolling_stock__image')
# Filter items and loads efficiently # Filter items and loads
data = [ data = list(
item.rolling_stock item.rolling_stock
for item in consist_items.filter(load=False) for item in consist_items.filter(load=False)
if item.rolling_stock.uuid in published_ids if RollingStock.objects.get_published(request.user)
] .filter(uuid=item.rolling_stock_id)
loads = [ .exists()
)
loads = list(
item.rolling_stock item.rolling_stock
for item in consist_items.filter(load=True) for item in consist_items.filter(load=True)
if item.rolling_stock.uuid in published_ids if RollingStock.objects.get_published(request.user)
] .filter(uuid=item.rolling_stock_id)
.exists()
)
paginator = Paginator(data, get_items_per_page()) paginator = Paginator(data, get_items_per_page())
data = paginator.get_page(page) data = paginator.get_page(page)
@@ -621,7 +704,6 @@ class GetConsist(View):
"consist": consist, "consist": consist,
"data": data, "data": data,
"loads": loads, "loads": loads,
"loads_count": len(loads),
"page_range": page_range, "page_range": page_range,
}, },
) )
@@ -790,7 +872,7 @@ class Books(GetData):
def get_data(self, request): def get_data(self, request):
return ( return (
Book.objects.get_published(request.user) Book.objects.get_published(request.user)
.with_related() .prefetch_related('tags', 'image', 'toc')
.all() .all()
) )
@@ -801,7 +883,8 @@ class Catalogs(GetData):
def get_data(self, request): def get_data(self, request):
return ( return (
Catalog.objects.get_published(request.user) Catalog.objects.get_published(request.user)
.with_related() .select_related('manufacturer')
.prefetch_related('scales', 'tags', 'image')
.all() .all()
) )
@@ -841,7 +924,7 @@ class GetMagazine(View):
raise Http404 raise Http404
data = list( data = list(
magazine.issue.get_published(request.user) magazine.issue.get_published(request.user)
.with_related() .prefetch_related('image', 'toc')
.all() .all()
) )
paginator = Paginator(data, get_items_per_page()) paginator = Paginator(data, get_items_per_page())
@@ -868,7 +951,8 @@ class GetMagazineIssue(View):
try: try:
issue = ( issue = (
MagazineIssue.objects.get_published(request.user) MagazineIssue.objects.get_published(request.user)
.with_details() .select_related('magazine')
.prefetch_related('property', 'document', 'image', 'toc')
.get(uuid=uuid, magazine__uuid=magazine) .get(uuid=uuid, magazine__uuid=magazine)
) )
except ObjectDoesNotExist: except ObjectDoesNotExist:
@@ -892,13 +976,14 @@ class GetBookCatalog(View):
if selector == "book": if selector == "book":
return ( return (
Book.objects.get_published(request.user) Book.objects.get_published(request.user)
.with_details() .prefetch_related('property', 'document', 'image', 'toc', 'tags')
.get(uuid=uuid) .get(uuid=uuid)
) )
elif selector == "catalog": elif selector == "catalog":
return ( return (
Catalog.objects.get_published(request.user) Catalog.objects.get_published(request.user)
.with_details() .select_related('manufacturer')
.prefetch_related('property', 'document', 'image', 'scales', 'tags')
.get(uuid=uuid) .get(uuid=uuid)
) )
else: else:

View File

@@ -9,5 +9,5 @@ if DJANGO_VERSION < (6, 0):
) )
) )
__version__ = "0.20.1" __version__ = "0.19.10"
__version__ += git_suffix(__file__) __version__ += git_suffix(__file__)

View File

@@ -2,18 +2,16 @@ from django.db import models
from django.core.exceptions import FieldError from django.core.exceptions import FieldError
class PublicQuerySet(models.QuerySet): class PublicManager(models.Manager):
"""Base QuerySet with published/public filtering."""
def get_published(self, user): def get_published(self, user):
""" """
Get published items based on user authentication status. Get published items based on user authentication status.
Returns all items for authenticated users, only published for anonymous. Returns all items for authenticated users, only published for anonymous.
""" """
if user.is_authenticated: if user.is_authenticated:
return self return self.get_queryset()
else: else:
return self.filter(published=True) return self.get_queryset().filter(published=True)
def get_public(self, user): def get_public(self, user):
""" """
@@ -21,29 +19,16 @@ class PublicQuerySet(models.QuerySet):
Returns all items for authenticated users, only non-private for anonymous. Returns all items for authenticated users, only non-private for anonymous.
""" """
if user.is_authenticated: if user.is_authenticated:
return self return self.get_queryset()
else: else:
try: try:
return self.filter(private=False) return self.get_queryset().filter(private=False)
except FieldError: except FieldError:
return self.filter(property__private=False) return self.get_queryset().filter(property__private=False)
class PublicManager(models.Manager): class RollingStockManager(PublicManager):
"""Manager using PublicQuerySet.""" """Optimized manager for RollingStock with prefetch methods."""
def get_queryset(self):
return PublicQuerySet(self.model, using=self._db)
def get_published(self, user):
return self.get_queryset().get_published(user)
def get_public(self, user):
return self.get_queryset().get_public(user)
class RollingStockQuerySet(PublicQuerySet):
"""QuerySet with optimization methods for RollingStock."""
def with_related(self): def with_related(self):
""" """
@@ -74,19 +59,6 @@ class RollingStockQuerySet(PublicQuerySet):
'decoder__document', 'decoder__document',
) )
class RollingStockManager(PublicManager):
"""Optimized manager for RollingStock with prefetch methods."""
def get_queryset(self):
return RollingStockQuerySet(self.model, using=self._db)
def with_related(self):
return self.get_queryset().with_related()
def with_details(self):
return self.get_queryset().with_details()
def get_published_with_related(self, user): def get_published_with_related(self, user):
""" """
Convenience method combining get_published with related objects. Convenience method combining get_published with related objects.
@@ -94,8 +66,8 @@ class RollingStockManager(PublicManager):
return self.get_published(user).with_related() return self.get_published(user).with_related()
class ConsistQuerySet(PublicQuerySet): class ConsistManager(PublicManager):
"""QuerySet with optimization methods for Consist.""" """Optimized manager for Consist with prefetch methods."""
def with_related(self): def with_related(self):
""" """
@@ -122,21 +94,8 @@ class ConsistQuerySet(PublicQuerySet):
) )
class ConsistManager(PublicManager): class BookManager(PublicManager):
"""Optimized manager for Consist with prefetch methods.""" """Optimized manager for Book/Catalog with prefetch methods."""
def get_queryset(self):
return ConsistQuerySet(self.model, using=self._db)
def with_related(self):
return self.get_queryset().with_related()
def with_rolling_stock(self):
return self.get_queryset().with_rolling_stock()
class BookQuerySet(PublicQuerySet):
"""QuerySet with optimization methods for Book."""
def with_related(self): def with_related(self):
""" """
@@ -153,21 +112,8 @@ class BookQuerySet(PublicQuerySet):
return self.with_related().prefetch_related('property', 'document') return self.with_related().prefetch_related('property', 'document')
class BookManager(PublicManager): class CatalogManager(PublicManager):
"""Optimized manager for Book/Catalog with prefetch methods.""" """Optimized manager for Catalog with prefetch methods."""
def get_queryset(self):
return BookQuerySet(self.model, using=self._db)
def with_related(self):
return self.get_queryset().with_related()
def with_details(self):
return self.get_queryset().with_details()
class CatalogQuerySet(PublicQuerySet):
"""QuerySet with optimization methods for Catalog."""
def with_related(self): def with_related(self):
""" """
@@ -184,21 +130,8 @@ class CatalogQuerySet(PublicQuerySet):
return self.with_related().prefetch_related('property', 'document') return self.with_related().prefetch_related('property', 'document')
class CatalogManager(PublicManager): class MagazineIssueManager(PublicManager):
"""Optimized manager for Catalog with prefetch methods.""" """Optimized manager for MagazineIssue with prefetch methods."""
def get_queryset(self):
return CatalogQuerySet(self.model, using=self._db)
def with_related(self):
return self.get_queryset().with_related()
def with_details(self):
return self.get_queryset().with_details()
class MagazineIssueQuerySet(PublicQuerySet):
"""QuerySet with optimization methods for MagazineIssue."""
def with_related(self): def with_related(self):
""" """
@@ -213,16 +146,3 @@ class MagazineIssueQuerySet(PublicQuerySet):
Optimize queryset for detail views with properties and documents. Optimize queryset for detail views with properties and documents.
""" """
return self.with_related().prefetch_related('property', 'document') return self.with_related().prefetch_related('property', 'document')
class MagazineIssueManager(PublicManager):
"""Optimized manager for MagazineIssue with prefetch methods."""
def get_queryset(self):
return MagazineIssueQuerySet(self.model, using=self._db)
def with_related(self):
return self.get_queryset().with_related()
def with_details(self):
return self.get_queryset().with_details()

View File

@@ -17,7 +17,7 @@ from django.http import (
) )
from django.views import View from django.views import View
from django.utils.text import slugify as slugify from django.utils.text import slugify as slugify
from django.utils.encoding import iri_to_uri, smart_str from django.utils.encoding import smart_str
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt from django.views.decorators.csrf import csrf_exempt
@@ -112,9 +112,7 @@ class DownloadFile(View):
if getattr(settings, "USE_X_ACCEL_REDIRECT", False): if getattr(settings, "USE_X_ACCEL_REDIRECT", False):
response = HttpResponse() response = HttpResponse()
response["Content-Type"] = "" response["Content-Type"] = ""
response["X-Accel-Redirect"] = iri_to_uri( response["X-Accel-Redirect"] = f"/private/{file.name}"
f"/private/{file.name}"
)
else: else:
response = FileResponse( response = FileResponse(
open(file.path, "rb"), as_attachment=True open(file.path, "rb"), as_attachment=True

View File

@@ -161,7 +161,15 @@ class RollingStockAdmin(SortableAdminBase, admin.ModelAdmin):
def get_queryset(self, request): def get_queryset(self, request):
"""Optimize queryset with select_related and prefetch_related.""" """Optimize queryset with select_related and prefetch_related."""
qs = super().get_queryset(request) qs = super().get_queryset(request)
return qs.with_related() return qs.select_related(
'rolling_class',
'rolling_class__company',
'rolling_class__type',
'manufacturer',
'scale',
'decoder',
'shop',
).prefetch_related('tags', 'image')
@admin.display(description="Country") @admin.display(description="Country")
def country_flag(self, obj): def country_flag(self, obj):
@@ -273,18 +281,6 @@ class RollingStockAdmin(SortableAdminBase, admin.ModelAdmin):
"Properties", "Properties",
] ]
data = [] data = []
# Prefetch related data to avoid N+1 queries
queryset = queryset.select_related(
'rolling_class',
'rolling_class__type',
'rolling_class__company',
'manufacturer',
'scale',
'decoder',
'shop'
).prefetch_related('tags', 'property__property')
for obj in queryset: for obj in queryset:
properties = settings.CSV_SEPARATOR_ALT.join( properties = settings.CSV_SEPARATOR_ALT.join(
"{}:{}".format(property.property.name, property.value) "{}:{}".format(property.property.name, property.value)

View File

@@ -1,65 +0,0 @@
# Generated by Django 6.0.1 on 2026-01-18 13:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"metadata",
"0027_company_company_slug_idx_company_company_country_idx_and_more",
),
("roster", "0040_alter_rollingstock_decoder_interface_order"),
]
operations = [
migrations.AddIndex(
model_name="rollingclass",
index=models.Index(fields=["company"], name="roster_rc_company_idx"),
),
migrations.AddIndex(
model_name="rollingclass",
index=models.Index(fields=["type"], name="roster_rc_type_idx"),
),
migrations.AddIndex(
model_name="rollingclass",
index=models.Index(
fields=["company", "identifier"], name="roster_rc_co_ident_idx"
),
),
migrations.AddIndex(
model_name="rollingstock",
index=models.Index(fields=["published"], name="roster_published_idx"),
),
migrations.AddIndex(
model_name="rollingstock",
index=models.Index(fields=["featured"], name="roster_featured_idx"),
),
migrations.AddIndex(
model_name="rollingstock",
index=models.Index(
fields=["item_number_slug"], name="roster_item_slug_idx"
),
),
migrations.AddIndex(
model_name="rollingstock",
index=models.Index(fields=["road_number_int"], name="roster_road_num_idx"),
),
migrations.AddIndex(
model_name="rollingstock",
index=models.Index(
fields=["published", "featured"], name="roster_pub_feat_idx"
),
),
migrations.AddIndex(
model_name="rollingstock",
index=models.Index(
fields=["manufacturer", "item_number_slug"], name="roster_mfr_item_idx"
),
),
migrations.AddIndex(
model_name="rollingstock",
index=models.Index(fields=["scale"], name="roster_scale_idx"),
),
]

View File

@@ -11,7 +11,7 @@ from tinymce import models as tinymce
from ram.models import BaseModel, Image, PropertyInstance from ram.models import BaseModel, Image, PropertyInstance
from ram.utils import DeduplicatedStorage, slugify from ram.utils import DeduplicatedStorage, slugify
from ram.managers import RollingStockManager from ram.managers import PublicManager, RollingStockManager
from metadata.models import ( from metadata.models import (
Scale, Scale,
Manufacturer, Manufacturer,
@@ -38,14 +38,6 @@ class RollingClass(models.Model):
ordering = ["company", "identifier"] ordering = ["company", "identifier"]
verbose_name = "Class" verbose_name = "Class"
verbose_name_plural = "Classes" verbose_name_plural = "Classes"
indexes = [
models.Index(fields=["company"], name="roster_rc_company_idx"),
models.Index(fields=["type"], name="roster_rc_type_idx"),
models.Index(
fields=["company", "identifier"],
name="roster_rc_co_ident_idx", # Shortened to fit 30 char limit
),
]
def __str__(self): def __str__(self):
return "{0} {1}".format(self.company, self.identifier) return "{0} {1}".format(self.company, self.identifier)
@@ -128,35 +120,9 @@ class RollingStock(BaseModel):
Tag, related_name="rolling_stock", blank=True Tag, related_name="rolling_stock", blank=True
) )
objects = RollingStockManager()
class Meta: class Meta:
ordering = ["rolling_class", "road_number_int"] ordering = ["rolling_class", "road_number_int"]
verbose_name_plural = "Rolling stock" verbose_name_plural = "Rolling stock"
indexes = [
# Index for published/featured filtering
models.Index(fields=["published"], name="roster_published_idx"),
models.Index(fields=["featured"], name="roster_featured_idx"),
# Index for item number searches
models.Index(
fields=["item_number_slug"], name="roster_item_slug_idx"
),
# Index for road number searches and ordering
models.Index(
fields=["road_number_int"], name="roster_road_num_idx"
),
# Composite index for common filtering patterns
models.Index(
fields=["published", "featured"], name="roster_pub_feat_idx"
),
# Composite index for manufacturer+item_number lookups
models.Index(
fields=["manufacturer", "item_number_slug"],
name="roster_mfr_item_idx",
),
# Index for scale filtering
models.Index(fields=["scale"], name="roster_scale_idx"),
]
def __str__(self): def __str__(self):
return "{0} {1}".format(self.rolling_class, self.road_number) return "{0} {1}".format(self.rolling_class, self.road_number)

View File

@@ -1,3 +0,0 @@
[flake8]
extend-ignore = E501
exclude = *settings.py*,*/migrations/*