Files

14 lines
284 B
Desktop File

[Unit]
Description=LM Studio proxy Python Backend Service
After=network.target
[Service]
User=samuele
WorkingDirectory=/home/samuele/lm-chat-app/backend
ExecStart=/usr/bin/env uvicorn main:app --host 0.0.0.0 --port 8000 --reload
Restart=always
[Install]
WantedBy=multi-user.target