knox / Llama-Deployment-23.sh
0 likes
0 forks
1 files
Last active
1 | sudo apt install -y htop |
2 | bash <(curl -Ss https://my-netdata.io/kickstart.sh) |
knox / Llama-Deployment-22.sh
0 likes
0 forks
1 files
Last active
1 | sudo systemctl restart caddy |
2 | sudo systemctl enable caddy |
knox / Llama-Deployment-21.Caddyfile
0 likes
0 forks
1 files
Last active
1 | your_domain.com { |
2 | reverse_proxy unix//home/deployer/llama/llama.sock |
3 | encode gzip |
4 | |
5 | log { |
6 | output file /var/log/caddy/access.log |
7 | format single_field common_log |
8 | } |
9 | |
10 | tls your_email@example.com |
knox / Llama-Deployment-20.sh
0 likes
0 forks
1 files
Last active
1 | sudo nano /etc/caddy/Caddyfile |
knox / Llama-Deployment-19.sh
0 likes
0 forks
1 files
Last active
1 | curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/gpg.key' | sudo apt-key add - |
2 | curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/debian.deb.txt' | sudo tee /etc/apt/sources.list.d/caddy-stable.list |
3 | sudo apt update |
4 | sudo apt install caddy |
knox / Llama-Deployment-18.sh
0 likes
0 forks
1 files
Last active
1 | sudo systemctl start gunicorn |
2 | sudo systemctl enable gunicorn |
knox / Llama-Deployment-17.ini
0 likes
0 forks
1 files
Last active
1 | [Unit] |
2 | Description=gunicorn daemon |
3 | After=network.target |
4 | |
5 | [Service] |
6 | User=deployer |
7 | Group=www-data |
8 | WorkingDirectory=/home/deployer/llama |
9 | ExecStart=/home/deployer/llama/venv/bin/gunicorn --access-logfile - --workers 3 --bind unix:/home/deployer/llama/llama.sock llama.wsgi:application |
knox / Llama-Deployment-16.sh
0 likes
0 forks
1 files
Last active
1 | sudo nano /etc/systemd/system/gunicorn.service |
knox / Llama-Deployment-15.sh
0 likes
0 forks
1 files
Last active
1 | pip install gunicorn |
knox / Llama-Deployment-14.sh
0 likes
0 forks
1 files
Last active
1 | python manage.py collectstatic |