Welcome to my little corner of the internet! I'm thrilled to be starting this journey of sharing my adventures and helpful tips with you. For my very first post, I thought it would be fitting to cover something that empowered me: easily setting up my own Ghost blog for local development and testing. If you've been curious about having your own blogging space but felt overwhelmed by the technicalities, this guide is for you. We'll explore how Docker, Caddy, and Litestream can work together to make local Ghost deployment and secure backups to S3 a breeze.
Caddyfile
{
email "YOUR_EMAIL"
}
rawb1.xyz {
reverse_proxy ghost:2368
}
litestream.yml
dbs:
- path: /ghost/data/ghost.db
replicas:
- type: s3
bucket: blogdb
path: ghost.db
endpoint: s3.eu-central-003.backblazeb2.com
force-path-style: true
access-key-id: <S3_KEY>
secret-access-key: <S3_SECRET>
docker-compose.yml
services:
caddy:
container_name: caddy
image: caddy:2-alpine
restart: unless-stopped
ports:
- 80:80
- 443:443
volumes:
- caddy_data:/data
- caddy_config:/config
- ./Caddyfile:/etc/caddy/Caddyfile
ghost:
container_name: ghost
image: ghost:5-alpine
restart: unless-stopped
expose:
- 2368
environment:
url: https://rawb1.xyz
database__client: sqlite3
database__connection__filename: /var/lib/ghost/content/data/ghost.db
database__useNullAsDefault: true
database__debug: false
volumes:
- ghost:/var/lib/ghost/content
litestream:
container_name: litestream
image: litestream/litestream
restart: unless-stopped
command: replicate
volumes:
- ghost:/ghost
- ./litestream.yml:/etc/litestream.yml
volumes:
ghost:
external: true
caddy_data:
external: true
caddy_config: