Compare commits
3 Commits
5e9cde8cb2
...
f460703e37
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f460703e37 | ||
|
|
30303aac3d | ||
| 30b2fe346d |
41
audiomuse/.env.example
Normal file
41
audiomuse/.env.example
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Copy this file to `.env` and fill in the values that match your setup.
|
||||||
|
# Docker Compose files under deployment/ read these variables to keep settings in one place.
|
||||||
|
#
|
||||||
|
# IMPORTANT:
|
||||||
|
# 1. This file must be named exactly ".env" (not .env.txt or .env.example)
|
||||||
|
# 2. It must be in the SAME directory as your docker-compose-*.yaml file
|
||||||
|
# 3. Do NOT use spaces around the = sign
|
||||||
|
# 4. Do NOT use quotes around values (unless required by the value itself)
|
||||||
|
# 5. After editing, restart containers: docker-compose down && docker-compose up -d
|
||||||
|
#
|
||||||
|
# SPECIAL CHARACTERS IN VALUES:
|
||||||
|
# If your password or API key contains special characters like: $ ` " ' \ # ! & * ( ) [ ] { } | ; < > ?
|
||||||
|
# you may need to:
|
||||||
|
# - Avoid quotes entirely: GEMINI_API_KEY=AIza$pecial!Key (usually works)
|
||||||
|
# - OR use single quotes if the value has $: GEMINI_API_KEY='AIza$pecial!Key'
|
||||||
|
# - OR escape with backslash: GEMINI_API_KEY=AIza\$pecial\!Key
|
||||||
|
# Most problematic characters: $ (variable expansion), ` (command substitution), " (string delimiter)
|
||||||
|
#
|
||||||
|
# TROUBLESHOOTING:
|
||||||
|
# If API keys don't work, verify:
|
||||||
|
# - File is named ".env" exactly (check with: ls -la)
|
||||||
|
# - No spaces: GEMINI_API_KEY=AIza... (not GEMINI_API_KEY = "AIza...")
|
||||||
|
# - No unescaped special characters (especially $ ` " ')
|
||||||
|
# - Restart containers after changing this file
|
||||||
|
# If all else fails, try hardcoding the value directly in docker-compose-*.yaml to isolate the issue
|
||||||
|
|
||||||
|
# --- Jellyfin ---
|
||||||
|
JELLYFIN_USER_ID=
|
||||||
|
JELLYFIN_TOKEN=
|
||||||
|
JELLYFIN_URL=https://jellyfin.smittenfeld.nl
|
||||||
|
|
||||||
|
# --- Shared backend configuration ---
|
||||||
|
AUDIOMUSE_POSTGRES_USER=audiomuse
|
||||||
|
AUDIOMUSE_POSTGRES_PASSWORD=
|
||||||
|
AUDIOMUSE_POSTGRES_DB=audiomusedb
|
||||||
|
#
|
||||||
|
## --- Remote worker integration ---
|
||||||
|
#WORKER_URL=http://worker.example.com:8029/worker
|
||||||
|
#WORKER_POSTGRES_HOST=server.example.com
|
||||||
|
#WORKER_REDIS_URL=redis://server.example.com:6379/0
|
||||||
|
|
||||||
127
audiomuse/docker-compose.yml
Normal file
127
audiomuse/docker-compose.yml
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
version: '3.8'
|
||||||
|
services:
|
||||||
|
# Redis service for RQ (task queue)
|
||||||
|
audiomuse-ai-redis:
|
||||||
|
image: redis:7-alpine
|
||||||
|
container_name: audiomuse-redis
|
||||||
|
ports:
|
||||||
|
- "6379:6379" # Expose Redis port to the host
|
||||||
|
volumes:
|
||||||
|
- redis-data:/data # Persistent storage for Redis data
|
||||||
|
networks:
|
||||||
|
- audiomuse
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
# PostgreSQL database service
|
||||||
|
audiomuse-ai-postgres:
|
||||||
|
image: postgres:15-alpine
|
||||||
|
container_name: audiomuse-postgres
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${AUDIOMUSE_POSTGRES_USER}
|
||||||
|
POSTGRES_PASSWORD: ${AUDIOMUSE_POSTGRES_PASSWORD}
|
||||||
|
POSTGRES_DB: ${AUDIOMUSE_POSTGRES_DB}
|
||||||
|
# ports:
|
||||||
|
# - "5432:5432" # Expose PostgreSQL port to the host
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data # Persistent storage for PostgreSQL data
|
||||||
|
networks:
|
||||||
|
- audiomuse
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
# AudioMuse-AI Flask application service
|
||||||
|
audiomuse-ai-flask:
|
||||||
|
image: ghcr.io/neptunehub/audiomuse-ai:latest # Reflects deployment.yaml
|
||||||
|
container_name: audiomuse-ai-flask-app
|
||||||
|
ports:
|
||||||
|
- "8013:8000"
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
SERVICE_TYPE: "flask" # Tells the container to run the Flask app
|
||||||
|
MEDIASERVER_TYPE: "jellyfin" # Specify the media server type
|
||||||
|
POSTGRES_USER: ${AUDIOMUSE_POSTGRES_USER}
|
||||||
|
POSTGRES_PASSWORD: ${AUDIOMUSE_POSTGRES_PASSWORD}
|
||||||
|
POSTGRES_DB: ${AUDIOMUSE_POSTGRES_DB}
|
||||||
|
POSTGRES_PORT: "5432"
|
||||||
|
POSTGRES_HOST: "audiomuse-ai-postgres" # Service name of the postgres container
|
||||||
|
REDIS_URL: "redis://audiomuse-ai-redis:6379/0" # Connects to the 'redis' service
|
||||||
|
AI_MODEL_PROVIDER: "OPENAI"
|
||||||
|
OPENAI_API_KEY: "any-random-string" # Dummy key to enable local model usage
|
||||||
|
OPENAI_SERVER_URL: "http://172.17.0.1:12434/engines/llama.cpp/v1/chat/completions" #This is the API endpoint for local DMR model from within the Docker container.
|
||||||
|
OPENAI_MODEL_NAME: "ai/qwen3:0.6B-Q4_0"
|
||||||
|
TEMP_DIR: "/app/temp_audio"
|
||||||
|
# Use tmpfs to process audio files in memory for better performance. this reduuces disk I/O but might use more RAM.
|
||||||
|
# Mounted directories are not shared between containers, so each container gets its own tmpfs instance.
|
||||||
|
# Increase tmpfs size for very large audio files as needed.
|
||||||
|
# If host RAM is limited, use a Docker volume instead of tmpfs.
|
||||||
|
# For more info on tmpfs: https://docs.docker.com/engine/storage/tmpfs/
|
||||||
|
tmpfs:
|
||||||
|
- /app/temp_audio:rw,size=1000m
|
||||||
|
depends_on:
|
||||||
|
- audiomuse-ai-redis
|
||||||
|
- audiomuse-ai-postgres
|
||||||
|
restart: unless-stopped
|
||||||
|
networks:
|
||||||
|
- audiomuse
|
||||||
|
models:
|
||||||
|
- llm # Specify that LLM models are used in this service
|
||||||
|
|
||||||
|
# AudioMuse-AI RQ Worker service
|
||||||
|
audiomuse-ai-worker:
|
||||||
|
image: ghcr.io/neptunehub/audiomuse-ai:latest # Reflects deployment.yaml
|
||||||
|
container_name: audiomuse-ai-worker-instance
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
SERVICE_TYPE: "worker" # Tells the container to run the RQ worker
|
||||||
|
MEDIASERVER_TYPE: "jellyfin" # Specify the media server type
|
||||||
|
JELLYFIN_USER_ID: "${JELLYFIN_USER_ID}"
|
||||||
|
JELLYFIN_TOKEN: "${JELLYFIN_TOKEN}"
|
||||||
|
JELLYFIN_URL: "${JELLYFIN_URL}"
|
||||||
|
# DATABASE_URL is now constructed by config.py from the following:
|
||||||
|
POSTGRES_USER: ${AUDIOMUSE_POSTGRES_USER}
|
||||||
|
POSTGRES_PASSWORD: ${AUDIOMUSE_POSTGRES_PASSWORD}
|
||||||
|
POSTGRES_DB: ${AUDIOMUSE_POSTGRES_DB}
|
||||||
|
POSTGRES_PORT: "5432"
|
||||||
|
POSTGRES_HOST: "audiomuse-ai-postgres" # Service name of the postgres container
|
||||||
|
REDIS_URL: "redis://audiomuse-ai-redis:6379/0" # Connects to the 'redis' service
|
||||||
|
AI_MODEL_PROVIDER: "OPENAI"
|
||||||
|
OPENAI_API_KEY: "any-random-string" # Dummy key to enable local model usage
|
||||||
|
OPENAI_SERVER_URL: "http://172.17.0.1:12434/engines/llama.cpp/v1/chat/completions" #This is the API endpoint for local DMR model from within the Docker container.
|
||||||
|
OPENAI_MODEL_NAME: "ai/qwen3:0.6B-Q4_0"
|
||||||
|
TEMP_DIR: "/app/temp_audio"
|
||||||
|
# Use tmpfs to process audio files in memory for better performance. this reduuces disk I/O but might use more RAM.
|
||||||
|
# Mounted directories are not shared between containers, so each container gets its own tmpfs instance.
|
||||||
|
# Increase tmpfs size for very large audio files as needed.
|
||||||
|
# If host RAM is limited, use a Docker volume instead of tmpfs.
|
||||||
|
# For more info on tmpfs: https://docs.docker.com/engine/storage/tmpfs/
|
||||||
|
tmpfs:
|
||||||
|
- /app/temp_audio:rw,size=1000m
|
||||||
|
depends_on:
|
||||||
|
- audiomuse-ai-redis
|
||||||
|
- audiomuse-ai-postgres
|
||||||
|
restart: unless-stopped
|
||||||
|
networks:
|
||||||
|
- audiomuse
|
||||||
|
models:
|
||||||
|
- llm # Specify that LLM models are used in this service
|
||||||
|
|
||||||
|
# Using Docker Model Runner (DMR)
|
||||||
|
# - Make sure your Docker Engine version supports the AI features and that the docker-model-plugin is installed.
|
||||||
|
# - Follow Docker's setup guide: https://docs.docker.com/ai/model-runner/get-started/#docker-engine
|
||||||
|
# - Once DMR is configured, you can download and run AI models locally just like Docker images — no code changes to this compose file are required.
|
||||||
|
# - For model integration with docker-compose, see: https://docs.docker.com/ai/compose/models-and-compose/
|
||||||
|
models:
|
||||||
|
llm:
|
||||||
|
model: ai/qwen3:0.6B-Q4_0 # Lightweight local model for testing. Change as needed; if changed, ensure it matches OPENAI_MODEL_NAME.
|
||||||
|
|
||||||
|
|
||||||
|
# Define volumes for persistent data and temporary files
|
||||||
|
volumes:
|
||||||
|
redis-data:
|
||||||
|
postgres-data:
|
||||||
|
|
||||||
|
networks:
|
||||||
|
audiomuse:
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
include:
|
include:
|
||||||
- audiobookshelf/docker-compose.yml
|
- audiobookshelf/docker-compose.yml
|
||||||
|
- audiomuse/docker-compose.yml
|
||||||
- baikal/docker-compose.yml
|
- baikal/docker-compose.yml
|
||||||
- gitea/docker-compose.yml
|
- gitea/docker-compose.yml
|
||||||
- immich/docker-compose.yml
|
- immich/docker-compose.yml
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ services:
|
|||||||
- 5434:5432
|
- 5434:5432
|
||||||
|
|
||||||
paperless:
|
paperless:
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:2.19.4
|
image: ghcr.io/paperless-ngx/paperless-ngx:2.20.2
|
||||||
restart: always
|
restart: always
|
||||||
depends_on:
|
depends_on:
|
||||||
- db
|
- db
|
||||||
@@ -49,7 +49,7 @@ services:
|
|||||||
PAPERLESS_REDIRECT_LOGIN_TO_SSO: true
|
PAPERLESS_REDIRECT_LOGIN_TO_SSO: true
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.24.0
|
image: docker.io/gotenberg/gotenberg:8.25.1
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
services:
|
services:
|
||||||
spliit:
|
spliit:
|
||||||
image: ghcr.io/spliit-app/spliit:1.19.0
|
image: ghcr.io/spliit-app/spliit:1.19.1
|
||||||
restart: always
|
restart: always
|
||||||
ports:
|
ports:
|
||||||
- 3001:3000
|
- 3001:3000
|
||||||
|
|||||||
Reference in New Issue
Block a user