-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdocker-compose.smarthome.yaml
134 lines (128 loc) · 4.63 KB
/
docker-compose.smarthome.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
#-------------------------------------------------------------
# Docker compose related to smart home services
#-------------------------------------------------------------
version: '3.8'
services:
# Frigate
# A complete and local NVR designed for Home Assistant with AI object detection. Uses OpenCV and Tensorflow to perform realtime object detection locally for IP cameras.
# https://frigate.video/
frigate:
container_name: frigate
privileged: true # this may not be necessary for all setups
restart: unless-stopped
image: ghcr.io/blakeblackshear/frigate:stable
shm_size: "128mb" # update for your cameras based on calculation above
# profiles:
# - donotstart #do not start service
devices:
# - /dev/bus/usb:/dev/bus/usb # passes the USB Coral, needs to be modified for other versions
- /dev/dri/renderD128 # for intel hwaccel, needs to be updated for your hardware
volumes:
- /etc/localtime:/etc/localtime:ro
- ${SELF_HOME_DIR}/frigate/config/:/config/
- "/Users/macmini/data/frigate/media:/media/frigate"
- type: tmpfs # Optional: 1GB of memory, reduces SSD/SD Card wear
target: /tmp/cache
tmpfs:
size: 1000000000
ports:
- "5000:5000"
#- "8554:8554" # RTSP feeds
#- "8555:8555/tcp" # WebRTC over tcp
#- "8555:8555/udp" # WebRTC over udp
# depends_on:
# - codeproject-ai-server-cpu
env_file:
- .env
# Mosquitto
# Message broker that implements the MQTT protocol
mosquitto:
container_name: mosquitto
image: eclipse-mosquitto
restart: unless-stopped
profiles:
- donotstart #do not start service
ports:
- "1883:1883"
- "1884:1884"
volumes:
- "${SELF_HOME_DIR}/mosquitto/config:/mosquitto/config"
- "${SELF_HOME_DIR}/mosquitto/data:/mosquitto/data"
- "${SELF_HOME_DIR}/mosquitto/log:/mosquitto/log"
env_file:
- .env
# CodeProject.AI Server is a self-hosted, free and Open Source Artificial Intelligence Server for any platform, any language.
# Just like you would install a database server to provide data storage, you install CodeProject.AI Server to provide AI services.
# https://www.codeproject.com/
CodeProjectAI:
container_name: codeproject-ai-server-cpu
image: codeproject/ai-server:2.5.4
restart: unless-stopped
profiles:
- donotstart #do not start service
volumes:
- ${SELF_HOME_DIR}/codeprojectai/settings:/etc/codeproject/ai
- ${SELF_HOME_DIR}/codeprojectai/modules:/app/modules
ports:
- "32168:32168"
env_file:
- .env
# go2rtc
# Ultimate camera streaming application with support RTSP, WebRTC, MJPEG, HomeKit, FFmpeg, etc.
# https://github.com/AlexxIT/go2rtc
go2rtc:
container_name: go2rtc
image: alexxit/go2rtc
#network_mode: host # important for WebRTC, HomeKit, UDP cameras
privileged: true # only for FFmpeg hardware transcoding
restart: unless-stopped # autorestart on fail or config change from WebUI
volumes:
- "${SELF_HOME_DIR}/go2rtc:/config" # folder for go2rtc.yaml file (edit from WebUI)
ports:
- "1984:1984" # API
- "8554:8554" # RTSP feeds
- "8555:8555/tcp" # WebRTC over tcp
- "8555:8555/udp" # WebRTC over udp
env_file:
- .env
# Ollama
# With Ollama, all your interactions with large language models happen locally without sending private data to third-party services
# https://ollama.com/
ollama:
container_name: ollama
image: ollama/ollama:latest
#pull_policy: always
restart: always
ports:
- 11434:11434
volumes:
- "${SELF_HOME_DIR}/ollama:/root/.ollama"
env_file:
- .env
# Open WebUI (Formerly Ollama WebUI)
# Open WebUI is an extensible, feature-rich, and user-friendly self-hosted WebUI designed to operate entirely offline. It supports various LLM runners, including Ollama and OpenAI-compatible APIs.
# https://github.com/open-webui/open-webui
open-webui:
container_name: open-webui
image: ghcr.io/open-webui/open-webui:main
volumes:
- "${SELF_HOME_DIR}/open-webui:/app/backend/data"
depends_on:
- ollama
ports:
- 3002:8080
environment:
- '/ollama/api=http://ollama:11434/api'
restart: unless-stopped
# Node-RED is a programming tool for wiring together hardware devices, APIs and online services in new and interesting ways.
# https://nodered.org/
nodered:
container_name: nodered
image: nodered/node-red
restart: unless-stopped
ports:
- "1880:1880/tcp"
environment:
- TZ=Europe/Brussels
volumes:
- "${SELF_HOME_DIR}/nodered/data:/data"