Page MenuHomeWMGMC Issues

No OneTemporary

diff --git a/data/Dockerfiles/dockerapi/dockerapi.py b/data/Dockerfiles/dockerapi/dockerapi.py
index 1ce47265..965fc906 100644
--- a/data/Dockerfiles/dockerapi/dockerapi.py
+++ b/data/Dockerfiles/dockerapi/dockerapi.py
@@ -1,635 +1,622 @@
from fastapi import FastAPI, Response, Request
import aiodocker
import psutil
import sys
import re
import time
import os
import json
import asyncio
import redis
from datetime import datetime
containerIds_to_update = []
host_stats_isUpdating = False
app = FastAPI()
@app.get("/host/stats")
async def get_host_update_stats():
global host_stats_isUpdating
if host_stats_isUpdating == False:
print("start host stats task")
asyncio.create_task(get_host_stats())
host_stats_isUpdating = True
while True:
if redis_client.exists('host_stats'):
break
print("wait for host_stats results")
await asyncio.sleep(1.5)
print("host stats pulled")
stats = json.loads(redis_client.get('host_stats'))
return Response(content=json.dumps(stats, indent=4), media_type="application/json")
@app.get("/containers/{container_id}/json")
async def get_container(container_id : str):
if container_id and container_id.isalnum():
try:
for container in (await async_docker_client.containers.list()):
if container._id == container_id:
container_info = await container.show()
return Response(content=json.dumps(container_info, indent=4), media_type="application/json")
res = {
"type": "danger",
"msg": "no container found"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
"type": "danger",
"msg": "no or invalid id defined"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
@app.get("/containers/json")
async def get_containers():
containers = {}
try:
for container in (await async_docker_client.containers.list()):
container_info = await container.show()
containers.update({container_info['Id']: container_info})
return Response(content=json.dumps(containers, indent=4), media_type="application/json")
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
@app.post("/containers/{container_id}/{post_action}")
async def post_containers(container_id : str, post_action : str, request: Request):
try :
request_json = await request.json()
except Exception as err:
request_json = {}
if container_id and container_id.isalnum() and post_action:
try:
"""Dispatch container_post api call"""
if post_action == 'exec':
if not request_json or not 'cmd' in request_json:
res = {
"type": "danger",
"msg": "cmd is missing"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if not request_json or not 'task' in request_json:
res = {
"type": "danger",
"msg": "task is missing"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
api_call_method_name = '__'.join(['container_post', str(post_action), str(request_json['cmd']), str(request_json['task']) ])
else:
api_call_method_name = '__'.join(['container_post', str(post_action) ])
docker_utils = DockerUtils(async_docker_client)
api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
print("api call: %s, container_id: %s" % (api_call_method_name, container_id))
return await api_call_method(container_id, request_json)
except Exception as e:
print("error - container_post: %s" % str(e))
res = {
"type": "danger",
"msg": str(e)
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
"type": "danger",
"msg": "invalid container id or missing action"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
@app.post("/container/{container_id}/stats/update")
async def post_container_update_stats(container_id : str):
global containerIds_to_update
# start update task for container if no task is running
if container_id not in containerIds_to_update:
asyncio.create_task(get_container_stats(container_id))
containerIds_to_update.append(container_id)
while True:
if redis_client.exists(container_id + '_stats'):
break
await asyncio.sleep(1.5)
stats = json.loads(redis_client.get(container_id + '_stats'))
return Response(content=json.dumps(stats, indent=4), media_type="application/json")
class DockerUtils:
def __init__(self, docker_client):
self.docker_client = docker_client
# api call: container_post - post_action: stop
async def container_post__stop(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
await container.stop()
res = {
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: start
async def container_post__start(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
await container.start()
res = {
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: restart
async def container_post__restart(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
await container.restart()
res = {
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: top
async def container_post__top(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
ps_exec = await container.exec("ps")
async with ps_exec.start(detach=False) as stream:
ps_return = await stream.read_out()
exec_details = await ps_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': ps_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'danger',
'msg': ''
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: delete
async def container_post__exec__mailq__delete(self, container_id, request_json):
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-d %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids))
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: hold
async def container_post__exec__mailq__hold(self, container_id, request_json):
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-h %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids))
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: cat
async def container_post__exec__mailq__cat(self, container_id, request_json):
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
sanitized_string = str(' '.join(filtered_qids))
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postcat_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
return exec_run_handler('utf8_text_only', postcat_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: unhold
async def container_post__exec__mailq__unhold(self, container_id, request_json):
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-H %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids))
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: deliver
async def container_post__exec__mailq__deliver(self, container_id, request_json):
if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items'])
if filtered_qids:
flagged_qids = ['-i %s' % i for i in filtered_qids]
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
for i in flagged_qids:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
async with postsuper_r_exec.start(detach=False) as stream:
postsuper_r_return = await stream.read_out()
# todo: check each exit code
res = {
'type': 'success',
'msg': 'Scheduled immediate delivery'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: list
async def container_post__exec__mailq__list(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
mailq_exec = await container.exec(["/usr/sbin/postqueue", "-j"], user='postfix')
return await exec_run_handler('utf8_text_only', mailq_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: flush
async def container_post__exec__mailq__flush(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/usr/sbin/postqueue", "-f"], user='postfix')
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: super_delete
async def container_post__exec__mailq__super_delete(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/usr/sbin/postsuper", "-d", "ALL"])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: system - task: fts_rescan
async def container_post__exec__system__fts_rescan(self, container_id, request_json):
if 'username' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
async with rescan_exec.start(detach=False) as stream:
rescan_return = await stream.read_out()
exec_details = await rescan_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': 'fts_rescan: rescan triggered'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'warning',
'msg': 'fts_rescan error'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if 'all' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
async with rescan_exec.start(detach=False) as stream:
rescan_return = await stream.read_out()
exec_details = await rescan_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': 'fts_rescan: rescan triggered'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'warning',
'msg': 'fts_rescan error'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: df
async def container_post__exec__system__df(self, container_id, request_json):
if 'dir' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
df_exec = await container.exec(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
async with df_exec.start(detach=False) as stream:
df_return = await stream.read_out()
print(df_return)
print(await df_exec.inspect())
exec_details = await df_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
return df_return.data.decode('utf-8').rstrip()
else:
return "0,0,0,0,0,0"
# api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
async def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
async with sql_exec.start(detach=False) as stream:
sql_return = await stream.read_out()
exec_details = await sql_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
matched = False
for line in sql_return.data.decode('utf-8').split("\n"):
if 'is already upgraded to' in line:
matched = True
if matched:
res = {
'type': 'success',
'msg': 'mysql_upgrade: already upgraded',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
await container.restart()
res = {
'type': 'warning',
'msg': 'mysql_upgrade: upgrade was applied',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'error',
'msg': 'mysql_upgrade: error running command',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
async def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
async with sql_exec.start(detach=False) as stream:
sql_return = await stream.read_out()
exec_details = await sql_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'info',
'msg': 'mysql_tzinfo_to_sql: command completed successfully',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'error',
'msg': 'mysql_tzinfo_to_sql: error running command',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: reload - task: dovecot
async def container_post__exec__reload__dovecot(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: reload - task: postfix
async def container_post__exec__reload__postfix(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: reload - task: nginx
async def container_post__exec__reload__nginx(self, container_id, request_json):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
reload_exec = await container.exec(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: sieve - task: list
async def container_post__exec__sieve__list(self, container_id, request_json):
if 'username' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sieve_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
return await exec_run_handler('utf8_text_only', sieve_exec)
# api call: container_post - post_action: exec - cmd: sieve - task: print
async def container_post__exec__sieve__print(self, container_id, request_json):
if 'username' in request_json and 'script_name' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
sieve_exec = await container.exec(cmd)
return await exec_run_handler('utf8_text_only', sieve_exec)
# api call: container_post - post_action: exec - cmd: maildir - task: cleanup
async def container_post__exec__maildir__cleanup(self, container_id, request_json):
if 'maildir' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
sane_name = re.sub(r'\W+', '', request_json['maildir'])
cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"]
maildir_cleanup_exec = await container.exec(cmd, user='vmail')
return await exec_run_handler('generic', maildir_cleanup_exec)
# api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
async def container_post__exec__rspamd__worker_password(self, container_id, request_json):
if 'raw' in request_json:
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
- cmd = "/usr/bin/rspamadm pw -e -p '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
+ cmd = "./set_worker_password.sh '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
rspamd_password_exec = await container.exec(cmd, user='_rspamd')
async with rspamd_password_exec.start(detach=False) as stream:
rspamd_password_return = await stream.read_out()
-
- matched = False
- for line in rspamd_password_return.data.decode('utf-8').split("\n"):
- if '$2$' in line:
- hash = line.strip()
- hash_out = re.search('\$2\$.+$', hash).group(0)
- rspamd_passphrase_hash = re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip())
-
- rspamd_password_filename = "/etc/rspamd/override.d/worker-controller-password.inc"
- cmd = '''/bin/echo 'enable_password = "%s";' > %s && cat %s''' % (rspamd_passphrase_hash, rspamd_password_filename, rspamd_password_filename)
- rspamd_password_exec = await container.exec(cmd, user='_rspamd')
- async with rspamd_password_exec.start(detach=False) as stream:
- rspamd_password_return = await stream.read_out()
-
- if rspamd_passphrase_hash.startswith("$2$") and rspamd_passphrase_hash in rspamd_password_return.data.decode('utf-8'):
- await container.restart()
- matched = True
+
+ if "OK" in rspamd_password_return.data.decode('utf-8'):
+ matched = True
+ await container.restart()
if matched:
res = {
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
'type': 'danger',
'msg': 'command did not complete'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
async def exec_run_handler(type, exec_obj):
async with exec_obj.start(detach=False) as stream:
exec_return = await stream.read_out()
if exec_return == None:
exec_return = ""
else:
exec_return = exec_return.data.decode('utf-8')
if type == 'generic':
exec_details = await exec_obj.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
"type": "success",
"msg": "command completed successfully"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
else:
res = {
"type": "success",
"msg": "'command failed: " + exec_return
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
if type == 'utf8_text_only':
return Response(content=exec_return, media_type="text/plain")
async def get_host_stats(wait=5):
global host_stats_isUpdating
try:
system_time = datetime.now()
host_stats = {
"cpu": {
"cores": psutil.cpu_count(),
"usage": psutil.cpu_percent()
},
"memory": {
"total": psutil.virtual_memory().total,
"usage": psutil.virtual_memory().percent,
"swap": psutil.swap_memory()
},
"uptime": time.time() - psutil.boot_time(),
"system_time": system_time.strftime("%d.%m.%Y %H:%M:%S")
}
redis_client.set('host_stats', json.dumps(host_stats), ex=10)
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
print(json.dumps(res, indent=4))
await asyncio.sleep(wait)
host_stats_isUpdating = False
async def get_container_stats(container_id, wait=5, stop=False):
global containerIds_to_update
if container_id and container_id.isalnum():
try:
for container in (await async_docker_client.containers.list()):
if container._id == container_id:
res = await container.stats(stream=False)
if redis_client.exists(container_id + '_stats'):
stats = json.loads(redis_client.get(container_id + '_stats'))
else:
stats = []
stats.append(res[0])
if len(stats) > 3:
del stats[0]
redis_client.set(container_id + '_stats', json.dumps(stats), ex=60)
except Exception as e:
res = {
"type": "danger",
"msg": str(e)
}
print(json.dumps(res, indent=4))
else:
res = {
"type": "danger",
"msg": "no or invalid id defined"
}
print(json.dumps(res, indent=4))
await asyncio.sleep(wait)
if stop == True:
# update task was called second time, stop
containerIds_to_update.remove(container_id)
else:
# call update task a second time
await get_container_stats(container_id, wait=0, stop=True)
if os.environ['REDIS_SLAVEOF_IP'] != "":
redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0)
else:
redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0)
async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')
diff --git a/data/Dockerfiles/rspamd/Dockerfile b/data/Dockerfiles/rspamd/Dockerfile
index 23fcbb3f..2520ddcc 100644
--- a/data/Dockerfiles/rspamd/Dockerfile
+++ b/data/Dockerfiles/rspamd/Dockerfile
@@ -1,35 +1,36 @@
FROM debian:bullseye-slim
LABEL maintainer "Andre Peters <andre.peters@tinc.gmbh>"
ARG DEBIAN_FRONTEND=noninteractive
ARG CODENAME=bullseye
ENV LC_ALL C
RUN apt-get update && apt-get install -y \
tzdata \
ca-certificates \
gnupg2 \
apt-transport-https \
dnsutils \
netcat \
&& apt-key adv --fetch-keys https://rspamd.com/apt-stable/gpg.key \
&& echo "deb [arch=amd64] https://rspamd.com/apt-stable/ $CODENAME main" > /etc/apt/sources.list.d/rspamd.list \
&& apt-get update \
&& apt-get --no-install-recommends -y install rspamd redis-tools procps nano \
&& rm -rf /var/lib/apt/lists/* \
&& apt-get autoremove --purge \
&& apt-get clean \
&& mkdir -p /run/rspamd \
&& chown _rspamd:_rspamd /run/rspamd \
&& echo 'alias ll="ls -la --color"' >> ~/.bashrc \
&& sed -i 's/#analysis_keyword_table > 0/analysis_cat_table.macro_exist == "M"/g' /usr/share/rspamd/lualib/lua_scanners/oletools.lua
COPY settings.conf /etc/rspamd/settings.conf
COPY metadata_exporter.lua /usr/share/rspamd/plugins/metadata_exporter.lua
+COPY set_worker_password.sh /set_worker_password.sh
COPY docker-entrypoint.sh /docker-entrypoint.sh
ENTRYPOINT ["/docker-entrypoint.sh"]
STOPSIGNAL SIGTERM
CMD ["/usr/bin/rspamd", "-f", "-u", "_rspamd", "-g", "_rspamd"]
diff --git a/data/Dockerfiles/rspamd/set_worker_password.sh b/data/Dockerfiles/rspamd/set_worker_password.sh
new file mode 100755
index 00000000..7205e888
--- /dev/null
+++ b/data/Dockerfiles/rspamd/set_worker_password.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+password_file='/etc/rspamd/override.d/worker-controller-password.inc'
+password_hash=`/usr/bin/rspamadm pw -e -p $1`
+
+echo 'enable_password = "'$password_hash'";' > $password_file
+
+if grep -q "$password_hash" "$password_file"; then
+ echo "OK"
+else
+ echo "ERROR"
+fi
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
index da2b7e0d..5ae00ade 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,639 +1,639 @@
version: '2.1'
services:
unbound-mailcow:
image: mailcow/unbound:1.16
environment:
- TZ=${TZ}
volumes:
- ./data/hooks/unbound:/hooks:Z
- ./data/conf/unbound/unbound.conf:/etc/unbound/unbound.conf:ro,Z
restart: always
tty: true
networks:
mailcow-network:
ipv4_address: ${IPV4_NETWORK:-172.22.1}.254
aliases:
- unbound
mysql-mailcow:
image: mariadb:10.5
depends_on:
- unbound-mailcow
stop_grace_period: 45s
volumes:
- mysql-vol-1:/var/lib/mysql/
- mysql-socket-vol-1:/var/run/mysqld/
- ./data/conf/mysql/:/etc/mysql/conf.d/:ro,Z
environment:
- TZ=${TZ}
- MYSQL_ROOT_PASSWORD=${DBROOT}
- MYSQL_DATABASE=${DBNAME}
- MYSQL_USER=${DBUSER}
- MYSQL_PASSWORD=${DBPASS}
- MYSQL_INITDB_SKIP_TZINFO=1
restart: always
ports:
- "${SQL_PORT:-127.0.0.1:13306}:3306"
networks:
mailcow-network:
aliases:
- mysql
redis-mailcow:
image: redis:6-alpine
volumes:
- redis-vol-1:/data/
restart: always
ports:
- "${REDIS_PORT:-127.0.0.1:7654}:6379"
environment:
- TZ=${TZ}
sysctls:
- net.core.somaxconn=4096
networks:
mailcow-network:
ipv4_address: ${IPV4_NETWORK:-172.22.1}.249
aliases:
- redis
clamd-mailcow:
image: mailcow/clamd:1.54
restart: always
depends_on:
- unbound-mailcow
dns:
- ${IPV4_NETWORK:-172.22.1}.254
environment:
- TZ=${TZ}
- SKIP_CLAMD=${SKIP_CLAMD:-n}
volumes:
- ./data/conf/clamav/:/etc/clamav/:Z
- clamd-db-vol-1:/var/lib/clamav
networks:
mailcow-network:
aliases:
- clamd
rspamd-mailcow:
- image: mailcow/rspamd:1.90
+ image: mailcow/rspamd:1.91
stop_grace_period: 30s
depends_on:
- dovecot-mailcow
environment:
- TZ=${TZ}
- IPV4_NETWORK=${IPV4_NETWORK:-172.22.1}
- IPV6_NETWORK=${IPV6_NETWORK:-fd4d:6169:6c63:6f77::/64}
- REDIS_SLAVEOF_IP=${REDIS_SLAVEOF_IP:-}
- REDIS_SLAVEOF_PORT=${REDIS_SLAVEOF_PORT:-}
volumes:
- ./data/hooks/rspamd:/hooks:Z
- ./data/conf/rspamd/custom/:/etc/rspamd/custom:z
- ./data/conf/rspamd/override.d/:/etc/rspamd/override.d:Z
- ./data/conf/rspamd/local.d/:/etc/rspamd/local.d:Z
- ./data/conf/rspamd/plugins.d/:/etc/rspamd/plugins.d:Z
- ./data/conf/rspamd/lua/:/etc/rspamd/lua/:ro,Z
- ./data/conf/rspamd/rspamd.conf.local:/etc/rspamd/rspamd.conf.local:Z
- ./data/conf/rspamd/rspamd.conf.override:/etc/rspamd/rspamd.conf.override:Z
- rspamd-vol-1:/var/lib/rspamd
restart: always
hostname: rspamd
dns:
- ${IPV4_NETWORK:-172.22.1}.254
networks:
mailcow-network:
aliases:
- rspamd
php-fpm-mailcow:
image: mailcow/phpfpm:1.79
command: "php-fpm -d date.timezone=${TZ} -d expose_php=0"
depends_on:
- redis-mailcow
volumes:
- ./data/hooks/phpfpm:/hooks:Z
- ./data/web:/web:z
- ./data/conf/rspamd/dynmaps:/dynmaps:ro,z
- ./data/conf/rspamd/custom/:/rspamd_custom_maps:z
- rspamd-vol-1:/var/lib/rspamd
- mysql-socket-vol-1:/var/run/mysqld/
- ./data/conf/sogo/:/etc/sogo/:z
- ./data/conf/rspamd/meta_exporter:/meta_exporter:ro,z
- ./data/conf/phpfpm/sogo-sso/:/etc/sogo-sso/:z
- ./data/conf/phpfpm/php-fpm.d/pools.conf:/usr/local/etc/php-fpm.d/z-pools.conf:Z
- ./data/conf/phpfpm/php-conf.d/opcache-recommended.ini:/usr/local/etc/php/conf.d/opcache-recommended.ini:Z
- ./data/conf/phpfpm/php-conf.d/upload.ini:/usr/local/etc/php/conf.d/upload.ini:Z
- ./data/conf/phpfpm/php-conf.d/other.ini:/usr/local/etc/php/conf.d/zzz-other.ini:Z
- ./data/conf/dovecot/global_sieve_before:/global_sieve/before:z
- ./data/conf/dovecot/global_sieve_after:/global_sieve/after:z
- ./data/assets/templates:/tpls:z
- ./data/conf/nginx/:/etc/nginx/conf.d/:z
dns:
- ${IPV4_NETWORK:-172.22.1}.254
environment:
- REDIS_SLAVEOF_IP=${REDIS_SLAVEOF_IP:-}
- REDIS_SLAVEOF_PORT=${REDIS_SLAVEOF_PORT:-}
- LOG_LINES=${LOG_LINES:-9999}
- TZ=${TZ}
- DBNAME=${DBNAME}
- DBUSER=${DBUSER}
- DBPASS=${DBPASS}
- MAILCOW_HOSTNAME=${MAILCOW_HOSTNAME}
- MAILCOW_PASS_SCHEME=${MAILCOW_PASS_SCHEME:-BLF-CRYPT}
- IMAP_PORT=${IMAP_PORT:-143}
- IMAPS_PORT=${IMAPS_PORT:-993}
- POP_PORT=${POP_PORT:-110}
- POPS_PORT=${POPS_PORT:-995}
- SIEVE_PORT=${SIEVE_PORT:-4190}
- IPV4_NETWORK=${IPV4_NETWORK:-172.22.1}
- IPV6_NETWORK=${IPV6_NETWORK:-fd4d:6169:6c63:6f77::/64}
- SUBMISSION_PORT=${SUBMISSION_PORT:-587}
- SMTPS_PORT=${SMTPS_PORT:-465}
- SMTP_PORT=${SMTP_PORT:-25}
- API_KEY=${API_KEY:-invalid}
- API_KEY_READ_ONLY=${API_KEY_READ_ONLY:-invalid}
- API_ALLOW_FROM=${API_ALLOW_FROM:-invalid}
- COMPOSE_PROJECT_NAME=${COMPOSE_PROJECT_NAME:-mailcow-dockerized}
- SKIP_SOLR=${SKIP_SOLR:-y}
- SKIP_CLAMD=${SKIP_CLAMD:-n}
- SKIP_SOGO=${SKIP_SOGO:-n}
- ALLOW_ADMIN_EMAIL_LOGIN=${ALLOW_ADMIN_EMAIL_LOGIN:-n}
- MASTER=${MASTER:-y}
- DEV_MODE=${DEV_MODE:-n}
- WEBAUTHN_ONLY_TRUSTED_VENDORS=${WEBAUTHN_ONLY_TRUSTED_VENDORS:-n}
restart: always
networks:
mailcow-network:
aliases:
- phpfpm
sogo-mailcow:
image: mailcow/sogo:1.109
environment:
- DBNAME=${DBNAME}
- DBUSER=${DBUSER}
- DBPASS=${DBPASS}
- TZ=${TZ}
- LOG_LINES=${LOG_LINES:-9999}
- MAILCOW_HOSTNAME=${MAILCOW_HOSTNAME}
- MAILCOW_PASS_SCHEME=${MAILCOW_PASS_SCHEME:-BLF-CRYPT}
- ACL_ANYONE=${ACL_ANYONE:-disallow}
- ALLOW_ADMIN_EMAIL_LOGIN=${ALLOW_ADMIN_EMAIL_LOGIN:-n}
- IPV4_NETWORK=${IPV4_NETWORK:-172.22.1}
- SOGO_EXPIRE_SESSION=${SOGO_EXPIRE_SESSION:-480}
- SKIP_SOGO=${SKIP_SOGO:-n}
- MASTER=${MASTER:-y}
- REDIS_SLAVEOF_IP=${REDIS_SLAVEOF_IP:-}
- REDIS_SLAVEOF_PORT=${REDIS_SLAVEOF_PORT:-}
dns:
- ${IPV4_NETWORK:-172.22.1}.254
volumes:
- ./data/hooks/sogo:/hooks:Z
- ./data/conf/sogo/:/etc/sogo/:z
- ./data/web/inc/init_db.inc.php:/init_db.inc.php:Z
- ./data/conf/sogo/custom-favicon.ico:/usr/lib/GNUstep/SOGo/WebServerResources/img/sogo.ico:z
- ./data/conf/sogo/custom-theme.js:/usr/lib/GNUstep/SOGo/WebServerResources/js/theme.js:z
- ./data/conf/sogo/custom-sogo.js:/usr/lib/GNUstep/SOGo/WebServerResources/js/custom-sogo.js:z
- mysql-socket-vol-1:/var/run/mysqld/
- sogo-web-vol-1:/sogo_web
- sogo-userdata-backup-vol-1:/sogo_backup
labels:
ofelia.enabled: "true"
ofelia.job-exec.sogo_sessions.schedule: "@every 1m"
ofelia.job-exec.sogo_sessions.command: "/bin/bash -c \"[[ $${MASTER} == y ]] && /usr/local/bin/gosu sogo /usr/sbin/sogo-tool expire-sessions $${SOGO_EXPIRE_SESSION} || exit 0\""
ofelia.job-exec.sogo_ealarms.schedule: "@every 1m"
ofelia.job-exec.sogo_ealarms.command: "/bin/bash -c \"[[ $${MASTER} == y ]] && /usr/local/bin/gosu sogo /usr/sbin/sogo-ealarms-notify -p /etc/sogo/sieve.creds || exit 0\""
ofelia.job-exec.sogo_eautoreply.schedule: "@every 5m"
ofelia.job-exec.sogo_eautoreply.command: "/bin/bash -c \"[[ $${MASTER} == y ]] && /usr/local/bin/gosu sogo /usr/sbin/sogo-tool update-autoreply -p /etc/sogo/sieve.creds || exit 0\""
ofelia.job-exec.sogo_backup.schedule: "@every 24h"
ofelia.job-exec.sogo_backup.command: "/bin/bash -c \"[[ $${MASTER} == y ]] && /usr/local/bin/gosu sogo /usr/sbin/sogo-tool backup /sogo_backup ALL || exit 0\""
restart: always
networks:
mailcow-network:
ipv4_address: ${IPV4_NETWORK:-172.22.1}.248
aliases:
- sogo
dovecot-mailcow:
image: mailcow/dovecot:1.18
depends_on:
- mysql-mailcow
dns:
- ${IPV4_NETWORK:-172.22.1}.254
cap_add:
- NET_BIND_SERVICE
volumes:
- ./data/hooks/dovecot:/hooks:Z
- ./data/conf/dovecot:/etc/dovecot:z
- ./data/assets/ssl:/etc/ssl/mail/:ro,z
- ./data/conf/sogo/:/etc/sogo/:z
- ./data/conf/phpfpm/sogo-sso/:/etc/phpfpm/:z
- vmail-vol-1:/var/vmail
- vmail-index-vol-1:/var/vmail_index
- crypt-vol-1:/mail_crypt/
- ./data/conf/rspamd/custom/:/etc/rspamd/custom:z
- ./data/assets/templates:/templates:z
- rspamd-vol-1:/var/lib/rspamd
- mysql-socket-vol-1:/var/run/mysqld/
environment:
- DOVECOT_MASTER_USER=${DOVECOT_MASTER_USER:-}
- DOVECOT_MASTER_PASS=${DOVECOT_MASTER_PASS:-}
- LOG_LINES=${LOG_LINES:-9999}
- DBNAME=${DBNAME}
- DBUSER=${DBUSER}
- DBPASS=${DBPASS}
- TZ=${TZ}
- MAILCOW_HOSTNAME=${MAILCOW_HOSTNAME}
- MAILCOW_PASS_SCHEME=${MAILCOW_PASS_SCHEME:-BLF-CRYPT}
- IPV4_NETWORK=${IPV4_NETWORK:-172.22.1}
- ALLOW_ADMIN_EMAIL_LOGIN=${ALLOW_ADMIN_EMAIL_LOGIN:-n}
- MAILDIR_GC_TIME=${MAILDIR_GC_TIME:-7200}
- ACL_ANYONE=${ACL_ANYONE:-disallow}
- SKIP_SOLR=${SKIP_SOLR:-y}
- MAILDIR_SUB=${MAILDIR_SUB:-}
- MASTER=${MASTER:-y}
- REDIS_SLAVEOF_IP=${REDIS_SLAVEOF_IP:-}
- REDIS_SLAVEOF_PORT=${REDIS_SLAVEOF_PORT:-}
- COMPOSE_PROJECT_NAME=${COMPOSE_PROJECT_NAME:-mailcow-dockerized}
ports:
- "${DOVEADM_PORT:-127.0.0.1:19991}:12345"
- "${IMAP_PORT:-143}:143"
- "${IMAPS_PORT:-993}:993"
- "${POP_PORT:-110}:110"
- "${POPS_PORT:-995}:995"
- "${SIEVE_PORT:-4190}:4190"
restart: always
tty: true
labels:
ofelia.enabled: "true"
ofelia.job-exec.dovecot_imapsync_runner.schedule: "@every 1m"
ofelia.job-exec.dovecot_imapsync_runner.no-overlap: "true"
ofelia.job-exec.dovecot_imapsync_runner.command: "/bin/bash -c \"[[ $${MASTER} == y ]] && /usr/local/bin/gosu nobody /usr/local/bin/imapsync_runner.pl || exit 0\""
ofelia.job-exec.dovecot_trim_logs.schedule: "@every 1m"
ofelia.job-exec.dovecot_trim_logs.command: "/bin/bash -c \"[[ $${MASTER} == y ]] && /usr/local/bin/gosu vmail /usr/local/bin/trim_logs.sh || exit 0\""
ofelia.job-exec.dovecot_quarantine.schedule: "@every 20m"
ofelia.job-exec.dovecot_quarantine.command: "/bin/bash -c \"[[ $${MASTER} == y ]] && /usr/local/bin/gosu vmail /usr/local/bin/quarantine_notify.py || exit 0\""
ofelia.job-exec.dovecot_clean_q_aged.schedule: "@every 24h"
ofelia.job-exec.dovecot_clean_q_aged.command: "/bin/bash -c \"[[ $${MASTER} == y ]] && /usr/local/bin/gosu vmail /usr/local/bin/clean_q_aged.sh || exit 0\""
ofelia.job-exec.dovecot_maildir_gc.schedule: "@every 30m"
ofelia.job-exec.dovecot_maildir_gc.command: "/bin/bash -c \"source /source_env.sh ; /usr/local/bin/gosu vmail /usr/local/bin/maildir_gc.sh\""
ofelia.job-exec.dovecot_sarules.schedule: "@every 24h"
ofelia.job-exec.dovecot_sarules.command: "/bin/bash -c \"/usr/local/bin/sa-rules.sh\""
ofelia.job-exec.dovecot_fts.schedule: "@every 24h"
ofelia.job-exec.dovecot_fts.command: "/usr/bin/curl http://solr:8983/solr/dovecot-fts/update?optimize=true"
ofelia.job-exec.dovecot_repl_health.schedule: "@every 5m"
ofelia.job-exec.dovecot_repl_health.command: "/bin/bash -c \"/usr/local/bin/gosu vmail /usr/local/bin/repl_health.sh\""
ulimits:
nproc: 65535
nofile:
soft: 20000
hard: 40000
networks:
mailcow-network:
ipv4_address: ${IPV4_NETWORK:-172.22.1}.250
aliases:
- dovecot
postfix-mailcow:
image: mailcow/postfix:1.67
depends_on:
- mysql-mailcow
volumes:
- ./data/hooks/postfix:/hooks:Z
- ./data/conf/postfix:/opt/postfix/conf:z
- ./data/assets/ssl:/etc/ssl/mail/:ro,z
- postfix-vol-1:/var/spool/postfix
- crypt-vol-1:/var/lib/zeyple
- rspamd-vol-1:/var/lib/rspamd
- mysql-socket-vol-1:/var/run/mysqld/
environment:
- LOG_LINES=${LOG_LINES:-9999}
- TZ=${TZ}
- DBNAME=${DBNAME}
- DBUSER=${DBUSER}
- DBPASS=${DBPASS}
- REDIS_SLAVEOF_IP=${REDIS_SLAVEOF_IP:-}
- REDIS_SLAVEOF_PORT=${REDIS_SLAVEOF_PORT:-}
- MAILCOW_HOSTNAME=${MAILCOW_HOSTNAME}
cap_add:
- NET_BIND_SERVICE
ports:
- "${SMTP_PORT:-25}:25"
- "${SMTPS_PORT:-465}:465"
- "${SUBMISSION_PORT:-587}:587"
restart: always
dns:
- ${IPV4_NETWORK:-172.22.1}.254
networks:
mailcow-network:
ipv4_address: ${IPV4_NETWORK:-172.22.1}.253
aliases:
- postfix
memcached-mailcow:
image: memcached:alpine
restart: always
environment:
- TZ=${TZ}
networks:
mailcow-network:
aliases:
- memcached
nginx-mailcow:
depends_on:
- sogo-mailcow
- php-fpm-mailcow
- redis-mailcow
image: nginx:mainline-alpine
dns:
- ${IPV4_NETWORK:-172.22.1}.254
command: /bin/sh -c "envsubst < /etc/nginx/conf.d/templates/listen_plain.template > /etc/nginx/conf.d/listen_plain.active &&
envsubst < /etc/nginx/conf.d/templates/listen_ssl.template > /etc/nginx/conf.d/listen_ssl.active &&
envsubst < /etc/nginx/conf.d/templates/sogo.template > /etc/nginx/conf.d/sogo.active &&
. /etc/nginx/conf.d/templates/server_name.template.sh > /etc/nginx/conf.d/server_name.active &&
. /etc/nginx/conf.d/templates/sites.template.sh > /etc/nginx/conf.d/sites.active &&
. /etc/nginx/conf.d/templates/sogo_eas.template.sh > /etc/nginx/conf.d/sogo_eas.active &&
nginx -qt &&
until ping phpfpm -c1 > /dev/null; do sleep 1; done &&
until ping sogo -c1 > /dev/null; do sleep 1; done &&
until ping redis -c1 > /dev/null; do sleep 1; done &&
until ping rspamd -c1 > /dev/null; do sleep 1; done &&
exec nginx -g 'daemon off;'"
environment:
- HTTPS_PORT=${HTTPS_PORT:-443}
- HTTP_PORT=${HTTP_PORT:-80}
- MAILCOW_HOSTNAME=${MAILCOW_HOSTNAME}
- IPV4_NETWORK=${IPV4_NETWORK:-172.22.1}
- TZ=${TZ}
- SKIP_SOGO=${SKIP_SOGO:-n}
- ALLOW_ADMIN_EMAIL_LOGIN=${ALLOW_ADMIN_EMAIL_LOGIN:-n}
- ADDITIONAL_SERVER_NAMES=${ADDITIONAL_SERVER_NAMES:-}
volumes:
- ./data/web:/web:ro,z
- ./data/conf/rspamd/dynmaps:/dynmaps:ro,z
- ./data/assets/ssl/:/etc/ssl/mail/:ro,z
- ./data/conf/nginx/:/etc/nginx/conf.d/:z
- ./data/conf/rspamd/meta_exporter:/meta_exporter:ro,z
- sogo-web-vol-1:/usr/lib/GNUstep/SOGo/
ports:
- "${HTTPS_BIND:-}:${HTTPS_PORT:-443}:${HTTPS_PORT:-443}"
- "${HTTP_BIND:-}:${HTTP_PORT:-80}:${HTTP_PORT:-80}"
restart: always
networks:
mailcow-network:
aliases:
- nginx
acme-mailcow:
depends_on:
- nginx-mailcow
image: mailcow/acme:1.82
dns:
- ${IPV4_NETWORK:-172.22.1}.254
environment:
- LOG_LINES=${LOG_LINES:-9999}
- ACME_CONTACT=${ACME_CONTACT:-}
- ADDITIONAL_SAN=${ADDITIONAL_SAN}
- MAILCOW_HOSTNAME=${MAILCOW_HOSTNAME}
- DBNAME=${DBNAME}
- DBUSER=${DBUSER}
- DBPASS=${DBPASS}
- SKIP_LETS_ENCRYPT=${SKIP_LETS_ENCRYPT:-n}
- COMPOSE_PROJECT_NAME=${COMPOSE_PROJECT_NAME:-mailcow-dockerized}
- DIRECTORY_URL=${DIRECTORY_URL:-}
- ENABLE_SSL_SNI=${ENABLE_SSL_SNI:-n}
- SKIP_IP_CHECK=${SKIP_IP_CHECK:-n}
- SKIP_HTTP_VERIFICATION=${SKIP_HTTP_VERIFICATION:-n}
- ONLY_MAILCOW_HOSTNAME=${ONLY_MAILCOW_HOSTNAME:-n}
- LE_STAGING=${LE_STAGING:-n}
- TZ=${TZ}
- REDIS_SLAVEOF_IP=${REDIS_SLAVEOF_IP:-}
- REDIS_SLAVEOF_PORT=${REDIS_SLAVEOF_PORT:-}
- SNAT_TO_SOURCE=${SNAT_TO_SOURCE:-n}
- SNAT6_TO_SOURCE=${SNAT6_TO_SOURCE:-n}
volumes:
- ./data/web/.well-known/acme-challenge:/var/www/acme:z
- ./data/assets/ssl:/var/lib/acme/:z
- ./data/assets/ssl-example:/var/lib/ssl-example/:ro,Z
- mysql-socket-vol-1:/var/run/mysqld/
restart: always
networks:
mailcow-network:
aliases:
- acme
netfilter-mailcow:
image: mailcow/netfilter:1.48
stop_grace_period: 30s
depends_on:
- dovecot-mailcow
- postfix-mailcow
- sogo-mailcow
- php-fpm-mailcow
- redis-mailcow
restart: always
privileged: true
environment:
- TZ=${TZ}
- IPV4_NETWORK=${IPV4_NETWORK:-172.22.1}
- IPV6_NETWORK=${IPV6_NETWORK:-fd4d:6169:6c63:6f77::/64}
- SNAT_TO_SOURCE=${SNAT_TO_SOURCE:-n}
- SNAT6_TO_SOURCE=${SNAT6_TO_SOURCE:-n}
- REDIS_SLAVEOF_IP=${REDIS_SLAVEOF_IP:-}
- REDIS_SLAVEOF_PORT=${REDIS_SLAVEOF_PORT:-}
network_mode: "host"
volumes:
- /lib/modules:/lib/modules:ro
watchdog-mailcow:
image: mailcow/watchdog:1.96
dns:
- ${IPV4_NETWORK:-172.22.1}.254
tmpfs:
- /tmp
volumes:
- rspamd-vol-1:/var/lib/rspamd
- mysql-socket-vol-1:/var/run/mysqld/
- postfix-vol-1:/var/spool/postfix
- ./data/assets/ssl:/etc/ssl/mail/:ro,z
restart: always
environment:
- IPV6_NETWORK=${IPV6_NETWORK:-fd4d:6169:6c63:6f77::/64}
- LOG_LINES=${LOG_LINES:-9999}
- TZ=${TZ}
- DBNAME=${DBNAME}
- DBUSER=${DBUSER}
- DBPASS=${DBPASS}
- DBROOT=${DBROOT}
- USE_WATCHDOG=${USE_WATCHDOG:-n}
- WATCHDOG_NOTIFY_EMAIL=${WATCHDOG_NOTIFY_EMAIL:-}
- WATCHDOG_NOTIFY_BAN=${WATCHDOG_NOTIFY_BAN:-y}
- WATCHDOG_SUBJECT=${WATCHDOG_SUBJECT:-Watchdog ALERT}
- WATCHDOG_EXTERNAL_CHECKS=${WATCHDOG_EXTERNAL_CHECKS:-n}
- WATCHDOG_MYSQL_REPLICATION_CHECKS=${WATCHDOG_MYSQL_REPLICATION_CHECKS:-n}
- WATCHDOG_VERBOSE=${WATCHDOG_VERBOSE:-n}
- MAILCOW_HOSTNAME=${MAILCOW_HOSTNAME}
- COMPOSE_PROJECT_NAME=${COMPOSE_PROJECT_NAME:-mailcow-dockerized}
- IPV4_NETWORK=${IPV4_NETWORK:-172.22.1}
- IP_BY_DOCKER_API=${IP_BY_DOCKER_API:-0}
- CHECK_UNBOUND=${CHECK_UNBOUND:-1}
- SKIP_CLAMD=${SKIP_CLAMD:-n}
- SKIP_LETS_ENCRYPT=${SKIP_LETS_ENCRYPT:-n}
- SKIP_SOGO=${SKIP_SOGO:-n}
- HTTPS_PORT=${HTTPS_PORT:-443}
- REDIS_SLAVEOF_IP=${REDIS_SLAVEOF_IP:-}
- REDIS_SLAVEOF_PORT=${REDIS_SLAVEOF_PORT:-}
- EXTERNAL_CHECKS_THRESHOLD=${EXTERNAL_CHECKS_THRESHOLD:-1}
- NGINX_THRESHOLD=${NGINX_THRESHOLD:-5}
- UNBOUND_THRESHOLD=${UNBOUND_THRESHOLD:-5}
- REDIS_THRESHOLD=${REDIS_THRESHOLD:-5}
- MYSQL_THRESHOLD=${MYSQL_THRESHOLD:-5}
- MYSQL_REPLICATION_THRESHOLD=${MYSQL_REPLICATION_THRESHOLD:-1}
- SOGO_THRESHOLD=${SOGO_THRESHOLD:-3}
- POSTFIX_THRESHOLD=${POSTFIX_THRESHOLD:-8}
- CLAMD_THRESHOLD=${CLAMD_THRESHOLD:-15}
- DOVECOT_THRESHOLD=${DOVECOT_THRESHOLD:-12}
- DOVECOT_REPL_THRESHOLD=${DOVECOT_REPL_THRESHOLD:-20}
- PHPFPM_THRESHOLD=${PHPFPM_THRESHOLD:-5}
- RATELIMIT_THRESHOLD=${RATELIMIT_THRESHOLD:-1}
- FAIL2BAN_THRESHOLD=${FAIL2BAN_THRESHOLD:-1}
- ACME_THRESHOLD=${ACME_THRESHOLD:-1}
- RSPAMD_THRESHOLD=${RSPAMD_THRESHOLD:-5}
- OLEFY_THRESHOLD=${OLEFY_THRESHOLD:-5}
- MAILQ_THRESHOLD=${MAILQ_THRESHOLD:-20}
- MAILQ_CRIT=${MAILQ_CRIT:-30}
networks:
mailcow-network:
aliases:
- watchdog
dockerapi-mailcow:
- image: mailcow/dockerapi:1.43
+ image: mailcow/dockerapi:1.44
security_opt:
- label=disable
restart: always
oom_kill_disable: true
dns:
- ${IPV4_NETWORK:-172.22.1}.254
environment:
- DBROOT=${DBROOT}
- TZ=${TZ}
- REDIS_SLAVEOF_IP=${REDIS_SLAVEOF_IP:-}
- REDIS_SLAVEOF_PORT=${REDIS_SLAVEOF_PORT:-}
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
networks:
mailcow-network:
aliases:
- dockerapi
solr-mailcow:
image: mailcow/solr:1.8.1
restart: always
volumes:
- solr-vol-1:/opt/solr/server/solr/dovecot-fts/data
ports:
- "${SOLR_PORT:-127.0.0.1:18983}:8983"
environment:
- TZ=${TZ}
- SOLR_HEAP=${SOLR_HEAP:-1024}
- SKIP_SOLR=${SKIP_SOLR:-y}
networks:
mailcow-network:
aliases:
- solr
olefy-mailcow:
image: mailcow/olefy:1.10
restart: always
environment:
- TZ=${TZ}
- OLEFY_BINDADDRESS=0.0.0.0
- OLEFY_BINDPORT=10055
- OLEFY_TMPDIR=/tmp
- OLEFY_PYTHON_PATH=/usr/bin/python3
- OLEFY_OLEVBA_PATH=/usr/bin/olevba
- OLEFY_LOGLVL=20
- OLEFY_MINLENGTH=500
- OLEFY_DEL_TMP=1
networks:
mailcow-network:
aliases:
- olefy
ofelia-mailcow:
image: mcuadros/ofelia:latest
restart: always
command: daemon --docker
environment:
- TZ=${TZ}
depends_on:
- sogo-mailcow
- dovecot-mailcow
labels:
ofelia.enabled: "true"
security_opt:
- label=disable
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
networks:
mailcow-network:
aliases:
- ofelia
ipv6nat-mailcow:
depends_on:
- unbound-mailcow
- mysql-mailcow
- redis-mailcow
- clamd-mailcow
- rspamd-mailcow
- php-fpm-mailcow
- sogo-mailcow
- dovecot-mailcow
- postfix-mailcow
- memcached-mailcow
- nginx-mailcow
- acme-mailcow
- netfilter-mailcow
- watchdog-mailcow
- dockerapi-mailcow
- solr-mailcow
environment:
- TZ=${TZ}
image: robbertkl/ipv6nat
security_opt:
- label=disable
restart: always
privileged: true
network_mode: "host"
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
- /lib/modules:/lib/modules:ro
networks:
mailcow-network:
driver: bridge
driver_opts:
com.docker.network.bridge.name: br-mailcow
enable_ipv6: true
ipam:
driver: default
config:
- subnet: ${IPV4_NETWORK:-172.22.1}.0/24
- subnet: ${IPV6_NETWORK:-fd4d:6169:6c63:6f77::/64}
volumes:
vmail-vol-1:
vmail-index-vol-1:
mysql-vol-1:
mysql-socket-vol-1:
redis-vol-1:
rspamd-vol-1:
solr-vol-1:
postfix-vol-1:
crypt-vol-1:
sogo-web-vol-1:
sogo-userdata-backup-vol-1:
clamd-db-vol-1:

File Metadata

Mime Type
text/x-diff
Expires
9月 9 Tue, 5:46 AM (7 h, 34 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
5371
默认替代文本
(53 KB)

Event Timeline