2
0

dockerapi.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547
  1. from fastapi import FastAPI, Response, Request
  2. import aiodocker
  3. import docker
  4. import psutil
  5. import sys
  6. import re
  7. import time
  8. import os
  9. import json
  10. import asyncio
  11. import redis
  12. from datetime import datetime
  13. import logging
  14. from logging.config import dictConfig
  15. log_config = {
  16. "version": 1,
  17. "disable_existing_loggers": False,
  18. "formatters": {
  19. "default": {
  20. "()": "uvicorn.logging.DefaultFormatter",
  21. "fmt": "%(levelprefix)s %(asctime)s %(message)s",
  22. "datefmt": "%Y-%m-%d %H:%M:%S",
  23. },
  24. },
  25. "handlers": {
  26. "default": {
  27. "formatter": "default",
  28. "class": "logging.StreamHandler",
  29. "stream": "ext://sys.stderr",
  30. },
  31. },
  32. "loggers": {
  33. "api-logger": {"handlers": ["default"], "level": "INFO"},
  34. },
  35. }
  36. dictConfig(log_config)
  37. containerIds_to_update = []
  38. host_stats_isUpdating = False
  39. app = FastAPI()
  40. logger = logging.getLogger('api-logger')
  41. @app.get("/host/stats")
  42. async def get_host_update_stats():
  43. global host_stats_isUpdating
  44. if host_stats_isUpdating == False:
  45. asyncio.create_task(get_host_stats())
  46. host_stats_isUpdating = True
  47. while True:
  48. if redis_client.exists('host_stats'):
  49. break
  50. await asyncio.sleep(1.5)
  51. stats = json.loads(redis_client.get('host_stats'))
  52. return Response(content=json.dumps(stats, indent=4), media_type="application/json")
  53. @app.get("/containers/{container_id}/json")
  54. async def get_container(container_id : str):
  55. if container_id and container_id.isalnum():
  56. try:
  57. for container in (await async_docker_client.containers.list()):
  58. if container._id == container_id:
  59. container_info = await container.show()
  60. return Response(content=json.dumps(container_info, indent=4), media_type="application/json")
  61. res = {
  62. "type": "danger",
  63. "msg": "no container found"
  64. }
  65. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  66. except Exception as e:
  67. res = {
  68. "type": "danger",
  69. "msg": str(e)
  70. }
  71. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  72. else:
  73. res = {
  74. "type": "danger",
  75. "msg": "no or invalid id defined"
  76. }
  77. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  78. @app.get("/containers/json")
  79. async def get_containers():
  80. containers = {}
  81. try:
  82. for container in (await async_docker_client.containers.list()):
  83. container_info = await container.show()
  84. containers.update({container_info['Id']: container_info})
  85. return Response(content=json.dumps(containers, indent=4), media_type="application/json")
  86. except Exception as e:
  87. res = {
  88. "type": "danger",
  89. "msg": str(e)
  90. }
  91. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  92. @app.post("/containers/{container_id}/{post_action}")
  93. async def post_containers(container_id : str, post_action : str, request: Request):
  94. try :
  95. request_json = await request.json()
  96. except Exception as err:
  97. request_json = {}
  98. if container_id and container_id.isalnum() and post_action:
  99. try:
  100. """Dispatch container_post api call"""
  101. if post_action == 'exec':
  102. if not request_json or not 'cmd' in request_json:
  103. res = {
  104. "type": "danger",
  105. "msg": "cmd is missing"
  106. }
  107. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  108. if not request_json or not 'task' in request_json:
  109. res = {
  110. "type": "danger",
  111. "msg": "task is missing"
  112. }
  113. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  114. api_call_method_name = '__'.join(['container_post', str(post_action), str(request_json['cmd']), str(request_json['task']) ])
  115. else:
  116. api_call_method_name = '__'.join(['container_post', str(post_action) ])
  117. docker_utils = DockerUtils(sync_docker_client)
  118. api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
  119. logger.info("api call: %s, container_id: %s" % (api_call_method_name, container_id))
  120. return api_call_method(container_id, request_json)
  121. except Exception as e:
  122. logger.error("error - container_post: %s" % str(e))
  123. res = {
  124. "type": "danger",
  125. "msg": str(e)
  126. }
  127. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  128. else:
  129. res = {
  130. "type": "danger",
  131. "msg": "invalid container id or missing action"
  132. }
  133. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  134. @app.post("/container/{container_id}/stats/update")
  135. async def post_container_update_stats(container_id : str):
  136. global containerIds_to_update
  137. # start update task for container if no task is running
  138. if container_id not in containerIds_to_update:
  139. asyncio.create_task(get_container_stats(container_id))
  140. containerIds_to_update.append(container_id)
  141. while True:
  142. if redis_client.exists(container_id + '_stats'):
  143. break
  144. await asyncio.sleep(1.5)
  145. stats = json.loads(redis_client.get(container_id + '_stats'))
  146. return Response(content=json.dumps(stats, indent=4), media_type="application/json")
  147. class DockerUtils:
  148. def __init__(self, docker_client):
  149. self.docker_client = docker_client
  150. # api call: container_post - post_action: stop
  151. def container_post__stop(self, container_id, request_json):
  152. for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
  153. container.stop()
  154. res = { 'type': 'success', 'msg': 'command completed successfully'}
  155. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  156. # api call: container_post - post_action: start
  157. def container_post__start(self, container_id, request_json):
  158. for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
  159. container.start()
  160. res = { 'type': 'success', 'msg': 'command completed successfully'}
  161. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  162. # api call: container_post - post_action: restart
  163. def container_post__restart(self, container_id, request_json):
  164. for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
  165. container.restart()
  166. res = { 'type': 'success', 'msg': 'command completed successfully'}
  167. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  168. # api call: container_post - post_action: top
  169. def container_post__top(self, container_id, request_json):
  170. for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
  171. res = { 'type': 'success', 'msg': container.top()}
  172. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  173. # api call: container_post - post_action: stats
  174. def container_post__stats(self, container_id, request_json):
  175. for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
  176. for stat in container.stats(decode=True, stream=True):
  177. res = { 'type': 'success', 'msg': stat}
  178. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  179. # api call: container_post - post_action: exec - cmd: mailq - task: delete
  180. def container_post__exec__mailq__delete(self, container_id, request_json):
  181. if 'items' in request_json:
  182. r = re.compile("^[0-9a-fA-F]+$")
  183. filtered_qids = filter(r.match, request_json['items'])
  184. if filtered_qids:
  185. flagged_qids = ['-d %s' % i for i in filtered_qids]
  186. sanitized_string = str(' '.join(flagged_qids));
  187. for container in self.docker_client.containers.list(filters={"id": container_id}):
  188. postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
  189. return exec_run_handler('generic', postsuper_r)
  190. # api call: container_post - post_action: exec - cmd: mailq - task: hold
  191. def container_post__exec__mailq__hold(self, container_id, request_json):
  192. if 'items' in request_json:
  193. r = re.compile("^[0-9a-fA-F]+$")
  194. filtered_qids = filter(r.match, request_json['items'])
  195. if filtered_qids:
  196. flagged_qids = ['-h %s' % i for i in filtered_qids]
  197. sanitized_string = str(' '.join(flagged_qids));
  198. for container in self.docker_client.containers.list(filters={"id": container_id}):
  199. postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
  200. return exec_run_handler('generic', postsuper_r)
  201. # api call: container_post - post_action: exec - cmd: mailq - task: cat
  202. def container_post__exec__mailq__cat(self, container_id, request_json):
  203. if 'items' in request_json:
  204. r = re.compile("^[0-9a-fA-F]+$")
  205. filtered_qids = filter(r.match, request_json['items'])
  206. if filtered_qids:
  207. sanitized_string = str(' '.join(filtered_qids));
  208. for container in self.docker_client.containers.list(filters={"id": container_id}):
  209. postcat_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
  210. if not postcat_return:
  211. postcat_return = 'err: invalid'
  212. return exec_run_handler('utf8_text_only', postcat_return)
  213. # api call: container_post - post_action: exec - cmd: mailq - task: unhold
  214. def container_post__exec__mailq__unhold(self, container_id, request_json):
  215. if 'items' in request_json:
  216. r = re.compile("^[0-9a-fA-F]+$")
  217. filtered_qids = filter(r.match, request_json['items'])
  218. if filtered_qids:
  219. flagged_qids = ['-H %s' % i for i in filtered_qids]
  220. sanitized_string = str(' '.join(flagged_qids));
  221. for container in self.docker_client.containers.list(filters={"id": container_id}):
  222. postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
  223. return exec_run_handler('generic', postsuper_r)
  224. # api call: container_post - post_action: exec - cmd: mailq - task: deliver
  225. def container_post__exec__mailq__deliver(self, container_id, request_json):
  226. if 'items' in request_json:
  227. r = re.compile("^[0-9a-fA-F]+$")
  228. filtered_qids = filter(r.match, request_json['items'])
  229. if filtered_qids:
  230. flagged_qids = ['-i %s' % i for i in filtered_qids]
  231. for container in self.docker_client.containers.list(filters={"id": container_id}):
  232. for i in flagged_qids:
  233. postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
  234. # todo: check each exit code
  235. res = { 'type': 'success', 'msg': 'Scheduled immediate delivery'}
  236. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  237. # api call: container_post - post_action: exec - cmd: mailq - task: list
  238. def container_post__exec__mailq__list(self, container_id, request_json):
  239. for container in self.docker_client.containers.list(filters={"id": container_id}):
  240. mailq_return = container.exec_run(["/usr/sbin/postqueue", "-j"], user='postfix')
  241. return exec_run_handler('utf8_text_only', mailq_return)
  242. # api call: container_post - post_action: exec - cmd: mailq - task: flush
  243. def container_post__exec__mailq__flush(self, container_id, request_json):
  244. for container in self.docker_client.containers.list(filters={"id": container_id}):
  245. postqueue_r = container.exec_run(["/usr/sbin/postqueue", "-f"], user='postfix')
  246. return exec_run_handler('generic', postqueue_r)
  247. # api call: container_post - post_action: exec - cmd: mailq - task: super_delete
  248. def container_post__exec__mailq__super_delete(self, container_id, request_json):
  249. for container in self.docker_client.containers.list(filters={"id": container_id}):
  250. postsuper_r = container.exec_run(["/usr/sbin/postsuper", "-d", "ALL"])
  251. return exec_run_handler('generic', postsuper_r)
  252. # api call: container_post - post_action: exec - cmd: system - task: fts_rescan
  253. def container_post__exec__system__fts_rescan(self, container_id, request_json):
  254. if 'username' in request_json:
  255. for container in self.docker_client.containers.list(filters={"id": container_id}):
  256. rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
  257. if rescan_return.exit_code == 0:
  258. res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
  259. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  260. else:
  261. res = { 'type': 'warning', 'msg': 'fts_rescan error'}
  262. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  263. if 'all' in request_json:
  264. for container in self.docker_client.containers.list(filters={"id": container_id}):
  265. rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
  266. if rescan_return.exit_code == 0:
  267. res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
  268. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  269. else:
  270. res = { 'type': 'warning', 'msg': 'fts_rescan error'}
  271. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  272. # api call: container_post - post_action: exec - cmd: system - task: df
  273. def container_post__exec__system__df(self, container_id, request_json):
  274. if 'dir' in request_json:
  275. for container in self.docker_client.containers.list(filters={"id": container_id}):
  276. df_return = container.exec_run(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
  277. if df_return.exit_code == 0:
  278. return df_return.output.decode('utf-8').rstrip()
  279. else:
  280. return "0,0,0,0,0,0"
  281. # api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
  282. def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
  283. for container in self.docker_client.containers.list(filters={"id": container_id}):
  284. sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
  285. if sql_return.exit_code == 0:
  286. matched = False
  287. for line in sql_return.output.decode('utf-8').split("\n"):
  288. if 'is already upgraded to' in line:
  289. matched = True
  290. if matched:
  291. res = { 'type': 'success', 'msg':'mysql_upgrade: already upgraded', 'text': sql_return.output.decode('utf-8')}
  292. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  293. else:
  294. container.restart()
  295. res = { 'type': 'warning', 'msg':'mysql_upgrade: upgrade was applied', 'text': sql_return.output.decode('utf-8')}
  296. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  297. else:
  298. res = { 'type': 'error', 'msg': 'mysql_upgrade: error running command', 'text': sql_return.output.decode('utf-8')}
  299. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  300. # api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
  301. def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
  302. for container in self.docker_client.containers.list(filters={"id": container_id}):
  303. sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
  304. if sql_return.exit_code == 0:
  305. res = { 'type': 'info', 'msg': 'mysql_tzinfo_to_sql: command completed successfully', 'text': sql_return.output.decode('utf-8')}
  306. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  307. else:
  308. res = { 'type': 'error', 'msg': 'mysql_tzinfo_to_sql: error running command', 'text': sql_return.output.decode('utf-8')}
  309. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  310. # api call: container_post - post_action: exec - cmd: reload - task: dovecot
  311. def container_post__exec__reload__dovecot(self, container_id, request_json):
  312. for container in self.docker_client.containers.list(filters={"id": container_id}):
  313. reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
  314. return exec_run_handler('generic', reload_return)
  315. # api call: container_post - post_action: exec - cmd: reload - task: postfix
  316. def container_post__exec__reload__postfix(self, container_id, request_json):
  317. for container in self.docker_client.containers.list(filters={"id": container_id}):
  318. reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
  319. return exec_run_handler('generic', reload_return)
  320. # api call: container_post - post_action: exec - cmd: reload - task: nginx
  321. def container_post__exec__reload__nginx(self, container_id, request_json):
  322. for container in self.docker_client.containers.list(filters={"id": container_id}):
  323. reload_return = container.exec_run(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
  324. return exec_run_handler('generic', reload_return)
  325. # api call: container_post - post_action: exec - cmd: sieve - task: list
  326. def container_post__exec__sieve__list(self, container_id, request_json):
  327. if 'username' in request_json:
  328. for container in self.docker_client.containers.list(filters={"id": container_id}):
  329. sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
  330. return exec_run_handler('utf8_text_only', sieve_return)
  331. # api call: container_post - post_action: exec - cmd: sieve - task: print
  332. def container_post__exec__sieve__print(self, container_id, request_json):
  333. if 'username' in request.json and 'script_name' in request_json:
  334. for container in self.docker_client.containers.list(filters={"id": container_id}):
  335. cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
  336. sieve_return = container.exec_run(cmd)
  337. return exec_run_handler('utf8_text_only', sieve_return)
  338. # api call: container_post - post_action: exec - cmd: maildir - task: cleanup
  339. def container_post__exec__maildir__cleanup(self, container_id, request_json):
  340. if 'maildir' in request_json:
  341. for container in self.docker_client.containers.list(filters={"id": container_id}):
  342. sane_name = re.sub(r'\W+', '', request_json['maildir'])
  343. vmail_name = request_json['maildir'].replace("'", "'\\''")
  344. cmd_vmail = "if [[ -d '/var/vmail/" + vmail_name + "' ]]; then /bin/mv '/var/vmail/" + vmail_name + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"
  345. index_name = request_json['maildir'].split("/")
  346. if len(index_name) > 1:
  347. index_name = index_name[1].replace("'", "'\\''") + "@" + index_name[0].replace("'", "'\\''")
  348. cmd_vmail_index = "if [[ -d '/var/vmail_index/" + index_name + "' ]]; then /bin/mv '/var/vmail_index/" + index_name + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "_index'; fi"
  349. cmd = ["/bin/bash", "-c", cmd_vmail + " && " + cmd_vmail_index]
  350. else:
  351. cmd = ["/bin/bash", "-c", cmd_vmail]
  352. maildir_cleanup = container.exec_run(cmd, user='vmail')
  353. return exec_run_handler('generic', maildir_cleanup)
  354. # api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
  355. def container_post__exec__rspamd__worker_password(self, container_id, request_json):
  356. if 'raw' in request_json:
  357. for container in self.docker_client.containers.list(filters={"id": container_id}):
  358. cmd = "/usr/bin/rspamadm pw -e -p '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
  359. cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
  360. matched = False
  361. for line in cmd_response.split("\n"):
  362. if '$2$' in line:
  363. hash = line.strip()
  364. hash_out = re.search('\$2\$.+$', hash).group(0)
  365. rspamd_passphrase_hash = re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip())
  366. rspamd_password_filename = "/etc/rspamd/override.d/worker-controller-password.inc"
  367. cmd = '''/bin/echo 'enable_password = "%s";' > %s && cat %s''' % (rspamd_passphrase_hash, rspamd_password_filename, rspamd_password_filename)
  368. cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
  369. if rspamd_passphrase_hash.startswith("$2$") and rspamd_passphrase_hash in cmd_response:
  370. container.restart()
  371. matched = True
  372. if matched:
  373. res = { 'type': 'success', 'msg': 'command completed successfully' }
  374. logger.info('success changing Rspamd password')
  375. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  376. else:
  377. logger.error('failed changing Rspamd password')
  378. res = { 'type': 'danger', 'msg': 'command did not complete' }
  379. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  380. def exec_cmd_container(container, cmd, user, timeout=2, shell_cmd="/bin/bash"):
  381. def recv_socket_data(c_socket, timeout):
  382. c_socket.setblocking(0)
  383. total_data=[]
  384. data=''
  385. begin=time.time()
  386. while True:
  387. if total_data and time.time()-begin > timeout:
  388. break
  389. elif time.time()-begin > timeout*2:
  390. break
  391. try:
  392. data = c_socket.recv(8192)
  393. if data:
  394. total_data.append(data.decode('utf-8'))
  395. #change the beginning time for measurement
  396. begin=time.time()
  397. else:
  398. #sleep for sometime to indicate a gap
  399. time.sleep(0.1)
  400. break
  401. except:
  402. pass
  403. return ''.join(total_data)
  404. try :
  405. socket = container.exec_run([shell_cmd], stdin=True, socket=True, user=user).output._sock
  406. if not cmd.endswith("\n"):
  407. cmd = cmd + "\n"
  408. socket.send(cmd.encode('utf-8'))
  409. data = recv_socket_data(socket, timeout)
  410. socket.close()
  411. return data
  412. except Exception as e:
  413. logger.error("error - exec_cmd_container: %s" % str(e))
  414. traceback.print_exc(file=sys.stdout)
  415. def exec_run_handler(type, output):
  416. if type == 'generic':
  417. if output.exit_code == 0:
  418. res = { 'type': 'success', 'msg': 'command completed successfully' }
  419. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  420. else:
  421. res = { 'type': 'danger', 'msg': 'command failed: ' + output.output.decode('utf-8') }
  422. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  423. if type == 'utf8_text_only':
  424. return Response(content=output.output.decode('utf-8'), media_type="text/plain")
  425. async def get_host_stats(wait=5):
  426. global host_stats_isUpdating
  427. try:
  428. system_time = datetime.now()
  429. host_stats = {
  430. "cpu": {
  431. "cores": psutil.cpu_count(),
  432. "usage": psutil.cpu_percent()
  433. },
  434. "memory": {
  435. "total": psutil.virtual_memory().total,
  436. "usage": psutil.virtual_memory().percent,
  437. "swap": psutil.swap_memory()
  438. },
  439. "uptime": time.time() - psutil.boot_time(),
  440. "system_time": system_time.strftime("%d.%m.%Y %H:%M:%S")
  441. }
  442. redis_client.set('host_stats', json.dumps(host_stats), ex=10)
  443. except Exception as e:
  444. res = {
  445. "type": "danger",
  446. "msg": str(e)
  447. }
  448. await asyncio.sleep(wait)
  449. host_stats_isUpdating = False
  450. async def get_container_stats(container_id, wait=5, stop=False):
  451. global containerIds_to_update
  452. if container_id and container_id.isalnum():
  453. try:
  454. for container in (await async_docker_client.containers.list()):
  455. if container._id == container_id:
  456. res = await container.stats(stream=False)
  457. if redis_client.exists(container_id + '_stats'):
  458. stats = json.loads(redis_client.get(container_id + '_stats'))
  459. else:
  460. stats = []
  461. stats.append(res[0])
  462. if len(stats) > 3:
  463. del stats[0]
  464. redis_client.set(container_id + '_stats', json.dumps(stats), ex=60)
  465. except Exception as e:
  466. res = {
  467. "type": "danger",
  468. "msg": str(e)
  469. }
  470. else:
  471. res = {
  472. "type": "danger",
  473. "msg": "no or invalid id defined"
  474. }
  475. await asyncio.sleep(wait)
  476. if stop == True:
  477. # update task was called second time, stop
  478. containerIds_to_update.remove(container_id)
  479. else:
  480. # call update task a second time
  481. await get_container_stats(container_id, wait=0, stop=True)
  482. if os.environ['REDIS_SLAVEOF_IP'] != "":
  483. redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0)
  484. else:
  485. redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0)
  486. sync_docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock', version='auto')
  487. async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')
  488. logger.info('DockerApi started')