dockerapi.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549
  1. from fastapi import FastAPI, Response, Request
  2. import aiodocker
  3. import docker
  4. import psutil
  5. import sys
  6. import re
  7. import time
  8. import os
  9. import json
  10. import asyncio
  11. import redis
  12. import platform
  13. from datetime import datetime
  14. import logging
  15. from logging.config import dictConfig
  16. log_config = {
  17. "version": 1,
  18. "disable_existing_loggers": False,
  19. "formatters": {
  20. "default": {
  21. "()": "uvicorn.logging.DefaultFormatter",
  22. "fmt": "%(levelprefix)s %(asctime)s %(message)s",
  23. "datefmt": "%Y-%m-%d %H:%M:%S",
  24. },
  25. },
  26. "handlers": {
  27. "default": {
  28. "formatter": "default",
  29. "class": "logging.StreamHandler",
  30. "stream": "ext://sys.stderr",
  31. },
  32. },
  33. "loggers": {
  34. "api-logger": {"handlers": ["default"], "level": "INFO"},
  35. },
  36. }
  37. dictConfig(log_config)
  38. containerIds_to_update = []
  39. host_stats_isUpdating = False
  40. app = FastAPI()
  41. logger = logging.getLogger('api-logger')
  42. @app.get("/host/stats")
  43. async def get_host_update_stats():
  44. global host_stats_isUpdating
  45. if host_stats_isUpdating == False:
  46. asyncio.create_task(get_host_stats())
  47. host_stats_isUpdating = True
  48. while True:
  49. if redis_client.exists('host_stats'):
  50. break
  51. await asyncio.sleep(1.5)
  52. stats = json.loads(redis_client.get('host_stats'))
  53. return Response(content=json.dumps(stats, indent=4), media_type="application/json")
  54. @app.get("/containers/{container_id}/json")
  55. async def get_container(container_id : str):
  56. if container_id and container_id.isalnum():
  57. try:
  58. for container in (await async_docker_client.containers.list()):
  59. if container._id == container_id:
  60. container_info = await container.show()
  61. return Response(content=json.dumps(container_info, indent=4), media_type="application/json")
  62. res = {
  63. "type": "danger",
  64. "msg": "no container found"
  65. }
  66. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  67. except Exception as e:
  68. res = {
  69. "type": "danger",
  70. "msg": str(e)
  71. }
  72. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  73. else:
  74. res = {
  75. "type": "danger",
  76. "msg": "no or invalid id defined"
  77. }
  78. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  79. @app.get("/containers/json")
  80. async def get_containers():
  81. containers = {}
  82. try:
  83. for container in (await async_docker_client.containers.list()):
  84. container_info = await container.show()
  85. containers.update({container_info['Id']: container_info})
  86. return Response(content=json.dumps(containers, indent=4), media_type="application/json")
  87. except Exception as e:
  88. res = {
  89. "type": "danger",
  90. "msg": str(e)
  91. }
  92. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  93. @app.post("/containers/{container_id}/{post_action}")
  94. async def post_containers(container_id : str, post_action : str, request: Request):
  95. try :
  96. request_json = await request.json()
  97. except Exception as err:
  98. request_json = {}
  99. if container_id and container_id.isalnum() and post_action:
  100. try:
  101. """Dispatch container_post api call"""
  102. if post_action == 'exec':
  103. if not request_json or not 'cmd' in request_json:
  104. res = {
  105. "type": "danger",
  106. "msg": "cmd is missing"
  107. }
  108. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  109. if not request_json or not 'task' in request_json:
  110. res = {
  111. "type": "danger",
  112. "msg": "task is missing"
  113. }
  114. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  115. api_call_method_name = '__'.join(['container_post', str(post_action), str(request_json['cmd']), str(request_json['task']) ])
  116. else:
  117. api_call_method_name = '__'.join(['container_post', str(post_action) ])
  118. docker_utils = DockerUtils(sync_docker_client)
  119. api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
  120. logger.info("api call: %s, container_id: %s" % (api_call_method_name, container_id))
  121. return api_call_method(container_id, request_json)
  122. except Exception as e:
  123. logger.error("error - container_post: %s" % str(e))
  124. res = {
  125. "type": "danger",
  126. "msg": str(e)
  127. }
  128. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  129. else:
  130. res = {
  131. "type": "danger",
  132. "msg": "invalid container id or missing action"
  133. }
  134. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  135. @app.post("/container/{container_id}/stats/update")
  136. async def post_container_update_stats(container_id : str):
  137. global containerIds_to_update
  138. # start update task for container if no task is running
  139. if container_id not in containerIds_to_update:
  140. asyncio.create_task(get_container_stats(container_id))
  141. containerIds_to_update.append(container_id)
  142. while True:
  143. if redis_client.exists(container_id + '_stats'):
  144. break
  145. await asyncio.sleep(1.5)
  146. stats = json.loads(redis_client.get(container_id + '_stats'))
  147. return Response(content=json.dumps(stats, indent=4), media_type="application/json")
  148. class DockerUtils:
  149. def __init__(self, docker_client):
  150. self.docker_client = docker_client
  151. # api call: container_post - post_action: stop
  152. def container_post__stop(self, container_id, request_json):
  153. for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
  154. container.stop()
  155. res = { 'type': 'success', 'msg': 'command completed successfully'}
  156. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  157. # api call: container_post - post_action: start
  158. def container_post__start(self, container_id, request_json):
  159. for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
  160. container.start()
  161. res = { 'type': 'success', 'msg': 'command completed successfully'}
  162. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  163. # api call: container_post - post_action: restart
  164. def container_post__restart(self, container_id, request_json):
  165. for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
  166. container.restart()
  167. res = { 'type': 'success', 'msg': 'command completed successfully'}
  168. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  169. # api call: container_post - post_action: top
  170. def container_post__top(self, container_id, request_json):
  171. for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
  172. res = { 'type': 'success', 'msg': container.top()}
  173. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  174. # api call: container_post - post_action: stats
  175. def container_post__stats(self, container_id, request_json):
  176. for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
  177. for stat in container.stats(decode=True, stream=True):
  178. res = { 'type': 'success', 'msg': stat}
  179. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  180. # api call: container_post - post_action: exec - cmd: mailq - task: delete
  181. def container_post__exec__mailq__delete(self, container_id, request_json):
  182. if 'items' in request_json:
  183. r = re.compile("^[0-9a-fA-F]+$")
  184. filtered_qids = filter(r.match, request_json['items'])
  185. if filtered_qids:
  186. flagged_qids = ['-d %s' % i for i in filtered_qids]
  187. sanitized_string = str(' '.join(flagged_qids));
  188. for container in self.docker_client.containers.list(filters={"id": container_id}):
  189. postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
  190. return exec_run_handler('generic', postsuper_r)
  191. # api call: container_post - post_action: exec - cmd: mailq - task: hold
  192. def container_post__exec__mailq__hold(self, container_id, request_json):
  193. if 'items' in request_json:
  194. r = re.compile("^[0-9a-fA-F]+$")
  195. filtered_qids = filter(r.match, request_json['items'])
  196. if filtered_qids:
  197. flagged_qids = ['-h %s' % i for i in filtered_qids]
  198. sanitized_string = str(' '.join(flagged_qids));
  199. for container in self.docker_client.containers.list(filters={"id": container_id}):
  200. postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
  201. return exec_run_handler('generic', postsuper_r)
  202. # api call: container_post - post_action: exec - cmd: mailq - task: cat
  203. def container_post__exec__mailq__cat(self, container_id, request_json):
  204. if 'items' in request_json:
  205. r = re.compile("^[0-9a-fA-F]+$")
  206. filtered_qids = filter(r.match, request_json['items'])
  207. if filtered_qids:
  208. sanitized_string = str(' '.join(filtered_qids));
  209. for container in self.docker_client.containers.list(filters={"id": container_id}):
  210. postcat_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
  211. if not postcat_return:
  212. postcat_return = 'err: invalid'
  213. return exec_run_handler('utf8_text_only', postcat_return)
  214. # api call: container_post - post_action: exec - cmd: mailq - task: unhold
  215. def container_post__exec__mailq__unhold(self, container_id, request_json):
  216. if 'items' in request_json:
  217. r = re.compile("^[0-9a-fA-F]+$")
  218. filtered_qids = filter(r.match, request_json['items'])
  219. if filtered_qids:
  220. flagged_qids = ['-H %s' % i for i in filtered_qids]
  221. sanitized_string = str(' '.join(flagged_qids));
  222. for container in self.docker_client.containers.list(filters={"id": container_id}):
  223. postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
  224. return exec_run_handler('generic', postsuper_r)
  225. # api call: container_post - post_action: exec - cmd: mailq - task: deliver
  226. def container_post__exec__mailq__deliver(self, container_id, request_json):
  227. if 'items' in request_json:
  228. r = re.compile("^[0-9a-fA-F]+$")
  229. filtered_qids = filter(r.match, request_json['items'])
  230. if filtered_qids:
  231. flagged_qids = ['-i %s' % i for i in filtered_qids]
  232. for container in self.docker_client.containers.list(filters={"id": container_id}):
  233. for i in flagged_qids:
  234. postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
  235. # todo: check each exit code
  236. res = { 'type': 'success', 'msg': 'Scheduled immediate delivery'}
  237. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  238. # api call: container_post - post_action: exec - cmd: mailq - task: list
  239. def container_post__exec__mailq__list(self, container_id, request_json):
  240. for container in self.docker_client.containers.list(filters={"id": container_id}):
  241. mailq_return = container.exec_run(["/usr/sbin/postqueue", "-j"], user='postfix')
  242. return exec_run_handler('utf8_text_only', mailq_return)
  243. # api call: container_post - post_action: exec - cmd: mailq - task: flush
  244. def container_post__exec__mailq__flush(self, container_id, request_json):
  245. for container in self.docker_client.containers.list(filters={"id": container_id}):
  246. postqueue_r = container.exec_run(["/usr/sbin/postqueue", "-f"], user='postfix')
  247. return exec_run_handler('generic', postqueue_r)
  248. # api call: container_post - post_action: exec - cmd: mailq - task: super_delete
  249. def container_post__exec__mailq__super_delete(self, container_id, request_json):
  250. for container in self.docker_client.containers.list(filters={"id": container_id}):
  251. postsuper_r = container.exec_run(["/usr/sbin/postsuper", "-d", "ALL"])
  252. return exec_run_handler('generic', postsuper_r)
  253. # api call: container_post - post_action: exec - cmd: system - task: fts_rescan
  254. def container_post__exec__system__fts_rescan(self, container_id, request_json):
  255. if 'username' in request_json:
  256. for container in self.docker_client.containers.list(filters={"id": container_id}):
  257. rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
  258. if rescan_return.exit_code == 0:
  259. res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
  260. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  261. else:
  262. res = { 'type': 'warning', 'msg': 'fts_rescan error'}
  263. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  264. if 'all' in request_json:
  265. for container in self.docker_client.containers.list(filters={"id": container_id}):
  266. rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
  267. if rescan_return.exit_code == 0:
  268. res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
  269. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  270. else:
  271. res = { 'type': 'warning', 'msg': 'fts_rescan error'}
  272. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  273. # api call: container_post - post_action: exec - cmd: system - task: df
  274. def container_post__exec__system__df(self, container_id, request_json):
  275. if 'dir' in request_json:
  276. for container in self.docker_client.containers.list(filters={"id": container_id}):
  277. df_return = container.exec_run(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
  278. if df_return.exit_code == 0:
  279. return df_return.output.decode('utf-8').rstrip()
  280. else:
  281. return "0,0,0,0,0,0"
  282. # api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
  283. def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
  284. for container in self.docker_client.containers.list(filters={"id": container_id}):
  285. sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
  286. if sql_return.exit_code == 0:
  287. matched = False
  288. for line in sql_return.output.decode('utf-8').split("\n"):
  289. if 'is already upgraded to' in line:
  290. matched = True
  291. if matched:
  292. res = { 'type': 'success', 'msg':'mysql_upgrade: already upgraded', 'text': sql_return.output.decode('utf-8')}
  293. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  294. else:
  295. container.restart()
  296. res = { 'type': 'warning', 'msg':'mysql_upgrade: upgrade was applied', 'text': sql_return.output.decode('utf-8')}
  297. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  298. else:
  299. res = { 'type': 'error', 'msg': 'mysql_upgrade: error running command', 'text': sql_return.output.decode('utf-8')}
  300. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  301. # api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
  302. def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
  303. for container in self.docker_client.containers.list(filters={"id": container_id}):
  304. sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
  305. if sql_return.exit_code == 0:
  306. res = { 'type': 'info', 'msg': 'mysql_tzinfo_to_sql: command completed successfully', 'text': sql_return.output.decode('utf-8')}
  307. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  308. else:
  309. res = { 'type': 'error', 'msg': 'mysql_tzinfo_to_sql: error running command', 'text': sql_return.output.decode('utf-8')}
  310. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  311. # api call: container_post - post_action: exec - cmd: reload - task: dovecot
  312. def container_post__exec__reload__dovecot(self, container_id, request_json):
  313. for container in self.docker_client.containers.list(filters={"id": container_id}):
  314. reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
  315. return exec_run_handler('generic', reload_return)
  316. # api call: container_post - post_action: exec - cmd: reload - task: postfix
  317. def container_post__exec__reload__postfix(self, container_id, request_json):
  318. for container in self.docker_client.containers.list(filters={"id": container_id}):
  319. reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
  320. return exec_run_handler('generic', reload_return)
  321. # api call: container_post - post_action: exec - cmd: reload - task: nginx
  322. def container_post__exec__reload__nginx(self, container_id, request_json):
  323. for container in self.docker_client.containers.list(filters={"id": container_id}):
  324. reload_return = container.exec_run(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
  325. return exec_run_handler('generic', reload_return)
  326. # api call: container_post - post_action: exec - cmd: sieve - task: list
  327. def container_post__exec__sieve__list(self, container_id, request_json):
  328. if 'username' in request_json:
  329. for container in self.docker_client.containers.list(filters={"id": container_id}):
  330. sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
  331. return exec_run_handler('utf8_text_only', sieve_return)
  332. # api call: container_post - post_action: exec - cmd: sieve - task: print
  333. def container_post__exec__sieve__print(self, container_id, request_json):
  334. if 'username' in request_json and 'script_name' in request_json:
  335. for container in self.docker_client.containers.list(filters={"id": container_id}):
  336. cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
  337. sieve_return = container.exec_run(cmd)
  338. return exec_run_handler('utf8_text_only', sieve_return)
  339. # api call: container_post - post_action: exec - cmd: maildir - task: cleanup
  340. def container_post__exec__maildir__cleanup(self, container_id, request_json):
  341. if 'maildir' in request_json:
  342. for container in self.docker_client.containers.list(filters={"id": container_id}):
  343. sane_name = re.sub(r'\W+', '', request_json['maildir'])
  344. vmail_name = request_json['maildir'].replace("'", "'\\''")
  345. cmd_vmail = "if [[ -d '/var/vmail/" + vmail_name + "' ]]; then /bin/mv '/var/vmail/" + vmail_name + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"
  346. index_name = request_json['maildir'].split("/")
  347. if len(index_name) > 1:
  348. index_name = index_name[1].replace("'", "'\\''") + "@" + index_name[0].replace("'", "'\\''")
  349. cmd_vmail_index = "if [[ -d '/var/vmail_index/" + index_name + "' ]]; then /bin/mv '/var/vmail_index/" + index_name + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "_index'; fi"
  350. cmd = ["/bin/bash", "-c", cmd_vmail + " && " + cmd_vmail_index]
  351. else:
  352. cmd = ["/bin/bash", "-c", cmd_vmail]
  353. maildir_cleanup = container.exec_run(cmd, user='vmail')
  354. return exec_run_handler('generic', maildir_cleanup)
  355. # api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
  356. def container_post__exec__rspamd__worker_password(self, container_id, request_json):
  357. if 'raw' in request_json:
  358. for container in self.docker_client.containers.list(filters={"id": container_id}):
  359. cmd = "/usr/bin/rspamadm pw -e -p '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
  360. cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
  361. matched = False
  362. for line in cmd_response.split("\n"):
  363. if '$2$' in line:
  364. hash = line.strip()
  365. hash_out = re.search('\$2\$.+$', hash).group(0)
  366. rspamd_passphrase_hash = re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip())
  367. rspamd_password_filename = "/etc/rspamd/override.d/worker-controller-password.inc"
  368. cmd = '''/bin/echo 'enable_password = "%s";' > %s && cat %s''' % (rspamd_passphrase_hash, rspamd_password_filename, rspamd_password_filename)
  369. cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
  370. if rspamd_passphrase_hash.startswith("$2$") and rspamd_passphrase_hash in cmd_response:
  371. container.restart()
  372. matched = True
  373. if matched:
  374. res = { 'type': 'success', 'msg': 'command completed successfully' }
  375. logger.info('success changing Rspamd password')
  376. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  377. else:
  378. logger.error('failed changing Rspamd password')
  379. res = { 'type': 'danger', 'msg': 'command did not complete' }
  380. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  381. def exec_cmd_container(container, cmd, user, timeout=2, shell_cmd="/bin/bash"):
  382. def recv_socket_data(c_socket, timeout):
  383. c_socket.setblocking(0)
  384. total_data=[]
  385. data=''
  386. begin=time.time()
  387. while True:
  388. if total_data and time.time()-begin > timeout:
  389. break
  390. elif time.time()-begin > timeout*2:
  391. break
  392. try:
  393. data = c_socket.recv(8192)
  394. if data:
  395. total_data.append(data.decode('utf-8'))
  396. #change the beginning time for measurement
  397. begin=time.time()
  398. else:
  399. #sleep for sometime to indicate a gap
  400. time.sleep(0.1)
  401. break
  402. except:
  403. pass
  404. return ''.join(total_data)
  405. try :
  406. socket = container.exec_run([shell_cmd], stdin=True, socket=True, user=user).output._sock
  407. if not cmd.endswith("\n"):
  408. cmd = cmd + "\n"
  409. socket.send(cmd.encode('utf-8'))
  410. data = recv_socket_data(socket, timeout)
  411. socket.close()
  412. return data
  413. except Exception as e:
  414. logger.error("error - exec_cmd_container: %s" % str(e))
  415. traceback.print_exc(file=sys.stdout)
  416. def exec_run_handler(type, output):
  417. if type == 'generic':
  418. if output.exit_code == 0:
  419. res = { 'type': 'success', 'msg': 'command completed successfully' }
  420. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  421. else:
  422. res = { 'type': 'danger', 'msg': 'command failed: ' + output.output.decode('utf-8') }
  423. return Response(content=json.dumps(res, indent=4), media_type="application/json")
  424. if type == 'utf8_text_only':
  425. return Response(content=output.output.decode('utf-8'), media_type="text/plain")
  426. async def get_host_stats(wait=5):
  427. global host_stats_isUpdating
  428. try:
  429. system_time = datetime.now()
  430. host_stats = {
  431. "cpu": {
  432. "cores": psutil.cpu_count(),
  433. "usage": psutil.cpu_percent()
  434. },
  435. "memory": {
  436. "total": psutil.virtual_memory().total,
  437. "usage": psutil.virtual_memory().percent,
  438. "swap": psutil.swap_memory()
  439. },
  440. "uptime": time.time() - psutil.boot_time(),
  441. "system_time": system_time.strftime("%d.%m.%Y %H:%M:%S"),
  442. "architecture": platform.machine()
  443. }
  444. redis_client.set('host_stats', json.dumps(host_stats), ex=10)
  445. except Exception as e:
  446. res = {
  447. "type": "danger",
  448. "msg": str(e)
  449. }
  450. await asyncio.sleep(wait)
  451. host_stats_isUpdating = False
  452. async def get_container_stats(container_id, wait=5, stop=False):
  453. global containerIds_to_update
  454. if container_id and container_id.isalnum():
  455. try:
  456. for container in (await async_docker_client.containers.list()):
  457. if container._id == container_id:
  458. res = await container.stats(stream=False)
  459. if redis_client.exists(container_id + '_stats'):
  460. stats = json.loads(redis_client.get(container_id + '_stats'))
  461. else:
  462. stats = []
  463. stats.append(res[0])
  464. if len(stats) > 3:
  465. del stats[0]
  466. redis_client.set(container_id + '_stats', json.dumps(stats), ex=60)
  467. except Exception as e:
  468. res = {
  469. "type": "danger",
  470. "msg": str(e)
  471. }
  472. else:
  473. res = {
  474. "type": "danger",
  475. "msg": "no or invalid id defined"
  476. }
  477. await asyncio.sleep(wait)
  478. if stop == True:
  479. # update task was called second time, stop
  480. containerIds_to_update.remove(container_id)
  481. else:
  482. # call update task a second time
  483. await get_container_stats(container_id, wait=0, stop=True)
  484. if os.environ['REDIS_SLAVEOF_IP'] != "":
  485. redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0)
  486. else:
  487. redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0)
  488. sync_docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock', version='auto')
  489. async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')
  490. logger.info('DockerApi started')