Bartosz Kupidura | f82f414 | 2017-09-05 16:18:45 +0200 | [diff] [blame] | 1 | #!/usr/bin/env python |
| 2 | |
| 3 | import errno |
| 4 | import hashlib |
| 5 | import io |
| 6 | import logging |
| 7 | import os |
| 8 | import pwd |
Bartosz Kupidura | 5d39406 | 2017-09-06 12:32:53 +0200 | [diff] [blame] | 9 | import signal |
Bartosz Kupidura | f82f414 | 2017-09-05 16:18:45 +0200 | [diff] [blame] | 10 | import subprocess |
| 11 | import sys |
| 12 | import time |
| 13 | |
| 14 | try: |
| 15 | import dns.resolver |
| 16 | import dns.reversename |
| 17 | except ImportError: |
| 18 | pass |
| 19 | |
| 20 | |
| 21 | def get_uid_gid(username): |
| 22 | DAEMON_UID = pwd.getpwnam(username).pw_uid |
| 23 | DAEMON_GID = pwd.getpwnam(username).pw_gid |
| 24 | return DAEMON_UID, DAEMON_GID |
| 25 | |
| 26 | def set_logger(level=logging.DEBUG): |
| 27 | LOG_DATEFMT = "%Y-%m-%d %H:%M:%S" |
| 28 | LOG_FORMAT = "%(asctime)s.%(msecs)03d - %(levelname)s - %(message)s" |
| 29 | logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_DATEFMT) |
| 30 | LOG = logging.getLogger(__name__) |
| 31 | LOG.setLevel(logging.DEBUG) |
| 32 | return LOG |
| 33 | |
| 34 | |
| 35 | def send_signal(pid, sig): |
| 36 | LOG.debug('Sending signal %d to pid %d', sig, pid) |
Bartosz Kupidura | 3b7b8c0 | 2017-09-06 13:23:18 +0200 | [diff] [blame^] | 37 | os.kill(pid, sig) |
Bartosz Kupidura | f82f414 | 2017-09-05 16:18:45 +0200 | [diff] [blame] | 38 | |
| 39 | |
| 40 | def demote(demote_uid, demote_gid): |
| 41 | def set_ids(): |
| 42 | os.setgid(demote_gid) |
| 43 | os.setuid(demote_uid) |
| 44 | |
| 45 | return set_ids |
| 46 | |
| 47 | |
| 48 | def execute_cmd(cmd, directory, uid, gid): |
| 49 | LOG.debug("Executing cmd '%s' with uid/gid %d/%d ", cmd, uid, gid) |
| 50 | return subprocess.Popen(cmd, cwd=directory, preexec_fn=demote(uid, gid)) |
| 51 | |
| 52 | |
| 53 | def dns_query(address, record_type, retries=10, interval=5): |
| 54 | dns_response = None |
| 55 | LOG.debug("DNS query for address '%s' record type '%s'", address, record_type) |
| 56 | while(retries > 0): |
| 57 | time.sleep(interval) |
| 58 | try: |
| 59 | dns_response = dns.resolver.query(address, record_type) |
| 60 | except: |
| 61 | LOG.error('Fail to get DNS records for autodiscovery retries %d', retries) |
| 62 | retries -= 1 |
| 63 | else: |
| 64 | break |
| 65 | |
| 66 | if dns_response: |
| 67 | LOG.debug('Got dns response %s', dns_response.__dict__) |
| 68 | return dns_response |
| 69 | else: |
| 70 | raise Exception('Fail to get DNS response') |
| 71 | |
| 72 | |
| 73 | def get_data_dir(node_ip, retries=10, interval=5): |
| 74 | address = dns.reversename.from_address(node_ip) |
| 75 | ptr = dns_query(address, 'PTR', retries, interval) |
| 76 | |
| 77 | if ptr: |
| 78 | # monitoring_server.2.c81t242vtv4qobpv75mckqwob.monitoring_monitoring. |
| 79 | ptr_str = str(ptr[0]).split('.') |
| 80 | # ptr_str[0] = service name, monitoring_server |
| 81 | # ptr_str[1] = replica number, 2 |
| 82 | # ptr_str[3] = docker id, c81t242vtv4qobpv75mckqwob |
| 83 | # ptr_str[4] = network name, monitoring_monitoring |
| 84 | if ptr_str[1]: |
| 85 | return 'data/{}/'.format(ptr_str[1]) |
| 86 | |
| 87 | LOG.error('Fail to discover data dir') |
| 88 | raise Exception('Fail to discover data dir') |
| 89 | |
| 90 | |
| 91 | def create_data_dir(data_dir_prefix, data_dir, uid, gid): |
| 92 | try: |
| 93 | os.makedirs(os.path.join(data_dir_prefix, data_dir)) |
| 94 | except OSError as e: |
| 95 | if e.errno != errno.EEXIST: |
| 96 | LOG.error('Fail to create data dir, exiting') |
| 97 | raise Exception('Fail to create data dir') |
| 98 | |
| 99 | os.chown(os.path.join(data_dir_prefix, data_dir), uid, gid) |
| 100 | return os.path.join(data_dir_prefix, data_dir) |
| 101 | |
| 102 | |
| 103 | def calculate_hash(directory): |
| 104 | cur_hash = hashlib.sha256() |
| 105 | try: |
| 106 | for filename in os.listdir(directory): |
| 107 | try: |
| 108 | with open(os.path.join(directory, filename), "rb") as f: |
| 109 | for chunk in iter(lambda: f.read(io.DEFAULT_BUFFER_SIZE), b""): |
| 110 | cur_hash.update(chunk) |
| 111 | except IOError as e: |
| 112 | LOG.error('Fail to read file: %s', e) |
| 113 | except OSError as e: |
| 114 | LOG.error('Fail to calculate hash: %s', e) |
| 115 | |
| 116 | return cur_hash.hexdigest() |
| 117 | |
| 118 | |
| 119 | def wait_for_process_watch_config(proc, config_dir, interval=10): |
| 120 | config_hash = calculate_hash(config_dir) |
| 121 | while proc.poll() is None: |
| 122 | new_hash = calculate_hash(config_dir) |
| 123 | if config_hash != new_hash: |
| 124 | LOG.info('Reloading service, new hash %s', new_hash) |
| 125 | config_hash = new_hash |
| 126 | send_signal(proc.pid, signal.SIGHUP) |
| 127 | |
| 128 | time.sleep(interval) |
| 129 | |
| 130 | code = proc.poll() |
| 131 | LOG.info("Process exited with code %d", code) |
| 132 | sys.exit(code) |
| 133 | |
| 134 | def wait_for_process(proc): |
| 135 | code = proc.wait() |
| 136 | LOG.info("Process exited with code %d", code) |
| 137 | sys.exit(code) |
| 138 | |
| 139 | |
| 140 | def run_service(cmd, cwd, uid, gid): |
| 141 | proc = execute_cmd(cmd, cwd, uid, gid) |
| 142 | return proc, proc.pid |
| 143 | |
| 144 | LOG = set_logger() |