#!/usr/bin/env python
#
# Copyright (C) 2012-2014 Fanout, Inc.
#
# This file is part of Pushpin.
#
# Pushpin is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Pushpin is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
# more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.

import sys
import os
import ConfigParser

version = '1.0.0'

config_file = "/etc/pushpin/pushpin.conf"
log_file = None
verbose = False
for arg in sys.argv:
	if arg.startswith("--config="):
		config_file = arg[9:]
	elif arg.startswith("--logfile="):
		log_file = arg[10:]
	elif arg == "--verbose":
		verbose = True
	elif arg == "--version":
		print 'pushpin-handler %s' % version
		sys.exit(1)

class ConfigWithInclude(object):
	def __init__(self, fname):
		self.config_dir = os.path.dirname(fname)
		self.main = ConfigParser.ConfigParser()
		self.main.read([fname])

		self.include = None
		if self.main.has_option('global', 'include'):
			include_fname = self.main.get('global', 'include')
			if not os.path.isabs(include_fname):
				include_fname = os.path.join(self.config_dir, include_fname)
			self.include = ConfigParser.ConfigParser()
			self.include.read([include_fname])

	def has_option(self, section, key):
		if self.main.has_option(section, key):
			return True
		elif self.include is not None and self.include.has_option(section, key):
			return True
		return False

	def get(self, section, key):
		if self.main.has_option(section, key):
			return self.main.get(section, key)
		elif self.include is not None and self.include.has_option(section, key):
			return self.include.get(section, key)
		raise ConfigParser.NoOptionError(key, section)

config = ConfigWithInclude(config_file)

libdir = None
if config.has_option("global", "libdir"):
	libdir = config.get("global", "libdir")

if libdir:
	sys.path.insert(0, os.path.join(libdir, "handler"))

import time
import threading
import json
import copy
import logging
from logging.handlers import WatchedFileHandler
from base64 import b64decode
from setproctitle import setproctitle
import zmq
import tnetstring
import httpinterface
from validation import validate_publish, validate_http_publish, ValidationError
from conversion import ensure_utf8, convert_json_transport
from statusreasons import get_reason
import rpc

setproctitle("pushpin-handler")

# reopen stdout file descriptor with write mode
# and 0 as the buffer size (unbuffered)
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)

logger = logging.getLogger('handler')
if log_file:
	logger_handler = WatchedFileHandler(log_file)
else:
	logger_handler = logging.StreamHandler(stream=sys.stdout)
if verbose:
	logger.setLevel(logging.DEBUG)
	logger_handler.setLevel(logging.DEBUG)
else:
	logger.setLevel(logging.INFO)
	logger_handler.setLevel(logging.INFO)
formatter = logging.Formatter(fmt='%(levelname)s %(asctime)s.%(msecs)03d %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
logger_handler.setFormatter(formatter)
logger.addHandler(logger_handler)

CONNECTION_TTL = 600
CONNECTION_REFRESH = 540
CONNECTION_LINGER = 60

instance_id = "pushpin-handler_%d" % os.getpid()

m2a_in_stream_specs = config.get("handler", "m2a_in_stream_specs").split(",")
m2a_out_specs = config.get("handler", "m2a_out_specs").split(",")

if config.has_option("handler", "share_all") and config.get("handler", "share_all") == "true":
	share_all = True
else:
	share_all = False

if config.has_option("handler", "stats_spec"):
	stats_spec = config.get("handler", "stats_spec")
else:
	stats_spec = None

if config.has_option("handler", "command_spec"):
	command_spec = config.get("handler", "command_spec")
else:
	command_spec = None

if config.has_option("handler", "proxy_ws_control_in_spec"):
	ws_control_in_spec = config.get("handler", "proxy_ws_control_in_spec")
else:
	ws_control_in_spec = None

if config.has_option("handler", "proxy_ws_control_out_spec"):
	ws_control_out_spec = config.get("handler", "proxy_ws_control_out_spec")
else:
	ws_control_out_spec = None

if config.has_option("handler", "proxy_stats_spec"):
	proxy_stats_spec = config.get("handler", "proxy_stats_spec")
else:
	proxy_stats_spec = None

if config.has_option("handler", "proxy_command_spec"):
	proxy_command_spec = config.get("handler", "proxy_command_spec")
else:
	proxy_command_spec = None

ctx = zmq.Context()

class Hold(object):
	def __init__(self, rid, request, mode, response, auto_cross_origin, jsonp_callback):
		self.rid = rid
		self.request = request
		self.mode = mode
		self.response = response
		self.auto_cross_origin = auto_cross_origin
		self.jsonp_callback = jsonp_callback
		self.expire_time = None
		self.last_keepalive = None

class WsSession(object):
	def __init__(self, cid):
		self.cid = cid
		self.expire_time = None
		self.channel_prefix = None

class Subscription(object):
	def __init__(self, mode, channel):
		self.mode = mode
		self.channel = channel
		self.expire_time = None
		self.last_keepalive = None

class ConnectionInfo(object):
	def __init__(self, id, type):
		self.id = id
		self.type = type
		self.route = None
		self.peer_address = None
		self.ssl = False
		self.linger = False
		self.last_keepalive = None

lock = threading.Lock()
response_channels = dict()
response_lastids = dict()
stream_channels = dict()
channels_by_req = dict()
ws_sessions = dict()
ws_channels = dict()

# key=(type, channel)
subs = dict()

stats_lock = threading.Lock()
stats_activity = dict() # route, count
conns = dict() # conn id, ConnectionInfo

# assumes state is locked
def remove_from_req_channels(rid, response=True, stream=True):
	req_channels = channels_by_req.get(rid)
	if req_channels is not None:
		to_remove = set()
		for mode, channel in req_channels:
			if response and mode == 'response':
				hchannels = response_channels.get(channel)
				if hchannels is not None and rid in hchannels:
					del hchannels[rid]
					if len(hchannels) == 0:
						del response_channels[channel]
				to_remove.add((mode, channel))
			elif stream and mode == 'stream':
				hchannels = stream_channels.get(channel)
				if hchannels is not None and rid in hchannels:
					del hchannels[rid]
					if len(hchannels) == 0:
						del stream_channels[channel]
				to_remove.add((mode, channel))
		for mode, channel in to_remove:
			req_channels.remove((mode, channel))
		if len(req_channels) == 0:
			del channels_by_req[rid]

# assumes state is locked
def remove_from_response_channels(rid):
	remove_from_req_channels(rid, response=True, stream=False)

def headernames_contains(headers, name):
	lname = name.lower()
	for i in headers:
		if i.lower() == lname:
			return True
	return False

def header_get(headers, name):
	lname = name.lower()
	if isinstance(headers, list):
		for i in headers:
			if i[0].lower() == lname:
				return i[1]
	else:
		for k, v in headers.iteritems():
			if k.lower() == lname:
				return v
	return None

# return list of strings
def header_get_all(headers, name):
	lname = name.lower()
	hvals = list()
	if isinstance(headers, list):
		for i in headers:
			if i[0].lower() == lname:
				hvals.append(i[1])
	else:
		for k, v in headers.iteritems():
			if k.lower() == lname:
				hvals.append(v)
	out = list()
	for hval in hvals:
		parts = hval.split(',')
		for p in parts:
			p = p.strip()
			if p:
				out.append(p)
	return out

def header_remove(headers, name):
	lname = name.lower()
	if isinstance(headers, list):
		for n, i in enumerate(headers):
			if i[0].lower() == lname:
				del headers[n]
				break
	else:
		for k in headers.keys():
			if k.lower() == lname:
				del headers[k]
				break

def header_set(headers, name, value):
	header_remove(headers, name)
	headers[name] = value

def header_names_contains(header_names, name):
	lname = name.lower()
	for i in header_names:
		if i.lower() == lname:
			return True
	return False

# return (initial value, params dict)
def _parse_header_params(value):
	parts = value.split(';')
	v = parts[0].strip()
	params = dict()
	for n in range(1, len(parts)):
		part = parts[n].strip()
		at = part.find('=')
		if at != -1:
			pname = part[:at]
			pval = part[at + 1:]
		else:
			pname = part
			pval = None
		if pname:
			params[pname] = pval
	return (v, params)

# return (initial value, params dict) or None
def header_get_parsed(headers, name):
	h = header_get(headers, name)
	if h is None:
		return None
	return _parse_header_params(h)

# return list of (initial value, params dict)
def header_get_all_parsed(headers, name):
	out = list()
	hlist = header_get_all(headers, name)
	for h in hlist:
		out.append(_parse_header_params(h))
	return out

HTTP_FORMAT = "HTTP/1.1 %(code)s %(status)s\r\n%(headers)s\r\n\r\n%(body)s"
HTTP_FORMAT_NOHEADERS = "HTTP/1.1 %(code)s %(status)s\r\n\r\n%(body)s"

def http_response(body, code, status, headers):
	payload = {"code": code, "status": status, "body": body}
	header_set(headers, "Content-Length", str(len(body)))
	payload["headers"] = "\r\n".join("%s: %s" % (k, v) for k, v in
		headers.items())

	return HTTP_FORMAT % payload

def http_response_nolen(body, code, status, headers):
	payload = {"code": code, "status": status, "body": body}
	header_remove(headers, "Content-Length")

	if len(headers) > 0:
		payload["headers"] = "\r\n".join("%s: %s" % (k, v) for k, v in
			headers.items())

		return HTTP_FORMAT % payload
	else:
		return HTTP_FORMAT_NOHEADERS % payload

def reply_http_old(sock, rid, code, status, headers, body, nolen=False):
	header = "%s %d:%s," % (rid[0], len(rid[1]), rid[1])

	if isinstance(status, unicode):
		status = status.encode("utf-8")

	# ensure headers are utf-8
	tmp = dict()
	for k, v in headers.iteritems():
		if isinstance(k, unicode):
				k = k.encode("utf-8")
		if isinstance(v, unicode):
				v = v.encode("utf-8")
		tmp[k] = v
	headers = tmp

	if isinstance(body, unicode):
		body = body.encode("utf-8")

	if nolen:
		msg = http_response_nolen(body, code, status, headers)
	else:
		msg = http_response(body, code, status, headers)
	m_raw = header + " " + msg
	logger.debug("OUT publish: %s" % m_raw)
	sock.send(m_raw)

def reply_http_chunk_old(sock, rid, content):
	header = "%s %d:%s," % (rid[0], len(rid[1]), rid[1])
	m_raw = header + " " + content
	sock.send(m_raw)

# return body size sent
def reply_http(sock, rid, code, reason, headers, body, nolen=False, seq=None):
	if isinstance(reason, unicode):
		reason = reason.encode("utf-8")

	# ensure headers are utf-8
	tmp = dict()
	for k, v in headers.iteritems():
		if isinstance(k, unicode):
				k = k.encode("utf-8")
		if isinstance(v, unicode):
				v = v.encode("utf-8")
		tmp[k] = v
	headers = tmp

	if isinstance(body, unicode):
		body = body.encode("utf-8")

	if nolen:
		header_remove(headers, "Content-Length")
	else:
		header_set(headers, "Content-Length", str(len(body)))

	out = dict()
	out["from"] = instance_id
	out["id"] = rid[1]
	if seq is not None:
		out["seq"] = seq
	out["code"] = code
	out["reason"] = reason
	headers_list = list()
	for k, v in headers.iteritems():
		headers_list.append([k, v])
	out["headers"] = headers_list
	if body:
		out["body"] = body
	if nolen:
		out["more"] = True

	m_raw = rid[0] + " T" + tnetstring.dumps(out)
	logger.debug("OUT publish: %s" % m_raw)
	sock.send(m_raw)
	return len(body)

def reply_http_chunk(sock, rid, content, seq=None):
	out = dict()
	out["from"] = instance_id
	out["id"] = rid[1]
	if seq is not None:
		out["seq"] = seq
	out["body"] = content
	out["more"] = True

	m_raw = rid[0] + " T" + tnetstring.dumps(out)
	logger.debug("OUT publish: %s" % m_raw)
	sock.send(m_raw)

simple_headers = set()
simple_headers.add("Cache-control")
simple_headers.add("Content-Language")
simple_headers.add("Content-Length")
simple_headers.add("Content-Type")
simple_headers.add("Expires")
simple_headers.add("Last-Modified")
simple_headers.add("Pragma")

# modifies response_headers as needed
def apply_cors_headers(request_headers, response_headers):
	if not header_get(response_headers, "Access-Control-Allow-Methods"):
		acr_method = header_get(request_headers, "Access-Control-Request-Method")
		if acr_method:
			header_set(response_headers, "Access-Control-Allow-Methods", acr_method)
		else:
			header_set(response_headers, "Access-Control-Allow-Methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE")

	if not header_get(response_headers, "Access-Control-Allow-Headers"):
		acr_headers = header_get(request_headers, "Access-Control-Request-Headers")
		allow_headers = list()
		if acr_headers:
			for name in acr_headers.split(","):
				name = name.strip()
				if name:
					allow_headers.append(name)
		if len(allow_headers) > 0:
			header_set(response_headers, "Access-Control-Allow-Headers", ", ".join(allow_headers))

	if not header_get(response_headers, "Access-Control-Expose-Headers"):
		expose_headers = list()
		for name in response_headers.keys():
			lname = name.lower()
			if not header_names_contains(simple_headers, name) and not lname.startswith("access-control-") and not header_names_contains(expose_headers, name):
				expose_headers.append(name)
		if len(expose_headers) > 0:
			header_set(response_headers, "Access-Control-Expose-Headers", ", ".join(expose_headers))

	if not header_get(response_headers, "Access-Control-Allow-Credentials"):
		header_set(response_headers, "Access-Control-Allow-Credentials", "true")

	if not header_get(response_headers, "Access-Control-Allow-Origin"):
		origin = header_get(request_headers, "Origin")
		if not origin:
			origin = "*"
		header_set(response_headers, "Access-Control-Allow-Origin", origin)

def inspect_worker():
	sock = ctx.socket(zmq.REP)
	sock.connect(config.get("handler", "proxy_inspect_spec"))

	while True:
		m_raw = sock.recv()
		m = tnetstring.loads(m_raw)
		logger.debug("IN inspect: %s" % m)

		# reply saying to always proxy
		id = m["id"]
		method = m["method"]
		uri = m["uri"]
		m = dict()
		m["id"] = id
		m["no-proxy"] = False

		if share_all:
			m["sharing-key"] = method + '|' + uri

		logger.debug("OUT inspect: %s" % m)
		m_raw = tnetstring.dumps(m)
		sock.send(m_raw)

	sock.close()

def accept_worker():
	sock = ctx.socket(zmq.PULL)
	sock.connect(config.get("handler", "proxy_accept_in_spec"))

	out_sock = ctx.socket(zmq.PUB)
	for spec in m2a_out_specs:
		out_sock.connect(spec)

	retry_sock = ctx.socket(zmq.PUSH)
	retry_sock.connect(config.get("handler", "proxy_retry_out_spec"))

	stats_sock = ctx.socket(zmq.PUSH)
	stats_sock.connect('inproc://stats_in')

	while True:
		m_raw = sock.recv()
		m = tnetstring.loads(m_raw)
		logger.debug("IN accept: %s" % m)

		reqs = m["requests"]

		try:
			if "route" in m:
				route = m["route"]
			else:
				route = ""
			if "channel-prefix" in m:
				channel_prefix = m["channel-prefix"]
			else:
				channel_prefix = ""

			mode = None
			channels = list()
			timeout = 55
			resp_headers = m['response'].get('headers') or {}

			# headers-based grip
			grip_hold = header_get_parsed(resp_headers, 'Grip-Hold')
			if grip_hold:
				mode = grip_hold[0]
			grip_channels = header_get_all_parsed(resp_headers, 'Grip-Channel')
			if grip_channels:
				for c in grip_channels:
					name = channel_prefix + c[0]
					prev_id = c[1].get('prev-id')
					channels.append((name, prev_id))
			grip_timeout = header_get_parsed(resp_headers, 'Grip-Timeout')
			if grip_timeout:
				timeout = int(grip_timeout[0])
			grip_expose_headers = header_get_all(resp_headers, 'Grip-Expose-Headers')

			instruct = None
			ctype = header_get_parsed(resp_headers, 'Content-Type')
			if ctype[0] == 'application/grip-instruct':
				if m['response']['code'] != 200:
					raise ValueError('response code for grip-instruct must be 200')
				instruct = json.loads(m['response']['body'])
				hold = instruct['hold']
				mode = hold.get('mode')
				if mode is None:
					mode = 'response'
				for hc in hold['channels']:
					name = channel_prefix + hc['name']
					prev_id = hc.get('prev-id')
					channels.append((name, prev_id))
				if 'timeout' in hold:
					timeout = int(hold['timeout'])
				response = instruct.get('response')
				if response is None:
					response = dict()
					response['body'] = ''
				if "headers" in response and isinstance(response["headers"], list):
					d = dict()
					for i in response["headers"]:
						d[i[0]] = i[1]
					response["headers"] = d
				if "body-bin" in response:
					response["body"] = b64decode(response["body-bin"])
					del response["body-bin"]
				elif "body" in response:
					response["body"] = response["body"].encode("utf-8")
				else:
					response["body"] = ""
			else:
				response = m['response']
				if "headers" in response and isinstance(response["headers"], list):
					d = dict()
					for i in response["headers"]:
						if i[0].lower().startswith('grip-'):
							continue
						if grip_expose_headers and not headernames_contains(grip_expose_headers, i[0]):
							continue
						d[i[0]] = i[1]
					response["headers"] = d
				if 'body' not in response:
					response['body'] = ''

			if mode != 'response' and mode != 'stream':
				raise ValueError('bad mode')
		except:
			logger.debug("failed to parse accept instructions")
			for req in reqs:
				rid = (req["rid"]["sender"], req["rid"]["id"])
				rheaders = dict()
				rheaders['Content-Type'] = 'text/plain'
				reply_http(out_sock, rid, 502, 'Bad Gateway', rheaders, 'Error while proxying to origin.\n')
			continue

		logger.debug("accepting %d requests" % len(reqs))

		for req in reqs:
			rid = (req["rid"]["sender"], req["rid"]["id"])

			stats_lock.acquire()
			ci = ConnectionInfo("%s:%s" % (rid[0], rid[1]), "http")
			ci.route = route
			if "peer-address" in req:
				ci.peer_address = req["peer-address"]
			if "https" in req:
				ci.ssl = req["https"]
			ci.last_keepalive = int(time.time())
			# note: if we had a lingering connection, this will replace it
			conns[ci.id] = ci
			stats_lock.release()

			h = Hold(rid, m["request-data"], mode, response, req.get("auto-cross-origin"), req.get("jsonp-callback"))
			now = int(time.time())
			h.last_keepalive = now
			h.out_seq = req["out-seq"]
			h.out_credits = req["out-credits"]

			notify_sub = False
			if mode == "response":
				# bind channels
				quit = False
				lock.acquire()
				for channel, prev_id in channels:
					logger.debug("adding response hold on %s" % channel)
					h.expire_time = int(time.time()) + timeout
					if prev_id is not None:
						last_id = response_lastids.get(channel)
						if last_id is not None and last_id != prev_id:
							del response_lastids[channel]
							lock.release()
							stats_lock.acquire()
							ci.linger = True
							ci.last_keepalive = int(time.time())
							stats_lock.release()
							# note: we don't need to do a handoff here because we didn't ack to take over yet
							logger.debug("lastid inconsistency (got=%s, expected=%s), retrying" % (prev_id, last_id))
							r = dict()
							r["requests"] = [req] # only retry the request that failed the check
							r["request-data"] = m["request-data"]
							if "inspect" in m:
								r["inspect"] = m["inspect"]
							logger.debug("OUT retry: %s" % r)
							r_raw = tnetstring.dumps(r)
							retry_sock.send(r_raw)
							quit = True
							break
					hchannel = response_channels.get(channel)
					if not hchannel:
						hchannel = dict()
						response_channels[channel] = hchannel
					hchannel[rid] = h
					req_channels = channels_by_req.get(rid)
					if not req_channels:
						req_channels = set()
						channels_by_req[rid] = req_channels
					req_channels.add((mode, channel))
					sub_key = (mode, channel)
					sub = subs.get(sub_key)
					if not sub:
						sub = Subscription(mode, channel)
						sub.last_keepalive = now
						subs[sub_key] = sub
						notify_sub = True
					sub.expire_time = None
				if quit:
					# we already unlocked if this is set
					continue
				lock.release()

				# ack
				out = dict()
				out['from'] = instance_id
				out['id'] = rid[1]
				out['type'] = 'keep-alive'
				m_raw = rid[0] + ' T' + tnetstring.dumps(out)
				logger.debug('OUT publish (ack response accept): %s' % m_raw)
				out_sock.send(m_raw)
			else: # stream
				# initial reply
				if "code" in response:
					rcode = response["code"]
				else:
					rcode = 200

				if "reason" in response:
					rreason = response["reason"]
				else:
					rreason = get_reason(rcode)

				if "headers" in response:
					rheaders = response["headers"]
				else:
					rheaders = dict()

				body_size = reply_http(out_sock, rid, rcode, rreason, rheaders, response.get("body"), True, h.out_seq)

				# bind channels
				lock.acquire()
				h.out_credits -= body_size
				h.out_seq += 1
				for channel, prev_id in channels:
					logger.debug("adding stream hold on %s" % channel)
					hchannel = stream_channels.get(channel)
					if not hchannel:
						hchannel = dict()
						stream_channels[channel] = hchannel
					hchannel[rid] = h
					req_channels = channels_by_req.get(rid)
					if not req_channels:
						req_channels = set()
						channels_by_req[rid] = req_channels
					req_channels.add((mode, channel))
					sub_key = (mode, channel)
					sub = subs.get(sub_key)
					if not sub:
						sub = Subscription(mode, channel)
						sub.last_keepalive = now
						subs[sub_key] = sub
						notify_sub = True
					sub.expire_time = None
				lock.release()

			if notify_sub:
				out = dict()
				out['from'] = instance_id
				out['mode'] = ensure_utf8(mode)
				out['channel'] = ensure_utf8(channel)
				out['ttl'] = 60
				stats_sock.send('sub ' + tnetstring.dumps(out))

	sock.close()

def push_in_zmq_worker():
	in_sock = ctx.socket(zmq.PULL)
	in_sock.bind(config.get("handler", "push_in_spec"))

	out_sock = ctx.socket(zmq.PUSH)
	out_sock.connect("inproc://push_in")

	while True:
		m_raw = in_sock.recv()
		try:
			try:
				m = tnetstring.loads(m_raw)
			except:
				raise ValidationError("bad format (not a tnetstring)")

			m = validate_publish(m)

		except ValidationError as e:
			logger.debug("warning: %s, dropping" % e)

		out_sock.send(tnetstring.dumps(m))

	out_sock.linger = 0
	out_sock.close()

# return None for success or string on error
def push_in_http_handler(context, m):
	out_sock = context["out_sock"]

	try:
		m = validate_http_publish(m)
	except ValidationError as e:
		return e.message

	for n, i in enumerate(m["items"]):
		out = dict()

		channel = i.get("channel")
		if channel is not None:
			out["channel"] = ensure_utf8(channel)

		id = i.get("id")
		if id is not None:
			out["id"] = ensure_utf8(id)

		prev_id = i.get("prev-id")
		if prev_id is not None:
			out["prev-id"] = ensure_utf8(prev_id)

		for transport in ("http-response", "http-stream", "ws-message"):
			if transport in i:
				out[transport] = convert_json_transport(transport, i[transport])

		out_sock.send(tnetstring.dumps(out))

def push_in_http_worker():
	out_sock = ctx.socket(zmq.PUSH)
	out_sock.connect("inproc://push_in")

	context = dict()
	context["out_sock"] = out_sock
	httpinterface.run(config.get("handler", "push_in_http_addr"), int(config.get("handler", "push_in_http_port")), push_in_http_handler, context)

	out_sock.linger = 0
	out_sock.close()

def push_in_worker(c):
	in_sock = ctx.socket(zmq.PULL)
	in_sock.bind("inproc://push_in")
	c.acquire()
	c.notify()
	c.release()

	out_sock = ctx.socket(zmq.PUB)
	for spec in m2a_out_specs:
		out_sock.connect(spec)

	ws_control_out_sock = ctx.socket(zmq.PUSH)
	ws_control_out_sock.connect(ws_control_out_spec)

	stats_sock = ctx.socket(zmq.PUSH)
	stats_sock.connect('inproc://stats_in')

	while True:
		m_raw = in_sock.recv()
		m = tnetstring.loads(m_raw)
		logger.debug("IN publish: %s" % m)
		channel = m["channel"]

		if "http-response" in m:
			lock.acquire()
			hchannel = response_channels.get(channel)
			if hchannel:
				holds = hchannel.values()
				del response_channels[channel]
				for h in holds:
					req_channels = channels_by_req[h.rid]
					req_channels.remove(('response', channel))
					remove_from_response_channels(h.rid)
			else:
				holds = list()
			sub_key = ('response', channel)
			sub = subs.get(sub_key)
			if sub and sub.expire_time is None:
				# flag for deletion soon
				sub.expire_time = int(time.time()) + 60
			item_id = m.get("id")
			if item_id is not None:
				response_lastids[channel] = item_id
			lock.release()
			logger.debug("relaying to %d subscribers" % len(holds))
			http_response = m["http-response"]

			if "code" in http_response:
				pcode = http_response["code"]
			else:
				pcode = 200

			if "reason" in http_response:
				preason = http_response["reason"]
			else:
				preason = get_reason(pcode)

			if "headers" in http_response:
				pheaders = http_response["headers"]
				if isinstance(pheaders, list):
					d = dict()
					for i in pheaders:
						d[i[0]] = i[1]
					pheaders = d
			else:
				pheaders = dict()

			if "body" in http_response:
				pbody = http_response["body"]
			else:
				pbody = ""

			grip_expose_headers = header_get_all(pheaders, 'Grip-Expose-Headers')
			header_remove(pheaders, 'Grip-Expose-Headers')

			for n, h in enumerate(holds):
				# inherit any headers from the timeout response
				if 'headers' in h.response:
					rheaders = copy.deepcopy(h.response['headers'])
				else:
					rheaders = dict()

				# apply the headers from the pushed message
				for k, v in pheaders.iteritems():
					rheaders[k] = v

				# if Grip-Expose-Headers was provided in the pushed message, filter the results
				if grip_expose_headers:
					rkeys = rheaders.keys()
					for k in rkeys:
						if not headernames_contains(grip_expose_headers, k):
							del rheaders[k]

				headers = dict()
				if h.jsonp_callback:
					result = dict()
					result["code"] = pcode
					result["reason"] = preason
					result["headers"] = dict()
					if rheaders:
						for k, v in rheaders.iteritems():
							result["headers"][k] = v
					header_set(result["headers"], "Content-Length", str(len(pbody)))
					result["body"] = pbody

					body = h.jsonp_callback + "(" + json.dumps(result) + ");\n"
					header_set(headers, "Content-Type", "application/javascript")
					header_set(headers, "Content-Length", str(len(body)))
					reply_http(out_sock, h.rid, 200, "OK", headers, body)
				else:
					if rheaders:
						for k, v in rheaders.iteritems():
							headers[k] = v

					if h.auto_cross_origin:
						apply_cors_headers(h.request["headers"], headers)

					reply_http(out_sock, h.rid, pcode, preason, headers, pbody)

				if n % 10 == 0:
					time.sleep(0.0005)

				# report request done

				stats_lock.acquire()
				ci = conns.get("%s:%s" % (h.rid[0], h.rid[1]))
				if ci is not None:
					ci = copy.deepcopy(ci)
					del conns[ci.id]
				stats_lock.release()

				if ci is not None:
					out = dict()
					out['from'] = instance_id
					if ci.route:
						out['route'] = ci.route
					out['id'] = ci.id
					out['unavailable'] = True
					stats_sock.send('conn ' + tnetstring.dumps(out))

			rcount = len(holds)
			if rcount > 0:
				out = dict()
				out['from'] = instance_id
				out['channel'] = ensure_utf8(channel)
				if 'id' in m:
					out['item-id'] = ensure_utf8(m['id'])
				out['count'] = rcount
				out['transport'] = 'http-response'
				stats_sock.send('message ' + tnetstring.dumps(out))

		if "http-stream" in m:
			# TODO: support close action and report immediate unsubscribe via stats
			lock.acquire()
			hchannel = stream_channels.get(channel)
			if hchannel:
				holds = hchannel.values()
			else:
				holds = list()
			lock.release()
			logger.debug("relaying to %d subscribers" % len(holds))
			for h in holds:
				content = m["http-stream"]["content"]
				if content:
					if h.out_credits < len(content):
						logger.debug('not enough send credits, dropping')
						continue
					reply_http_chunk(out_sock, h.rid, content, h.out_seq)
					lock.acquire()
					h.out_credits -= len(content)
					h.out_seq += 1
					lock.release()
			rcount = len(holds)
			if rcount > 0:
				out = dict()
				out['from'] = instance_id
				out['channel'] = ensure_utf8(channel)
				if 'id' in m:
					out['item-id'] = ensure_utf8(m['id'])
				out['count'] = rcount
				out['transport'] = 'http-stream'
				stats_sock.send('message ' + tnetstring.dumps(out))

		if "ws-message" in m:
			lock.acquire()
			hchannel = ws_channels.get(channel)
			if hchannel:
				cids = hchannel.keys()
			else:
				cids = list()
			lock.release()
			logger.debug("relaying to %d subscribers" % len(cids))
			for cid in cids:
				t = m['ws-message']
				if 'content-bin' in t:
					content_type = 'binary'
					content = t['content']
				elif "content" in t:
					content_type = 'text'
					content = t['content']
				else:
					content = None

				if content is not None:
					item = dict()
					item['cid'] = cid
					item['type'] = 'send'
					item['content-type'] = content_type
					item['message'] = content
					out = dict()
					out['items'] = [item]
					logger.debug('OUT wscontrol: %s' % out)
					ws_control_out_sock.send(tnetstring.dumps(out))
			rcount = len(cids)
			if rcount > 0:
				out = dict()
				out['from'] = instance_id
				out['channel'] = ensure_utf8(channel)
				if 'id' in m:
					out['item-id'] = ensure_utf8(m['id'])
				out['count'] = rcount
				out['transport'] = 'ws-message'
				stats_sock.send('message ' + tnetstring.dumps(out))

	in_sock.close()

def session_worker():
	in_sock = ctx.socket(zmq.DEALER)
	in_sock.identity = instance_id
	for spec in m2a_in_stream_specs:
		in_sock.connect(spec)

	out_sock = ctx.socket(zmq.PUB)
	for spec in m2a_out_specs:
		out_sock.connect(spec)

	stats_sock = ctx.socket(zmq.PUSH)
	stats_sock.connect('inproc://stats_in')

	while True:
		m_list = in_sock.recv_multipart()
		m = tnetstring.loads(m_list[1][1:])
		logger.debug('IN session: %s' % m)
		mtype = m.get('type')
		notify_unsubs = set()
		if mtype is not None and (mtype == 'error' or mtype == 'cancel'):
			rid = (m['from'], m['id'])
			logger.debug('cleaning up subscriber %s' % repr(rid))
			now = int(time.time())
			lock.acquire()
			# FIXME: use channels_by_req for lookup
			channels = set()
			for channel, hchannels in response_channels.iteritems():
				channels.add(channel)
				if rid in hchannels:
					del hchannels[rid]
			for channel in channels:
				if channel in response_channels and len(response_channels[channel]) == 0:
					del response_channels[channel]
					sub_key = ('response', channel)
					sub = subs.get(sub_key)
					if sub and sub.expire_time is None:
						# flag for deletion soon
						sub.expire_time = now + 60
			channels = set()
			for channel, hchannels in stream_channels.iteritems():
				channels.add(channel)
				if rid in hchannels:
					del hchannels[rid]
			for channel in channels:
				if channel in stream_channels and len(stream_channels[channel]) == 0:
					del stream_channels[channel]
					sub_key = ('stream', channel)
					sub = subs.get(sub_key)
					if sub:
						del subs[sub_key]
						notify_unsubs.add(sub_key)
			remove_from_req_channels(rid)
			lock.release()

			stats_lock.acquire()
			ci = conns.get("%s:%s" % (rid[0], rid[1]))
			if ci is not None:
				ci = copy.deepcopy(ci)
				del conns[ci.id]
			stats_lock.release()

			if ci is not None:
				out = dict()
				out['from'] = instance_id
				if ci.route:
					out['route'] = ci.route
				out['id'] = ci.id
				out['unavailable'] = True
				stats_sock.send('conn ' + tnetstring.dumps(out))

		elif mtype is not None:
			# is this a known session?
			rid = (m['from'], m['id'])

			h = None
			lock.acquire()
			for hchannels in response_channels.itervalues():
				if rid in hchannels:
					h = hchannels[rid]
					break
			if h is None:
				for hchannels in stream_channels.itervalues():
					if rid in hchannels:
						h = hchannels[rid]
						break
			lock.release()

			if h is not None:
				if mtype == 'credit':
					credits = m['credits']
					lock.acquire()
					h.out_credits += credits
					lock.release()
					logger.debug('received %d credits, now %d' % (credits, h.out_credits))
			else:
				# no such session, send cancel
				out = dict()
				out['from'] = instance_id
				out['id'] = m['id']
				out['type'] = 'cancel'
				m_raw = m['from'] + ' T' + tnetstring.dumps(out)
				logger.debug('OUT publish: %s' % m_raw)
				out_sock.send(m_raw)

		for sub_key in notify_unsubs:
			out = dict()
			out['from'] = instance_id
			out['mode'] = ensure_utf8(sub_key[0])
			out['channel'] = ensure_utf8(sub_key[1])
			out['unavailable'] = True
			stats_sock.send('sub ' + tnetstring.dumps(out))

def ws_control_worker():
	in_sock = ctx.socket(zmq.PULL)
	in_sock.connect(ws_control_in_spec)

	out_sock = ctx.socket(zmq.PUSH)
	out_sock.connect(ws_control_out_spec)

	stats_sock = ctx.socket(zmq.PUSH)
	stats_sock.connect('inproc://stats_in')

	while True:
		m_raw = in_sock.recv()
		m = tnetstring.loads(m_raw)
		logger.debug('IN wscontrol: %s' % m)

		now = int(time.time())

		for item in m['items']:
			mtype = item.get('type')
			if mtype == 'here':
				cid = item['cid']
				channel_prefix = item.get('channel-prefix')
				lock.acquire()
				s = ws_sessions.get(cid)
				if not s:
					s = WsSession(cid)
					s.channel_prefix = channel_prefix
					ws_sessions[cid] = s
					logger.debug('added ws session: %s' % cid)
				s.expire_time = now + 60
				lock.release()
			elif mtype == 'gone' or mtype == 'cancel':
				cid = item['cid']

				notify_unsubs = set()

				lock.acquire()
				s = ws_sessions.get(cid)
				if s:
					del ws_sessions[cid]
					channels = set()
					for channel, hchannels in ws_channels.iteritems():
						channels.add(channel)
						if cid in hchannels:
							del hchannels[cid]
					for channel in channels:
						if channel in ws_channels and len(ws_channels[channel]) == 0:
							del ws_channels[channel]
							sub_key = ('ws', channel)
							sub = subs.get(sub_key)
							if sub:
								del subs[sub_key]
								notify_unsubs.add(sub_key)
					logger.debug('removed ws session: %s' % cid)
				lock.release()

				for sub_key in notify_unsubs:
					out = dict()
					out['from'] = instance_id
					out['mode'] = ensure_utf8(sub_key[0])
					out['channel'] = ensure_utf8(sub_key[1])
					out['unavailable'] = True
					stats_sock.send('sub ' + tnetstring.dumps(out))
			elif mtype == 'grip':
				cid = item['cid']

				try:
					gm = json.loads(item['message'])
					gtype = gm['type']
					if gtype == 'subscribe' or gtype == 'unsubscribe':
						channel = gm['channel']
					else:
						channel = None
				except:
					gm = None

				notify_sub = False
				notify_unsub = False
				notify_detach = False

				if gm:
					lock.acquire()
					s = ws_sessions.get(cid)
					if s:
						if channel is not None and s.channel_prefix:
							channel = s.channel_prefix + channel
						if gtype == 'subscribe':
							hchannel = ws_channels.get(channel)
							if not hchannel:
								hchannel = dict()
								ws_channels[channel] = hchannel
							hchannel[cid] = s
							sub_key = ('ws', channel)
							sub = subs.get(sub_key)
							if not sub:
								sub = Subscription('ws', channel)
								sub.last_keepalive = now
								subs[sub_key] = sub
								notify_sub = True
							sub.expire_time = None
						elif gtype == 'unsubscribe':
							hchannel = ws_channels.get(channel)
							if hchannel:
								if cid in hchannel:
									del hchannel[cid]
									if len(hchannel) == 0:
										del ws_channels[channel]
									sub_key = ('ws', channel)
									sub = subs.get(sub_key)
									if sub:
										del subs[sub_key]
										notify_unsub = True
						elif gtype == 'detach':
							notify_detach = True
					lock.release()

				if notify_sub:
					logger.debug('ws session %s subscribed to %s' % (cid, channel))
					out = dict()
					out['from'] = instance_id
					out['mode'] = 'ws'
					out['channel'] = ensure_utf8(channel)
					out['ttl'] = 60
					stats_sock.send('sub ' + tnetstring.dumps(out))

				if notify_unsub:
					out = dict()
					out['from'] = instance_id
					out['mode'] = 'ws'
					out['channel'] = ensure_utf8(channel)
					out['unavailable'] = True
					stats_sock.send('sub ' + tnetstring.dumps(out))

				if notify_detach:
					item = dict()
					item['cid'] = cid
					item['type'] = 'detach'
					out = dict()
					out['items'] = [item]
					logger.debug('OUT wscontrol: %s' % out)
					out_sock.send(tnetstring.dumps(out))

def timeout_worker():
	out_sock = ctx.socket(zmq.PUB)
	for spec in m2a_out_specs:
		out_sock.connect(spec)

	stats_sock = ctx.socket(zmq.PUSH)
	stats_sock.connect('inproc://stats_in')

	while True:
		now = int(time.time())
		lock.acquire()
		# FIXME: use channels_by_req for lookup
		holds = list()
		channels = set()
		for channel, hchannels in response_channels.iteritems():
			channels.add(channel)
			channel_holds = list()
			for h in hchannels.values():
				if h.expire_time and now >= h.expire_time:
					channel_holds.append(h)
			for h in channel_holds:
				del hchannels[h.rid]
			holds.extend(channel_holds)
		for channel in channels:
			if channel in response_channels and len(response_channels[channel]) == 0:
				del response_channels[channel]
				sub_key = ('response', channel)
				sub = subs.get(sub_key)
				if sub and sub.expire_time is None:
					# flag for deletion soon
					sub.expire_time = now + 60
		for h in holds:
			remove_from_response_channels(h.rid)
		lock.release()

		if len(holds) > 0:
			logger.debug("timing out %d subscribers" % len(holds))

			for h in holds:
				if "code" in h.response:
					pcode = h.response["code"]
				else:
					pcode = 200

				if "reason" in h.response:
					preason = h.response["reason"]
				else:
					preason = get_reason(pcode)

				if "headers" in h.response:
					pheaders = h.response["headers"]
				else:
					pheaders = dict()

				if "body" in h.response:
					pbody = h.response["body"]
				else:
					pbody = ""

				headers = dict()
				if h.jsonp_callback:
					result = dict()
					result["code"] = pcode
					result["reason"] = preason
					result["headers"] = dict()
					if pheaders:
						for k, v in pheaders.iteritems():
							result["headers"][k] = v
					header_set(result["headers"], "Content-Length", str(len(pbody)))
					result["body"] = pbody

					body = h.jsonp_callback + "(" + json.dumps(result) + ");\n"
					header_set(headers, "Content-Type", "application/javascript")
					header_set(headers, "Content-Length", str(len(body)))
					reply_http(out_sock, h.rid, 200, "OK", headers, body)
				else:
					if pheaders:
						for k, v in pheaders.iteritems():
							headers[k] = v

					if h.auto_cross_origin:
						apply_cors_headers(h.request["headers"], headers)

					reply_http(out_sock, h.rid, pcode, preason, headers, pbody)

				stats_lock.acquire()
				ci = conns.get("%s:%s" % (h.rid[0], h.rid[1]))
				if ci is not None:
					ci = copy.deepcopy(ci)
					del conns[ci.id]
				stats_lock.release()

				if ci is not None:
					out = dict()
					out['from'] = instance_id
					if ci.route:
						out['route'] = ci.route
					out['id'] = ci.id
					out['unavailable'] = True
					stats_sock.send('conn ' + tnetstring.dumps(out))

		now = int(time.time())
		rids = set()
		lock.acquire()
		for channel, hchannels in response_channels.iteritems():
			for h in hchannels.values():
				if h.last_keepalive is None or h.last_keepalive + 30 < now:
					if h.rid not in rids:
						h.last_keepalive = now
						rids.add(h.rid)
		for channel, hchannels in stream_channels.iteritems():
			for h in hchannels.values():
				if h.last_keepalive is None or h.last_keepalive + 30 < now:
					if h.rid not in rids:
						h.last_keepalive = now
						rids.add(h.rid)
		lock.release()

		if len(rids) > 0:
			logger.debug("keep-aliving %d subscribers" % len(rids))
			for rid in rids:
				out = dict()
				out['from'] = instance_id
				out['id'] = rid[1]
				out['type'] = 'keep-alive'
				m_raw = rid[0] + ' T' + tnetstring.dumps(out)
				logger.debug('OUT publish: %s' % m_raw)
				out_sock.send(m_raw)

		now = int(time.time())
		cids = set()
		lock.acquire()
		for cid, s in ws_sessions.iteritems():
			if s.expire_time and now >= s.expire_time:
				cids.add(cid)
		for cid in cids:
			del ws_sessions[cid]
			channels = set()
			for channel, hchannels in ws_channels.iteritems():
				channels.add(channel)
				if cid in hchannels:
					del hchannels[cid]
			for channel in channels:
				if channel in ws_channels and len(ws_channels[channel]) == 0:
					del ws_channels[channel]
					sub_key = ('ws', channel)
					sub = subs.get(sub_key)
					if sub:
						# use expire_time to flag for removal
						sub.expire_time = now
		lock.release()
		if len(cids) > 0:
			logger.debug("timing out %d ws sessions" % len(cids))

		notify_subs = set()
		notify_unsubs = set()
		lock.acquire()
		for sub_key, sub in subs.iteritems():
			if sub.expire_time is not None and now >= sub.expire_time:
				notify_unsubs.add(sub_key)
			elif sub.last_keepalive is None or sub.last_keepalive + 30 < now:
				sub.last_keepalive = now
				notify_subs.add(sub_key)
		for sub_key in notify_unsubs:
			del subs[sub_key]
		lock.release()

		for sub_key in notify_unsubs:
			out = dict()
			out['from'] = instance_id
			out['mode'] = ensure_utf8(sub_key[0])
			out['channel'] = ensure_utf8(sub_key[1])
			out['unavailable'] = True
			stats_sock.send('sub ' + tnetstring.dumps(out))

		if len(notify_subs) > 0:
			logger.debug('keep-aliving %d subscriptions' % len(notify_subs))
			for sub_key in notify_subs:
				out = dict()
				out['from'] = instance_id
				out['mode'] = ensure_utf8(sub_key[0])
				out['channel'] = ensure_utf8(sub_key[1])
				out['ttl'] = 60
				stats_sock.send('sub ' + tnetstring.dumps(out))

		refresh_conns = list()
		send_activity = list()
		stats_lock.acquire()
		remove_cids = set()
		for cid, ci in conns.iteritems():
			if ci.last_keepalive is None or (not ci.linger and ci.last_keepalive + CONNECTION_REFRESH < now) or (ci.linger and ci.last_keepalive + CONNECTION_LINGER < now):
				if ci.linger:
					remove_cids.add(cid)
				else:
					ci.last_keepalive = now
					refresh_conns.append(copy.deepcopy(ci))
		for cid in remove_cids:
			del conns[cid]
		for route, activity in stats_activity.iteritems():
			send_activity.append((route, activity))
		stats_activity.clear()
		stats_lock.release()

		for ci in refresh_conns:
			out = dict()
			out['from'] = instance_id
			if ci.route:
				out['route'] = ci.route
			out['id'] = ci.id
			out['type'] = ci.type
			if ci.peer_address:
				out['peer-address'] = ci.peer_address
			if ci.ssl:
				out['ssl'] = True
			out['ttl'] = CONNECTION_TTL
			stats_sock.send('conn ' + tnetstring.dumps(out))

		for i in send_activity:
			out = dict()
			out['from'] = instance_id
			if i[0]:
				out['route'] = i[0]
			out['count'] = i[1]
			stats_sock.send('activity ' + tnetstring.dumps(out))

		time.sleep(1)

def command_handler(method, args, data):
	proxy_client = data.get('proxy_client')

	logger.debug('IN command: %s args=%s' % (method, args))

	if method == 'conncheck':
		if 'ids' not in args or not isinstance(args['ids'], list):
			raise rpc.CallError('bad-format')
		cids = set(args['ids'])

		stats_lock.acquire()
		missing = set()
		for cid in cids:
			if cid not in conns:
				missing.add(cid)
		stats_lock.release()

		if len(missing) > 0:
			try:
				found = proxy_client.call('conncheck', {'ids': list(missing)})
			except:
				raise rpc.CallError('proxy-request-failed')
			for cid in found:
				missing.remove(cid)

		for cid in missing:
			cids.remove(cid)

		return list(cids)
	else:
		raise rpc.CallError('method-not-found')

command_server = None

def command_worker():
	data = dict()
	if proxy_command_spec:
		data['proxy_client'] = rpc.RpcClient([proxy_command_spec])
	command_server = rpc.RpcServer(command_spec, context=ctx)
	command_server.run(command_handler, data)

def proxy_stats_worker():
	in_sock = ctx.socket(zmq.SUB)
	in_sock.setsockopt(zmq.SUBSCRIBE, '')
	in_sock.connect(proxy_stats_spec)

	stats_sock = ctx.socket(zmq.PUSH)
	stats_sock.connect('inproc://stats_in')

	while True:
		m_raw = in_sock.recv()
		at = m_raw.find(' ')
		mtype = m_raw[:at]
		m = tnetstring.loads(m_raw[at + 1:])
		if mtype == 'activity':
			route = m.get('route')
			if not route:
				route = ''
			count = m['count']
			stats_lock.acquire()
			if route in stats_activity:
				stats_activity[route] += count
			else:
				stats_activity[route] = count
			stats_lock.release()
		elif mtype == 'conn':
			# relay
			m['from'] = instance_id
			stats_sock.send(mtype + ' ' + tnetstring.dumps(m))

def stats_worker(c):
	in_sock = ctx.socket(zmq.PULL)
	in_sock.bind('inproc://stats_in')
	c.acquire()
	c.notify()
	c.release()

	if stats_spec:
		out_sock = ctx.socket(zmq.PUB)
		out_sock.bind(stats_spec)
	else:
		out_sock = None

	while True:
		m_raw = in_sock.recv()
		at = m_raw.find(' ')
		mtype = m_raw[:at]
		mdata = m_raw[at + 1:]
		if out_sock:
			m_raw = mtype + ' T' + mdata
			logger.debug('OUT stats: %s' % m_raw)
			out_sock.send(m_raw)

inspect_thread = threading.Thread(target=inspect_worker)
inspect_thread.start()

# we use a condition here to ensure the inproc bind succeeds before progressing
c = threading.Condition()
c.acquire()
stats_thread = threading.Thread(target=stats_worker, args=(c,))
stats_thread.daemon = True
stats_thread.start()
c.wait()
c.release()

accept_thread = threading.Thread(target=accept_worker)
accept_thread.start()

# we use a condition here to ensure the inproc bind succeeds before progressing
c = threading.Condition()
c.acquire()
push_in_thread = threading.Thread(target=push_in_worker, args=(c,))
push_in_thread.start()
c.wait()
c.release()

push_in_zmq_thread = threading.Thread(target=push_in_zmq_worker)
push_in_zmq_thread.start()

push_in_http_thread = threading.Thread(target=push_in_http_worker)
push_in_http_thread.start()

session_thread = threading.Thread(target=session_worker)
session_thread.daemon = True
session_thread.start()

if ws_control_in_spec and ws_control_out_spec:
	ws_control_thread = threading.Thread(target=ws_control_worker)
	ws_control_thread.daemon = True
	ws_control_thread.start()

if proxy_stats_spec:
	proxy_stats_thread = threading.Thread(target=proxy_stats_worker)
	proxy_stats_thread.daemon = True
	proxy_stats_thread.start()

if command_spec:
	command_thread = threading.Thread(target=command_worker)
	command_thread.daemon = True
	command_thread.start()

timeout_thread = threading.Thread(target=timeout_worker)
timeout_thread.daemon = True
timeout_thread.start()

try:
	while True:
		time.sleep(60)
except KeyboardInterrupt:
	pass

if command_server:
	command_server.stop()

httpinterface.stop()
ctx.term()
