From 3794fce02440a4022291858d84c2f30b333fd739 Mon Sep 17 00:00:00 2001 From: vavarachen Date: Fri, 5 Jan 2018 12:08:36 -0600 Subject: [PATCH 01/13] Bugfix ProxyListener - Initialize dport to none if diverter.getOriginalDestPort does not return a destination port. Feature enhancements - Added ability to send logs to Splunk using HEC - Modified the listeners to use the same logger instance - Modified listeners to log key session information using json format Known issues - logger.name is not unique to each listener, which makes following the logs difficult - Logging json formatted logs to streamhandler/filehandler is not very useful. --- fakenet/configs/default.ini | 32 +++++++++++------ fakenet/diverters/diverterbase.py | 11 +++--- fakenet/diverters/linux.py | 4 +-- fakenet/fakenet.py | 24 ++++++++++--- fakenet/listeners/BITSListener.py | 48 +++++++++++++++++++++---- fakenet/listeners/FTPListener.py | 37 +++++++++++++++---- fakenet/listeners/HTTPListener.py | 58 ++++++++++++++++++++++++------ fakenet/listeners/ProxyListener.py | 34 ++++++++++++++---- fakenet/listeners/RawListener.py | 35 ++++++++++++++---- 9 files changed, 227 insertions(+), 56 deletions(-) diff --git a/fakenet/configs/default.ini b/fakenet/configs/default.ini index 606ab1f..9494716 100644 --- a/fakenet/configs/default.ini +++ b/fakenet/configs/default.ini @@ -5,7 +5,7 @@ # Specify whether or not FakeNet should divert traffic. Disable if you want to # just start listeners and direct traffic manually (e.g. modify DNS server) -DivertTraffic: Yes +DivertTraffic: No ############################################################################### # Diverter Configuration @@ -29,7 +29,7 @@ NetworkMode: Auto # DebugLevel (Linux only as of this writing): specify fine-grained debug print # flags to enable. Enabling all logging when verbose mode is selected results # in an unacceptable overhead cost, hence this setting. -DebugLevel: Off +DebugLevel: On # MultiHost mode only: Specify what interfaces the Linux Diverter should create # an iptables rule for to redirect traffic destined for other hosts to the @@ -112,6 +112,18 @@ BlackListPortsUDP: 67, 68, 137, 138, 443, 1900, 5355 # Specify hosts to ignore when diverting traffic. # HostBlackList: 6.6.6.6 +############################################################################### +# Splunk remote logging configuration +# http://docs.splunk.com/Documentation/SplunkCloud/latest/Data/UsetheHTTPEventCollector +[Splunk] +LogToSplunk : No +SplunkHost : splunk_hostname +Port: 8080 +Cert_verify : True +HECToken : xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +Source : fakenet:daemon +Sourcetype : json + ############################################################################### # Listener Configuration # @@ -239,7 +251,7 @@ HostBlackList: 5.5.5.5 Hidden: False [DNS Server] -Enabled: True +Enabled: False Port: 53 Protocol: UDP Listener: DNSListener @@ -251,7 +263,7 @@ Hidden: False [HTTPListener80] Enabled: True -Port: 80 +Port: 8080 Protocol: TCP Listener: HTTPListener UseSSL: No @@ -264,7 +276,7 @@ Hidden: False [HTTPListener443] Enabled: True -Port: 443 +Port: 8443 Protocol: TCP Listener: HTTPListener UseSSL: Yes @@ -274,7 +286,7 @@ DumpHTTPPostsFilePrefix: http Hidden: False [SMTPListener] -Enabled: True +Enabled: False Port: 25 Protocol: TCP Listener: SMTPListener @@ -283,7 +295,7 @@ Hidden: False [FTPListener21] Enabled: True -Port: 21 +Port: 2121 Protocol: TCP Listener: FTPListener UseSSL: No @@ -300,7 +312,7 @@ Protocol: TCP Hidden: False [IRCServer] -Enabled: True +Enabled: False Port: 6667 Protocol: TCP Listener: IRCListener @@ -311,7 +323,7 @@ Timeout: 30 Hidden: False [TFTPListener] -Enabled: True +Enabled: False Port: 69 Protocol: UDP Listener: TFTPListener @@ -320,7 +332,7 @@ Hidden: False TFTPFilePrefix: tftp [POPServer] -Enabled: True +Enabled: False Port: 110 Protocol: TCP Listener: POPListener diff --git a/fakenet/diverters/diverterbase.py b/fakenet/diverters/diverterbase.py index 73461c4..72d3cae 100644 --- a/fakenet/diverters/diverterbase.py +++ b/fakenet/diverters/diverterbase.py @@ -16,7 +16,7 @@ class DiverterBase(fnconfig.Config): def init_base(self, diverter_config, listeners_config, ip_addrs, - logging_level=logging.INFO): + logger=None, logging_level=logging.DEBUG): # For fine-grained control of subclass debug output. Does not control # debug output from DiverterBase. To see DiverterBase debug output, # pass logging.DEBUG as the logging_level argument to init_base. @@ -31,7 +31,10 @@ def init_base(self, diverter_config, listeners_config, ip_addrs, self.pcap_filename = '' self.pcap_lock = None - self.logger = logging.getLogger('Diverter') + if logger is not None: + self.logger = logger + else: + self.logger = logging.getLogger('Diverter') self.logger.setLevel(logging_level) portlists = ['BlackListPortsTCP', 'BlackListPortsUDP'] @@ -410,12 +413,12 @@ def parse_diverter_config(self): else: self.default_listener['TCP'] = int( self.listeners_config[self.getconfigval('defaulttcplistener').lower()]['port']) - self.logger.error('Using default listener %s on port %d', self.getconfigval( + self.logger.error('Using default listener %s on port %d.', self.getconfigval( 'defaulttcplistener').lower(), self.default_listener['TCP']) self.default_listener['UDP'] = int( self.listeners_config[self.getconfigval('defaultudplistener').lower()]['port']) - self.logger.error('Using default listener %s on port %d', self.getconfigval( + self.logger.error('Using default listener %s on port %d.', self.getconfigval( 'defaultudplistener').lower(), self.default_listener['UDP']) # Re-marshall these into a readily usable form... diff --git a/fakenet/diverters/linux.py b/fakenet/diverters/linux.py index 4d88e54..4a52edc 100644 --- a/fakenet/diverters/linux.py +++ b/fakenet/diverters/linux.py @@ -200,9 +200,9 @@ class Diverter(DiverterBase, LinUtilMixin): def __init__(self, diverter_config, listeners_config, ip_addrs, - logging_level=logging.INFO): + logger=None, logging_level=logging.INFO): self.init_base(diverter_config, listeners_config, ip_addrs, - logging_level) + logger, logging_level) self.init_linux_mixin() self.init_diverter_linux() diff --git a/fakenet/fakenet.py b/fakenet/fakenet.py index d56da61..54adf5f 100644 --- a/fakenet/fakenet.py +++ b/fakenet/fakenet.py @@ -6,6 +6,7 @@ # Developed by Peter Kacherginsky import logging +from splunk_http_handler import SplunkHttpHandler import os import sys @@ -43,6 +44,9 @@ def __init__(self, logging_level = logging.INFO): # Diverter used to intercept and redirect traffic self.diverter = None + # Splunk logging options and parameters + self.splunk_config = dict() + # FakeNet options and parameters self.fakenet_config = dict() @@ -84,6 +88,18 @@ def parse_config(self, config_filename): elif section == 'Diverter': self.diverter_config = dict(config.items(section)) + elif section == 'Splunk' and config.getboolean(section, 'LogToSplunk'): + self.splunk_config = dict(config.items(section)) + try: + self.logger.addHandler( + SplunkHttpHandler(self.splunk_config['splunkhost'], self.splunk_config['hectoken'], + port=int(self.splunk_config['port']), + source=self.splunk_config['source'], sourcetype=self.splunk_config['sourcetype'], + ssl_verify=bool(self.splunk_config['cert_verify']))) + self.logger.debug("Splunk handler config successful") + except Exception as e: + self.logger.error("Failed to set Splunk log handler. Exception: %s" % e) + elif config.getboolean(section, 'enabled'): self.listeners_config[section] = dict(config.items(section)) @@ -144,21 +160,21 @@ def start(self): # Check Windows version if platform.release() in ['2000', 'XP', '2003Server', 'post2003']: self.logger.error('Error: FakeNet-NG only supports Windows Vista+.') - self.logger.error(' Please use the original Fakenet for older versions of Windows.') + self.logger.error('Please use the original Fakenet for older versions of Windows.') sys.exit(1) if self.diverter_config['networkmode'].lower() == 'auto': self.diverter_config['networkmode'] = 'singlehost' from diverters.windows import Diverter - self.diverter = Diverter(self.diverter_config, self.listeners_config, self.logging_level) + self.diverter = Diverter(self.diverter_config, self.listeners_config, self.logger) elif platform_name.lower().startswith('linux'): if self.diverter_config['networkmode'].lower() == 'auto': self.diverter_config['networkmode'] = 'multihost' from diverters.linux import Diverter - self.diverter = Diverter(self.diverter_config, self.listeners_config, ip_addrs, self.logging_level) + self.diverter = Diverter(self.diverter_config, self.listeners_config, ip_addrs, self.logger) else: self.logger.error('Error: Your system %s is currently not supported.', platform_name) @@ -186,7 +202,7 @@ def start(self): else: listener_provider_instance = listener_provider( - listener_config, listener_name, self.logging_level) + listener_config, listener_name, self.logger, self.logging_level) # Store listener provider object self.running_listener_providers.append(listener_provider_instance) diff --git a/fakenet/listeners/BITSListener.py b/fakenet/listeners/BITSListener.py index c284e47..89a8795 100644 --- a/fakenet/listeners/BITSListener.py +++ b/fakenet/listeners/BITSListener.py @@ -203,6 +203,7 @@ class SimpleBITSRequestHandler(SimpleHTTPRequestHandler): supported_protocols = ["{7df0354d-249b-430f-820d-3d2a9bef4931}"] # The only existing protocol version to date fragment_size_limit = 100*1024*1024 # bytes + def do_HEAD(self): self.server.logger.info('Received HEAD request') @@ -434,6 +435,40 @@ def do_BITS_POST(self): repr(e.internal_exception)) self.__send_response(headers, status_code = status_code) + def log_message(self, format, *args): + """Construct CIM compliant log message as a dict object which would be indexed in splunk as json""" + + # http://docs.splunk.com/Documentation/CIM/4.9.1/User/Web + if 'user-agent' in self.headers.dict.keys(): + self.headers.dict['http_user_agent'] = self.headers.dict.pop('user-agent') + self.headers.dict['http_user_agent_length'] = len(self.headers.dict['http_user_agent']) + + if 'referrer' in self.headers.dict.keys(): + self.headers.dict['http_referrer'] = self.headers.dict.pop('referrer') + + if 'host' in self.headers.dict.keys(): + self.headers.dict['site'] = self.headers.dict.pop('host') + + try: + # Advertised fake web server signature + self.headers.dict['vendor'] = self.server.config.version + except: + pass + + try: + self.headers.dict['protocol'] = self.server.config.protocol.lower() + except: + self.headers.dict['protocol'] = 'tcp' + + logmsg = dict({'src': self.client_address[0], 'src_port':self.client_address[1], 'dest_port': self.server.server_port, + 'ssl':self.server.config['usessl'], 'http_method': self.command, 'http_header': self.headers.dict, + 'uri_query': self.path, 'http_protocol_version': self.protocol_version, 'listener': __name__}) + if self.command == 'POST': + logmsg['post_body'] = self.post_body + + self.server.logger.info(logmsg) + return + class BITSListener(): def taste(self, data, dport): @@ -449,8 +484,9 @@ def taste(self, data, dport): return confidence def __init__(self, config={}, name='BITSListener', - logging_level=logging.DEBUG, running_listeners=None): - self.logger = logging.getLogger(name) + logger=None, logging_level=logging.DEBUG, running_listeners=None): + + self.logger = logger or logging.getLogger(name) self.logger.setLevel(logging_level) self.config = config @@ -461,8 +497,6 @@ def __init__(self, config={}, name='BITSListener', self.NAME = 'BITS' self.PORT = self.config.get('port') - self.logger.info('Starting...') - self.logger.debug('Initialized with config:') for key, value in config.iteritems(): self.logger.debug(' %10s: %s', key, value) @@ -470,7 +504,8 @@ def __init__(self, config={}, name='BITSListener', self.bits_file_prefix = self.config.get('bitsfileprefix', 'bits') def start(self): - self.logger.debug('Starting...') + http_str = 'HTTPS' if self.config.get('usessl') == 'Yes' else 'HTTP' + self.logger.info('Starting %s server on %s:%s' % (http_str, self.local_ip, self.config.get('port'))) self.server = ThreadedHTTPServer((self.local_ip, int(self.config.get('port'))), SimpleBITSRequestHandler) self.server.logger = self.logger @@ -497,7 +532,8 @@ def start(self): self.server_thread.start() def stop(self): - self.logger.info('Stopping...') + http_str = 'HTTPS' if self.config.get('usessl') == 'Yes' else 'HTTP' + self.logger.info('Stopping %s server on %s:%s' % (http_str, self.local_ip, self.config.get('port'))) if self.server: self.server.shutdown() self.server.server_close() diff --git a/fakenet/listeners/FTPListener.py b/fakenet/listeners/FTPListener.py index 8c8ab9d..18c4b94 100644 --- a/fakenet/listeners/FTPListener.py +++ b/fakenet/listeners/FTPListener.py @@ -15,6 +15,7 @@ from pyftpdlib.handlers import FTPHandler, TLS_FTPHandler from pyftpdlib.filesystems import AbstractedFS from pyftpdlib.servers import ThreadedFTPServer +from pyftpdlib.log import logger as pyftpdlib_logger import BannerFactory @@ -150,8 +151,29 @@ class FakeFTPHandler(FTPHandler, object): - def ftp_PASS(self, line): + def log(self, msg, logfun=pyftpdlib_logger.info): + logmsg = dict({'src': self.remote_ip, 'src_port':self.remote_port, 'dest_port': self.server.address[1], + 'user': self.username, 'listener': __name__}) + if msg.__class__ is dict: + # for log_cmd, preserve format + logmsg.update(msg) + self.logger.info(logmsg) + else: + logmsg['message'] = msg + self.logger.debug(logmsg) + + # Finally, call superclass log method + super(FakeFTPHandler, self).log(msg, logfun) + + def log_cmd(self, cmd, arg, respcode, respstr): + """ Log all ftp command and arguments to remote logger """ + msg = dict({'ftp_cmd': cmd, 'ftp_cmd_args': arg, 'ftp_respcode': respcode}) + if str(respcode)[0] in ('4', '5'): + msg['ftp_respmsg'] = respstr + self.log(msg) + + def ftp_PASS(self, line): # Dynamically add user to authorizer if not self.authorizer.has_user(self.username): self.authorizer.add_user(self.username, line, self.ftproot_path, 'elradfmwM') @@ -226,15 +248,17 @@ def taste(self, data, dport): def __init__(self, config, - name='FTPListener', + name='FTPListener', + logger = None, logging_level=logging.INFO, running_listeners=None, diverter=None ): - self.logger = logging.getLogger(name) + self.logger = logger or logging.getLogger(name) + #self.logger.name = name self.logger.setLevel(logging_level) - + self.config = config self.name = name self.local_ip = '0.0.0.0' @@ -300,7 +324,8 @@ def start(self): # Override pyftpdlib logger name logging.getLogger('pyftpdlib').name = self.name - + self.handler.logger = self.logger + self.handler.logger.name = self.name self.server_thread = threading.Thread(target=self.server.serve_forever) self.server_thread.daemon = True @@ -333,7 +358,7 @@ def test(config): def main(): logging.basicConfig(format='%(asctime)s [%(name)15s] %(message)s', datefmt='%m/%d/%y %I:%M:%S %p', level=logging.DEBUG) - config = {'port': '21', 'usessl': 'No', 'protocol': 'tcp', 'ftproot': os.path.join('..', 'defaultFiles')} + config = {'port': '2121', 'usessl': 'No', 'protocol': 'tcp', 'ftproot': os.path.join('..', 'defaultFiles')} listener = FTPListener(config) listener.start() diff --git a/fakenet/listeners/HTTPListener.py b/fakenet/listeners/HTTPListener.py index c60990c..39a74ba 100644 --- a/fakenet/listeners/HTTPListener.py +++ b/fakenet/listeners/HTTPListener.py @@ -1,4 +1,5 @@ import logging +import json import os import sys @@ -56,12 +57,15 @@ def __init__( self, config={}, name='HTTPListener', - logging_level=logging.DEBUG, + logger=None, + logging_level=logging.DEBUG ): - self.logger = logging.getLogger(name) + self.logger = logger or logging.getLogger(name) + + #self.logger.name = name self.logger.setLevel(logging_level) - + self.config = config self.name = name self.local_ip = '0.0.0.0' @@ -82,8 +86,9 @@ def __init__( def start(self): - self.logger.debug('Starting...') - + http_str = 'HTTPS' if self.config.get('usessl') == 'Yes' else 'HTTP' + self.logger.info('Starting %s server on %s:%s' % (http_str, self.local_ip, self.config.get('port'))) + self.server = ThreadedHTTPServer((self.local_ip, int(self.config.get('port'))), ThreadedHTTPRequestHandler) self.server.logger = self.logger self.server.config = self.config @@ -112,7 +117,8 @@ def start(self): self.server_thread.start() def stop(self): - self.logger.info('Stopping...') + http_str = 'HTTPS' if self.config.get('usessl') == 'Yes' else 'HTTP' + self.logger.info('Stopping %s server on %s:%s' % (http_str, self.local_ip, self.config.get('port'))) if self.server: self.server.shutdown() self.server.server_close() @@ -176,17 +182,17 @@ def do_GET(self): def do_POST(self): self.server.logger.info('Received a POST request') - post_body = '' + self.post_body = '' content_len = int(self.headers.get('content-length', 0)) - post_body = self.rfile.read(content_len) + self.post_body = self.rfile.read(content_len) # Process request self.server.logger.info('%s', '-'*80) self.server.logger.info(self.requestline) for line in str(self.headers).split("\n"): self.server.logger.info(line) - for line in post_body.split("\n"): + for line in self.post_body.split("\n"): self.server.logger.info(line) self.server.logger.info('%s', '-'*80) @@ -200,7 +206,7 @@ def do_POST(self): if http_f: http_f.write(self.requestline + "\r\n") http_f.write(str(self.headers) + "\r\n") - http_f.write(post_body) + http_f.write(self.post_body) http_f.close() else: @@ -259,6 +265,38 @@ def get_response(self, path): return (response, response_type) def log_message(self, format, *args): + '''Construct CIM compliant log message as a dict object which would be indexed in splunk as json''' + + # http://docs.splunk.com/Documentation/CIM/4.9.1/User/Web + if 'user-agent' in self.headers.dict.keys(): + self.headers.dict['http_user_agent'] = self.headers.dict.pop('user-agent') + self.headers.dict['http_user_agent_length'] = len(self.headers.dict['http_user_agent']) + + if 'referrer' in self.headers.dict.keys(): + self.headers.dict['http_referrer'] = self.headers.dict.pop('referrer') + + if 'host' in self.headers.dict.keys(): + self.headers.dict['site'] = self.headers.dict.pop('host') + + try: + # Advertised fake web server signature + self.headers.dict['vendor'] = self.server.config.version + except: + pass + + try: + self.headers.dict['protocol'] = self.server.config.protocol.lower() + except: + self.headers.dict['protocol'] = 'tcp' + + logmsg = dict({'src': self.client_address[0], 'src_port':self.client_address[1], 'dest_port': self.server.server_port, + 'ssl':self.server.config['usessl'], 'http_method': self.command, 'http_header': self.headers.dict, + 'uri_query': self.path, 'http_protocol_version': self.protocol_version, 'listener': __name__}) + if self.command == 'POST': + logmsg['post_body'] = self.post_body + + #self.server.logger.info(json.dumps(logmsg, indent=2, sort_keys=True)) + self.server.logger.info(logmsg) return diff --git a/fakenet/listeners/ProxyListener.py b/fakenet/listeners/ProxyListener.py index d88ac45..4cc6576 100644 --- a/fakenet/listeners/ProxyListener.py +++ b/fakenet/listeners/ProxyListener.py @@ -23,11 +23,13 @@ class ProxyListener(): def __init__( self, config={}, - name ='ProxyListener', + name ='ProxyListener', + logger=None, logging_level=logging.DEBUG, ): - self.logger = logging.getLogger(name) + self.logger = logger or logging.getLogger(name) + self.logger.setLevel(logging_level) self.config = config @@ -35,8 +37,6 @@ def __init__( self.server = None self.udp_fwd_table = dict() - self.logger.info('Starting...') - self.logger.debug('Initialized with config:') for key, value in config.iteritems(): self.logger.debug(' %10s: %s', key, value) @@ -76,13 +76,16 @@ def start(self): self.server_thread = threading.Thread( target=self.server.serve_forever) self.server_thread.daemon = True + self.logger.info('Starting %s %s on %s:%s' + % (self.config['protocol'], self.name, self.server.server_address[0], self.server.server_address[1])) self.server_thread.start() server_ip, server_port = self.server.server_address self.logger.info("%s Server(%s:%d) thread: %s" % (proto, server_ip, server_port, self.server_thread.name)) def stop(self): - self.logger.debug('Stopping...') + self.logger.info('Starting %s %s on %s:%s' + % (self.config['protocol'], self.name, self.server.server_address[0], self.server.server_address[1])) if self.server: self.server.shutdown() self.server.server_close() @@ -200,7 +203,10 @@ def get_top_listener(config, data, listeners, diverter, orig_src_ip, top_listener = None top_confidence = 0 - dport = diverter.getOriginalDestPort(orig_src_ip, orig_src_port, proto) + if diverter is not None: + dport = diverter.getOriginalDestPort(orig_src_ip, orig_src_port, proto) + else: + dport = None for listener in listeners: @@ -216,8 +222,13 @@ def get_top_listener(config, data, listeners, diverter, orig_src_ip, return top_listener class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): + def log_mesage(self, hexdump): + logmsg = dict({'protocol':'tcp', 'src': self.client_address[0], 'src_port': self.client_address[1], + 'dest_port': self.server.server_address[1], 'hexdump': hexdump, 'listener': __name__}) + + self.server.logger.info(logmsg) + return - def handle(self): remote_sock = self.request @@ -251,6 +262,8 @@ def handle(self): for line in hexdump_table(data): self.server.logger.debug(line) self.server.logger.debug('%s', '-'*80,) + # Log message in json format + self.log_mesage(hexdump_table(data)) except Exception as e: self.server.logger.info('recv() error: %s' % e.message) @@ -317,7 +330,12 @@ def handle(self): remote_sock.send(data) class ThreadedUDPRequestHandler(SocketServer.BaseRequestHandler): + def log_mesage(self, hexdump): + logmsg = dict({'protocol':'udp', 'src': self.client_address[0], 'src_port': self.client_address[1], + 'dest_port': self.server.server_address[1], 'hexdump': hexdump, 'listener': __name__}) + self.server.logger.info(logmsg) + return def handle(self): @@ -334,6 +352,8 @@ def handle(self): for line in hexdump_table(data): self.server.logger.debug(line) self.server.logger.debug('%s', '-'*80,) + # Log message in json format + self.log_mesage(hexdump_table(data)) orig_src_ip = self.client_address[0] orig_src_port = self.client_address[1] diff --git a/fakenet/listeners/RawListener.py b/fakenet/listeners/RawListener.py index 99d1f31..6377ebc 100644 --- a/fakenet/listeners/RawListener.py +++ b/fakenet/listeners/RawListener.py @@ -18,13 +18,16 @@ def taste(self, data, dport): def __init__(self, config, - name='RawListener', + name='RawListener', + logger=None, logging_level=logging.INFO, ): - self.logger = logging.getLogger(name) + self.logger = logger or logging.getLogger(name) + + #self.logger.name = name self.logger.setLevel(logging_level) - + self.config = config self.name = name self.local_ip = '0.0.0.0' @@ -32,8 +35,6 @@ def __init__(self, self.name = 'Raw' self.port = self.config.get('port', 1337) - self.logger.info('Starting...') - self.logger.debug('Initialized with config:') for key, value in config.iteritems(): self.logger.debug(' %10s: %s', key, value) @@ -80,18 +81,27 @@ def start(self): self.server_thread = threading.Thread(target=self.server.serve_forever) self.server_thread.daemon = True + self.logger.info('Starting %s %s Listener (SSL:%s) on %s:%s' + % (self.name, self.config['protocol'], self.config.get('usessl'), self.local_ip, self.port)) self.server_thread.start() def stop(self): - self.logger.debug('Stopping...') + self.logger.info('Stopping %s %s Listener (SSL:%s) on %s:%s' + % (self.name, self.config['protocol'], self.config.get('usessl'), self.local_ip, self.port)) if self.server: self.server.shutdown() self.server.server_close() class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): + def log_mesage(self, hexdump): + logmsg = dict({'protocol':'tcp', 'src': self.client_address[0], 'src_port': self.client_address[1], + 'dest_port': self.server.server_address[1], 'hexdump': hexdump, 'listener': __name__}) - def handle(self): + self.server.logger.info(logmsg) + return + + def handle(self): # Timeout connection to prevent hanging self.request.settimeout(int(self.server.config.get('timeout', 5))) @@ -109,6 +119,8 @@ def handle(self): for line in hexdump_table(data): self.server.logger.info(line) self.server.logger.info('%s', '-'*80,) + # Log message in json format + self.log_mesage(hexdump_table(data)) self.request.sendall(data) @@ -122,6 +134,13 @@ def handle(self): self.server.logger.error('Error: %s', e) class ThreadedUDPRequestHandler(SocketServer.BaseRequestHandler): + def log_mesage(self, hexdump): + logmsg = dict({'protocol':'udp', 'src': self.client_address[0], 'src_port': self.client_address[1], + 'dest_port': self.server.server_address[1], 'hexdump': hexdump, 'listener': __name__}) + + self.server.logger.info(logmsg) + return + def handle(self): @@ -136,6 +155,8 @@ def handle(self): for line in hexdump_table(data): self.server.logger.debug(line) self.server.logger.debug('%s', '-'*80,) + # Log message in json format + self.log_mesage(hexdump_table(data)) socket.sendto(data, self.client_address) From 3b9535ae50b38f64708ae31da1a33576aebb590c Mon Sep 17 00:00:00 2001 From: vavarachen Date: Sun, 7 Jan 2018 02:03:22 -0600 Subject: [PATCH 02/13] Enhancements and new features - Extended logging capabilities to include remote syslog and Splunk loggers. - Implemented remote logger feature to ListenerBase - Splunk loggers are configure with a filter to drop all non-json messages - Log messages for remote loggers are created using Common Information Model when applicable - Improved logging consistency across most listeners - Each listener instance is created with its own stream and remote logger (if configured) - Added log and log_cmd to TLS_FakeFTPHander Bugfix - Fixed SSL configuration of SMTP listener ToDo - Improve banner and server configuration for SMTP listener, similar to FTPListener --- fakenet/configs/default.ini | 39 +++++++--- fakenet/fakenet.py | 35 ++++++--- fakenet/listeners/BITSListener.py | 13 ++-- fakenet/listeners/FTPListener.py | 40 +++++++--- fakenet/listeners/HTTPListener.py | 20 ++--- fakenet/listeners/ListenerBase.py | 114 ++++++++++++++++++++++++++++- fakenet/listeners/ProxyListener.py | 19 +++-- fakenet/listeners/RawListener.py | 14 ++-- fakenet/listeners/SMTPListener.py | 44 +++++++++-- fakenet/listeners/TFTPListener.py | 52 ++++++++++--- 10 files changed, 299 insertions(+), 91 deletions(-) diff --git a/fakenet/configs/default.ini b/fakenet/configs/default.ini index 9494716..53ede9e 100644 --- a/fakenet/configs/default.ini +++ b/fakenet/configs/default.ini @@ -113,22 +113,43 @@ BlackListPortsUDP: 67, 68, 137, 138, 443, 1900, 5355 # HostBlackList: 6.6.6.6 ############################################################################### +#[RemoteLogger] +#Enabled : Yes +# logger_type: [syslog,splunk] +#Logger_Type : syslog +# logger_host: [ip,name,fqdn] +#Logger_Host : localhost +#Logger_Port : 514 +# logger_proto: [udp,tcp] +#Logger_Protocol : TCP # Splunk remote logging configuration # http://docs.splunk.com/Documentation/SplunkCloud/latest/Data/UsetheHTTPEventCollector -[Splunk] -LogToSplunk : No -SplunkHost : splunk_hostname -Port: 8080 -Cert_verify : True -HECToken : xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -Source : fakenet:daemon -Sourcetype : json +#Splunk_Cert_verify : True +#Splunk_HECToken : xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +#Splunk_Source : fakenet:daemon +#Splunk_Sourcetype : _json +[RemoteLogger] +EnableRemoteLogger : False +Logger_Type: splunk +Logger_Host : localhost +Logger_Port: 8080 +Splunk_Cert_verify : True +Splunk_HECToken : xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +Splunk_Source : fakenet:daemon + +#[RemoteLogger] +#Enabled : Yes +#Logger_Type : syslog +#Logger_Host : localhost +#Logger_Port : 514 +#Logger_Protocol : TCP + ############################################################################### # Listener Configuration # # Listener configuration consists of generic settings used by the diverter which -# are the same for all listeners and listener specific settings. +# are the same for all listeners and listener specific settings. # # NOTE: Listener section names will be used for logging. # diff --git a/fakenet/fakenet.py b/fakenet/fakenet.py index 54adf5f..559afea 100644 --- a/fakenet/fakenet.py +++ b/fakenet/fakenet.py @@ -19,6 +19,8 @@ from optparse import OptionParser,OptionGroup from ConfigParser import ConfigParser +from listeners import ListenerBase + import platform from optparse import OptionParser @@ -88,17 +90,28 @@ def parse_config(self, config_filename): elif section == 'Diverter': self.diverter_config = dict(config.items(section)) - elif section == 'Splunk' and config.getboolean(section, 'LogToSplunk'): - self.splunk_config = dict(config.items(section)) + elif section == 'RemoteLogger': + self.remotelogger_config = dict(config.items(section)) try: - self.logger.addHandler( - SplunkHttpHandler(self.splunk_config['splunkhost'], self.splunk_config['hectoken'], - port=int(self.splunk_config['port']), - source=self.splunk_config['source'], sourcetype=self.splunk_config['sourcetype'], - ssl_verify=bool(self.splunk_config['cert_verify']))) - self.logger.debug("Splunk handler config successful") + if self.remotelogger_config['logger_type'] == 'splunk' and config.getboolean(section, 'enableremotelogger'): + ListenerBase.add_splunk_logger( + self.remotelogger_config['logger_host'], + self.remotelogger_config['splunk_hectoken'], + self.logger, + self.remotelogger_config['logger_port'], + self.remotelogger_config['splunk_cert_verify'], + self.remotelogger_config['splunk_source'] + ) + elif self.remotelogger_config['logger_type'] == 'syslog' and config.getboolean(section, 'enableremotelogger'): + ListenerBase.add_remote_logger( + self.remotelogger_config['logger_host'], + self.logger, + int(self.remotelogger_config['logger_port']), + self.remotelogger_config['logger_protocol'] + ) + self.logger.info("%s handler configured successfully." % self.remotelogger_config['logger_type']) except Exception as e: - self.logger.error("Failed to set Splunk log handler. Exception: %s" % e) + self.logger.warning("Failed to set remote log handler. Exception: %s" % e) elif config.getboolean(section, 'enabled'): self.listeners_config[section] = dict(config.items(section)) @@ -184,6 +197,8 @@ def start(self): for listener_name in self.listeners_config: listener_config = self.listeners_config[listener_name] + # Pass remote logger configs in case we want to enable it for listener + listener_config.update(self.remotelogger_config) # Anonymous listener if not 'listener' in listener_config: @@ -202,7 +217,7 @@ def start(self): else: listener_provider_instance = listener_provider( - listener_config, listener_name, self.logger, self.logging_level) + listener_config, listener_name, self.logging_level) # Store listener provider object self.running_listener_providers.append(listener_provider_instance) diff --git a/fakenet/listeners/BITSListener.py b/fakenet/listeners/BITSListener.py index 89a8795..775dc53 100644 --- a/fakenet/listeners/BITSListener.py +++ b/fakenet/listeners/BITSListener.py @@ -1,6 +1,7 @@ # Based on a simple BITS server by Dor Azouri import logging +import ListenerBase import os import sys @@ -484,11 +485,9 @@ def taste(self, data, dport): return confidence def __init__(self, config={}, name='BITSListener', - logger=None, logging_level=logging.DEBUG, running_listeners=None): + logging_level=logging.DEBUG, running_listeners=None): - self.logger = logger or logging.getLogger(name) - self.logger.setLevel(logging_level) - + self.logger = ListenerBase.set_logger("%s:%s" % (self.__module__, name), config, logging_level) self.config = config self.name = name self.local_ip = '0.0.0.0' @@ -497,6 +496,9 @@ def __init__(self, config={}, name='BITSListener', self.NAME = 'BITS' self.PORT = self.config.get('port') + ssl_str = 'HTTPS' if self.config.get('usessl') == 'Yes' else 'HTTP' + self.logger.info('Starting %s server on %s:%s' % (ssl_str, self.local_ip, self.config.get('port'))) + self.logger.debug('Initialized with config:') for key, value in config.iteritems(): self.logger.debug(' %10s: %s', key, value) @@ -504,9 +506,6 @@ def __init__(self, config={}, name='BITSListener', self.bits_file_prefix = self.config.get('bitsfileprefix', 'bits') def start(self): - http_str = 'HTTPS' if self.config.get('usessl') == 'Yes' else 'HTTP' - self.logger.info('Starting %s server on %s:%s' % (http_str, self.local_ip, self.config.get('port'))) - self.server = ThreadedHTTPServer((self.local_ip, int(self.config.get('port'))), SimpleBITSRequestHandler) self.server.logger = self.logger self.server.bits_file_prefix = self.bits_file_prefix diff --git a/fakenet/listeners/FTPListener.py b/fakenet/listeners/FTPListener.py index 18c4b94..165415d 100644 --- a/fakenet/listeners/FTPListener.py +++ b/fakenet/listeners/FTPListener.py @@ -1,4 +1,5 @@ import logging +import ListenerBase import os import sys @@ -172,7 +173,6 @@ def log_cmd(self, cmd, arg, respcode, respstr): msg['ftp_respmsg'] = respstr self.log(msg) - def ftp_PASS(self, line): # Dynamically add user to authorizer if not self.authorizer.has_user(self.username): @@ -182,6 +182,27 @@ def ftp_PASS(self, line): class TLS_FakeFTPHandler(TLS_FTPHandler, object): + def log(self, msg, logfun=pyftpdlib_logger.info): + logmsg = dict({'src': self.remote_ip, 'src_port':self.remote_port, 'dest_port': self.server.address[1], + 'user': self.username, 'listener': __name__}) + if msg.__class__ is dict: + # for log_cmd, preserve format + logmsg.update(msg) + self.logger.info(logmsg) + else: + logmsg['message'] = msg + self.logger.debug(logmsg) + + # Finally, call superclass log method + super(TLS_FakeFTPHandler, self).log(msg, logfun) + + def log_cmd(self, cmd, arg, respcode, respstr): + """ Log all ftp command and arguments to remote logger """ + msg = dict({'ftp_cmd': cmd, 'ftp_cmd_args': arg, 'ftp_respcode': respcode}) + if str(respcode)[0] in ('4', '5'): + msg['ftp_respmsg'] = respstr + self.log(msg) + def ftp_PASS(self, line): # Dynamically add user to authorizer @@ -249,16 +270,12 @@ def taste(self, data, dport): def __init__(self, config, name='FTPListener', - logger = None, - logging_level=logging.INFO, + logging_level=logging.INFO, running_listeners=None, diverter=None ): - self.logger = logger or logging.getLogger(name) - #self.logger.name = name - self.logger.setLevel(logging_level) - + self.logger = ListenerBase.set_logger("%s:%s" % (self.__module__, name), config, logging_level) self.config = config self.name = name self.local_ip = '0.0.0.0' @@ -268,7 +285,8 @@ def __init__(self, self.name = 'FTP' self.port = self.config.get('port', 21) - self.logger.info('Starting...') + ssl_str = 'FTPS' if self.config.get('usessl') == 'Yes' else 'FTP' + self.logger.info('Starting %s server on %s:%s' % (ssl_str, self.local_ip, self.config.get('port'))) self.logger.debug('Initialized with config:') for key, value in config.iteritems(): @@ -292,10 +310,8 @@ def expand_ports(self, ports_list): return ports def start(self): - self.authorizer = DummyAuthorizer() - if self.config.get('usessl') == 'Yes': self.logger.debug('Using SSL socket.') @@ -319,7 +335,6 @@ def start(self): self.handler.authorizer = self.authorizer self.handler.passive_ports = self.expand_ports(self.config.get('pasvports', '60000-60010')) - self.server = ThreadedFTPServer((self.local_ip, int(self.config['port'])), self.handler) # Override pyftpdlib logger name @@ -332,7 +347,8 @@ def start(self): self.server_thread.start() def stop(self): - self.logger.debug('Stopping...') + ssl_str = 'FTPS' if self.config.get('usessl') == 'Yes' else 'FTP' + self.logger.info('Stopping %s server on %s:%s' % (ssl_str, self.local_ip, self.config.get('port'))) if self.server: self.server.close_all() diff --git a/fakenet/listeners/HTTPListener.py b/fakenet/listeners/HTTPListener.py index 39a74ba..2c33b81 100644 --- a/fakenet/listeners/HTTPListener.py +++ b/fakenet/listeners/HTTPListener.py @@ -1,5 +1,5 @@ import logging -import json +import ListenerBase import os import sys @@ -57,15 +57,10 @@ def __init__( self, config={}, name='HTTPListener', - logger=None, logging_level=logging.DEBUG ): - self.logger = logger or logging.getLogger(name) - - #self.logger.name = name - self.logger.setLevel(logging_level) - + self.logger = ListenerBase.set_logger("%s:%s" % (self.__module__, name), config, logging_level) self.config = config self.name = name self.local_ip = '0.0.0.0' @@ -73,6 +68,9 @@ def __init__( self.name = 'HTTP' self.port = self.config.get('port', 80) + ssl_str = 'HTTPS' if self.config.get('usessl') == 'Yes' else 'HTTP' + self.logger.info('Starting %s server on %s:%s' % (ssl_str, self.local_ip, self.config.get('port'))) + self.logger.debug('Initialized with config:') for key, value in config.iteritems(): self.logger.debug(' %10s: %s', key, value) @@ -86,9 +84,6 @@ def __init__( def start(self): - http_str = 'HTTPS' if self.config.get('usessl') == 'Yes' else 'HTTP' - self.logger.info('Starting %s server on %s:%s' % (http_str, self.local_ip, self.config.get('port'))) - self.server = ThreadedHTTPServer((self.local_ip, int(self.config.get('port'))), ThreadedHTTPRequestHandler) self.server.logger = self.logger self.server.config = self.config @@ -117,8 +112,8 @@ def start(self): self.server_thread.start() def stop(self): - http_str = 'HTTPS' if self.config.get('usessl') == 'Yes' else 'HTTP' - self.logger.info('Stopping %s server on %s:%s' % (http_str, self.local_ip, self.config.get('port'))) + ssl_str = 'HTTPS' if self.config.get('usessl') == 'Yes' else 'HTTP' + self.logger.info('Stopping %s server on %s:%s' % (ssl_str, self.local_ip, self.config.get('port'))) if self.server: self.server.shutdown() self.server.server_close() @@ -295,7 +290,6 @@ def log_message(self, format, *args): if self.command == 'POST': logmsg['post_body'] = self.post_body - #self.server.logger.info(json.dumps(logmsg, indent=2, sort_keys=True)) self.server.logger.info(logmsg) return diff --git a/fakenet/listeners/ListenerBase.py b/fakenet/listeners/ListenerBase.py index d315607..25726f9 100644 --- a/fakenet/listeners/ListenerBase.py +++ b/fakenet/listeners/ListenerBase.py @@ -1,6 +1,9 @@ import logging +import logging.handlers +from socket import SOCK_DGRAM, SOCK_STREAM import os + def safe_join(root, path): """ Joins a path to a root path, even if path starts with '/', using os.sep @@ -14,6 +17,7 @@ def safe_join(root, path): return root + normpath + def abs_config_path(path): """ Attempts to return the absolute path of a path from a configuration @@ -36,4 +40,112 @@ def abs_config_path(path): if os.path.exists(relpath): return os.path.abspath(relpath) - return None \ No newline at end of file + return None + + +def add_remote_logger(host, logger=logging.getLogger('FakeNet Listener'), port=514, proto='TCP'): + """ + Attach a remote syslog handler to existing logger + + :param host: IP, hostname or remote logger. Can also be 'localhost' + :param logger: logging instance + :param port: Network port to send logs to + :param proto: Network protocol supported by remote logger + :return: Modified logger with remote handler attached + """ + socket_type = {'UDP': SOCK_DGRAM, 'TCP': SOCK_STREAM } + return logger.addHandler( + logging.handlers.SysLogHandler( + (host, port), + logging.handlers.SysLogHandler.LOG_DAEMON, + socket_type[proto.upper()] + ) + ) + + +def add_splunk_logger(host, hectoken, logger=logging.getLogger('FakeNet Listener'), port=8080, verify=True, source='FakeNet', sourcetype='_json'): + """ + Attach a remote Splunk HTTP Event Collector handler to existing logger + http://docs.splunk.com/Documentation/SplunkCloud/latest/Data/UsetheHTTPEventCollector + + :param host: IP, hostname of splunk search head, forwarder or indexer + :param hectoken: HTTP Event Collector token for authentication. + :param logger: logging instance + :param port: HEC port + :param verify: SSL verification + :param source: Splunk event source + :param sourcetype: Splunk event sourcetype. + :return: Modified logger with remote handler attached + """ + class JSONFilter(logging.Filter): + """ + Logging filter to filter out any non-json formatted events. + """ + def filter(self, record): + return record.getMessage().startswith('{') and record.getMessage().endswith('}') + + try: + from splunk_http_handler import SplunkHttpHandler + try: + splunk_handler = SplunkHttpHandler( + host, + hectoken, + port=port, + source=source, + sourcetype=sourcetype, + ssl_verify=bool(verify) + ) + splunk_handler.addFilter(JSONFilter()) + logger.addHandler(splunk_handler) + except Exception as e: + logger.error("Failed to set Splunk log handler. Exception: %s" % e) + except Exception as e: + logger.error("Failed to import Splunk python module (splunk_http_handler), Try 'pip install splunk_http_handler'") + logger.debug("Exception raised: %s" % e) + finally: + return logger + + +def set_logger(name="FakeNetListener", config=None, logging_level=logging.INFO): + """ + Set default logger for listeners + + :param name: Unique string to identify the Listener + :param config: listener_config object updated with containing remotelogger_config + :param logging_level: logging verbosity + :return: logger with either splunk or syslog handlers + """ + logger = logging.getLogger(name) + logger.setLevel(logging_level) + logger.propagate = False + stream_handler = logging.StreamHandler() + stream_handler.setLevel(logging_level) + stream_formatter = logging.Formatter('%(asctime)s [%(name)18s] %(message)s', datefmt='%m/%d/%y %I:%M:%S %p') + stream_handler.setFormatter(stream_formatter) + logger.addHandler(stream_handler) + + if config['enableremotelogger']: + try: + if config['logger_type'] == 'splunk': + add_splunk_logger( + config['logger_host'], + config['splunk_hectoken'], + logger, + config['logger_port'], + config['splunk_cert_verify'], + source=name + ) + elif config['logger_type'] == 'syslog': + add_remote_logger( + config['logger_host'], + logger, + int(config['logger_port']), + config['logger_protocol'] + ) + except Exception as e: + logger.warning("Failed to add %s log handler for %s" % (config['logger_type'], name)) + logger.debug("Exception raised: %s") % e + logger.debug("Config: \n%s") % config + + return logger + diff --git a/fakenet/listeners/ProxyListener.py b/fakenet/listeners/ProxyListener.py index 4cc6576..ec6047b 100644 --- a/fakenet/listeners/ProxyListener.py +++ b/fakenet/listeners/ProxyListener.py @@ -13,6 +13,7 @@ from ssl_utils import ssl_detector from . import * import os +import ListenerBase BUF_SZ = 1024 IP = '0.0.0.0' @@ -24,19 +25,19 @@ def __init__( self, config={}, name ='ProxyListener', - logger=None, - logging_level=logging.DEBUG, + logging_level=logging.DEBUG, ): - self.logger = logger or logging.getLogger(name) - - self.logger.setLevel(logging_level) - + self.logger = ListenerBase.set_logger("%s:%s" % (self.__module__, name), config, logging_level) self.config = config self.name = name + self.local_ip = '0.0.0.0' self.server = None self.udp_fwd_table = dict() + self.logger.info('Starting %s %s on %s:%s' + % (self.config['protocol'], self.name, self.local_ip, self.config['port'])) + self.logger.debug('Initialized with config:') for key, value in config.iteritems(): self.logger.debug(' %10s: %s', key, value) @@ -76,16 +77,14 @@ def start(self): self.server_thread = threading.Thread( target=self.server.serve_forever) self.server_thread.daemon = True - self.logger.info('Starting %s %s on %s:%s' - % (self.config['protocol'], self.name, self.server.server_address[0], self.server.server_address[1])) self.server_thread.start() server_ip, server_port = self.server.server_address self.logger.info("%s Server(%s:%d) thread: %s" % (proto, server_ip, server_port, self.server_thread.name)) def stop(self): - self.logger.info('Starting %s %s on %s:%s' - % (self.config['protocol'], self.name, self.server.server_address[0], self.server.server_address[1])) + self.logger.info('Stopping %s %s on %s:%s' + % (self.config['protocol'], self.name, self.local_ip, self.config['port'])) if self.server: self.server.shutdown() self.server.server_close() diff --git a/fakenet/listeners/RawListener.py b/fakenet/listeners/RawListener.py index 6377ebc..284a904 100644 --- a/fakenet/listeners/RawListener.py +++ b/fakenet/listeners/RawListener.py @@ -1,4 +1,5 @@ import logging +import ListenerBase import os import sys @@ -19,15 +20,10 @@ def taste(self, data, dport): def __init__(self, config, name='RawListener', - logger=None, - logging_level=logging.INFO, + logging_level=logging.INFO, ): - self.logger = logger or logging.getLogger(name) - - #self.logger.name = name - self.logger.setLevel(logging_level) - + self.logger = ListenerBase.set_logger("%s:%s" % (self.__module__, name), config, logging_level) self.config = config self.name = name self.local_ip = '0.0.0.0' @@ -35,6 +31,8 @@ def __init__(self, self.name = 'Raw' self.port = self.config.get('port', 1337) + self.logger.info('Starting %s %s Listener (SSL:%s) on %s:%s' + % (self.name, self.config['protocol'], self.config.get('usessl'), self.local_ip, self.port)) self.logger.debug('Initialized with config:') for key, value in config.iteritems(): self.logger.debug(' %10s: %s', key, value) @@ -81,8 +79,6 @@ def start(self): self.server_thread = threading.Thread(target=self.server.serve_forever) self.server_thread.daemon = True - self.logger.info('Starting %s %s Listener (SSL:%s) on %s:%s' - % (self.name, self.config['protocol'], self.config.get('usessl'), self.local_ip, self.port)) self.server_thread.start() def stop(self): diff --git a/fakenet/listeners/SMTPListener.py b/fakenet/listeners/SMTPListener.py index 137af59..56862cb 100755 --- a/fakenet/listeners/SMTPListener.py +++ b/fakenet/listeners/SMTPListener.py @@ -8,7 +8,9 @@ import ssl import socket +import ListenerBase +import urllib from . import * class SMTPListener(): @@ -40,9 +42,7 @@ def __init__( logging_level=logging.INFO, ): - self.logger = logging.getLogger(name) - self.logger.setLevel(logging_level) - + self.logger = ListenerBase.set_logger("%s:%s" % (self.__module__, name), config, logging_level) self.config = config self.name = name self.local_ip = '0.0.0.0' @@ -50,15 +50,14 @@ def __init__( self.name = 'SMTP' self.port = self.config.get('port', 25) - self.logger.info('Starting...') + ssl_str = 'SMTPS' if self.config.get('usessl') == 'Yes' else 'SMTP' + self.logger.info('Starting %s server on %s:%s' % (ssl_str, self.local_ip, self.config.get('port'))) self.logger.debug('Initialized with config:') for key, value in config.iteritems(): self.logger.debug(' %10s: %s', key, value) def start(self): - self.logger.debug('Starting...') - self.server = ThreadedTCPServer((self.local_ip, int(self.config['port'])), ThreadedTCPRequestHandler) if self.config.get('usessl') == 'Yes': @@ -76,7 +75,7 @@ def start(self): self.logger.error('Could not locate %s', certfile_path) sys.exit(1) - self.server.socket = ssl.wrap_socket(self.server.socket, keyfile='privkey.pem', certfile='server.pem', server_side=True, ciphers='RSA') + self.server.socket = ssl.wrap_socket(self.server.socket, keyfile=keyfile_path, certfile=certfile_path, server_side=True, ciphers='RSA') self.server.logger = self.logger self.server.config = self.config @@ -86,7 +85,8 @@ def start(self): self.server_thread.start() def stop(self): - self.logger.info('Stopping...') + ssl_str = 'SMTPS' if self.config.get('usessl') == 'Yes' else 'SMTP' + self.logger.info('Stopping %s server on %s:%s' % (ssl_str, self.local_ip, self.config.get('port'))) if self.server: self.server.shutdown() self.server.server_close() @@ -108,6 +108,31 @@ def handle(self): command = data[:4].upper() + escaped_data = urllib.quote(data,'@()<>{}[]:\/ '), + logmsg = dict({'src': self.client_address[0], 'src_port':self.client_address[1], + 'dest_port': self.server.server_address[1], 'smtp_cmd': command, 'listener': __name__}) + + if command.startswith('MAIL'): + logmsg['src_user'] = escaped_data + elif command.startswith('RCPT'): + logmsg['recipient'] = escaped_data + elif command.startswith('NOOP'): + logmsg['keepalive'] = escaped_data + elif command.startswith('RSET'): + logmsg['reset'] = escaped_data + elif command.startswith('HELO') or command.startswith('EHLO'): + logmsg['domain'] = escaped_data + elif command.startswith('VRFY'): + logmsg['verify'] = escaped_data + elif command.startswith('AUTH'): + logmsg['authentication'] = escaped_data + elif command.startswith('DATA'): + # capture entire mail data + logmsg['message'] = "" + else: + logmsg['message'] = escaped_data + + if command == '': break @@ -136,6 +161,7 @@ def handle(self): break self.server.logger.info('Received mail data.') + logmsg['message'] = urllib.quote(mail_data, '@()<>{}[]:\/ ') for line in mail_data.split("\n"): self.server.logger.info(line) @@ -144,6 +170,8 @@ def handle(self): else: self.request.sendall("503 Command not supported\r\n") + self.server.logger.info(logmsg) + except socket.timeout: self.server.logger.warning('Connection timeout') diff --git a/fakenet/listeners/TFTPListener.py b/fakenet/listeners/TFTPListener.py index 0efa7bf..3684074 100644 --- a/fakenet/listeners/TFTPListener.py +++ b/fakenet/listeners/TFTPListener.py @@ -1,4 +1,5 @@ import logging +import ListenerBase import os import sys @@ -72,9 +73,7 @@ def __init__(self, logging_level=logging.INFO, ): - self.logger = logging.getLogger(name) - self.logger.setLevel(logging_level) - + self.logger = ListenerBase.set_logger("%s:%s" % (self.__module__, name), config, logging_level) self.config = config self.name = name self.local_ip = '0.0.0.0' @@ -82,6 +81,8 @@ def __init__(self, self.name = 'TFTP' self.port = self.config.get('port', 69) + self.logger.info('Starting %s on %s:%s' + % (self.name, self.local_ip, self.config['port'])) self.logger.debug('Initialized with config:') for key, value in config.iteritems(): self.logger.debug(' %10s: %s', key, value) @@ -95,8 +96,6 @@ def __init__(self, self.tftp_file_prefix = self.config.get('tftpfileprefix', 'tftp') def start(self): - self.logger.info('Starting...') - # Start listener self.server = ThreadedUDPServer((self.local_ip, int(self.config['port'])), ThreadedUDPRequestHandler) @@ -110,7 +109,8 @@ def start(self): self.server_thread.start() def stop(self): - self.logger.debug('Stopping...') + self.logger.info('Stopping %s on %s:%s' + % (self.name, self.local_ip, self.config['port'])) if self.server: self.server.shutdown() self.server.server_close() @@ -127,24 +127,43 @@ def handle(self): opcode = data[:2] + logmsg = dict({'src': self.client_address[0], 'src_port':self.client_address[1], + 'dest_port': self.server.server_address[1], 'listener': __name__}) + if opcode == OPCODE_RRQ: - + filename, mode = self.parse_rrq_wrq_packet(data) - self.server.logger.info('Received request to download %s', filename) + msg = 'Received request to download %s' % filename + self.server.logger.info(msg) + logmsg['tftp_cmd'] = 'get' + logmsg['tftp_cmd_args'] = dict({'filename':filename, 'mode':mode}) + self.server.logger.info(logmsg) self.handle_rrq(socket, filename) elif opcode == OPCODE_WRQ: filename, mode = self.parse_rrq_wrq_packet(data) - self.server.logger.info('Received request to upload %s', filename) + msg = 'Received request to upload %s' % filename + self.server.logger.info(msg) + safe_file = self.server.tftp_file_prefix + "_" + urllib.quote(filename, '') + output_file = ListenerBase.safe_join(os.getcwd(),safe_file) + logmsg['message'] = msg + "(%s)" % output_file + logmsg['tftp_cmd'] = 'put' + logmsg['tftp_cmd_args'] = dict({'filename':filename, 'mode':mode}) + self.server.logger.info(logmsg) self.handle_wrq(socket, filename) elif opcode == OPCODE_ACK: block_num = struct.unpack('!H', data[2:4])[0] - self.server.logger.debug('Received ACK for block %d', block_num) + msg = 'Received ACK for block %d' % block_num + self.server.logger.debug(msg) + logmsg['message'] = msg + logmsg['tftp_cmd'] = 'ack' + logmsg['tftp_cmd_args'] = dict({'blocknum':block_num}) + self.server.logger.debug(logmsg) elif opcode == OPCODE_DATA: @@ -155,11 +174,20 @@ def handle(self): error_num = struct.unpack('!H', data[2:4])[0] error_msg = data[4:] - self.server.logger.info('Received error message %d:%s', error_num, error_msg) + msg = 'Received error message %d:%s' % (error_num, error_msg) + self.server.logger.info(msg) + logmsg['tftp_cmd'] = 'error' + logmsg['tftp_cmd_args'] = dict({'errornum': error_num, 'errormsg': error_msg}) + logmsg['message'] = msg + self.server.logger.info(logmsg) else: - self.server.logger.error('Unknown opcode: %d', struct.unpack('!H', data[:2])[0]) + msg = 'Unknown opcode: %d' % struct.unpack('!H', data[:2])[0] + self.server.logger.error(msg) + logmsg['tftp_cmd'] = struct.unpack('!H', data[:2])[0] + logmsg['message'] = msg + self.server.logger.info(logmsg) except Exception, e: self.server.logger.error('Error: %s', e) From 2a2298f805938d7662d219fe98c6ebe970041288 Mon Sep 17 00:00:00 2001 From: vavarachen Date: Mon, 15 Jan 2018 23:04:43 -0600 Subject: [PATCH 03/13] Simplified the remote logger code by moving most of the logic to ListenerBase. - Added ability to specify more than one remote loggers - Added json_only flag to offer the ability to avoid logging JSON to syslog. --- fakenet/configs/default.ini | 95 ++++++++++++--------- fakenet/fakenet.py | 32 ++----- fakenet/listeners/ListenerBase.py | 134 ++++++++++++++++++++---------- 3 files changed, 155 insertions(+), 106 deletions(-) diff --git a/fakenet/configs/default.ini b/fakenet/configs/default.ini index 53ede9e..22542dd 100644 --- a/fakenet/configs/default.ini +++ b/fakenet/configs/default.ini @@ -5,7 +5,7 @@ # Specify whether or not FakeNet should divert traffic. Disable if you want to # just start listeners and direct traffic manually (e.g. modify DNS server) -DivertTraffic: No +DivertTraffic: Yes ############################################################################### # Diverter Configuration @@ -29,7 +29,7 @@ NetworkMode: Auto # DebugLevel (Linux only as of this writing): specify fine-grained debug print # flags to enable. Enabling all logging when verbose mode is selected results # in an unacceptable overhead cost, hence this setting. -DebugLevel: On +DebugLevel: Off # MultiHost mode only: Specify what interfaces the Linux Diverter should create # an iptables rule for to redirect traffic destined for other hosts to the @@ -113,36 +113,53 @@ BlackListPortsUDP: 67, 68, 137, 138, 443, 1900, 5355 # HostBlackList: 6.6.6.6 ############################################################################### -#[RemoteLogger] -#Enabled : Yes -# logger_type: [syslog,splunk] -#Logger_Type : syslog -# logger_host: [ip,name,fqdn] -#Logger_Host : localhost -#Logger_Port : 514 -# logger_proto: [udp,tcp] -#Logger_Protocol : TCP -# Splunk remote logging configuration +# Multiple remote loggers can be specified. +# Each remote logger section name must be unique and start with 'RemoteLogger'. +# +# [RemoteLogger:description] +# Logger_Type - [syslog|splunk] (required) + +# logger_host - [ip,name,fqdn] (required) +# Logger_Port - 0-65535 (optional) +# Logger_Protocol - [TCP|UDP] (optional) +# https://docs.python.org/2/library/logging.html#logging-levels +# Logger_Level - [DEBUG|INFO|WARNING|ERROR|CRITICAL] (optional) +# +# Splunk specific configurations +# Requires splunk_http_handler python module ('pip install splunk_http_handler') +# If SSL capability is required, see https://github.com/vavarachen/splunk_http_handler # http://docs.splunk.com/Documentation/SplunkCloud/latest/Data/UsetheHTTPEventCollector -#Splunk_Cert_verify : True -#Splunk_HECToken : xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -#Splunk_Source : fakenet:daemon -#Splunk_Sourcetype : _json -[RemoteLogger] -EnableRemoteLogger : False -Logger_Type: splunk -Logger_Host : localhost -Logger_Port: 8080 -Splunk_Cert_verify : True -Splunk_HECToken : xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -Splunk_Source : fakenet:daemon - -#[RemoteLogger] -#Enabled : Yes -#Logger_Type : syslog -#Logger_Host : localhost -#Logger_Port : 514 -#Logger_Protocol : TCP +# Splunk_HECToken - xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx (required) +# Splunk_Source - Any valid splunk source name (optional) +# Replace with custom sourcetype as needed +# Splunk_Sourcetype - _json (optional) +# Splunk_Cert_verify - [0|1] (optional) +# +# JSON_Only - [0|1] +# 0 - Log all events +# 1 - Only log events in JSON format +###### +# Splunk Example +# +# [RemoteLogger:splunk] +# Logger_Type: splunk +# Logger_Host : localhost +# Logger_Port: 8080 +# Logger_Level: INFO +# Splunk_Cert_verify : True +# Splunk_HECToken : xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +# Splunk_Source : FakeNet +# JSON_Only : 1 +###### +# Syslog Example +# +# [RemoteLogger:syslogUDP] +# Logger_Type : syslog +# Logger_Host : localhost +# Logger_Port : 514 +# Logger_Level: INFO +# Logger_Protocol : UDP +# JSON_Only : 0 ############################################################################### @@ -272,7 +289,7 @@ HostBlackList: 5.5.5.5 Hidden: False [DNS Server] -Enabled: False +Enabled: True Port: 53 Protocol: UDP Listener: DNSListener @@ -284,7 +301,7 @@ Hidden: False [HTTPListener80] Enabled: True -Port: 8080 +Port: 80 Protocol: TCP Listener: HTTPListener UseSSL: No @@ -297,7 +314,7 @@ Hidden: False [HTTPListener443] Enabled: True -Port: 8443 +Port: 443 Protocol: TCP Listener: HTTPListener UseSSL: Yes @@ -307,7 +324,7 @@ DumpHTTPPostsFilePrefix: http Hidden: False [SMTPListener] -Enabled: False +Enabled: True Port: 25 Protocol: TCP Listener: SMTPListener @@ -316,7 +333,7 @@ Hidden: False [FTPListener21] Enabled: True -Port: 2121 +Port: 21 Protocol: TCP Listener: FTPListener UseSSL: No @@ -333,7 +350,7 @@ Protocol: TCP Hidden: False [IRCServer] -Enabled: False +Enabled: True Port: 6667 Protocol: TCP Listener: IRCListener @@ -344,7 +361,7 @@ Timeout: 30 Hidden: False [TFTPListener] -Enabled: False +Enabled: True Port: 69 Protocol: UDP Listener: TFTPListener @@ -353,7 +370,7 @@ Hidden: False TFTPFilePrefix: tftp [POPServer] -Enabled: False +Enabled: True Port: 110 Protocol: TCP Listener: POPListener diff --git a/fakenet/fakenet.py b/fakenet/fakenet.py index 559afea..1e0e643 100644 --- a/fakenet/fakenet.py +++ b/fakenet/fakenet.py @@ -6,7 +6,6 @@ # Developed by Peter Kacherginsky import logging -from splunk_http_handler import SplunkHttpHandler import os import sys @@ -78,6 +77,8 @@ def parse_config(self, config_filename): config = ConfigParser() config.read(config_filename) + # For list of valid remote logger configs + self.remoteloggers = {} self.logger.info('Loaded configuration file: %s', config_filename) @@ -90,28 +91,11 @@ def parse_config(self, config_filename): elif section == 'Diverter': self.diverter_config = dict(config.items(section)) - elif section == 'RemoteLogger': - self.remotelogger_config = dict(config.items(section)) - try: - if self.remotelogger_config['logger_type'] == 'splunk' and config.getboolean(section, 'enableremotelogger'): - ListenerBase.add_splunk_logger( - self.remotelogger_config['logger_host'], - self.remotelogger_config['splunk_hectoken'], - self.logger, - self.remotelogger_config['logger_port'], - self.remotelogger_config['splunk_cert_verify'], - self.remotelogger_config['splunk_source'] - ) - elif self.remotelogger_config['logger_type'] == 'syslog' and config.getboolean(section, 'enableremotelogger'): - ListenerBase.add_remote_logger( - self.remotelogger_config['logger_host'], - self.logger, - int(self.remotelogger_config['logger_port']), - self.remotelogger_config['logger_protocol'] - ) - self.logger.info("%s handler configured successfully." % self.remotelogger_config['logger_type']) - except Exception as e: - self.logger.warning("Failed to set remote log handler. Exception: %s" % e) + elif section.startswith('RemoteLogger'): + remotelogger_config = dict(config.items(section)) + if ListenerBase.add_remote_logger(self.logger, remotelogger_config): + # for adding to listeners when we initialize them + self.remoteloggers[section] = remotelogger_config elif config.getboolean(section, 'enabled'): self.listeners_config[section] = dict(config.items(section)) @@ -198,7 +182,7 @@ def start(self): listener_config = self.listeners_config[listener_name] # Pass remote logger configs in case we want to enable it for listener - listener_config.update(self.remotelogger_config) + listener_config.update(self.remoteloggers) # Anonymous listener if not 'listener' in listener_config: diff --git a/fakenet/listeners/ListenerBase.py b/fakenet/listeners/ListenerBase.py index 25726f9..c5469d5 100644 --- a/fakenet/listeners/ListenerBase.py +++ b/fakenet/listeners/ListenerBase.py @@ -43,7 +43,57 @@ def abs_config_path(path): return None -def add_remote_logger(host, logger=logging.getLogger('FakeNet Listener'), port=514, proto='TCP'): +class JSONIncludeFilter(logging.Filter): + """ + Logging filter to filter out any non-json formatted events. + """ + def filter(self, record): + return record.getMessage().startswith('{') and record.getMessage().endswith('}') + +def add_remote_logger(logger, config=None): + """ + Process remote logger configuration + :param logger: existing logging instance + :param config: dictionary object containing remote logger parameters + :return: true, if remote log handler added successfully, else false. + """ + logging_level = config['logger_level'] if config.has_key('logger_level') else logging.INFO + json_only = bool(int(config['json_only'])) if config.has_key('json_only') else True + + if config is None: + return False + elif config['logger_type'] == 'splunk': + ssl_verify = bool(int(config['splunk_cert_verify'])) if config.has_key('splunk_cert_verify') else False + splunk_source = config['splunk_source'] if config.has_key('splunk_source') else 'FakeNet' + splunk_sourcetype = config['splunk_sourcetype'] if config.has_key('splunk_sourcetype') else '_json' + port = int(config['logger_port']) if config.has_key('logger_port') else 8080 + + return add_splunk_logger( + config['logger_host'], + config['splunk_hectoken'], + logger, + logging_level, + port, + ssl_verify, + splunk_source, + splunk_sourcetype, + json_only + ) + else: + port = int(config['logger_port']) if config.has_key('logger_port') else 514 + proto = config['logger_protocol'] if config.has_key('logger_protocol') else 'TCP' + return add_syslog_logger( + config['logger_host'], + logger, + logging_level, + port, + proto, + json_only + ) + + +def add_syslog_logger(host, logger=logging.getLogger('FakeNet Listener'), logging_level=logging.INFO, + port=514, proto='TCP', json_only=False): """ Attach a remote syslog handler to existing logger @@ -51,19 +101,34 @@ def add_remote_logger(host, logger=logging.getLogger('FakeNet Listener'), port=5 :param logger: logging instance :param port: Network port to send logs to :param proto: Network protocol supported by remote logger - :return: Modified logger with remote handler attached + :param json_only: Set True to only emit json formatted logs + :return: True if handler was added, else false """ + socket_type = {'UDP': SOCK_DGRAM, 'TCP': SOCK_STREAM } - return logger.addHandler( - logging.handlers.SysLogHandler( - (host, port), - logging.handlers.SysLogHandler.LOG_DAEMON, - socket_type[proto.upper()] - ) - ) + try: + remote_handler = logging.handlers.SysLogHandler( + (host, int(port)), + logging.handlers.SysLogHandler.LOG_DAEMON, + socket_type[proto.upper()] + ) + try: + remote_handler.setLevel(logging.getLevelName(logging_level)) + except: + remote_handler.setLevel(logging.INFO) + + if json_only: + remote_handler.addFilter(JSONIncludeFilter()) + + logger.addHandler(remote_handler) + return True + except Exception as e: + logger.error("Failed to set Splunk log handler. Exception: %s" % e) + return False -def add_splunk_logger(host, hectoken, logger=logging.getLogger('FakeNet Listener'), port=8080, verify=True, source='FakeNet', sourcetype='_json'): +def add_splunk_logger(host, hectoken, logger=logging.getLogger('FakeNet Listener'), logging_level=logging.INFO, + port=8080, verify=True, source='FakeNet', sourcetype='_json', json_only=True): """ Attach a remote Splunk HTTP Event Collector handler to existing logger http://docs.splunk.com/Documentation/SplunkCloud/latest/Data/UsetheHTTPEventCollector @@ -75,14 +140,9 @@ def add_splunk_logger(host, hectoken, logger=logging.getLogger('FakeNet Listener :param verify: SSL verification :param source: Splunk event source :param sourcetype: Splunk event sourcetype. - :return: Modified logger with remote handler attached + :param json_only: Set True to only emit json formatted logs + :return: True if handler was added, else false """ - class JSONFilter(logging.Filter): - """ - Logging filter to filter out any non-json formatted events. - """ - def filter(self, record): - return record.getMessage().startswith('{') and record.getMessage().endswith('}') try: from splunk_http_handler import SplunkHttpHandler @@ -95,15 +155,23 @@ def filter(self, record): sourcetype=sourcetype, ssl_verify=bool(verify) ) - splunk_handler.addFilter(JSONFilter()) + try: + splunk_handler.setLevel(logging.getLevelName(logging_level)) + except: + splunk_handler.setLevel(logging.INFO) + + if json_only: + splunk_handler.addFilter(JSONIncludeFilter()) + logger.addHandler(splunk_handler) + return True except Exception as e: logger.error("Failed to set Splunk log handler. Exception: %s" % e) + return False except Exception as e: logger.error("Failed to import Splunk python module (splunk_http_handler), Try 'pip install splunk_http_handler'") logger.debug("Exception raised: %s" % e) - finally: - return logger + return False def set_logger(name="FakeNetListener", config=None, logging_level=logging.INFO): @@ -124,28 +192,8 @@ def set_logger(name="FakeNetListener", config=None, logging_level=logging.INFO): stream_handler.setFormatter(stream_formatter) logger.addHandler(stream_handler) - if config['enableremotelogger']: - try: - if config['logger_type'] == 'splunk': - add_splunk_logger( - config['logger_host'], - config['splunk_hectoken'], - logger, - config['logger_port'], - config['splunk_cert_verify'], - source=name - ) - elif config['logger_type'] == 'syslog': - add_remote_logger( - config['logger_host'], - logger, - int(config['logger_port']), - config['logger_protocol'] - ) - except Exception as e: - logger.warning("Failed to add %s log handler for %s" % (config['logger_type'], name)) - logger.debug("Exception raised: %s") % e - logger.debug("Config: \n%s") % config - + for k in config.iterkeys(): + if config[k].__class__ is dict and config[k].has_key('logger_host'): + add_remote_logger(logger, config[k]) return logger From dbdd1f2e1e51aa6806cf47804d9230971cf3c0e5 Mon Sep 17 00:00:00 2001 From: vavarachen Date: Tue, 16 Jan 2018 15:59:06 -0600 Subject: [PATCH 04/13] Gave POP and DNS listeners the same logging treatment as other Listeners. --- fakenet/listeners/DNSListener.py | 25 ++++++++++++++++--------- fakenet/listeners/POPListener.py | 21 +++++++++++++-------- 2 files changed, 29 insertions(+), 17 deletions(-) diff --git a/fakenet/listeners/DNSListener.py b/fakenet/listeners/DNSListener.py index 1daee61..148b68a 100644 --- a/fakenet/listeners/DNSListener.py +++ b/fakenet/listeners/DNSListener.py @@ -9,6 +9,7 @@ import socket from . import * +import ListenerBase class DNSListener(): @@ -30,16 +31,14 @@ def __init__( logging_level=logging.INFO, ): - self.logger = logging.getLogger(name) - self.logger.setLevel(logging_level) - + self.logger = ListenerBase.set_logger("%s:%s" % (self.__module__, name), config, logging_level) self.config = config self.local_ip = '0.0.0.0' self.server = None self.name = 'DNS' self.port = self.config.get('port', 53) - self.logger.info('Starting...') + self.logger.info('Starting %s %s server on %s:%s' % (self.config['protocol'], self.name, self.local_ip, self.config.get('port'))) self.logger.debug('Initialized with config:') for key, value in config.iteritems(): @@ -49,12 +48,10 @@ def start(self): # Start UDP listener if self.config['protocol'].lower() == 'udp': - self.logger.debug('Starting UDP ...') self.server = ThreadedUDPServer((self.local_ip, int(self.config.get('port', 53))), self.config, self.logger, UDPHandler) # Start TCP listener elif self.config['protocol'].lower() == 'tcp': - self.logger.debug('Starting TCP ...') self.server = ThreadedTCPServer((self.local_ip, int(self.config.get('port', 53))), self.config, self.logger, TCPHandler) self.server.nxdomains = int(self.config.get('nxdomains', 0)) @@ -64,8 +61,8 @@ def start(self): self.server_thread.start() def stop(self): - self.logger.debug('Stopping...') - + self.logger.info('Stopping %s %s server on %s:%s' % (self.config['protocol'], self.name, self.local_ip, self.config.get('port'))) + # Stop listener if self.server: self.server.shutdown() @@ -177,7 +174,17 @@ def parse(self,data): response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](fake_record))) response = response.pack() - + + logmsg = dict({'src': self.client_address[0], 'src_port':self.client_address[1], + 'dest_port': self.server.config['port'], 'protocol': self.server.config['protocol'], + 'query_type': qtype, 'query': qname, 'listener': __name__}) + if qtype in ['A', 'MX', 'TXT']: + logmsg['answer'] = fake_record + else: + logmsg['answer'] = "Not Implemented" + + self.server.logger.info(logmsg) + return response class UDPHandler(DNSHandler, SocketServer.BaseRequestHandler): diff --git a/fakenet/listeners/POPListener.py b/fakenet/listeners/POPListener.py index a1801ec..c075cdb 100644 --- a/fakenet/listeners/POPListener.py +++ b/fakenet/listeners/POPListener.py @@ -10,6 +10,7 @@ import socket from . import * +import ListenerBase EMAIL = """From: "Bob Example" To: Alice Example @@ -48,9 +49,7 @@ def __init__(self, logging_level=logging.INFO, ): - self.logger = logging.getLogger(name) - self.logger.setLevel(logging_level) - + self.logger = ListenerBase.set_logger("%s:%s" % (self.__module__, name), config, logging_level) self.config = config self.name = name self.local_ip = '0.0.0.0' @@ -58,14 +57,13 @@ def __init__(self, self.name = 'POP' self.port = self.config.get('port', 110) - self.logger.info('Starting...') - self.logger.debug('Initialized with config:') for key, value in config.iteritems(): self.logger.debug(' %10s: %s', key, value) def start(self): - self.logger.debug('Starting...') + ssl_str = 'POPS' if self.config.get('usessl') == 'Yes' else 'POP' + self.logger.info('Starting %s server on %s:%s' % (ssl_str, self.local_ip, self.config.get('port'))) self.server = ThreadedTCPServer((self.local_ip, int(self.config['port'])), ThreadedTCPRequestHandler) @@ -94,7 +92,8 @@ def start(self): self.server_thread.start() def stop(self): - self.logger.info('Stopping...') + ssl_str = 'POPS' if self.config.get('usessl') == 'Yes' else 'POP' + self.logger.info('Starting %s server on %s:%s' % (ssl_str, self.local_ip, self.config.get('port'))) if self.server: self.server.shutdown() self.server.server_close() @@ -105,10 +104,12 @@ def handle(self): # Timeout connection to prevent hanging self.request.settimeout(int(self.server.config.get('timeout', 10))) + logmsg = dict({'src': self.client_address[0], 'src_port':self.client_address[1], + 'dest_port': self.server.server_address[1], 'listener': __name__}) try: - self.request.sendall("+OK FakeNet POP3 Server Ready\r\n") + self.request.sendall("+OK %s\r\n" % self.server.config.get('banner',"FakeNet POP3 Server Ready")) while True: data = self.request.recv(1024) @@ -130,6 +131,10 @@ def handle(self): handler = getattr(self, 'pop_%s' % (cmd.upper()), self.pop_DEFAULT) handler(cmd, params) + logmsg['pop_cmd'] = cmd.upper() + logmsg['pop_cmd_args'] = params + self.server.logger.info(logmsg) + except socket.timeout: self.server.logger.warning('Connection timeout') From db669ce38e000c2554fcf8323952b81ab00e09b2 Mon Sep 17 00:00:00 2001 From: vavarachen Date: Tue, 16 Jan 2018 16:36:18 -0600 Subject: [PATCH 05/13] Minor fix to ssl configuration. Copied from SMTPListener. Tested. --- fakenet/listeners/POPListener.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fakenet/listeners/POPListener.py b/fakenet/listeners/POPListener.py index c075cdb..37f3315 100644 --- a/fakenet/listeners/POPListener.py +++ b/fakenet/listeners/POPListener.py @@ -82,7 +82,7 @@ def start(self): self.logger.error('Could not locate %s', certfile_path) sys.exit(1) - self.server.socket = ssl.wrap_socket(self.server.socket, keyfile='privkey.pem', certfile='server.pem', server_side=True, ciphers='RSA') + self.server.socket = ssl.wrap_socket(self.server.socket, keyfile=keyfile_path, certfile=certfile_path, server_side=True, ciphers='RSA') self.server.logger = self.logger self.server.config = self.config From c03866ee7d5a423b8ade964a4aba89e83f487f2a Mon Sep 17 00:00:00 2001 From: vavarachen Date: Wed, 17 Jan 2018 22:49:16 -0600 Subject: [PATCH 06/13] Added ability to turn off remote logging per listener using RemoteLogger = [1|0] Fixed type in ProxyListener. Renamed log_mesage to log_message. --- fakenet/listeners/ListenerBase.py | 7 ++++--- fakenet/listeners/ProxyListener.py | 8 ++++---- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/fakenet/listeners/ListenerBase.py b/fakenet/listeners/ListenerBase.py index c5469d5..cd5179b 100644 --- a/fakenet/listeners/ListenerBase.py +++ b/fakenet/listeners/ListenerBase.py @@ -192,8 +192,9 @@ def set_logger(name="FakeNetListener", config=None, logging_level=logging.INFO): stream_handler.setFormatter(stream_formatter) logger.addHandler(stream_handler) - for k in config.iterkeys(): - if config[k].__class__ is dict and config[k].has_key('logger_host'): - add_remote_logger(logger, config[k]) + if not config.has_key('remotelogging') or config['remotelogging'] == 1: + for k in config.iterkeys(): + if config[k].__class__ is dict and config[k].has_key('logger_host'): + add_remote_logger(logger, config[k]) return logger diff --git a/fakenet/listeners/ProxyListener.py b/fakenet/listeners/ProxyListener.py index ec6047b..feeb7ab 100644 --- a/fakenet/listeners/ProxyListener.py +++ b/fakenet/listeners/ProxyListener.py @@ -221,7 +221,7 @@ def get_top_listener(config, data, listeners, diverter, orig_src_ip, return top_listener class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): - def log_mesage(self, hexdump): + def log_message(self, hexdump): logmsg = dict({'protocol':'tcp', 'src': self.client_address[0], 'src_port': self.client_address[1], 'dest_port': self.server.server_address[1], 'hexdump': hexdump, 'listener': __name__}) @@ -262,7 +262,7 @@ def handle(self): self.server.logger.debug(line) self.server.logger.debug('%s', '-'*80,) # Log message in json format - self.log_mesage(hexdump_table(data)) + self.log_message(hexdump_table(data)) except Exception as e: self.server.logger.info('recv() error: %s' % e.message) @@ -329,7 +329,7 @@ def handle(self): remote_sock.send(data) class ThreadedUDPRequestHandler(SocketServer.BaseRequestHandler): - def log_mesage(self, hexdump): + def log_message(self, hexdump): logmsg = dict({'protocol':'udp', 'src': self.client_address[0], 'src_port': self.client_address[1], 'dest_port': self.server.server_address[1], 'hexdump': hexdump, 'listener': __name__}) @@ -352,7 +352,7 @@ def handle(self): self.server.logger.debug(line) self.server.logger.debug('%s', '-'*80,) # Log message in json format - self.log_mesage(hexdump_table(data)) + self.log_message(hexdump_table(data)) orig_src_ip = self.client_address[0] orig_src_port = self.client_address[1] From 02bfd49913a3bca661b0c2164120f8636c6156f7 Mon Sep 17 00:00:00 2001 From: vavarachen Date: Thu, 18 Jan 2018 00:02:30 -0600 Subject: [PATCH 07/13] Added logging filter to main daemon logger. JSON messages are not logged if remote logger is not set. --- fakenet/fakenet.py | 8 ++++++++ fakenet/listeners/ListenerBase.py | 13 ++++++++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/fakenet/fakenet.py b/fakenet/fakenet.py index 1e0e643..8507bf3 100644 --- a/fakenet/fakenet.py +++ b/fakenet/fakenet.py @@ -28,6 +28,7 @@ # Listener services import listeners from listeners import * +from listeners.ListenerBase import JSONExcludeFilter ############################################################################### # FakeNet @@ -42,6 +43,13 @@ def __init__(self, logging_level = logging.INFO): self.logging_level = logging_level + stream_handler = logging.StreamHandler() + stream_handler.setLevel(logging_level) + stream_formatter = logging.Formatter('%(asctime)s [%(name)18s] %(message)s', datefmt='%m/%d/%y %I:%M:%S %p') + stream_handler.setFormatter(stream_formatter) + stream_handler.addFilter(JSONExcludeFilter()) + self.logger.addHandler(stream_handler) + # Diverter used to intercept and redirect traffic self.diverter = None diff --git a/fakenet/listeners/ListenerBase.py b/fakenet/listeners/ListenerBase.py index cd5179b..73828cf 100644 --- a/fakenet/listeners/ListenerBase.py +++ b/fakenet/listeners/ListenerBase.py @@ -50,6 +50,14 @@ class JSONIncludeFilter(logging.Filter): def filter(self, record): return record.getMessage().startswith('{') and record.getMessage().endswith('}') + +class JSONExcludeFilter(logging.Filter): + """ + Logging filter to filter out any non-json formatted events. + """ + def filter(self, record): + return not record.getMessage().startswith('{') and not record.getMessage().endswith('}') + def add_remote_logger(logger, config=None): """ Process remote logger configuration @@ -190,11 +198,14 @@ def set_logger(name="FakeNetListener", config=None, logging_level=logging.INFO): stream_handler.setLevel(logging_level) stream_formatter = logging.Formatter('%(asctime)s [%(name)18s] %(message)s', datefmt='%m/%d/%y %I:%M:%S %p') stream_handler.setFormatter(stream_formatter) - logger.addHandler(stream_handler) if not config.has_key('remotelogging') or config['remotelogging'] == 1: for k in config.iterkeys(): if config[k].__class__ is dict and config[k].has_key('logger_host'): add_remote_logger(logger, config[k]) + else: + stream_handler.addFilter(JSONExcludeFilter()) + logger.addHandler(stream_handler) + return logger From 054d3f549ee0b67ebb8e91ea199eaf876c04d27c Mon Sep 17 00:00:00 2001 From: vavarachen Date: Fri, 19 Jan 2018 15:57:14 -0600 Subject: [PATCH 08/13] setting PID log message to debug since it appears to cause heavy load and too much noise. --- fakenet/diverters/linux.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fakenet/diverters/linux.py b/fakenet/diverters/linux.py index 4a52edc..3545f87 100644 --- a/fakenet/diverters/linux.py +++ b/fakenet/diverters/linux.py @@ -157,7 +157,7 @@ def handle_pkt(self): if ((not (self.diverter.pdebug_level & DGENPKTV)) and pid and (pid != self.diverter.pid)): - self.logger.info(' pid: %d name: %s' % + self.logger.debug(' pid: %d name: %s' % (pid, comm if comm else 'Unknown')) hdr_latest = self.hdr From 37a2ed7b493fe6aefb0ad12a24552116d63ebf8b Mon Sep 17 00:00:00 2001 From: vavarachen Date: Mon, 22 Jan 2018 21:31:36 -0600 Subject: [PATCH 09/13] Dropping pid logging to a lower level. It was causing excessive CPU usage under high load. --- fakenet/diverters/linux.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fakenet/diverters/linux.py b/fakenet/diverters/linux.py index 4a52edc..3545f87 100644 --- a/fakenet/diverters/linux.py +++ b/fakenet/diverters/linux.py @@ -157,7 +157,7 @@ def handle_pkt(self): if ((not (self.diverter.pdebug_level & DGENPKTV)) and pid and (pid != self.diverter.pid)): - self.logger.info(' pid: %d name: %s' % + self.logger.debug(' pid: %d name: %s' % (pid, comm if comm else 'Unknown')) hdr_latest = self.hdr From a9baef9628f180d21edd1e88f8a2608eeb4aeba1 Mon Sep 17 00:00:00 2001 From: vavarachen Date: Fri, 26 Jan 2018 13:28:09 -0600 Subject: [PATCH 10/13] Implemented ability to send syslog output to /dev/log and option to filter out JSON. Additionally, addressed the issue of excluding json output in listener stream handler. --- fakenet/listeners/ListenerBase.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/fakenet/listeners/ListenerBase.py b/fakenet/listeners/ListenerBase.py index cd5179b..34ba9f4 100644 --- a/fakenet/listeners/ListenerBase.py +++ b/fakenet/listeners/ListenerBase.py @@ -50,6 +50,14 @@ class JSONIncludeFilter(logging.Filter): def filter(self, record): return record.getMessage().startswith('{') and record.getMessage().endswith('}') + +class JSONExcludeFilter(logging.Filter): + """ + Logging filter to filter out any json formatted events. + """ + def filter(self, record): + return not record.getMessage().startswith('{') and not record.getMessage().endswith('}') + def add_remote_logger(logger, config=None): """ Process remote logger configuration @@ -107,7 +115,13 @@ def add_syslog_logger(host, logger=logging.getLogger('FakeNet Listener'), loggin socket_type = {'UDP': SOCK_DGRAM, 'TCP': SOCK_STREAM } try: - remote_handler = logging.handlers.SysLogHandler( + if str(host).startswith('/dev'): + remote_handler = logging.handlers.SysLogHandler( + host, + logging.handlers.SysLogHandler.LOG_DAEMON + ) + else: + remote_handler = logging.handlers.SysLogHandler( (host, int(port)), logging.handlers.SysLogHandler.LOG_DAEMON, socket_type[proto.upper()] @@ -190,9 +204,10 @@ def set_logger(name="FakeNetListener", config=None, logging_level=logging.INFO): stream_handler.setLevel(logging_level) stream_formatter = logging.Formatter('%(asctime)s [%(name)18s] %(message)s', datefmt='%m/%d/%y %I:%M:%S %p') stream_handler.setFormatter(stream_formatter) + stream_handler.addFilter(JSONExcludeFilter()) logger.addHandler(stream_handler) - if not config.has_key('remotelogging') or config['remotelogging'] == 1: + if (config is not None) and (not config.has_key('remotelogging') or config['remotelogging']) == 1: for k in config.iterkeys(): if config[k].__class__ is dict and config[k].has_key('logger_host'): add_remote_logger(logger, config[k]) From 79cb90f6e36b0ce8487ceb8daf31d353f50d205a Mon Sep 17 00:00:00 2001 From: vavarachen Date: Fri, 26 Jan 2018 14:42:55 -0600 Subject: [PATCH 11/13] Added option to specify facility for syslog. --- fakenet/fakenet.py | 0 fakenet/listeners/ListenerBase.py | 7 ++++--- 2 files changed, 4 insertions(+), 3 deletions(-) mode change 100644 => 100755 fakenet/fakenet.py diff --git a/fakenet/fakenet.py b/fakenet/fakenet.py old mode 100644 new mode 100755 diff --git a/fakenet/listeners/ListenerBase.py b/fakenet/listeners/ListenerBase.py index 34ba9f4..9f33d5d 100644 --- a/fakenet/listeners/ListenerBase.py +++ b/fakenet/listeners/ListenerBase.py @@ -101,7 +101,8 @@ def add_remote_logger(logger, config=None): def add_syslog_logger(host, logger=logging.getLogger('FakeNet Listener'), logging_level=logging.INFO, - port=514, proto='TCP', json_only=False): + port=514, proto='TCP', json_only=False, + facility=logging.handlers.SysLogHandler.LOG_LOCAL6): """ Attach a remote syslog handler to existing logger @@ -118,12 +119,12 @@ def add_syslog_logger(host, logger=logging.getLogger('FakeNet Listener'), loggin if str(host).startswith('/dev'): remote_handler = logging.handlers.SysLogHandler( host, - logging.handlers.SysLogHandler.LOG_DAEMON + facility ) else: remote_handler = logging.handlers.SysLogHandler( (host, int(port)), - logging.handlers.SysLogHandler.LOG_DAEMON, + facility, socket_type[proto.upper()] ) try: From 118d31045022b9a1eb376fff2192100877e85b74 Mon Sep 17 00:00:00 2001 From: vavarachen Date: Mon, 19 Feb 2018 17:19:33 -0600 Subject: [PATCH 12/13] Minor tweaks to add ability to filter out json messages from the daemon root logger --- fakenet/fakenet.py | 8 -------- fakenet/listeners/ListenerBase.py | 6 ++++-- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/fakenet/fakenet.py b/fakenet/fakenet.py index 8507bf3..1e0e643 100755 --- a/fakenet/fakenet.py +++ b/fakenet/fakenet.py @@ -28,7 +28,6 @@ # Listener services import listeners from listeners import * -from listeners.ListenerBase import JSONExcludeFilter ############################################################################### # FakeNet @@ -43,13 +42,6 @@ def __init__(self, logging_level = logging.INFO): self.logging_level = logging_level - stream_handler = logging.StreamHandler() - stream_handler.setLevel(logging_level) - stream_formatter = logging.Formatter('%(asctime)s [%(name)18s] %(message)s', datefmt='%m/%d/%y %I:%M:%S %p') - stream_handler.setFormatter(stream_formatter) - stream_handler.addFilter(JSONExcludeFilter()) - self.logger.addHandler(stream_handler) - # Diverter used to intercept and redirect traffic self.diverter = None diff --git a/fakenet/listeners/ListenerBase.py b/fakenet/listeners/ListenerBase.py index 9f33d5d..8371ef8 100644 --- a/fakenet/listeners/ListenerBase.py +++ b/fakenet/listeners/ListenerBase.py @@ -134,6 +134,8 @@ def add_syslog_logger(host, logger=logging.getLogger('FakeNet Listener'), loggin if json_only: remote_handler.addFilter(JSONIncludeFilter()) + else: + remote_handler.addFilter(JSONExcludeFilter()) logger.addHandler(remote_handler) return True @@ -189,7 +191,7 @@ def add_splunk_logger(host, hectoken, logger=logging.getLogger('FakeNet Listener return False -def set_logger(name="FakeNetListener", config=None, logging_level=logging.INFO): +def set_logger(name="FakeNetListener", config=None, logging_level=logging.WARNING): """ Set default logger for listeners @@ -206,7 +208,7 @@ def set_logger(name="FakeNetListener", config=None, logging_level=logging.INFO): stream_formatter = logging.Formatter('%(asctime)s [%(name)18s] %(message)s', datefmt='%m/%d/%y %I:%M:%S %p') stream_handler.setFormatter(stream_formatter) stream_handler.addFilter(JSONExcludeFilter()) - logger.addHandler(stream_handler) + #logger.addHandler(stream_handler) if (config is not None) and (not config.has_key('remotelogging') or config['remotelogging']) == 1: for k in config.iterkeys(): From 48fdca465a6ef5a9759f0d717ef0805c6ef4c051 Mon Sep 17 00:00:00 2001 From: vavarachen Date: Mon, 19 Feb 2018 17:49:06 -0600 Subject: [PATCH 13/13] Added example showing /dev/log as a valid destination. Switched SSL specification to 1|0 from true|false. --- fakenet/configs/default.ini | 38 +++++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 14 deletions(-) diff --git a/fakenet/configs/default.ini b/fakenet/configs/default.ini index 22542dd..3993bf2 100644 --- a/fakenet/configs/default.ini +++ b/fakenet/configs/default.ini @@ -118,22 +118,43 @@ BlackListPortsUDP: 67, 68, 137, 138, 443, 1900, 5355 # # [RemoteLogger:description] # Logger_Type - [syslog|splunk] (required) - -# logger_host - [ip,name,fqdn] (required) +# logger_host - [ip,name,fqdn,/dev/log] (required) # Logger_Port - 0-65535 (optional) # Logger_Protocol - [TCP|UDP] (optional) # https://docs.python.org/2/library/logging.html#logging-levels # Logger_Level - [DEBUG|INFO|WARNING|ERROR|CRITICAL] (optional) # +###### +# Syslog Example +# +# [RemoteLogger:syslogUDP] +# Logger_Type : syslog +# Logger_Host : logger.domain.tld +# Logger_Port : 514 +# Logger_Level: INFO +# Logger_Protocol : UDP +# JSON_Only : 0 +# +# [RemoteLogger:syslogSocket] +# Logger_Type : syslog +# Logger_Host : /dev/log +# Logger_Level: INFO +# JSON_Only : 0 + # Splunk specific configurations # Requires splunk_http_handler python module ('pip install splunk_http_handler') # If SSL capability is required, see https://github.com/vavarachen/splunk_http_handler # http://docs.splunk.com/Documentation/SplunkCloud/latest/Data/UsetheHTTPEventCollector # Splunk_HECToken - xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx (required) +# # Splunk_Source - Any valid splunk source name (optional) +# # Replace with custom sourcetype as needed # Splunk_Sourcetype - _json (optional) +# # Splunk_Cert_verify - [0|1] (optional) +# 0 - Don't very SSL cert +# 1 - Verify SSL cert # # JSON_Only - [0|1] # 0 - Log all events @@ -146,21 +167,10 @@ BlackListPortsUDP: 67, 68, 137, 138, 443, 1900, 5355 # Logger_Host : localhost # Logger_Port: 8080 # Logger_Level: INFO -# Splunk_Cert_verify : True +# Splunk_Cert_verify : 0 # Splunk_HECToken : xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx # Splunk_Source : FakeNet # JSON_Only : 1 -###### -# Syslog Example -# -# [RemoteLogger:syslogUDP] -# Logger_Type : syslog -# Logger_Host : localhost -# Logger_Port : 514 -# Logger_Level: INFO -# Logger_Protocol : UDP -# JSON_Only : 0 - ############################################################################### # Listener Configuration