diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..1b99ce0 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,6 @@ +[MASTER] +extension-pkg-whitelist=GeoIP + +[FORMAT] +indent-string=' ' +indent-after-paren=2 diff --git a/README.md b/README.md index 1592c1c..26ba063 100644 --- a/README.md +++ b/README.md @@ -2,130 +2,143 @@ alpha beta whatever partly works -## roadmap -refactor README and lib/plugin/plugins/* -cleanup * - -probably Listener shoul be part of Core in order to supervise everything - -tasks don't store results currently. need to implement some validation of performed tasks (at least in Executor thread before spreading new tasks) -http://python-rq.org/docs/results/ ## configuration `data/config.yaml` ``` -# lists top-level services (like listeners, executors, data-manager) and pipelines enabled +--- +dsl_version: 1 + core: - services: # should point to correct service - - data_manager + services: + - random_ip - rq_executor + - tg_feed pipelines: - ftp + - gopher -# describes top-level services and their configurations services: - data_manager: - # REQUIRED package name, just like path to file with dots - package: lib.data.Manager - # REQUIRED class inherited from Service (lib.Service) - service: DataManager - # REQUIRED used to select one of storages - data: - id: pool - # there can be more service-specific configuration fields - # for now they can be found in code :) - sources: - - random_ip - feeds: - - test_telegram - rq_executor: - package: lib.exeq.Executor - service: RQExecutor - data: - id: pool - redis: - host: "127.0.0.1" - -# describes datasources for data_manager -sources: random_ip: package: lib.plugin.base.lib.IP service: RandomIP - data: - id: random_ip - -# describes datafeeds for data_manager -feeds: - test_telegram: # doesn't work yet, eh + storage: ip_source + rq_executor: + package: lib.exec.Executor + service: RQExecutor + storage: pool + redis: + host: "127.0.0.1" + tg_feed: package: lib.plugin.base.lib.Telegram service: TelegramFeed - data: - id: pool - token: - chats: - - id: good_evening - pipelines: [ftp, gopher] - filter: - clause: any-of - equal: - - ftp_list_files_status: success - - gopher_collect_status: success + storage: pool + token: "mocken" + chats: + - id: aiWeipeighah7vufoHa0ieToipooYe + if: + steps.ftp_apply_tpl: true + data.filter: false + - id: ohl7AeGah5uo8cho4nae9Eemaeyae3 + if: + steps.gopher_apply_tpl: true + data.filter: false -# describes various storages, e.g. data pool for pipelines or queues for datasources storage: pool: - # REQUIRED package: lib.plugin.base.lib.Mongo service: MongoStorage - size: 40960 - # service-specific + size: 0 db: "medved" coll: 'pool' - random_ip: + ip_source: package: lib.plugin.base.lib.Mongo service: MongoStorage - size: 500 + size: 800 db: "medved" - coll: 'randomipsource' + coll: 'ip_source' + -# describes available pipelines pipelines: ftp: - # list of steps with dependencies - steps: - # will pass 10 items to lib.plugin.iscan.tasks.common.scan - - name: scan - package: lib.plugin.iscan.tasks.common - service: scan - multiple: 10 # default: False - requires: [] - # will pass 1 item marked with ftp_scan to lib.plugin.iscan.tasks.ftp.connect - - name: connect - package: lib.plugin.iscan.tasks.ftp - service: connect - requires: - - ftp_scan - - name: list_files - package: lib.plugin.iscan.tasks.ftp - service: list_files - requires: - - ftp_connect + source: ip_source + steps: + - task: ftp_scan + priority: low + parallel: 100 + - task: ftp_connect + priority: normal + if: + steps.ftp_scan: true + - task: ftp_list_files + priority: high + if: + steps.ftp_connect: true + - task: ftp_apply_tpl + priority: high + if: + steps.ftp_list_files: true + gopher: + source: ip_source + steps: + - task: gopher_scan + priority: normal + parallel: 100 + - task: gopher_find + priority: high + if: + steps.gopher_scan: true + - task: gopher_apply_tpl + priority: high + if: + steps.gopher_find: true + + http: + source: ip_source + steps: + - task: http_scan + priority: low + parallel: 25 -# various configurations for tasks tasks: + gopher_scan: + package: lib.plugin.iscan.tasks.common + service: MasScanTask + ports: + - 70 + gopher_find: + package: lib.plugin.iscan.tasks.gopher + service: GopherFindTask + gopher_apply_tpl: + package: lib.plugin.base.tasks.text + service: Jinja2TemplateTask + path: lib/plugin/iscan/templates/gopher.tpl + ftp_scan: + package: lib.plugin.iscan.tasks.common + service: MasScanTask ports: - 21 - ftp_connect: + ftp_connect: + package: lib.plugin.iscan.tasks.ftp + service: FTPConnectTask logins: data/ftp/logins.txt passwords: data/ftp/passwords.txt bruteforce: true timeout: 15 ftp_list_files: + package: lib.plugin.iscan.tasks.ftp + service: FTPListFilesTask + filter: true + ftp_apply_tpl: + package: lib.plugin.base.tasks.text + service: Jinja2TemplateTask + path: lib/plugin/iscan/templates/ftp.tpl logging: - Storage: INFO + Storage: DEBUG + Loader: DEBUG ``` probably it can be launched with docker, however I didn't test it yet @@ -137,9 +150,10 @@ you'll need working redis and mongodb for default configuration ## top-level services -### lib.data.Manager.DataManager -Orchestrates datasources and datafeeds - starts and stops them, also checks pool size. If it is too low - takes data from DS. -### lib.exeq.Executor.RQExecutor +### sources ### +### feeds ### + +### lib.exec.Executor.RQExecutor Should run pipelines described in configuration. Works via [RedisQueue](http://python-rq.org/), so needs some Redis up and running Basically takes data from pool and submits it to workers. RQ workers should be launched separately (`rqworker worker` from code root) diff --git a/data/config.yaml b/data/config.yaml index 0a47f04..1234fc7 100644 --- a/data/config.yaml +++ b/data/config.yaml @@ -5,6 +5,7 @@ core: services: - random_ip - rq_executor + - GC - tg_feed pipelines: - ftp @@ -16,21 +17,28 @@ services: service: RandomIP storage: ip_source rq_executor: - package: lib.exeq.Executor + package: lib.exec.Executor service: RQExecutor storage: pool redis: host: redis + GC: + package: lib.plugin.base.lib.GC + service: GarbageCollector + storage: pool + delay: 10 + if: + steps.ftp_scan: false + steps.gopher_scan: false tg_feed: package: lib.plugin.base.lib.Telegram service: TelegramFeed storage: pool - token: "3" + token: "358947254:" chats: - id: aiWeipeighah7vufoHa0ieToipooYe if: steps.ftp_apply_tpl: true - data.filter: false - id: ohl7AeGah5uo8cho4nae9Eemaeyae3 if: steps.gopher_apply_tpl: true @@ -65,13 +73,19 @@ pipelines: if: steps.ftp_scan: true - task: ftp_list_files - priority: high + priority: normal if: steps.ftp_connect: true + - task: ftp_filter_files + priority: normal + parallel: 100 + if: + steps.ftp_list_files: true - task: ftp_apply_tpl priority: high if: - steps.ftp_list_files: true + steps.ftp_filter_files: true + data.filter: false gopher: source: ip_source steps: @@ -124,11 +138,15 @@ tasks: http_scan: package: lib.plugin.iscan.tasks.common service: MasScanTask - ports: + ports: &http_ports - 80 - 81 - 8080 - 8081 + http_find: + package: lib.plugin.iscan.tasks.http + service: HTTPFindTask + ftp_scan: package: lib.plugin.iscan.tasks.common @@ -138,19 +156,21 @@ tasks: ftp_connect: package: lib.plugin.iscan.tasks.ftp service: FTPConnectTask - logins: data/ftp/logins.txt + usernames: data/ftp/usernames.txt passwords: data/ftp/passwords.txt bruteforce: true timeout: 15 ftp_list_files: package: lib.plugin.iscan.tasks.ftp service: FTPListFilesTask - filter: true + ftp_filter_files: + package: lib.plugin.iscan.tasks.ftp + service: FTPListFilesTask ftp_apply_tpl: package: lib.plugin.base.tasks.text service: Jinja2TemplateTask path: lib/plugin/iscan/templates/ftp.tpl logging: - Storage: INFO + Storage: DEBUG Loader: INFO diff --git a/data/ftp/logins.txt b/data/ftp/usernames.txt similarity index 100% rename from data/ftp/logins.txt rename to data/ftp/usernames.txt diff --git a/lib/Service.py b/lib/Service.py index 8ab2d76..12d5626 100644 --- a/lib/Service.py +++ b/lib/Service.py @@ -1,3 +1,8 @@ +""" +Provides Service class +""" + + from time import sleep from threading import Thread from lib import Logger, Loader, Loadable @@ -21,8 +26,9 @@ class Service(Loadable): def _init(self): pass - + def start(self): + """Executes pre_start, starts thread and executes post_start""" self._logger.debug('pre_start') self._pre_start() @@ -38,6 +44,7 @@ class Service(Loadable): self._logger.info('start finished') def stop(self): + """Executes pre_stop, stops thread and executes post_stop""" self._logger.debug('pre_stop') self._pre_stop() @@ -49,18 +56,18 @@ class Service(Loadable): self._post_stop() self._logger.info('stop finished') - + def __run(self): while self._running: self._logger.debug('NOOP') sleep(1) - + def _pre_stop(self): pass def _post_stop(self): pass - + def _pre_start(self): pass diff --git a/lib/__init__.py b/lib/__init__.py index 7878017..5da849c 100644 --- a/lib/__init__.py +++ b/lib/__init__.py @@ -1,3 +1,3 @@ import data from .util import Logger, Loader, Loadable -from .Service import Service \ No newline at end of file +from .Service import Service diff --git a/lib/data/Feed.py b/lib/data/Feed.py index 17f3ebe..5512a40 100644 --- a/lib/data/Feed.py +++ b/lib/data/Feed.py @@ -1,20 +1,8 @@ -from queue import LifoQueue -from time import sleep - -import itertools - -from lib.net import Remote from lib import Service - class Feed(Service): """Base class for datafeeds""" def __init__(self, thread, id, root): super().__init__(thread, id, root) self._logger.add_field('service', 'Feed') self._logger.add_field('vname', self.__class__.__name__) - - def get(self, plugin, count=1, timeout=3): - items = self._data.get(count) - self._logger.debug("get %s OF %s", len(items), count) - return items diff --git a/lib/data/Source.py b/lib/data/Source.py index 3672c04..edf5f07 100644 --- a/lib/data/Source.py +++ b/lib/data/Source.py @@ -1,20 +1,22 @@ +import copy + from lib import Service class Source(Service): """Base class for datasources""" def __init__(self, thread, id, root): super().__init__(thread, id, root) - self._logger.add_field('service', 'Feed') + self._logger.add_field('service', 'Source') self._logger.add_field('vname', self.__class__.__name__) - + self._item = { 'source': self._id, 'steps': {}, 'data': {} } - def next(self, count=10, block=False): - if self._running or not self._data.count() == 0: - return self._data.get(count=count, block=block) - elif self._data.count() == 0: - raise Exception("Storage is empty, generator is stopped") + def _create(self): + return copy.deepcopy(self._item) + + def _prepare(self, item): + pass diff --git a/lib/data/Storage.py b/lib/data/Storage.py index 3bdaf10..954fee1 100644 --- a/lib/data/Storage.py +++ b/lib/data/Storage.py @@ -1,4 +1,4 @@ -from queue import LifoQueue, Empty, Full +import inspect from lib import Loadable, Logger @@ -10,16 +10,16 @@ class Storage(Loadable): self._size = self.lcnf.get("size", 0) self._logger = Logger("Storage") self._logger.add_field('vname', self.__class__.__name__) - + def size(self): return self._size - + def count(self): return 0 def _get(self, block, filter): pass - + def _get_many(self, count, block, filter): items = [] for _ in range(count): @@ -27,7 +27,9 @@ class Storage(Loadable): return items def get(self, count=1, block=True, filter=None): - self._logger.debug("get %s, %s", count, block) + """Returns items, removing them from storage""" + self._logger.debug("get|%s|%s|%s", + count, block, inspect.stack()[1][0].f_locals["self"].__class__.__name__) items = [] if count == 1: items.append(self._get(block, filter)) @@ -44,55 +46,37 @@ class Storage(Loadable): self._put(i, block) def put(self, items, block=True): + """Puts provided items""" + self._logger.debug("put|%s|%s|%s", + len(items), block, inspect.stack()[1][0].f_locals["self"].__class__.__name__) if items: items = [i for i in items if i is not None] - self._logger.debug("put %s, %s", len(items), block) if len(items) == 1: self._put(items[0], block) elif len(items) > 1: self._put_many(items, block) - def _find(self): + def _find(self, filter): pass - def find(self): - self._logger.debug("find") - return self._find() - + def find(self, filter): + """Returns items without removing them from storage""" + return self._find(filter) + def _update(self, items, update): pass def update(self, items, update=None): + """Updates provided items""" + self._logger.debug("update|%s|%s", + len(items), inspect.stack()[1][0].f_locals["self"].__class__.__name__) if items: items = [i for i in items if i is not None] - self._logger.debug("update %s, %s", len(items), update) self._update(items, update) def _remove(self, items): pass def remove(self, items): + """Removes provided items""" self._remove(items) - - -class LiFoStorage(Storage): - def __init__(self, id, root): - super().__init__(id, root) - self._data = LifoQueue() - - def count(self): - return self._data.qsize() - - def _get(self, block=False, filter=None): - try: - return self._data.get(block=block) - except Empty: - pass - - def _put(self, item, block=True): - try: - self._data.put(item, block=block) - except Full: - pass - - diff --git a/lib/exeq/Executor.py b/lib/exec/Executor.py similarity index 97% rename from lib/exeq/Executor.py rename to lib/exec/Executor.py index 5c38c8d..3458fee 100644 --- a/lib/exeq/Executor.py +++ b/lib/exec/Executor.py @@ -57,7 +57,7 @@ class RQExecutor(Executor): for i in items: i['steps'][step['task']] = None self._data.update(items) - job = q.enqueue("lib.exeq.Task.run", step['task'], items) + job = q.enqueue("lib.exec.Task.run", step['task'], items) self._logger.info("%s|%s|%s|%s", job.id, step.get('priority', 'normal'), step['task'], len(items)) jobs.append(job.id) except Exception as e: diff --git a/lib/exeq/Task.py b/lib/exec/Task.py similarity index 81% rename from lib/exeq/Task.py rename to lib/exec/Task.py index 50d2ef7..7137175 100644 --- a/lib/exeq/Task.py +++ b/lib/exec/Task.py @@ -16,7 +16,12 @@ class Task(Loadable): return result def _run(self, items): + for item in items: + item['steps'][self._id] = self._process(item) return items + + def _process(self, item): + return True def run(task_name, items): result = Loader.by_id('tasks', task_name).run(items) diff --git a/lib/exeq/__init__.py b/lib/exec/__init__.py similarity index 100% rename from lib/exeq/__init__.py rename to lib/exec/__init__.py diff --git a/lib/plugin/base/lib/GC.py b/lib/plugin/base/lib/GC.py new file mode 100644 index 0000000..b70f072 --- /dev/null +++ b/lib/plugin/base/lib/GC.py @@ -0,0 +1,25 @@ +""" +Provides garbage collector +""" + +from time import sleep + +from lib import Service + +class GarbageCollector(Service): + """Simple GarbageCollector, removes items by filter periodically""" + def __init__(self, id, root): + super().__init__(self.__run, id, root) + self._logger.add_field('service', 'GC') + self._logger.add_field('vname', self.__class__.__name__) + + def __run(self): + while self._running: + filter = {key: value for key, value in self.lcnf.get("if", {}).items()} + if filter: + items = self._data.find(filter=filter) + self._logger.info("Removing %s items", items.count()) + self._data.remove(items) + else: + self._logger.error("Filter is empty!") + sleep(self.lcnf.get('delay', 600)) diff --git a/lib/plugin/base/lib/IP.py b/lib/plugin/base/lib/IP.py index 2fd5a9c..38d1179 100644 --- a/lib/plugin/base/lib/IP.py +++ b/lib/plugin/base/lib/IP.py @@ -1,36 +1,51 @@ -from lib.data import Source -from lib import Loader - -import copy - from time import sleep -import os -import netaddr import itertools import random import socket import struct +import netaddr + +import GeoIP + +from lib.data import Source class IPSource(Source): + """Base source for IPs, appends data.ip and data.geo""" def __init__(self, thread, id, root): super().__init__(thread, id, root) - - self._item.update ({ - 'source': self._id, + self._item.update({ 'data': { - 'ip': None + 'ip': None, + 'geo': { + 'country': None, + 'city': None + } } }) + self.geo_ip = GeoIP.open(self.lcnf.get("geoip_dat", "/usr/share/GeoIP/GeoIPCity.dat"), + GeoIP.GEOIP_INDEX_CACHE | GeoIP.GEOIP_CHECK_CACHE) + def _geoip(self, item): + geodata = self.geo_ip.record_by_name(item['data']['ip']) + if geodata: + if 'country_code3' in geodata and geodata['country_code3']: + item['data']['geo']['country'] = geodata['country_code3'] + if 'city' in geodata and geodata['city']: + item['data']['geo']['city'] = geodata['city'] + + def _prepare(self, item): + self._geoip(item) class IPRange(IPSource): + """Provides IPs from ranges specified in file""" def __init__(self, id, root): super().__init__(self.__run, id, root) self._iprange = [] self.load_ip_range() def load_ip_range(self): + """Loads IP ranges from specified path""" ip_range = [] with open(self.lcnf.get('path'), "r") as text: for line in text: @@ -41,18 +56,19 @@ class IPRange(IPSource): else: ip_range.append(netaddr.IPNetwork(diap[0])) except Exception as e: - raise Exception("Error while adding range {}: {}".format(line, e)) + raise Exception("Error while adding range %s: %s" % (line, e)) self._iprange = ip_range def __run(self): - npos = 0 - apos = 0 + npos = 0 # network cursor + apos = 0 # address cursor while self._running: try: - for _ in itertools.repeat(None, self.lcnf.get('oneshot', 100)): + for _ in itertools.repeat(None, self.lcnf.get('oneshot', 200)): + item = self._create() if self.lcnf.get('ordered', True): # put currently selected element - self._data.put(str(self._iprange[npos][apos])) + item['data']['ip'] = str(self._iprange[npos][apos]) # rotate next element through networks and addresses if apos + 1 < self._iprange[npos].size: apos += 1 @@ -66,26 +82,30 @@ class IPRange(IPSource): else: self.stop() else: - self._data.put(str(random.choice(random.choice(self._iprange)))) + item['data']['ip'] = str(random.choice(random.choice(self._iprange))) + self._prepare(item) + self._data.put(item) sleep(self.lcnf.get('delay', 0.5)) - except Exception as e: - self._logger.warn(e) + except Exception as err: + self._logger.warn(err) class RandomIP(IPSource): + """Generates completely pseudorandom IPs""" def __init__(self, id, root): super().__init__(self.__run, id, root) - + def __run(self): while self._running: try: items = [] for _ in itertools.repeat(None, self.lcnf.get("oneshot", 200)): - item = copy.deepcopy(self._item) + item = self._create() randomip = socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff))) item['data']['ip'] = str(randomip) + self._prepare(item) items.append(item) self._data.put(items) sleep(self.lcnf.get("delay", 0.2)) - except Exception as e: - self._logger.warn(e) + except Exception as err: + self._logger.warn(err) diff --git a/lib/plugin/base/lib/Mongo.py b/lib/plugin/base/lib/Mongo.py index 8c67204..a32f6c1 100644 --- a/lib/plugin/base/lib/Mongo.py +++ b/lib/plugin/base/lib/Mongo.py @@ -1,5 +1,6 @@ -from pymongo import MongoClient from time import sleep +from pymongo import MongoClient + from lib.data import Storage class MongoStorage(Storage): @@ -15,7 +16,7 @@ class MongoStorage(Storage): def count(self): return self._coll.count() - + def _get(self, block, filter): if filter is None: filter = {} @@ -26,12 +27,10 @@ class MongoStorage(Storage): item = self._coll.find_one(filter=filter) sleep(1) return item - - def _get_many(self, count, block, filter, update=None): + + def _get_many(self, count, block, filter): if filter is None: filter = {} - - self._logger.debug("%s, %s", filter, update) items = self._coll.find(filter=filter, limit=count) return items @@ -41,7 +40,7 @@ class MongoStorage(Storage): self._logger.debug('Collection full: %s of %s', self.count(), self.size()) sleep(1) self._coll.insert_one(item) - + def _put_many(self, items, block): if block and self.size() is not 0: while self.count() + len(items) > self.size(): @@ -57,11 +56,10 @@ class MongoStorage(Storage): def _update(self, items, update): if update: filter = {'_id': {'$in': [item['_id'] for item in items]}} - self._logger.debug("%s, %s", filter, update) self._coll.update_many(filter, update, upsert=True) else: for item in items: self._coll.replace_one({'_id': item['_id']}, item, upsert=True) - + def _remove(self, items): - self._coll.delete_many({'_id': {'$in': [item['_id'] for item in items]}}) \ No newline at end of file + self._coll.delete_many({'_id': {'$in': [item['_id'] for item in items]}}) diff --git a/lib/plugin/base/lib/Telegram.py b/lib/plugin/base/lib/Telegram.py index b684899..adf9830 100644 --- a/lib/plugin/base/lib/Telegram.py +++ b/lib/plugin/base/lib/Telegram.py @@ -1,8 +1,7 @@ -from lib.data import Feed, Filter - -import telebot from time import sleep +import telebot +from lib.data import Feed class TelegramFeed(Feed): """Send data to Telegram chat""" @@ -28,5 +27,5 @@ class TelegramFeed(Feed): self._logger.debug("@%s: %s", chat_id, i['data']['message']) tbot.send_message("@" + chat_id, i['data']['message']) sleep(delay) - except Exception as e: - self._logger.warn(e) + except Exception as err: + self._logger.warn(err) diff --git a/lib/plugin/base/tasks/text.py b/lib/plugin/base/tasks/text.py index 08eeffc..d489c40 100644 --- a/lib/plugin/base/tasks/text.py +++ b/lib/plugin/base/tasks/text.py @@ -1,4 +1,4 @@ -from lib.exeq import Task +from lib.exec import Task from jinja2 import Environment, FileSystemLoader diff --git a/lib/plugin/iscan/tasks/common.py b/lib/plugin/iscan/tasks/common.py index 1241664..c257118 100644 --- a/lib/plugin/iscan/tasks/common.py +++ b/lib/plugin/iscan/tasks/common.py @@ -3,22 +3,19 @@ import subprocess import json from jsoncomment import JsonComment -from lib import Logger -import GeoIP -from Config import cnf -from lib.exeq import Task +from lib.exec import Task -class MasScan: - def __init__(self, bin_path='/usr/bin/masscan', opts="-sS -Pn -n --wait 0 --max-rate 5000"): - self.bin_path = bin_path - self.opts_list = opts.split(' ') - - def scan(self, ip_list, port_list): +class MasScanTask(Task): + """Provides data.ports for each of items scanned with masscan""" + def scan(self, ip_list, port_list, bin_path, opts="-sS -Pn -n --wait 0 --max-rate 5000"): + """Executes masscan on given IPs/ports""" + bin_path = bin_path + opts_list = opts.split(' ') port_list = ','.join([str(p) for p in port_list]) ip_list = ','.join([str(ip) for ip in ip_list]) - process_list = [self.bin_path] - process_list.extend(self.opts_list) + process_list = [bin_path] + process_list.extend(opts_list) process_list.extend(['-oJ', '-', '-p']) process_list.append(port_list) process_list.append(ip_list) @@ -29,45 +26,27 @@ class MasScan: result = parser.loads(out) return result -class MasScanTask(Task): - def __init__(self, id, root): - super().__init__(id, root) - def _run(self, items): - result = [] - - gi = GeoIP.open(cnf.get("geoip_dat", "/usr/share/GeoIP/GeoIPCity.dat"), GeoIP.GEOIP_INDEX_CACHE | GeoIP.GEOIP_CHECK_CACHE) ip_list = [i['data']['ip'] for i in items] port_list = self.lcnf.get("ports") - - self._logger.debug("Starting scan, ip_list=%s, port_list=%s", ip_list, port_list) - - ms = MasScan(bin_path=self.lcnf.get('bin_path', "/usr/bin/masscan")) - hosts = ms.scan(ip_list=ip_list, port_list=port_list) - + + self._logger.debug("Starting scan, port_list=%s", port_list) + + hosts = self.scan(ip_list=ip_list, + port_list=port_list, + bin_path=self.lcnf.get('bin_path', "/usr/bin/masscan")) + self._logger.debug(hosts) hosts = {h['ip']: h for h in hosts} for item in items: - data = {} - result = False if hosts.get(item['data']['ip']): - data = { - 'ports': [p['port'] for p in hosts[item['data']['ip']]['ports']], - 'geo': { - 'country': None, - 'city': None - } - } - result = True - geodata = gi.record_by_name(item['data']['ip']) - if geodata: - if 'country_code3' in geodata and geodata['country_code3']: - data['geo']['country'] = geodata['country_code3'] - if 'city' in geodata and geodata['city']: - data['geo']['city'] = geodata['city'] - self._logger.debug(data) - item['data'].update(data) - item['steps'][self._id] = result - if result: - self._logger.debug("Found %s with open %s", item['data']['ip'], item['data']['ports']) + ports = [p['port'] for p in hosts[item['data']['ip']]['ports']] + if 'ports' in item['data']: + item['data']['ports'].extend(ports) + else: + item['data']['ports'] = ports + item['steps'][self._id] = True + self._logger.debug("Found %s with open ports %s", item['data']['ip'], ports) + else: + item['steps'][self._id] = False return items diff --git a/lib/plugin/iscan/tasks/ftp.py b/lib/plugin/iscan/tasks/ftp.py index 05e19da..e0d84cc 100644 --- a/lib/plugin/iscan/tasks/ftp.py +++ b/lib/plugin/iscan/tasks/ftp.py @@ -1,125 +1,91 @@ -# pylint: disable=E1101 +""" +Basic tasks for FTP services +""" import ftplib -import netaddr -from lib import Logger -from Config import cnf - -from lib.exeq import Task - -class FTPConnectTask(Task): - def __init__(self, id, root): - super().__init__(id, root) +from lib.exec import Task +class FTPConnectTask(Task): # pylint: disable=too-few-public-methods + """Tries to connect FTP service with various credentials""" def _process(self, item): - data = {} - result = False - - self.ftp = ftplib.FTP(host=item['data']['ip'], timeout=self.lcnf.get('timeout', 30)) + ftp = ftplib.FTP(host=item['data']['ip'], timeout=self.lcnf.get('timeout', 30)) try: self._logger.debug('Trying anonymous login') - self.ftp.login() - except ftplib.error_perm: - pass + ftp.login() + except ftplib.error_perm as err: + self._logger.debug('Failed (%s)', err) else: - self._logger.debug('Succeeded with anonymous') - data['username'] = 'anonymous' - data['password'] = '' - result = True - - self._logger.debug(data) - item['data'].update(data) - item['steps'][self._id] = result - return + self._logger.info('Succeeded with anonymous') + item['data']['username'] = 'anonymous' + item['data']['password'] = '' + return True if self.lcnf.get('bruteforce', False): - usernames = [] - passwords = [] + self._logger.debug('Bruteforce enabled, loading usernames and passwords') + usernames = [line.rstrip() for line in open(self.lcnf.get('usernames'), 'r')] + passwords = [line.rstrip() for line in open(self.lcnf.get('passwords'), 'r')] - with open(self.lcnf.get('logins'), 'r') as lfh: - for username in lfh: - usernames.append(username.rstrip()) - with open(self.lcnf.get('passwords'), 'r') as pfh: - for password in pfh: - passwords.append(password.rstrip()) for username in usernames: for password in passwords: + self._logger.debug('Checking %s', username + ':' + password) try: - self.ftp.voidcmd('NOOP') - except IOError: - self.ftp = ftplib.FTP(host=item['data']['ip'], timeout=self.lcnf.get('timeout', 30)) - self._logger.debug('Trying %s' % (username + ':' + password)) + self._logger.debug('Sending NOOP') + ftp.voidcmd('NOOP') + except IOError as err: + self._logger.debug('IOError occured (%s), attempting to open new connection', err) + ftp = ftplib.FTP(host=item['data']['ip'], timeout=self.lcnf.get('timeout', 30)) try: - self.ftp.login(username, password) - except ftplib.error_perm: + self._logger.debug('Trying to log in') + ftp.login(username, password) + except ftplib.error_perm as err: + self._logger.debug('Failed (%s)', err) continue - except: - raise else: - self._logger.debug('Succeeded with %s' %(username + ':' + password)) - data['username'] = username - data['password'] = password - result = True - - - self._logger.debug(data) - item['data'].update(data) - item['steps'][self._id] = result - return - self._logger.debug(data) - item['data'].update(data) - item['steps'][self._id] = result - - def _run(self, items): - for item in items: - self._process(item) - return items - -class FTPListFilesTask(Task): - def __init__(self, id, root): - super().__init__(id, root) + self._logger.info('Succeeded with %s', username + ':' + password) + item['data']['username'] = username + item['data']['password'] = password + return True + self._logger.info('Could not connect') + return False +class FTPListFilesTask(Task): # pylint: disable=too-few-public-methods + """Executes NLST to list files on FTP""" def _process(self, item): - item['steps'][self._id] = False - self.ftp = ftplib.FTP(host=item['data']['ip'], - user=item['data']['username'], - passwd=item['data']['password']) - filelist = self.ftp.nlst() + ftp = ftplib.FTP(host=item['data']['ip'], + user=item['data']['username'], + passwd=item['data']['password']) + filelist = ftp.nlst() try: - self.ftp.quit() - except: - # that's weird, but we don't care - pass - - try: - if len(filelist) == 0 or filelist[0] == "total 0": - item['data']['filter'] = "Empty server" - except IndexError: + ftp.quit() + except ftplib.Error: pass item['data']['files'] = [] - for fileName in filelist: - item['data']['files'].append(fileName) - item['steps'][self._id] = True + for filename in filelist: + item['data']['files'].append(filename) + return True + +class FTPFilterFilesTask(Task): # pylint: disable=too-few-public-methods + """Sets data.filter if FTP contains only junk""" + def _process(self, item): + junk_list = ['incoming', '..', '.ftpquota', '.', 'pub'] + files = item['data']['files'] - def _filter(self, item): item['data']['filter'] = False - if len(item['data']['files']) == 0: - item['data']['filter'] = "Empty" - elif len(item['data']['files']) < 6: - match = 0 - for f in 'incoming', '..', '.ftpquota', '.', 'pub': - if f in item['data']['files']: - match += 1 - if match == len(item['data']['files']): - item['data']['filter'] = "EmptyWithSystemDirs" - if item['data']['filter'] == False: - item['steps'][self._id] = True - def _run(self, items): - for item in items: - self._process(item) - if self.lcnf.get('filter', False): - self._filter(item) - return items + try: + if not files or files[0] == "total 0": + item['data']['filter'] = "Empty" + except IndexError: + pass + + if 0 < len(files) <= len(junk_list): # pylint: disable=C1801 + match_count = 0 + for filename in junk_list: + if filename in files: + match_count += 1 + if match_count == len(files): + item['data']['filter'] = "EmptyWithBloatDirs" + + return True diff --git a/lib/plugin/iscan/tasks/gopher.py b/lib/plugin/iscan/tasks/gopher.py index 3f17873..f1a575a 100644 --- a/lib/plugin/iscan/tasks/gopher.py +++ b/lib/plugin/iscan/tasks/gopher.py @@ -1,19 +1,15 @@ +""" +Basic tasks for Gopher services +""" + import socket -from Config import cnf +from lib.exec import Task -from lib.exeq import Task - -class GopherFindTask(Task): - def __init__(self, id, root): - super().__init__(id, root) - - def _run(self, items): - for item in items: - self._process(item) - return items - - def _recv(self, sck): +class GopherFindTask(Task): # pylint: disable=too-few-public-methods + """Tries to connect Gopher service""" + @staticmethod + def _recv(sck): total_data = [] while True: data = sck.recv(2048) @@ -23,7 +19,6 @@ class GopherFindTask(Task): return ''.join(total_data) def _process(self, item): - item['steps'][self._id] = False sock = socket.socket() sock.settimeout(self.lcnf.get('timeout', 20)) sock.connect((item['data']['ip'], int(70))) diff --git a/lib/plugin/iscan/tasks/http.py b/lib/plugin/iscan/tasks/http.py new file mode 100644 index 0000000..7792cb6 --- /dev/null +++ b/lib/plugin/iscan/tasks/http.py @@ -0,0 +1,60 @@ +from lib.exec import Task + +from io import BytesIO +import json +import time + +import bs4 +import requests +import urllib3 +from PIL import Image +from bson.binary import Binary + +from selenium import webdriver +from selenium.webdriver.common.desired_capabilities import DesiredCapabilities +from selenium.webdriver.common.proxy import Proxy, ProxyType +import zlib +import netaddr + + +class HTTPFindTask(Task): + def __init__(self, id, root): + super().__init__(id, root) + + def _process(self, item): + urllib3.disable_warnings() + response = requests.get(url='http://%s:%s/' % (self._host['ip'], self._host['port']), + timeout=cnf.stalker.HTTP.timeout, + verify=False) + + if response.status_code in [400, 401, 403, 500]: + raise self.PipelineError("Bad response") + + self._host['data']['response'] = {} + self._host['data']['response']['code'] = response.status_code + self._host['data']['response']['text'] = response.text + self._host['data']['response']['content'] = response.content + self._host['data']['response']['encoding'] = response.encoding + self._host['data']['response']['headers'] = response.headers + + encoding = response.encoding if 'charset' in response.headers.get('content-type', '').lower() else None + soup = bs4.BeautifulSoup(response.content, "html.parser", from_encoding=encoding) + if soup.original_encoding != 'utf-8': + meta = soup.select_one('meta[charset], meta[http-equiv="Content-Type"]') + if meta: + if 'charset' in meta.attrs: + meta['charset'] = 'utf-8' + else: + meta['content'] = 'text/html; charset=utf-8' + self._host['data']['response']['text'] = soup.prettify() # encodes to UTF-8 by default + + title = soup.select_one('title') + if title: + if title.string: + title = title.string + else: + title = "" + else: + title = "" + + self._host['data']['title'] = title diff --git a/lib/plugin/iscan/tasks/vnc.py b/lib/plugin/iscan/tasks/vnc.py new file mode 100644 index 0000000..0769bc8 --- /dev/null +++ b/lib/plugin/iscan/tasks/vnc.py @@ -0,0 +1,10 @@ +import tempfile + +from lib.exec import Task + +class VNCFindTask(Task): # pylint: disable=too-few-public-methods + """Tries to connect FTP service with various credentials""" + def _process(self, item): + fd, temp_path = tempfile.mkstemp() + print(fd, temp_path) + \ No newline at end of file diff --git a/lib/plugin/iscan/templates/http.tpl b/lib/plugin/iscan/templates/http.tpl new file mode 100644 index 0000000..4b1cc14 --- /dev/null +++ b/lib/plugin/iscan/templates/http.tpl @@ -0,0 +1,19 @@ + +#code{{data['response']['code']}} +server = self._host['data']['response']['headers'].get('Server', None) +{% if server != none %} + + +" Server: #%s\n" % server + +else: + +"\n" + +if self._host['data']['title']: + +"Title: %s\n" % self._host['data']['title'] + +"Geo: %s/%s\n" % (self._host['data']['geo']['country'], self._host['data']['geo']['city']) +"http://%s:%s/\n" % (self._host['ip'], self._host['port']) +"#http_" + str(int(netaddr.IPAddress(self._host['ip']))) diff --git a/lib/util.py b/lib/util.py index 0ff606f..6d19863 100644 --- a/lib/util.py +++ b/lib/util.py @@ -1,10 +1,10 @@ import logging -from Config import cnf as config - import importlib import sys, os +from Config import cnf as config + class Logger(logging.Logger): """Logger. standard logging logger with some shitcode on the top""" def __init__(self, name): @@ -75,7 +75,7 @@ class Loadable: """parent for loadable from configuration""" def __init__(self, id, root=config): self.cnf = config # global config - self.lcnf = root[id] # local config + self.lcnf = root[id] # local config self._id = id @@ -93,10 +93,11 @@ class Loader: self._logger.debug('load %s', name) result = importlib.import_module(self._path) return getattr(result, name) - + @classmethod - def by_id(cls, section, id): + def by_id(cls, section, id) -> Loadable: + """Returns instantiated object of class provided in configuration""" # prepares Loader for certain package - l = cls(config.get(section).get(id).get('package')) + loader = cls(config.get(section).get(id).get('package')) # loads class from this package and returns instantiated object of this class - return l.get(config.get(section).get(id).get('service'))(id=id, root=config.get(section)) + return loader.get(config.get(section).get(id).get('service'))(id=id, root=config.get(section)) diff --git a/medved.py b/medved.py index 0894c27..3a59e02 100755 --- a/medved.py +++ b/medved.py @@ -1,5 +1,4 @@ #!/usr/bin/python3 -# -*- coding: utf-8 -*- import time @@ -20,22 +19,24 @@ class Core: self._services.append(service) def start(self): + """Starts all loaded services""" self.logger.info("Starting") for service in self._services: service.start() self.logger.info("Started") def stop(self): + """Stops all loaded services""" self.logger.info("Stopping Core") for service in self._services: service.stop() self.logger.info("Stopped") if __name__ == '__main__': - core = Core() - core.start() + CORE = Core() + CORE.start() try: while True: time.sleep(1) except KeyboardInterrupt: - core.stop() + CORE.stop()