FTP pipeline works

This commit is contained in:
Good Evening 2018-08-02 20:42:09 +03:00
parent 8015a0da1d
commit d0ff66993d
25 changed files with 281 additions and 260 deletions

View File

@ -3,6 +3,13 @@
import yaml
cnf = {}
with open('data/config.yaml') as config_file:
cnf = yaml.load(config_file)
class Config(object):
def __init__(self):
with open('data/config.yaml') as config_file:
self.config = yaml.load(config_file)
def get(self, key, defval=None):
return self.config.get(key, defval)
cnf = Config()

View File

@ -5,20 +5,32 @@ core:
services:
- random_ip
- rq_executor
- tg_feed
pipelines:
- ftp
- gopher
services:
random_ip:
package: lib.plugin.base.lib.IP
service: RandomIP
storage: ip_source
rq_executor:
package: lib.exeq.Executor
service: RQExecutor
storage: pool
redis:
host: "127.0.0.1"
random_ip:
package: lib.plugin.base.lib.IP
service: RandomIP
storage: ftp_source
tg_feed:
package: lib.plugin.base.lib.Telegram
service: TelegramFeed
storage: pool
token: "358947514:"
chats:
- id: aiWeipeighah7vufoHa0ieToipooYe
if:
steps.ftp_apply_tpl: true
data.filter: false
storage:
pool:
@ -27,21 +39,21 @@ storage:
size: 0
db: "medved"
coll: 'pool'
ftp_source:
ip_source:
package: lib.plugin.base.lib.Mongo
service: MongoStorage
size: 500
size: 800
db: "medved"
coll: 'ftp_source'
coll: 'ip_source'
pipelines:
ftp:
source: ftp_source
source: ip_source
steps:
- task: ftp_scan
priority: low
multiple: 100
parallel: 100
- task: ftp_connect
priority: normal
if:
@ -50,8 +62,68 @@ pipelines:
priority: high
if:
steps.ftp_connect: true
- task: ftp_apply_tpl
priority: high
if:
steps.ftp_list_files: true
gopher:
source: ip_source
steps:
- task: gopher_scan
priority: normal
parallel: 100
- task: gopher_find
priority: high
if:
steps.gopher_scan: true
- task: gopher_apply_tpl
priority: high
if:
steps.gopher_find: true
http:
source: ip_source
steps:
- task: http_scan
priority: low
parallel: 25
tasks:
gopher_scan:
package: lib.plugin.iscan.tasks.common
service: MasScanTask
ports:
- 70
gopher_find:
package: lib.plugin.iscan.tasks.gopher
service: GopherFindTask
gopher_apply_tpl:
package: lib.plugin.base.tasks.text
service: Jinja2TemplateTask
path: lib/plugin/iscan/templates/gopher.tpl
vnc_scan:
package: lib.plugin.iscan.tasks.common
service: MasScanTask
ports:
- 5900
- 5901
vnc_connect:
package: lib.plugin.iscan.tasks.vnc
service: VNCConnectTask
ports:
- 5900
- 5901
http_scan:
package: lib.plugin.iscan.tasks.common
service: MasScanTask
ports:
- 80
- 81
- 8080
- 8081
ftp_scan:
package: lib.plugin.iscan.tasks.common
service: MasScanTask
@ -67,6 +139,12 @@ tasks:
ftp_list_files:
package: lib.plugin.iscan.tasks.ftp
service: FTPListFilesTask
filter: true
ftp_apply_tpl:
package: lib.plugin.base.tasks.text
service: Jinja2TemplateTask
path: lib/plugin/iscan/templates/ftp.tpl
logging:
Storage: INFO
Storage: DEBUG
Loader: DEBUG

17
lib/data/Item.py Normal file
View File

@ -0,0 +1,17 @@
class Item(object):
"""Base class for item"""
def __init__(self, source):
self._item = {
'source': source,
'steps': {},
'data': {}
}
def set(self, key, value):
elem = self._item['data']
upd = {}
for x in key.split("."):
elem = elem.get(x, {})
upd[x] = {}
upd[0] = value
self._item['data'].update(upd)

View File

@ -59,6 +59,21 @@ class Storage(Loadable):
self._logger.debug("find")
return self._find()
def _update(self, items, update):
pass
def update(self, items, update=None):
if items:
items = [i for i in items if i is not None]
self._logger.debug("update %s, %s", len(items), update)
self._update(items, update)
def _remove(self, items):
pass
def remove(self, items):
self._remove(items)
class LiFoStorage(Storage):
def __init__(self, id, root):

View File

@ -1,7 +1,6 @@
from .Storage import Storage
from .Source import Source
from .Feed import Feed
from .Item import Item
from .Manager import DataManager
__all__ = ['Storage', 'Source', 'DataManager']
__all__ = ['Storage', 'Source', 'Feed', 'Item']

View File

@ -1,7 +1,5 @@
from lib import Service, Loader, Loadable
from lib.tasks.worker import worker
from time import sleep
from rq import Queue
@ -17,43 +15,48 @@ class Executor(Service):
class RQExecutor(Executor):
"""rq (redis queue) executor - lightweight; workers placed on different nodes"""
"""rq (redis queue) executor"""
def __init__(self, id, root):
super().__init__(self.__run, id, root)
def __run(self):
redis_conn = Redis(host=self.lcnf.get('redis').get('host'))
jobs = []
while self._running:
sleep(self.lcnf.get('delay', 0.07))
sleep(self.lcnf.get('delay', 0.2))
try:
for job in [j for j in jobs if j.result is not None]:
self._logger.debug('Publishing finished job result')
self._data.put(job.result)
job.cleanup()
jobs.remove(job)
for pn, pipeline in self.cnf.get("pipelines").items():
self._logger.debug("pipeline: %s", pn)
if pn not in self.cnf.get('core').get('pipelines'):
continue
source = Loader.by_id('storage', pipeline.get('source'))
for step in pipeline['steps']:
self._logger.debug("task name: %s", step['task'])
q = Queue(step.get('priority', 'normal'), connection=redis_conn)
if q.count + 1 > self.lcnf.get('qsize', 100):
for job_id in jobs:
job = q.fetch_job(job_id)
if job:
if job.result is not None:
self._logger.debug("%s|%s", job_id, job._status)
self._data.update(job.result)
job.cleanup()
jobs.remove(job_id)
if len(jobs) + 1 > self.lcnf.get('qsize', 200):
continue
filter = {"steps.%s" % step['task']: {'$exists': False}}
filter.update({key: value for key, value in step.get("if", {}).items()})
count = step.get('multiple') if step.get('multiple', False) else 1
count = step.get('parallel', 1)
# get as much as possible from own pool
items = self._data.get(block=False, count=count, filter=filter)
# obtain everything else from source
if len(items) < count:
items.extend(source.get(block=False, count=(count - len(items)), filter=filter))
new_items = source.get(block=False, count=(count - len(items)), filter=filter)
items.extend(new_items)
source.remove(new_items)
if items:
for i in items:
i['steps'][step['task']] = None
self._logger.debug("enqueueing task '%s' (count: %s)", step['task'], len(items))
self._data.update(items, {'$set': {'steps.%s' % step['task']: None}})
job = q.enqueue("lib.exeq.Task.run", step['task'], items)
jobs.append(job)
self._logger.info("%s|%s|%s|%s", job.id, step.get('priority', 'normal'), step['task'], len(items))
jobs.append(job.id)
except Exception as e:
self._logger.error("Error in executor main thread: %s", e)

View File

@ -1,9 +0,0 @@
import importlib
class Manager:
def __init__(self):
pass
@staticmethod
def get_plugin(name):
return importlib.import_module("lib.plugin.plugins." + name)

View File

@ -1,6 +0,0 @@
THIS PART IS LIKE UNDER CONSTRUCTION
get out
for now
come back later

View File

@ -1 +0,0 @@
from .Manager import Manager

View File

@ -80,12 +80,12 @@ class RandomIP(IPSource):
while self._running:
try:
items = []
for _ in itertools.repeat(None, self.lcnf.get("oneshot", 100)):
for _ in itertools.repeat(None, self.lcnf.get("oneshot", 200)):
item = copy.deepcopy(self._item)
randomip = socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff)))
item['data']['ip'] = str(randomip)
items.append(item)
self._data.put(items)
sleep(self.lcnf.get("delay", 0.5))
sleep(self.lcnf.get("delay", 0.2))
except Exception as e:
self._logger.warn(e)

View File

@ -19,25 +19,21 @@ class MongoStorage(Storage):
def _get(self, block, filter):
if filter is None:
filter = {}
else:
self._logger.debug(filter)
item = self._coll.find_one_and_delete(filter=filter)
item = self._coll.find_one(filter=filter)
if block:
while not item:
item = self._coll.find_one_and_delete(filter=filter)
item = self._coll.find_one(filter=filter)
sleep(1)
return item
def _get_many(self, count, block, filter):
def _get_many(self, count, block, filter, update=None):
if filter is None:
filter = {}
else:
self._logger.debug(filter)
self._logger.debug("%s, %s", filter, update)
items = self._coll.find(filter=filter, limit=count)
result = []
for i in items:
self._coll.delete_one({'_id': i['_id']})
result.append(i)
return result
return items
def _put(self, item, block):
if block and self.size() is not 0:
@ -57,3 +53,15 @@ class MongoStorage(Storage):
if filter is None:
filter = {}
return self._coll.find(filter)
def _update(self, items, update):
if update:
filter = {'_id': {'$in': [item['_id'] for item in items]}}
self._logger.debug("%s, %s", filter, update)
self._coll.update_many(filter, update, upsert=True)
else:
for item in items:
self._coll.replace_one({'_id': item['_id']}, item, upsert=True)
def _remove(self, items):
self._coll.delete_many({'_id': {'$in': [item['_id'] for item in items]}})

View File

@ -1,5 +1,4 @@
from lib.data import Feed, Filter
from lib.plugin import Manager
import telebot
from time import sleep
@ -16,24 +15,18 @@ class TelegramFeed(Feed):
while self._running:
try:
for chat in self.lcnf.get("chats"):
chat_id = chat['id']
sleep(delay)
continue
# plugins -> pipelines
# it is in progress
#TODO
msg = Manager.get_plugin(plugin).Plugin.TelegramMessage(host)
msg.run()
if msg.data['txt']:
chat_id = chat.get('id')
self._logger.debug(chat_id)
filter = {"feed.%s" % self._id: {'$exists': False}}
filter.update({key: value for key, value in chat.get("if", {}).items()})
items = self._data.get(block=False, count=10, filter=filter)
self._logger.debug(items)
if items:
self._data.update(items, {'$set': {'feed.%s' % self._id: True}})
tbot = telebot.TeleBot(self.lcnf.get('token'), threaded=False)
if msg.data['img']:
self._logger.debug("Send IP with img %s:%s to %s" % (host['ip'], host['port'], chat_id))
tbot.send_photo("@" + chat_id, msg.data['img'], caption=msg.data['txt'])
else:
self._logger.debug("Send IP %s:%s to %s" % (host['ip'], host['port'], chat_id))
tbot.send_message("@" + chat_id, msg.data['txt'])
else:
self._logger.error('Empty text!')
for i in items:
self._logger.debug("@%s: %s", chat_id, i['data']['message'])
tbot.send_message("@" + chat_id, i['data']['message'])
sleep(delay)
except Exception as e:
self._logger.warn(e)

View File

@ -0,0 +1,17 @@
from lib.exeq import Task
from jinja2 import Environment, FileSystemLoader
class Jinja2TemplateTask(Task):
def __init__(self, id, root):
super().__init__(id, root)
def _process(self, item):
template = Environment(loader=FileSystemLoader('.')).get_template(self.lcnf.get('path'))
item['data']['message'] = template.render(data = item['data'])
item['steps'][self._id] = True
def _run(self, items):
for item in items:
self._process(item)
return items

View File

@ -1,62 +0,0 @@
plugin:
name: iscan
version: 0.1
pipelines:
FTP:
actions:
- scan
- connect
- metadata
- filetree
df:
Telegram:
action: metadata
chats:
- xai7poozengee2Aen3poMookohthaZ
- aiWeipeighah7vufoHa0ieToipooYe
HTTP:
actions:
- scan
- connect
- metadata
- screenshot
df:
Telegram:
action: screenshot
chats:
- xai7poozengee2Aen3poMookohthaZ
- gohquooFee3duaNaeNuthushoh8di2
Gopher:
actions:
- connect
- collect
df:
Telegram:
action: collect
chats:
- xai7poozengee2Aen3poMookohthaZ
- ohl7AeGah5uo8cho4nae9Eemaeyae3
df:
Telegram:
token: TOKEN
ds:
IPRange:
file: file
Remote:
docker:
services:
selenium:
image: selenium/standalone-chrome:latest
volumes:
- /dev/shm:/dev/shm
environment:
- JAVA_OPTS=-Dselenium.LOGGER.level=WARNING
worker_env:
- SELENIUM_IP=selenium
required_by:
- HTTP

View File

@ -70,6 +70,4 @@ class MasScanTask(Task):
item['steps'][self._id] = result
if result:
self._logger.debug("Found %s with open %s", item['data']['ip'], item['data']['ports'])
self._logger.debug(items)
return items

View File

@ -81,6 +81,7 @@ class FTPListFilesTask(Task):
super().__init__(id, root)
def _process(self, item):
item['steps'][self._id] = False
self.ftp = ftplib.FTP(host=item['data']['ip'],
user=item['data']['username'],
passwd=item['data']['password'])
@ -100,6 +101,7 @@ class FTPListFilesTask(Task):
item['data']['files'] = []
for fileName in filelist:
item['data']['files'].append(fileName)
item['steps'][self._id] = True
def _filter(self, item):
item['data']['filter'] = False
@ -112,6 +114,8 @@ class FTPListFilesTask(Task):
match += 1
if match == len(item['data']['files']):
item['data']['filter'] = "EmptyWithSystemDirs"
if item['data']['filter'] == False:
item['steps'][self._id] = True
def _run(self, items):
for item in items:

View File

@ -0,0 +1,50 @@
import socket
from Config import cnf
from lib.exeq import Task
class GopherFindTask(Task):
def __init__(self, id, root):
super().__init__(id, root)
def _run(self, items):
for item in items:
self._process(item)
return items
def _recv(self, sck):
total_data = []
while True:
data = sck.recv(2048)
if not data:
break
total_data.append(data.decode('utf-8'))
return ''.join(total_data)
def _process(self, item):
sock = socket.socket()
sock.settimeout(self.lcnf.get('timeout', 20))
sock.connect((item['data']['ip'], int(70)))
sock.sendall(b'\n\n')
response = self._recv(sock)
sock.close()
self._logger.debug("Parsing result")
item['data']['files'] = []
item['data']['filter'] = False
for s in [s for s in response.split("\r\n") if s]:
node = {}
fields = s.split("\t")
self._logger.debug(fields)
node['type'] = fields[0][0]
if len(fields) == 4:
node['name'] = fields[0][1:]
node['path'] = fields[1]
node['serv'] = f"{fields[2]}:{fields[3]}"
item['data']['files'].append(node)
if not item['data']['files']:
item['steps'][self._id] = False
raise Exception("Empty server (not Gopher?)")

View File

@ -0,0 +1,6 @@
ftp://{{data['username']}}:{{data['password']}}@{{data['ip']}}
{% for filename in data['files'] -%}
+ {{ filename }}
{% endfor -%}
Geo: {{data['geo']['country']}}/{{data['geo']['city']}}

View File

@ -0,0 +1,11 @@
gopher://{{data['ip']}}/
Dirs:
{% for dir in [f for f in self._host['data']['files'] if f['type'] == '1'] -%}
+ {{dir['path']}}
{% endfor -%}
Other nodes:
{% for file in [f for f in self._host['data']['files'] if f['type'] != '1' and f['type'] != 'i'] -%}
+ {{file['path']}}
{{file['name']}}
{% endfor -%}
Geo: {{data['geo']['country']}}/{{data['geo']['city']}}

View File

@ -1,18 +0,0 @@
import ftplib
import netaddr
from Config import cnf
class Plugin():
class TelegramMessage():
def _init(self):
self._name = "FTP"
def _generate(self):
self.data['txt'] = "ftp://%s:%s@%s/\n" % \
(self._host['data']['username'], self._host['data']['password'], self._host['ip'])
for filename in self._host['data']['files']:
self.data['txt'] += " + %s\n" % filename
self.data['txt'] += "Geo: %s/%s\n" % (self._host['data']['geo']['country'], self._host['data']['geo']['city'])
self.data['txt'] += "#ftp_" + str(int(netaddr.IPAddress(self._host['ip'])))

View File

@ -1,4 +1,3 @@
import socket
import netaddr
from Config import cnf
@ -11,15 +10,6 @@ class Plugin(BasePlugin):
self._name = "Gopher"
def _generate(self):
self.data['txt'] = "gopher://%s/\n" % self._host['ip']
self.data['txt'] += "Dirs:\n"
for dir in [f for f in self._host['data']['files'] if f['type'] == '1']:
self.data['txt'] += " + %s\n" % dir['path']
self.data['txt'] += "Other nodes:\n"
for file in [f for f in self._host['data']['files'] if f['type'] != '1' and f['type'] != 'i']:
self.data['txt'] += " + %s\n %s\n" % (file['path'], file['name'])
self.data['txt'] += "Geo: %s/%s\n" % (self._host['data']['geo']['country'], self._host['data']['geo']['city'])
self.data['txt'] += "#gopher_" + str(int(netaddr.IPAddress(self._host['ip'])))
class Pipeline(BasePlugin.Pipeline):
def _init(self):
@ -33,38 +23,3 @@ class Plugin(BasePlugin):
self._logger.debug("Error occured: %s (%s)", e, self._host['ip'])
else:
self._logger.info("Succeeded for %s" % self._host['ip'])
def _recv(self, sck):
total_data = []
while True:
data = sck.recv(2048)
if not data:
break
total_data.append(data.decode('utf-8'))
return ''.join(total_data)
def _find(self):
sock = socket.socket()
sock.settimeout(cnf.stalker.Gopher.timeout)
sock.connect((self._host['ip'], int(self._host['port'])))
sock.sendall(b'\n\n')
response = self._recv(sock)
sock.close()
self._logger.debug("Parsing result")
self._host['data']['files'] = []
self._host['data']['filter'] = False
for s in [s for s in response.split("\r\n") if s]:
node = {}
fields = s.split("\t")
self._logger.debug(fields)
node['type'] = fields[0][0]
if len(fields) == 4:
node['name'] = fields[0][1:]
node['path'] = fields[1]
node['serv'] = f"{fields[2]}:{fields[3]}"
self._host['data']['files'].append(node)
if not self._host['data']['files']:
raise self.PipelineError("Empty server (not Gopher?)")

View File

@ -1 +0,0 @@
plugins formed in old plugin format

View File

@ -1,45 +0,0 @@
from threading import Thread
from time import sleep
class A: # NOOP
def __init__(self, thread = None):
if thread:
self.__thread = Thread(target=thread)
self._running = False
self._init()
def _init(self):
pass
def start(self):
self._running = True
self.__thread.daemon = True
self.__thread.start()
def stop(self):
self._running = False
self.__thread.join()
def __run(self):
while(self._running):
print('NOOP')
sleep(1)
class B(A): # NOOP
def __init__(self):
super().__init__(self.__run)
def __run(self):
while(self._running):
print('OP')
sleep(1)
class C(A): # NOOP
def __run(self):
while(self._running):
print('OP')
sleep(1)
def _init(self):
self.__thread = Thread(target=self.__run)

View File

@ -96,5 +96,7 @@ class Loader:
@classmethod
def by_id(cls, section, id):
l = cls(config[section][id].get('package'))
return l.get(config[section][id].get('service'))(id=id, root=config[section])
# prepares Loader for certain package
l = cls(config.get(section).get(id).get('package'))
# loads class from this package and returns instantiated object of this class
return l.get(config.get(section).get(id).get('service'))(id=id, root=config.get(section))