Merge branch 'zecora-z'

This commit is contained in:
Zloooy 2019-11-13 22:18:35 +03:00
commit 371d64d005
8 changed files with 241 additions and 27 deletions

View File

@ -2,18 +2,22 @@
config = { config = {
"parser" : "parser" :
{ {
"name":"GDocsHashParser", "name":"Parser",
"init_args":{} "init_args":{}
}, },
"address_generator" : "address_generator" :
{ {
"name":"GDocsAddressGenerator", "name":"IpGenerator",
"init_args":{} "init_args":{}
}, },
"scanner" : "scanner" :
{ {
"name":"GDocsScanner", "name":"FTPScanner",
"init_args":{} "init_args":{
"credentials": (
("admin", "admin")
)
}
}, },
"storage" : "storage" :
{ {
@ -21,17 +25,18 @@ config = {
"init_args": "init_args":
{ {
"path":"results.json", "path":"results.json",
"json_scheme":{ "json_scheme":
"status": {
{ "ftp_status":
"gdoc_prefix": [
[ {
{ "@ip":"ipv4_str",
"@hash": "gdoc_hash", "@port":"port",
"@title": "gdoc_title" "@login":"login",
} "@password":"password",
] "@ftp_version":"ftp_version",
} }
]
} }
} }
} }
@ -54,3 +59,15 @@ config = {
} }
} }
}''' }'''
'''scheme for gdocs scanner
"status":
{
"gdoc_prefix":
[
{
"@hash": "gdoc_hash",
"@title": "gdoc_title"
}
]
}
'''

View File

@ -72,6 +72,7 @@ class MainPresenter:
get_return_annotations(IpGenerator.get_next_address).union(get_return_annotations(CoreModel.scan_address)), get_return_annotations(IpGenerator.get_next_address).union(get_return_annotations(CoreModel.scan_address)),
get_argument_annotations(self.storage.put_responce) get_argument_annotations(self.storage.put_responce)
) )
input()
self.exit_lock = RLock() self.exit_lock = RLock()
def startScan(self, ipRanges, portsStr, threadNumber, timeout): def startScan(self, ipRanges, portsStr, threadNumber, timeout):
@ -174,6 +175,7 @@ class ScanWorker(QObject):
) )
print(scan_result) print(scan_result)
scan_address.update(scan_result) scan_address.update(scan_result)
print(scan_address)
self.previous_address = scan_address self.previous_address = scan_address
self.storage.put_responce( self.storage.put_responce(
*convert_for_storage(scan_address) *convert_for_storage(scan_address)
@ -181,9 +183,9 @@ class ScanWorker(QObject):
string_scan_address = " ".join(key + ":" + str(scan_address[key]) for string_scan_address = " ".join(key + ":" + str(scan_address[key]) for
key in scan_address.keys()) key in scan_address.keys())
if scan_result == 0: if scan_result == 0:
self.log_signal.emit('%s is open' % string_scan_address) self.log_signal.emit(string_scan_address)
else: else:
self.log_signal.emit('%s is closed' % string_scan_address) self.log_signal.emit(string_scan_address)
self.stop() self.stop()
def stop(self): def stop(self):

View File

@ -30,6 +30,7 @@ class ConvertTable():
function.__annotations__.items() if function.__annotations__.items() if
key!='return') key!='return')
if input_args.issubset(from_keys) and to_key.issubset(function.__annotations__['return']): if input_args.issubset(from_keys) and to_key.issubset(function.__annotations__['return']):
print("found converter for %s!!!" % to_key)
return input_args, function return input_args, function
raise Exception("There is no converter for %s to %s" % (from_keys, raise Exception("There is no converter for %s to %s" % (from_keys,
to_key)) to_key))
@ -38,23 +39,31 @@ class ConvertTable():
def get_metaconverter(self, from_keys, to_keys): def get_metaconverter(self, from_keys, to_keys):
'''This function constructs and returns new function used to provide fast '''This function constructs and returns new function used to provide fast
conversion from from_keys to to_keys''' conversion from from_keys to to_keys'''
print("from_keys",from_keys)
print("to_keys",to_keys)
converters_args = [] converters_args = []
converters = [] converters = []
for key in to_keys: for key in to_keys:
keys_to_convert, converter = None, None keys_to_convert, converter = None, None
if key in from_keys: if key in from_keys:
print("%s is in from_keys" % key)
keys_to_convert = [key] keys_to_convert = [key]
converter = lambda x : {key: x} converter = lambda x : {key: x}
else: else:
print("getting converter for %s." % key)
keys_to_convert, converter = self.get_converter(from_keys, key) keys_to_convert, converter = self.get_converter(from_keys, key)
print("needed keys: %s" % " ".join(keys_to_convert))
converters_args.append(keys_to_convert) converters_args.append(keys_to_convert)
converters.append(converter) converters.append(converter)
def metaconverter(args_dict): def metaconverter(args_dict):
if args_dict == None: if args_dict == None:
return [None] * len(converters) return [None] * len(converters)
res = [] res = []
print(converters)
print(converters_args)
for i,conv in enumerate(converters): for i,conv in enumerate(converters):
print(converters_args[i])
print(args_dict)
args = [args_dict[arg] for arg in converters_args[i]] args = [args_dict[arg] for arg in converters_args[i]]
res.append(*[value for key, value in conv(*args).items()]) res.append(*[value for key, value in conv(*args).items()])
return res return res

View File

@ -1,17 +1,15 @@
from core.prototypes.AbstractAddressGenerator import AbstractAddressGenerator from core.prototypes.AbstractAddressGenerator import AbstractAddressGenerator
from core.prototypes.AbstractModuleClass import internal
from threading import RLock from threading import RLock
import ipaddress import ipaddress
from types import GeneratorType from types import GeneratorType
class IpGenerator(AbstractAddressGenerator): class IpGenerator(AbstractAddressGenerator):
def set_parsed_fields(self, ips : 'ipv4_ranges', ports : 'ports') -> None: def set_parsed_fields(self, ips : 'ipv4_objects', ports : 'ports') -> None:
self.ips = ips self.ips = ips
self.ports = ports self.ports = ports
self.lock = RLock() self.lock = RLock()
@internal
def get_next_port_number(self, previous_port): def get_next_port_number(self, previous_port):
return (self.ports.index(previous_port) + 1) % len(self.ports) return (self.ports.index(previous_port) + 1) % len(self.ports)

View File

@ -1,6 +1,5 @@
import ipaddress import ipaddress
from core.prototypes.AbstractParser import AbstractParser from core.prototypes.AbstractParser import AbstractParser
from core.prototypes.AbstractModuleClass import internal
class Parser(AbstractParser): class Parser(AbstractParser):
@ -9,10 +8,9 @@ class Parser(AbstractParser):
'ports'}: 'ports'}:
result = dict() result = dict()
result['ports'] = self.parse_port_field(ports) result['ports'] = self.parse_port_field(ports)
result['ipv4_ranges'] = self.get_all_addresses(ips) result['ipv4_objects'] = self.get_all_addresses(ips)
return result return result
@internal
def parse_port_field(self, ports): def parse_port_field(self, ports):
""" """
Parses ports from string, returns them as integers in the list. Parses ports from string, returns them as integers in the list.
@ -40,7 +38,6 @@ class Parser(AbstractParser):
# Change to default ports from constant # Change to default ports from constant
return [21, 22, 23, 25, 80, 443, 110, 111, 135, 139, 445, 8080, 8443, 53, 143, 989, 990, 3306, 1080, 5554, 6667, 2222, 4444, 666, 6666, 1337, 2020, 31337] return [21, 22, 23, 25, 80, 443, 110, 111, 135, 139, 445, 8080, 8443, 53, 143, 989, 990, 3306, 1080, 5554, 6667, 2222, 4444, 666, 6666, 1337, 2020, 31337]
@internal
def parse_address_field(self, ips): def parse_address_field(self, ips):
""" """
Parses ip input string, returns the generator over them. Parses ip input string, returns the generator over them.
@ -62,7 +59,6 @@ class Parser(AbstractParser):
for host in ip_obj: for host in ip_obj:
yield host yield host
@internal
def get_all_addresses(self, ips): def get_all_addresses(self, ips):
ip_objects = set() ip_objects = set()
inputs = [ip.strip() for ip in ips.split(',')] inputs = [ip.strip() for ip in ips.split(',')]

View File

@ -0,0 +1,98 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from core.prototypes.AbstractScanner import AbstractScanner
import ftplib
from ftplib import FTP
MAX_ERRORS = 3
class FTPScanner(AbstractScanner):
def __init__(self, timeout:"timeout", credentials:"credentials"):
self.__timeout__ = timeout
self.__credantials__ = credentials
def scan_address(self, host: 'ipv4_str', port: 'port') -> {'ftp_version', 'ftp_status', 'login', 'password'}:
result = self.ftp_anonymous_login(host, port, self.__timeout__)
if result['ftp_status'] == 'ok':
#Что-то делать с ошибками
return result
if not result['ftp_status'].startswith('530'):
return result
return self.ftp_bruteforce(
host, port, self.__credentials__, self.__timeout__)
@staticmethod
def ftp_anonymous_login(host, port, timeout):
'''Get version and check if anonympous login is enabled'''
result = {
key:None for key in ['ftp_version', 'ftp_status', 'login',
'password']
}
ftp_connection = FTP(timeout=timeout)
try:
version = ftp_connection.connect(host=host, port=port)
# Get something like "220 Twisted 16.6.0 FTP Server"
result['ftp_version'] = version.lstrip('220 ')
# Try to login as anonymous user
ftp_connection.login()
result['ftp_status'] = 'ok'
except ftplib.error_perm as e:
if str(e).startswith("530"):
result['ftp_status'] = 'ok'
result['anonymous_login'] = False
except ftplib.all_errors as e:
#status - error
result['ftp_status'] = str(e)
return result
finally:
ftp_connection.close()
return result
@staticmethod
def ftp_bruteforce(host, port, creds, timeout):
'''Attempt to brute force login/password pair'''
# We want maintain connection to speed up bruteforce
# but we also want to reconnect if necessary.
# That is why I use cred iterator to pick up new login/pass only when
# we need to.
result = {
key:None for key in ['ftp_version', 'ftp_status', 'login',
'password']
}
result['ftp_status'] = "error"
error_count = 0
it = iter(creds)
cred = next(it, "")
ftp_connection = FTP(timeout=timeout)
while error_count < MAX_ERRORS:
try:
# Connecting to server
ftp_connection.connect(host=host, port=port)
while cred and error_count < MAX_ERRORS:
user, password = cred
# Trying to log in
try:
ftp_connection.login(user, password)
ftp_connection.close()
result['ftp_status'] = 'ok'
result['login'] = user
result['password'] = password
return result
except ftplib.error_perm as e:
# Password was wrong, checking another
cred = next(it, "")
continue
except ftplib.all_errors as e:
error_count += 1
# Connection was dropped or another network error happened
# We must connection, error_count would help us to
# avoid deadlock on mumbling host
break
except ftplib.all_errors as e:
# Cannot reconnect, give up
break
finally:
ftp_connection.close()
return result

View File

@ -0,0 +1,94 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from modules.network_scan.FTPScanner import FTPScanner
from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
import unittest
from tempfile import mkdtemp
import multiprocessing
from time import sleep
import http.server
import socketserver
import os
TEST_CREDS = (("admin", "admin"), ("1", "1"), ('user', 'password'))
PORT = 2121
def run_anonymous_ftp(temp_dir):
authorizer = DummyAuthorizer()
authorizer.add_anonymous(temp_dir)
handler = FTPHandler
handler.authorizer = authorizer
server = FTPServer(("127.0.0.1", PORT), handler)
server.serve_forever()
def run_bruteforce_ftp(temp_dir):
authorizer = DummyAuthorizer()
user, password = TEST_CREDS[-1]
authorizer.add_user(user, password, temp_dir, perm="elradfmw")
handler = FTPHandler
handler.authorizer = authorizer
handler.max_login_attempts = 2 # Drop connection on each 2 incorrect attempts
server = FTPServer(("127.0.0.1", PORT), handler)
server.serve_forever()
def run_mumble():
handler = http.server.SimpleHTTPRequestHandler
httpd = socketserver.TCPServer(("127.0.0.1", PORT), handler)
httpd.serve_forever()
class TestFTPScanner(unittest.TestCase):
def test_closed_port(self):
scanner = FTPScanner(timeout=10, credentials=TEST_CREDS)
result = scanner.scan_address('127.0.0.1', 31337)
print(result)
#self.assertEqual(result['status'], 'Connection refused', "Should be error")
self.assertTrue("Connection refused" in result['ftp_status'], "Connection refused")
def test_mumble(self):
p = multiprocessing.Process(target=run_mumble)
p.start()
sleep(5)
scanner = FTPScanner(timeout=10, credentials=TEST_CREDS)
result = scanner.scan_address('127.0.0.1', PORT)
print(result)
self.assertEqual(result['status'], 'error', "Should be error")
self.assertTrue("timed out" in result['error_type'], "Timed out")
p.terminate()
def test_anonymous_login(self):
temp_dir = mkdtemp()
p = multiprocessing.Process(target=run_anonymous_ftp, args=(temp_dir,))
p.start()
sleep(5)
scanner = FTPScanner(timeout=10, credentials=TEST_CREDS)
result = scanner.scan_address('127.0.0.1', PORT)
print(result)
self.assertEqual(result['login'], None, "Should be True")
self.assertEqual(result['password'], None, "Should be True")
p.terminate()
os.rmdir(temp_dir)
def test_bruteforce(self):
temp_dir = mkdtemp()
p = multiprocessing.Process(target=run_bruteforce_ftp, args=(temp_dir,))
p.start()
sleep(5)
scanner = FTPScanner(timeout=10, credentials=TEST_CREDS)
result = scanner.scan_address('127.0.0.1', PORT)
print(result)
self.assertEqual(result['login'], TEST_CREDS[-1][0], "Should be True")
self.assertEqual(result['password'], TEST_CREDS[-1][1], "Should be True")
p.terminate()
os.rmdir(temp_dir)
if __name__ == '__main__':
unittest.main()

View File

@ -1 +1 @@
{"200": {"https://docs.google.com/document/d/": [{"hash": "11Sz_PyqL268V9xmcEjYqEhufFGleT5TowdKEu5cTFak", "title": null}, {"hash": "11Sz_PyqL268V9xmcEjYqEhufFGleT5TowdKEu5cTFal", "title": null}, {"hash": "11Sz_PyqL268V9xmcEjYqEhufFGleT5TowdKEu5cTFam", "title": null}]}} {"timed out": [{"ip": "122.3.42.1", "port": 20, "login": null, "password": null, "ftp_version": null}, {"ip": "122.3.42.1", "port": 21, "login": null, "password": null, "ftp_version": null}]}