Sync codebase with upstream

This commit is contained in:
ChronosX88 2019-11-15 00:54:17 +04:00 committed by GitHub
commit ca808bca5f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 657 additions and 95 deletions

View File

@ -12,8 +12,8 @@ INSTALLATION
------------ ------------
Just run Just run
```bash ```bash
git clone http://github.com/ChronosX88/PySca.git git clone http://github.com/Zloooy/PySca.git
cd PySca cd PyNesca
pip install -r requirements.txt pip install -r requirements.txt
``` ```

View File

@ -10,8 +10,8 @@ PySca - сетевой сканер, переписанный на Python
------------ ------------
Введите в терминале Введите в терминале
```bash ```bash
git clone http://github.com/ChronosX88/PySca.git git clone http://github.com/Zloooy/PySca.git
cd PySca cd PyNesca
pip install -r requirements.txt pip install -r requirements.txt
``` ```
@ -22,3 +22,13 @@ pip install -r requirements.txt
python main.py python main.py
``` ```
Находясь в корневой папке PySca Находясь в корневой папке PySca
НАСТРОЙКА
------------
Тут [описание синтаксиса config.py](./docs/config.ru.md)
ХОЧУ СДЕЛАТЬ СВОЙ МОДУЛЬ ДЛЯ PySca
-------------
Тут [общие требования к оформлению модуля](./docs/developing_custom_module.md)
Тут [описание функций конкретных модулей](./docs/classes.ru.md)

View File

@ -1,7 +1,73 @@
#modules selection #modules and selection and init args setup
config = { config = {
"parser" : "GDocsHashParser", "parser" :
"address_generator" : "GDocsAddressGenerator", {
"scanner" : "URLScanner", "name":"Parser",
"storage" : "GDocsStorage" "init_args":{}
},
"address_generator" :
{
"name":"IpGenerator",
"init_args":{}
},
"scanner" :
{
"name":"FTPScanner",
"init_args":{
"credentials": (
("admin", "admin")
)
}
},
"storage" :
{
"name":"JSONStorage",
"init_args":
{
"path":"results.json",
"json_scheme":
{
"ftp_status":
[
{
"@ip":"ipv4_str",
"@port":"port",
"@login":"login",
"@password":"password",
"@ftp_version":"ftp_version",
}
]
}
}
}
} }
'''scheme for url scanner
{
"status":
{
"url"
}
}'''
'''scheme for port scanner
{
"ipv4_str":
{
"port_status_str":
{
"port"
}
}
}'''
'''scheme for gdocs scanner
"status":
{
"gdoc_prefix":
[
{
"@hash": "gdoc_hash",
"@title": "gdoc_title"
}
]
}
'''

View File

@ -6,19 +6,19 @@ from PyQt5.Qt import QThread, pyqtSignal
from PyQt5.QtCore import QObject, pyqtSlot from PyQt5.QtCore import QObject, pyqtSlot
from config import config from config import config
from inspect import isfunction from inspect import isfunction
from communication.communication_utils import complitable_functions, get_converted_arguments from communication.communication_utils import complitable_functions, get_converted_arguments, get_argument_annotations, get_return_annotations
CoreModel = import_utils.import_class("modules/network_scan/%s.py" % CoreModel = import_utils.import_class("modules/network_scan/%s.py" %
config["scanner"]) config["scanner"]["name"])
Parser = import_utils.import_class("modules/address_generation/%s.py" % Parser = import_utils.import_class("modules/address_generation/%s.py" %
config["parser"] config["parser"]["name"]
) )
IpGenerator = import_utils.import_class( IpGenerator = import_utils.import_class(
"modules/address_generation/%s.py" % "modules/address_generation/%s.py" %
config["address_generator"] config["address_generator"]["name"]
) )
JSONStorage = import_utils.import_class("modules/storage/%s.py" % JSONStorage = import_utils.import_class("modules/storage/%s.py" %
config["storage"]) config["storage"]["name"])
convert_table = ConvertTable() convert_table = ConvertTable()
for func in import_utils.import_matching( for func in import_utils.import_matching(
"modules/convert_functions/", "modules/convert_functions/",
@ -36,15 +36,43 @@ for function in [
msg = "%s is not complitable with %s" msg = "%s is not complitable with %s"
print(msg % (function, previous)) print(msg % (function, previous))
previous = function previous = function
convert_for_parser = convert_table.get_metaconverter(
{'address_field','port_field'},
get_argument_annotations(Parser.parse_fields)
)
convert_for_address_generator = convert_table.get_metaconverter(
get_return_annotations(Parser.parse_fields),
get_argument_annotations(IpGenerator.set_parsed_fields)
)
convert_for_scanner = convert_table.get_metaconverter(
get_return_annotations(IpGenerator.get_next_address),
get_argument_annotations(CoreModel.scan_address)
)
convert_for_address_generator_reverse = convert_table.get_metaconverter(
get_return_annotations(IpGenerator.get_next_address).union(get_return_annotations(CoreModel.scan_address)),
get_argument_annotations(IpGenerator.get_next_address)
)
convert_for_storage = None
class MainPresenter: class MainPresenter:
def __init__(self, ui): def __init__(self, ui):
self.ui = ui self.ui = ui
self.threads = [] self.threads = []
self.isScanEnabled = False self.isScanEnabled = False
self.parser = Parser() self.parser = Parser(*get_converted_arguments(Parser.__init__,
config['parser']['init_args'], convert_table))
#needed config to specify path #needed config to specify path
self.storage = JSONStorage("results.json") print(*get_converted_arguments(JSONStorage.__init__,
config["storage"]["init_args"], convert_table))
self.storage = JSONStorage(*get_converted_arguments(JSONStorage.__init__,
config["storage"]["init_args"], convert_table))
print(get_argument_annotations(self.storage.put_responce))
global convert_for_storage
convert_for_storage = convert_table.get_metaconverter(
get_return_annotations(IpGenerator.get_next_address).union(get_return_annotations(CoreModel.scan_address)),
get_argument_annotations(self.storage.put_responce)
)
input()
self.exit_lock = RLock() self.exit_lock = RLock()
def startScan(self, ipRanges, portsStr, threadNumber, timeout): def startScan(self, ipRanges, portsStr, threadNumber, timeout):
@ -52,25 +80,21 @@ class MainPresenter:
addresses = None addresses = None
parser_args = {'port_field':portsStr, 'address_field':ipRanges} parser_args = {'port_field':portsStr, 'address_field':ipRanges}
fields = self.parser.parse_fields( fields = self.parser.parse_fields(
*get_converted_arguments( *convert_for_parser(parser_args)
self.parser.parse_fields,
parser_args,
convert_table
)
) )
self.scanner = CoreModel(timeout) config["scanner"]["init_args"]["timeout"] = timeout
self.scanner = CoreModel(*get_converted_arguments(CoreModel.__init__,
config["scanner"]["init_args"], convert_table))
if CoreModel.INDEPENDENT_THREAD_MANAGEMENT: if CoreModel.INDEPENDENT_THREAD_MANAGEMENT:
addresses = self.parser.get_all_addresses(ipRanges) addresses = self.parser.get_all_addresses(ipRanges)
self.ip_generator = PlugAddressGenerator(addresses, ports) self.ip_generator = PlugAddressGenerator(addresses, ports)
threadNumber = 1 threadNumber = 1
else: else:
self.ip_generator = IpGenerator() self.ip_generator = IpGenerator(
*get_converted_arguments(IpGenerator.__init__,
config["address_generator"]["init_args"], convert_table))
self.ip_generator.set_parsed_fields( self.ip_generator.set_parsed_fields(
*get_converted_arguments( *convert_for_address_generator(fields)
self.ip_generator.set_parsed_fields,
fields,
convert_table
)
) )
threadNumber = int(threadNumber) threadNumber = int(threadNumber)
print("thread %i number set" % threadNumber) print("thread %i number set" % threadNumber)
@ -142,37 +166,26 @@ class ScanWorker(QObject):
while self.isRunning: while self.isRunning:
print("worker start") print("worker start")
scan_address = self.ip_generator.get_next_address( scan_address = self.ip_generator.get_next_address(
*get_converted_arguments( *convert_for_address_generator_reverse(self.previous_address)
self.ip_generator.get_next_address,
self.previous_address,
convert_table
)
) )
if not scan_address: if not scan_address:
break break
scan_result = self.scanner.scan_address( scan_result = self.scanner.scan_address(
*get_converted_arguments( *convert_for_scanner(scan_address)
self.scanner.scan_address,
scan_address,
convert_table
)
) )
print(scan_result) print(scan_result)
scan_address.update(scan_result) scan_address.update(scan_result)
print(scan_address)
self.previous_address = scan_address self.previous_address = scan_address
self.storage.put_responce( self.storage.put_responce(
*get_converted_arguments( *convert_for_storage(scan_address)
self.storage.put_responce,
scan_address,
convert_table
)
) )
string_scan_address = " ".join(key + ":" + str(scan_address[key]) for string_scan_address = " ".join(key + ":" + str(scan_address[key]) for
key in scan_address.keys()) key in scan_address.keys())
if scan_result == 0: if scan_result == 0:
self.log_signal.emit('%s is open' % string_scan_address) self.log_signal.emit(string_scan_address)
else: else:
self.log_signal.emit('%s is closed' % string_scan_address) self.log_signal.emit(string_scan_address)
self.stop() self.stop()
def stop(self): def stop(self):

View File

@ -30,7 +30,41 @@ class ConvertTable():
function.__annotations__.items() if function.__annotations__.items() if
key!='return') key!='return')
if input_args.issubset(from_keys) and to_key.issubset(function.__annotations__['return']): if input_args.issubset(from_keys) and to_key.issubset(function.__annotations__['return']):
print("found converter for %s!!!" % to_key)
return input_args, function return input_args, function
raise Exception("There is no converter for %s to %s" % (from_keys, raise Exception("There is no converter for %s to %s" % (from_keys,
to_key)) to_key))
return None, None return None, None
def get_metaconverter(self, from_keys, to_keys):
'''This function constructs and returns new function used to provide fast
conversion from from_keys to to_keys'''
print("from_keys",from_keys)
print("to_keys",to_keys)
converters_args = []
converters = []
for key in to_keys:
keys_to_convert, converter = None, None
if key in from_keys:
print("%s is in from_keys" % key)
keys_to_convert = [key]
converter = lambda x : {key: x}
else:
print("getting converter for %s." % key)
keys_to_convert, converter = self.get_converter(from_keys, key)
print("needed keys: %s" % " ".join(keys_to_convert))
converters_args.append(keys_to_convert)
converters.append(converter)
def metaconverter(args_dict):
if args_dict == None:
return [None] * len(converters)
res = []
print(converters)
print(converters_args)
for i,conv in enumerate(converters):
print(converters_args[i])
print(args_dict)
args = [args_dict[arg] for arg in converters_args[i]]
res.append(*[value for key, value in conv(*args).items()])
return res
return metaconverter

View File

@ -1,7 +1,13 @@
def get_argument_annotations(func):
return list(value for key, value in func.__annotations__.items() if key !=
'return')
def get_return_annotations(func):
return func.__annotations__['return']
def complitable_functions(output_function, input_function, convert_table): def complitable_functions(output_function, input_function, convert_table):
input_keys = set(value for key, value in input_keys = set(get_argument_annotations(input_function))
input_function.__annotations__.items() if key != 'return') return_keys = set(get_return_annotations(output_function))
return_keys = set(output_function.__annotations__["return"])
all_possible_return_keys = return_keys.union( all_possible_return_keys = return_keys.union(
convert_table.all_possible_conversions(return_keys) convert_table.all_possible_conversions(return_keys)
) )
@ -10,9 +16,13 @@ def complitable_functions(output_function, input_function, convert_table):
return input_keys.issubset(all_possible_return_keys) return input_keys.issubset(all_possible_return_keys)
def get_converted_arguments(function, simple_arg_dict, convert_table): def get_converted_arguments(function, simple_arg_dict, convert_table):
#This function returns list of arguments needed by function made from
#simple_arg_dict by convert_table
if simple_arg_dict == None: if simple_arg_dict == None:
return [None for key in function.__annotations__.keys() if key != 'return'] return [None for key in function.__annotations__.keys() if key != 'return']
result = [] result = []
if not hasattr(function, "__annotations__"):
return result
for key, value in function.__annotations__.items(): for key, value in function.__annotations__.items():
if key != 'return': if key != 'return':
converted_arg = None converted_arg = None
@ -29,4 +39,3 @@ def get_converted_arguments(function, simple_arg_dict, convert_table):
)[value] )[value]
result.append(converted_arg) result.append(converted_arg)
return result return result

View File

@ -3,6 +3,8 @@ from core.prototypes.AbstractModuleClass import AbstractModuleClass
class AbstractAddressGenerator(AbstractModuleClass): class AbstractAddressGenerator(AbstractModuleClass):
'''The class describes addess generation mechanism.''' '''The class describes addess generation mechanism.'''
INPUT_FUNCTIONS = {"set_parsed_fields", "get_next_address"}
OUTPUT_FUNCTIONS = {"get_next_address", "get_all_addresses"}
@abstractmethod @abstractmethod
def set_parsed_fields(self): def set_parsed_fields(self):
'''This method is called after generator initialization. It is used to '''This method is called after generator initialization. It is used to

View File

@ -1,17 +1,30 @@
def internal(func):
func.is_internal = True
return func
class AbstractModuleClassType(type): class AbstractModuleClassType(type):
def __new__(self, name, bases, attrs): def __new__(self, name, bases, attrs):
print("creating class", name) print("creating class", name)
base_class = None
if len(bases) != 0:
base_class = bases[0]
input_function_names = None
output_function_names = None
if base_class:
input_function_names = getattr(base_class, "INPUT_FUNCTIONS")
output_function_names = getattr(base_class, "OUTPUT_FUNCTIONS")
else:
input_function_names = attrs["INPUT_FUNCTIONS"]
output_function_names = attrs["OUTPUT_FUNCTIONS"]
if not name.startswith("Abstract"): if not name.startswith("Abstract"):
for attrname, attrvalue in attrs.items(): for attrname, attrvalue in attrs.items():
if type(attrvalue).__name__ == 'function': if type(attrvalue).__name__ == 'function':
if attrvalue.__name__ not in ["__init__", "save"] and not (hasattr(attrvalue, "is_internal") and attrvalue.is_internal if attrvalue.__name__ in input_function_names:
): if len(list(filter(lambda x: x!= "return",
if not name.endswith("Storage"): attrvalue.__annotations__.keys()))) == 0:
raise Exception(
"%s.%s:no input annotations." %
(name, attrname)
)
if attrvalue.__name__ in output_function_names:
try: try:
attrvalue.__annotations__["return"] attrvalue.__annotations__["return"]
except KeyError: except KeyError:
@ -19,14 +32,10 @@ class AbstractModuleClassType(type):
"%s.%s: return type is not defined!" % "%s.%s: return type is not defined!" %
(name, attrname) (name, attrname)
) )
if not name.endswith("Parser"):
if not attrvalue.__annotations__:
raise Exception(
"%s.%s: arguments missing annotations!" %
(name, attrname)
)
return super().__new__(self, name, bases, attrs) return super().__new__(self, name, bases, attrs)
class AbstractModuleClass(metaclass = AbstractModuleClassType): class AbstractModuleClass(metaclass = AbstractModuleClassType):
REQUIED_INPUT_KEYS = None REQUIED_INPUT_KEYS = None
OUTPUT_KEYS = [] OUTPUT_KEYS = []
INPUT_FUNCTIONS = {}
OUTPUT_FUNCTIONS = {}

View File

@ -4,7 +4,8 @@ from core.prototypes.AbstractModuleClass import AbstractModuleClass
class AbstractParser(AbstractModuleClass): class AbstractParser(AbstractModuleClass):
'''The class describes fields parsing mechanisms''' '''The class describes fields parsing mechanisms'''
INPUT_FUNCTIONS = {}
OUTPUT_FUNCTIONS = {"parse_fields"}
@abstractmethod @abstractmethod
def parse_fields(self, args): def parse_fields(self, args):
'''In address field can be plased any text, describing address of '''In address field can be plased any text, describing address of

View File

@ -6,6 +6,8 @@ class AbstractScanner(AbstractModuleClass):
If it can manage many threads by itself set INDEPENDENT_THREAD_MANAGEMENT If it can manage many threads by itself set INDEPENDENT_THREAD_MANAGEMENT
to "True"''' to "True"'''
INDEPENDENT_THREAD_MANAGEMENT = False INDEPENDENT_THREAD_MANAGEMENT = False
INPUT_FUNCTIONS = {"scan_address"}
OUTPUT_FUNCTIONS = {"scan_address"}
@abstractmethod @abstractmethod
def scan_address(self, address): def scan_address(self, address):

43
docs/classes.ru.md Normal file
View File

@ -0,0 +1,43 @@
# Классы-прототипы модулей PySca
Процесс сканирования в PySca разделён на этапы. За каждый этап сканирования отвечает отдельный класс - модуль, реализующий все необходимые для данного этапа функции, декларированнынные в соответствующем классе-прототипе.
Каждая функция модуля PySca - элемент конвеера. На вход она получает результаты выполнения предидущей функции (явно - в виде словаря или же как значения аргументов согласно аннотациям к аргументам) другого модуля и возвращет значение в следующую в цепочке функцию. Связи между функциями можно представить в виде таблицы:
Название функции в абстрактном классе | Функция-источник аргументов | Функция - приёмник результатов
:------------------------------------: | :---------------------------: | :------------------------------:
AbstractParser.parse_fields | вводится пользователем | AbstractAddresGenerator.set_parsed_fields
AbstractAddressGenerator.set_parsed_fields | AbstractParser.parse_fields | нет
AbstractAddressGenerator.get_next_address | AbstractScanner.scan_address + AbstractAddressGenerator.get_next_address или None при первом запросе адреса | AbstractScanner.scan_address и AbstractStorage.put_responce
AbstractScanner.scan_address | AbstractAddressGenerator.get_next_address | AbstractAddressGenerator.get_next_address и AbstractStorage.put_responce
AbstractStorage.put_responce | AbstractAddressGenerator.get_next_address + AbstractScanner.scan_address | нет
AbstractStorage.save | нет | нет
## Описание классов модулей по отдельности
### AbstractParser
Задача этого класса - обработка пользовательского ввода, преобразование строк в именованные python-объекты.
#### Методы AbstractParser
* __init__()
В аргументы передаются запрошенные параметры из config.py
* parse_fields()
В аргументы функции передаётся содержимое текстовых полей, введённое пользователем.
### AbstractAddressGenerator
Задача модулей-наследников этого класса - обработка вывода парсера и генерация адресов - задач для сканирования на основе не только данных парсера, но и результатов, полученных от сканирования предидущих адресов.
#### Методы AbstractAddressGenerator
* __init__()
В аргументы передаются запрошенные параметры из config.py
* set_parsed_fields()
В аргументы получает разультаты AbstractParser.parse_fields. Если нужно обрабатывает их и сохраняет как поля класса.
* get_next_address()
В аргументы получает либо None как значение всех аргументов - для получения первого адреса, либо результаты работы AbstractScanner.scan_address + результаты собственной работы (тот адрес, который сканировал экземпляр AbstractScanner). На основе полученных данных/внутренних полей возвращает адрес для последующего сканирования либо None, если адресов больше нет.
ВАЖНО: Так как обращения к функции класса возможны в асинхронном виде, рекомндуется либо оборачивать код функции в lock класса Threading, либо использоват потокобезопасные структуры как поля класса (Queue и т. п.).
### AbstractScanner
Модули этого класса отвечают за сам процесс сканирования. На данный момент доступно сканирование через функцию только одного адреса, своя реализация параллелизма пока невозможна.
#### Методы AbstractScanner
* __init__()
В аргументы передаются запрошенные параметры из config.py
* scan_address()
На вход метод получает адрес, сгенерированный AbstractAddresGenerator'ом, возвращает результаты сканирования. В процессе сканирования не рекомендуется менять поля класса, а если менять, то только потокобезопасно.
#### Методы AbstractStorage
* __init__()
В аргументы передаются запрошенные параметры из config.py
* put_responce()
На вход получает сумму результатов выполнения AbstractAddressGenerator.get_next_address и AbstractScammer.scan_address и сохраняет их себе в поля/ в реальном времени записывает их в файл.
* save()
Метод вызывается в конце сканирования, когда все потоки скаенра остановлены. Сохраняет информацию в файл.

15
docs/config.ru.md Normal file
View File

@ -0,0 +1,15 @@
# Что такое config.py
config.py - инструмент для выбора модулей цепочки и их предварительной конфигурации.
Синтаксис [config.py](../config.py) выглядит так:
```
config = {
"parser":
{
"name":[ИМЯ_ПАРСЕРА]
"init_args":[словарь с аргументами инициализации]
}
...
}
```
Здесь ИМЯ_ПАРСЕРА - это название файла модуля без расширения и класса в нём, который будет отвечать за парсинг пользовательского ввода.
словарь с аргументами инициализации - все аргументы(кроме self), которые могут понадобиться при инициализации(__init__) модуля этой направленности (не только выбранного, но и любого другого. Может сохраняться при смене модуля, если другой имеет все требуемые аргументы в этом словаре.

View File

@ -0,0 +1,17 @@
# Создание собственного модуля
Каждый модуль PySca - класс, отвечающий за конкретные задачи в цепочке сканирования. Чтобы ваш код стал модулем сканера, необходимо соблюдать следующие условия:
* Модуль должен представлять из себя отдельный файл, содержащий в себе единственный класс, в методах которого и реализована логика модуля. Имя файла модуля должно совпадать с именем содержащегося в нём класса.
* Наследование класса модуля от одного из абстрактных классов AbstractParser, AbstractAddressGenerator, AbstractScanner, AbstractStorage в соответствии с функционалом.
* Расположение файла модуля в общей системе папок PySca зависит от класса-родителя модуля и определяется по этой таблице:
Класс-родитель | Адрес[^pysca_folder_system]
:--------------:|:-----:
AbstractParser | modules/address_generation
AbstractAddressGenerator | modules/address_generation
AbstractScanner | modules/network_scan
AbstractStorage | modules/storage
[^pysca_folder_system]: Все адреса приведены относительно корневого каталога PySca.
* Реализация всех методов класса-родителя с соблюдением условий:
* Аннотация всех аргументов методов (если они необходимы)[^self_argument] ключами требуемых значений.
* Аннотация возвращаемых методами значений в виде множества/списка возвращаемых ключей (если метод должен что-то возвращать).
* Соответствие возвращаемых методом значений аннотации - возвращение словаря с парами ключ-значение, где ключ - элемент из списка заявленных ключей, а значение - соответствующие ему данные, либо возвращение None.
[^self_argument]: Разумеется, self в аннотации не нуждается, более того, аннотация self может сломать сканер.

View File

@ -1,5 +1,4 @@
from core.prototypes.AbstractAddressGenerator import AbstractAddressGenerator from core.prototypes.AbstractAddressGenerator import AbstractAddressGenerator
from core.prototypes.AbstractModuleClass import internal
class GDocsAddressGenerator(AbstractAddressGenerator): class GDocsAddressGenerator(AbstractAddressGenerator):
def set_parsed_fields(self, prefix:"gdocs_prefix", def set_parsed_fields(self, prefix:"gdocs_prefix",
@ -13,7 +12,6 @@ class GDocsAddressGenerator(AbstractAddressGenerator):
self.hashlen = len(ranges[0][0]) self.hashlen = len(ranges[0][0])
self.currange = self.ranges.pop(0) self.currange = self.ranges.pop(0)
@internal
def hash2int(self, gdhash): def hash2int(self, gdhash):
alen = len(self.alphabet) alen = len(self.alphabet)
res = 0 res = 0
@ -22,7 +20,6 @@ class GDocsAddressGenerator(AbstractAddressGenerator):
res += self.revsymbols[symb] res += self.revsymbols[symb]
return res return res
@internal
def int2hash(self, hint): def int2hash(self, hint):
alen = len(self.alphabet) alen = len(self.alphabet)
reshash = [self.alphabet[0]]*self.hashlen reshash = [self.alphabet[0]]*self.hashlen
@ -31,15 +28,15 @@ class GDocsAddressGenerator(AbstractAddressGenerator):
reshash[i] = self.alphabet[rest] reshash[i] = self.alphabet[rest]
return "".join(reshash) return "".join(reshash)
def get_next_address(self, prev_url:'url') -> {"url"}: def get_next_address(self, prev_hash:'gdoc_hash') -> {"gdoc_prefix", "gdoc_hash"}:
if not prev_url: if not prev_hash:
return {'url':self.prefix + self.currange[0]} return {'gdoc_prefix':self.prefix, "gdoc_hash":self.currange[0]}
prev_hash = prev_url[prev_url.rfind('/') + 1:] #prev_hash = prev_url[prev_url.rfind('/') + 1:]
if self.hash2int(self.currange[1]) <= self.hash2int(prev_hash): if self.hash2int(self.currange[1]) <= self.hash2int(prev_hash):
if not self.ranges: return None if not self.ranges: return None
self.currange = self.ranges.pop(0) self.currange = self.ranges.pop(0)
return {'url' : self.prefix + self.currange[0]} return {'gdoc_prefix' : self.prefix, 'gdoc_hash':self.currange[0]}
return {'url' : self.prefix + self.int2hash(self.hash2int(prev_hash) + return {'gdoc_prefix' : self.prefix, 'gdoc_hash':self.int2hash(self.hash2int(prev_hash) +
1)} 1)}
def get_all_addresses(self) -> {'gdocs_prefix', 'gdocs_hash_ranges'}: def get_all_addresses(self) -> {'gdocs_prefix', 'gdocs_hash_ranges'}:

View File

@ -1,17 +1,15 @@
from core.prototypes.AbstractAddressGenerator import AbstractAddressGenerator from core.prototypes.AbstractAddressGenerator import AbstractAddressGenerator
from core.prototypes.AbstractModuleClass import internal
from threading import RLock from threading import RLock
import ipaddress import ipaddress
from types import GeneratorType from types import GeneratorType
class IpGenerator(AbstractAddressGenerator): class IpGenerator(AbstractAddressGenerator):
def set_parsed_fields(self, ips : 'ipv4_ranges', ports : 'ports') -> None: def set_parsed_fields(self, ips : 'ipv4_objects', ports : 'ports') -> None:
self.ips = ips self.ips = ips
self.ports = ports self.ports = ports
self.lock = RLock() self.lock = RLock()
@internal
def get_next_port_number(self, previous_port): def get_next_port_number(self, previous_port):
return (self.ports.index(previous_port) + 1) % len(self.ports) return (self.ports.index(previous_port) + 1) % len(self.ports)

View File

@ -1,6 +1,5 @@
import ipaddress import ipaddress
from core.prototypes.AbstractParser import AbstractParser from core.prototypes.AbstractParser import AbstractParser
from core.prototypes.AbstractModuleClass import internal
class Parser(AbstractParser): class Parser(AbstractParser):
@ -9,10 +8,9 @@ class Parser(AbstractParser):
'ports'}: 'ports'}:
result = dict() result = dict()
result['ports'] = self.parse_port_field(ports) result['ports'] = self.parse_port_field(ports)
result['ipv4_ranges'] = self.get_all_addresses(ips) result['ipv4_objects'] = self.get_all_addresses(ips)
return result return result
@internal
def parse_port_field(self, ports): def parse_port_field(self, ports):
""" """
Parses ports from string, returns them as integers in the list. Parses ports from string, returns them as integers in the list.
@ -40,7 +38,6 @@ class Parser(AbstractParser):
# Change to default ports from constant # Change to default ports from constant
return [21, 22, 23, 25, 80, 443, 110, 111, 135, 139, 445, 8080, 8443, 53, 143, 989, 990, 3306, 1080, 5554, 6667, 2222, 4444, 666, 6666, 1337, 2020, 31337] return [21, 22, 23, 25, 80, 443, 110, 111, 135, 139, 445, 8080, 8443, 53, 143, 989, 990, 3306, 1080, 5554, 6667, 2222, 4444, 666, 6666, 1337, 2020, 31337]
@internal
def parse_address_field(self, ips): def parse_address_field(self, ips):
""" """
Parses ip input string, returns the generator over them. Parses ip input string, returns the generator over them.
@ -62,7 +59,6 @@ class Parser(AbstractParser):
for host in ip_obj: for host in ip_obj:
yield host yield host
@internal
def get_all_addresses(self, ips): def get_all_addresses(self, ips):
ip_objects = set() ip_objects = set()
inputs = [ip.strip() for ip in ips.split(',')] inputs = [ip.strip() for ip in ips.split(',')]

View File

@ -0,0 +1,2 @@
def gdocs_prefix_hash2Url(prefix:'gdoc_prefix', ghash:'gdoc_hash') -> {'url'}:
return {'url':prefix + ghash}

View File

@ -0,0 +1,2 @@
def response2text(response:'response') -> {'text'}:
return {'text': response.text}

View File

@ -2,7 +2,7 @@ import socket
from core.prototypes.AbstractScanner import AbstractScanner from core.prototypes.AbstractScanner import AbstractScanner
class CoreModel(AbstractScanner): class CoreModel(AbstractScanner):
def __init__(self, timeout): def __init__(self, timeout:"timeout"):
self.defSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.defSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.defSocket.settimeout(int(timeout)) self.defSocket.settimeout(int(timeout))

View File

@ -0,0 +1,98 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from core.prototypes.AbstractScanner import AbstractScanner
import ftplib
from ftplib import FTP
MAX_ERRORS = 3
class FTPScanner(AbstractScanner):
def __init__(self, timeout:"timeout", credentials:"credentials"):
self.__timeout__ = timeout
self.__credantials__ = credentials
def scan_address(self, host: 'ipv4_str', port: 'port') -> {'ftp_version', 'ftp_status', 'login', 'password'}:
result = self.ftp_anonymous_login(host, port, self.__timeout__)
if result['ftp_status'] == 'ok':
#Что-то делать с ошибками
return result
if not result['ftp_status'].startswith('530'):
return result
return self.ftp_bruteforce(
host, port, self.__credentials__, self.__timeout__)
@staticmethod
def ftp_anonymous_login(host, port, timeout):
'''Get version and check if anonympous login is enabled'''
result = {
key:None for key in ['ftp_version', 'ftp_status', 'login',
'password']
}
ftp_connection = FTP(timeout=timeout)
try:
version = ftp_connection.connect(host=host, port=port)
# Get something like "220 Twisted 16.6.0 FTP Server"
result['ftp_version'] = version.lstrip('220 ')
# Try to login as anonymous user
ftp_connection.login()
result['ftp_status'] = 'ok'
except ftplib.error_perm as e:
if str(e).startswith("530"):
result['ftp_status'] = 'ok'
result['anonymous_login'] = False
except ftplib.all_errors as e:
#status - error
result['ftp_status'] = str(e)
return result
finally:
ftp_connection.close()
return result
@staticmethod
def ftp_bruteforce(host, port, creds, timeout):
'''Attempt to brute force login/password pair'''
# We want maintain connection to speed up bruteforce
# but we also want to reconnect if necessary.
# That is why I use cred iterator to pick up new login/pass only when
# we need to.
result = {
key:None for key in ['ftp_version', 'ftp_status', 'login',
'password']
}
result['ftp_status'] = "error"
error_count = 0
it = iter(creds)
cred = next(it, "")
ftp_connection = FTP(timeout=timeout)
while error_count < MAX_ERRORS:
try:
# Connecting to server
ftp_connection.connect(host=host, port=port)
while cred and error_count < MAX_ERRORS:
user, password = cred
# Trying to log in
try:
ftp_connection.login(user, password)
ftp_connection.close()
result['ftp_status'] = 'ok'
result['login'] = user
result['password'] = password
return result
except ftplib.error_perm as e:
# Password was wrong, checking another
cred = next(it, "")
continue
except ftplib.all_errors as e:
error_count += 1
# Connection was dropped or another network error happened
# We must connection, error_count would help us to
# avoid deadlock on mumbling host
break
except ftplib.all_errors as e:
# Cannot reconnect, give up
break
finally:
ftp_connection.close()
return result

View File

@ -0,0 +1,36 @@
import json
import requests
from urllib.parse import urlencode, urljoin
import lxml.html
from core.prototypes.AbstractScanner import AbstractScanner
class GDocsScanner(AbstractScanner):
def __init__(self, timeout:"timeout"):
pass
def scan_address(self, prefix:"gdoc_prefix", ghash:"gdoc_hash") -> {"response",
"gdoc_info", "gdoc_title"}:
print("Scanning", prefix, ghash)
response = requests.get(prefix+ghash)
if response.status_code != 200:
return {"response":response, "gdoc_info":None, "gdoc_title":None}
print(response.status_code)
response_tree = lxml.html.fromstring(response.text)
(title,) = response_tree.xpath("//meta[@property='og:title']/@content")
(token_container,) = response_tree.xpath('//script[contains(text(),"token")]')
token_container = token_container.text
token_container = token_container[token_container.find("{"):token_container.rfind("}") + 1]
#print(json.dumps(json.loads(token_container), indent=4, sort_keys=True))
try:
info_params = json.loads(token_container)["info_params"]
except json.JSONDecodeError:
return {"response":response, "gdoc_info":None, "gdoc_title":None}
#print(info_params)
info = None
if "token" in info_params.keys():
info_params.update({"id":ghash})
info_url = urljoin(prefix, ghash+"/docdetails/read?"+urlencode(info_params))
print(info_url)
info_text = requests.get(info_url).text
info = json.loads(info_text[info_text.find("\n") + 1:])
print(info)
return {"response":response, "gdoc_info":info,
"gdoc_title":title}

View File

@ -0,0 +1,31 @@
from core.prototypes import AbstractScanner
from urllib.parse import urlencode
import requests
import re
STATS_SEARCHPATTERN = r'<div id="resultStats>([^>]+)'
LINK_SEARCHPATTERN = r'<div class="r"><a href="([^"]+)"'
RESULT_REGEXP = re.compile(LINK_SEARCHPATTERN)
class GoogleSearcher(AbstractScanner):
def __init__(self:
pass
def scan_address(self, query:'google_search_query')->{"search_result_list"}:
search_url = "http://google.com/search?%s"
num_loaded_results = 100
start = 0
search_result_list = set()
while num_loaded_results == 100:
query_params = {
"num":100,
"q":query,
"start":start,
"filter":0
}
page = requests.get(search_url % urlencode(query_params))
if page.status_code != 200:
break
start += 100
result_page = set(RESULT_REGEXP.findall(page_text))
num_loaded_results = len(result_page)

View File

@ -3,7 +3,7 @@ import requests
class URLScanner(AbstractScanner): class URLScanner(AbstractScanner):
def __init__(self, timeout): def __init__(self):
pass pass
def scan_address(self, url:"url") -> {"response"}: def scan_address(self, url:"url") -> {"response"}:

View File

@ -0,0 +1,94 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from modules.network_scan.FTPScanner import FTPScanner
from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
import unittest
from tempfile import mkdtemp
import multiprocessing
from time import sleep
import http.server
import socketserver
import os
TEST_CREDS = (("admin", "admin"), ("1", "1"), ('user', 'password'))
PORT = 2121
def run_anonymous_ftp(temp_dir):
authorizer = DummyAuthorizer()
authorizer.add_anonymous(temp_dir)
handler = FTPHandler
handler.authorizer = authorizer
server = FTPServer(("127.0.0.1", PORT), handler)
server.serve_forever()
def run_bruteforce_ftp(temp_dir):
authorizer = DummyAuthorizer()
user, password = TEST_CREDS[-1]
authorizer.add_user(user, password, temp_dir, perm="elradfmw")
handler = FTPHandler
handler.authorizer = authorizer
handler.max_login_attempts = 2 # Drop connection on each 2 incorrect attempts
server = FTPServer(("127.0.0.1", PORT), handler)
server.serve_forever()
def run_mumble():
handler = http.server.SimpleHTTPRequestHandler
httpd = socketserver.TCPServer(("127.0.0.1", PORT), handler)
httpd.serve_forever()
class TestFTPScanner(unittest.TestCase):
def test_closed_port(self):
scanner = FTPScanner(timeout=10, credentials=TEST_CREDS)
result = scanner.scan_address('127.0.0.1', 31337)
print(result)
#self.assertEqual(result['status'], 'Connection refused', "Should be error")
self.assertTrue("Connection refused" in result['ftp_status'], "Connection refused")
def test_mumble(self):
p = multiprocessing.Process(target=run_mumble)
p.start()
sleep(5)
scanner = FTPScanner(timeout=10, credentials=TEST_CREDS)
result = scanner.scan_address('127.0.0.1', PORT)
print(result)
self.assertEqual(result['status'], 'error', "Should be error")
self.assertTrue("timed out" in result['error_type'], "Timed out")
p.terminate()
def test_anonymous_login(self):
temp_dir = mkdtemp()
p = multiprocessing.Process(target=run_anonymous_ftp, args=(temp_dir,))
p.start()
sleep(5)
scanner = FTPScanner(timeout=10, credentials=TEST_CREDS)
result = scanner.scan_address('127.0.0.1', PORT)
print(result)
self.assertEqual(result['login'], None, "Should be True")
self.assertEqual(result['password'], None, "Should be True")
p.terminate()
os.rmdir(temp_dir)
def test_bruteforce(self):
temp_dir = mkdtemp()
p = multiprocessing.Process(target=run_bruteforce_ftp, args=(temp_dir,))
p.start()
sleep(5)
scanner = FTPScanner(timeout=10, credentials=TEST_CREDS)
result = scanner.scan_address('127.0.0.1', PORT)
print(result)
self.assertEqual(result['login'], TEST_CREDS[-1][0], "Should be True")
self.assertEqual(result['password'], TEST_CREDS[-1][1], "Should be True")
p.terminate()
os.rmdir(temp_dir)
if __name__ == '__main__':
unittest.main()

View File

@ -1,14 +1,19 @@
from core.prototypes.AbstractStorage import AbstractStorage from core.prototypes.AbstractStorage import AbstractStorage
import json import json
class GDocsStorage(AbstractStorage): class GDocsStorage(AbstractStorage):
def __init__(self, path): def __init__(self, path:"path"):
self.path = path self.path = path
self.urls = dict() self.urls = dict()
def put_responce(self, url:'url', status:'status'): def put_responce(self, url:'url', status:'status', title:'gdoc_title',
info:'gdoc_info'):
if str(status) not in self.urls.keys(): if str(status) not in self.urls.keys():
self.urls[str(status)] = [] self.urls[str(status)] = dict()
self.urls[str(status)].append(url) print(int(status))
url_object = dict()
if status == 200:
url_object = info
url_object["title"] = title
self.urls[str(status)][url] = url_object
def save(self): def save(self):
print("saving") print("saving")
with open(self.path, "w") as f: with open(self.path, "w") as f:

View File

@ -4,19 +4,100 @@ from threading import RLock
class JSONStorage(AbstractStorage): class JSONStorage(AbstractStorage):
def __init__(self, path): def __init__(self, path:"path", scheme:"json_scheme"):
self.path = path self.path = path
self.respdict = dict() self.scheme = scheme
self.needed_keys = set()
if type(scheme) == dict:
left_nodes = []
self.needed_keys = self.needed_keys.union(set(
filter(JSONStorage.is_needed_key,scheme.keys())))
left_nodes.extend(scheme.values())
for node in left_nodes:
if type(node) == str and JSONStorage.is_needed_key(node):
self.needed_keys.add(node)
elif type(node) == set or type(node) == list:
left_nodes.extend(list(node))
#self.needed_keys = self.needed_keys.union(set(
#filter(JSONStorage.is_needed_key, node)))
elif type(node) == dict:
self.needed_keys = self.needed_keys.union(
set(filter(JSONStorage.is_needed_key,
node.keys())))
left_nodes.extend(node.values())
elif type(scheme) == set or type(scheme) == list:
self.needed_keys = set(
filter(JSONStorage.is_needed_key, scheme.copy()))
elif type(scheme) == str and JSONStorage.is_needed_key(scheme):
self.needed_keys.add(scheme)
self.respdict = type(scheme)()
self.needed_keys = list(self.needed_keys)
setattr(self.put_responce.__func__, "__annotations__", {str(i):arg for
i, arg in enumerate(self.needed_keys)})
self.lock = RLock() self.lock = RLock()
def put_responce(self, ip:'ipv4_str', port:'port', scan_result:'scan_result'): '''def put_responce(self, ip:'ipv4_str', port:'port', scan_result:'scan_result'):
if ip not in self.respdict.keys(): if ip not in self.respdict.keys():
self.respdict[ip] = {"open": [], "close": []} self.respdict[ip] = {"open": [], "close": []}
self.respdict[ip]["open" if scan_result == 0 self.respdict[ip]["open" if scan_result == 0
else "close"].append(port) else "close"].append(port)'''
#Все ключи, начинающиеся с "@", считаются значениями и не декодируются.
@staticmethod
def is_needed_key(string):
return not string.startswith("@")
@staticmethod
def get_element_name(key, named_args):
return key[1:] if not JSONStorage.is_needed_key(key) else named_args[key]
@staticmethod
def get_node_adder(node, key = None):
adder = None
if key == None:
adder = node.append if type(node) == list else node.add
elif type(key) == str:
def result(x):
node[key] = x
adder = result
return adder
@staticmethod
def process_scheme(scheme, current_level, named_args):
print("processing scheme", scheme)
if type(scheme) == str:
JSONStorage.get_node_adder(current_level)(JSONStorage.get_element_name(scheme, named_args))
elif type(scheme) == set or type(scheme) == list:
for el in scheme:
if type(el) == str:
JSONStorage.process_scheme(el, current_level, named_args)
elif type(el) == dict:
current_level.append(dict())
JSONStorage.process_scheme(el, current_level[-1], named_args)
elif type(scheme) == dict:
for key, value in scheme.items():
reversed_key = JSONStorage.get_element_name(key, named_args)
if type(value) == str:
print(value, named_args)
JSONStorage.get_node_adder(current_level,
reversed_key)(JSONStorage.get_element_name(value,
named_args))
else:
if reversed_key not in current_level:
current_level[reversed_key] = type(value)()
JSONStorage.process_scheme(
value,
current_level[reversed_key],
named_args
)
def put_responce(self, *args) -> {"A"}:
named_args = {self.needed_keys[i]:arg for i, arg in
enumerate(list(args))}
with self.lock:
JSONStorage.process_scheme(self.scheme, self.respdict, named_args)
def save(self): def save(self):
print("saving") with self.lock:
with open(self.path, "w") as f: print("saving")
json.dump(self.respdict, f) print(self.respdict)
with open(self.path, "w") as f:
json.dump(self.respdict, f, default = lambda o: o if not
isinstance(o, set) else list(o))
self.respdict = {} self.respdict = {}

1
results.json Normal file
View File

@ -0,0 +1 @@
{"timed out": [{"ip": "122.3.42.1", "port": 20, "login": null, "password": null, "ftp_version": null}, {"ip": "122.3.42.1", "port": 21, "login": null, "password": null, "ftp_version": null}]}