Skip to content

Commit

Permalink
Version 1.1.4 (Improvements)
Browse files Browse the repository at this point in the history
Added functionality for loading and saving URLs and forms
Added a way to set blind injection delay and response logging without config
Now you can set delay between requests
Random user agent now works during crawling
Random user agent is now chosen for every request
Added custom plugins to gitignore
Fixed a bug with default False values
Fixed some bugs with the crawler

A minor update with simple improvements and fixes
  • Loading branch information
vladko312 committed Jun 1, 2023
1 parent 84578dd commit 1619ed2
Show file tree
Hide file tree
Showing 9 changed files with 397 additions and 154 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# SSTImap custom plugins
plugins/custom/*

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,7 @@ If you plan to contribute something big from this list, inform me to avoid worki

- [ ] Make template and base language evaluation functionality more uniform
- [ ] Add more payloads for different engines
- [ ] Parse raw HTTP request from file
- [ ] Variable dumping functionality
- [ ] Blind value extraction
- [ ] Better documentation (or at least any documentation)
Expand Down
16 changes: 10 additions & 6 deletions core/channel.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import time

import requests
import urllib3
from utils.loggers import log
Expand Down Expand Up @@ -97,12 +99,6 @@ def _parse_header(self, all_injectable=False):
self.injs.append({'field': 'Header', 'part': 'param', 'param': param})
if self.tag in value or all_injectable:
self.injs.append({'field': 'Header', 'part': 'value', 'value': value, 'param': param})
if self.args.get('random_agent'):
user_agent = get_agent()
else:
user_agent = self.args.get('user_agent')
if 'user-agent' not in [p.lower() for p in self.header_params.keys()]:
self.header_params['User-Agent'] = user_agent

def _parse_post(self, all_injectable=False):
if self.args.get('data'):
Expand Down Expand Up @@ -203,6 +199,14 @@ def req(self, injection):
log.debug(f'[HEDR] {header_params}')
if len(cookie_params) > 1:
log.debug(f'[COOK] {cookie_params}')
if self.args.get('random_agent'):
user_agent = get_agent()
else:
user_agent = self.args.get('user_agent')
if 'user-agent' not in [p.lower() for p in header_params.keys()]:
header_params['User-Agent'] = user_agent
if self.args['delay']:
time.sleep(self.args['delay'])
try:
result = requests.request(method=self.http_method, url=url_params, params=get_params, data=post_params,
headers=header_params, cookies=cookie_params, proxies=self.proxies,
Expand Down
102 changes: 99 additions & 3 deletions core/checks.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
from utils.loggers import log
from core.clis import Shell, MultilineShell
from core.tcpserver import TcpServer
import json
import os
import telnetlib
import urllib
from urllib import parse
import socket
from utils.loggers import log
from core.clis import Shell, MultilineShell
from core.tcpserver import TcpServer
from utils.crawler import crawl, find_forms
from core.channel import Channel


def plugins(legacy=False):
Expand Down Expand Up @@ -211,3 +216,94 @@ def check_template_injection(channel):
else:
log.log(22, 'No reverse TCP shell capabilities have been detected on the target')
return current_plugin


def scan_website(args):
urls = set()
forms = set()
single_url = args.get('url', None)
if single_url:
urls.add(single_url)
preloaded_urls = args.get('loaded_urls', None)
if preloaded_urls:
urls.update(preloaded_urls)
preloaded_forms = args.get('loaded_forms', None)
if preloaded_forms:
forms.update(preloaded_forms)
if args['load_forms']:
if os.path.isdir(args['load_forms']):
args['load_forms'] = f"{args['load_forms']}/forms.json"
if os.path.exists(args['load_forms']):
try:
with open(args['load_forms'], 'r') as stream:
loaded_forms = set([tuple(x) for x in json.load(stream)])
forms.update(loaded_forms)
log.log(21, f"Loaded {len(loaded_forms)} forms from file: {args['load_forms']}")
except Exception as e:
log.log(22, f"Error occurred while loading forms from file:\n{repr(e)}")
if not forms or args['forms']:
if args['load_urls']:
if os.path.isdir(args['load_urls']):
args['load_urls'] = f"{args['load_urls']}/urls.txt"
if os.path.exists(args['load_urls']):
try:
with open(args['load_urls'], 'r') as stream:
loaded_urls = set([x.strip() for x in stream.readlines()])
urls.update(loaded_urls)
log.log(21, f"Loaded {len(loaded_urls)} URL(s) from file: {args['load_urls']}")
except Exception as e:
log.log(22, f"Error occurred while loading URLs from file:\n{repr(e)}")
if args['crawl_depth']:
crawled_urls = crawl(urls, args)
urls.update(crawled_urls)
args['crawled_urls'] = crawled_urls
if args['save_urls']:
if os.path.isdir(args['save_urls']):
args['save_urls'] = f"{args['save_urls']}/sstimap_urls.txt"
try:
with open(args['save_urls'], 'w') as stream:
stream.write("\n".join(crawled_urls))
log.log(21, f"Saved URLs to file: {args['save_urls']}")
except Exception as e:
log.log(22, f"Error occurred while saving URLs to file:\n{repr(e)}")
else:
log.log(25, "Skipping URL loading and crawling as forms are already supplied")
args['target_urls'] = urls
if args['forms']:
crawled_forms = find_forms(urls, args)
forms.update(crawled_forms)
args['crawled_forms'] = crawled_forms
if args['save_forms'] and crawled_forms:
if os.path.isdir(args['save_forms']):
args['save_forms'] = f"{args['save_forms']}/sstimap_forms.json"
try:
with open(args['save_forms'], 'w') as stream:
json.dump([x for x in crawled_forms], stream, indent=4)
log.log(21, f"Saved forms to file: {args['save_forms']}")
except Exception as e:
log.log(22, f"Error occurred while saving forms to file:\n{repr(e)}")
args['target_forms'] = forms
if not urls and not forms:
log.log(22, 'No targets found')
return
elif not forms:
for url in urls:
log.log(27, f'Scanning url: {url}')
url_args = args.copy()
url_args['url'] = url
channel = Channel(url_args)
result = check_template_injection(channel)
if channel.data.get('engine'):
return result # TODO: save vulnerabilities
else:
for form in forms:
log.log(27, f'Scanning form with url: {form[0]}')
url_args = args.copy()
url_args['url'] = form[0]
url_args['method'] = form[1]
url_args['data'] = urllib.parse.parse_qs(form[2], keep_blank_values=True)
channel = Channel(url_args)
result = check_template_injection(channel)
if channel.data.get('engine'):
return result # TODO: save vulnerabilities
return
Loading

0 comments on commit 1619ed2

Please sign in to comment.