Skip to content

Commit 7684889

Browse files
authored
Multiple updates -> 3.1.3
- Removed browser engine emulation (closes #220, closes #217, closes #200 ) - Fixed a few bugs - Added a plugin to scan for outdated JS libraries - Improved crawling and DOM scanning
2 parents e66cfdd + 3d7fbca commit 7684889

15 files changed

+1835
-96
lines changed

.travis.yml

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
language: python
22
cache: pip
3-
addons:
4-
firefox: "45.4.0esr"
53
os:
64
- linux
75
python:
@@ -10,11 +8,6 @@ install:
108
- pip install -r requirements.txt
119
- pip install flake8
1210
before_script:
13-
# download and extract geckodrive to /usr/local/bin
14-
- wget https://github.com/mozilla/geckodriver/releases/download/v0.23.0/geckodriver-v0.23.0-linux64.tar.gz
15-
- mkdir geckodriver
16-
- tar -xzf geckodriver-v0.23.0-linux64.tar.gz -C geckodriver
17-
- export PATH=$PATH:$PWD/geckodriver # stop the build if there are Python syntax errors or undefined names
1811
- flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics
1912
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
2013
- flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
### 3.1.3
2+
- Removed browser engine emulation
3+
- Fixed a few bugs
4+
- Added a plugin to scan for outdated JS libraries
5+
- Improved crawling and DOM scanning
6+
17
### 3.1.2
28
- Fixed POST data handling
39
- Support for JSON POST data

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ Apart from that, XSStrike has crawling, fuzzing, parameter discovery, WAF detect
4949
- Context analysis
5050
- Configurable core
5151
- WAF detection & evasion
52-
- Browser engine integration for zero false positive rate
52+
- Outdated JS lib scanning
5353
- Intelligent payload generator
5454
- Handmade HTML & JavaScript parser
5555
- Powerful fuzzing engine
@@ -65,7 +65,6 @@ Apart from that, XSStrike has crawling, fuzzing, parameter discovery, WAF detect
6565
- [Compatibility & Dependencies](https://github.com/s0md3v/XSStrike/wiki/Compatibility-&-Dependencies)
6666

6767
### FAQ
68-
- [There's some error related to `geckodriver`.](https://github.com/s0md3v/XSStrike/wiki/FAQ#theres-some-error-related-to-geckodriver)
6968
- [It says fuzzywuzzy isn't installed but it is.](https://github.com/s0md3v/XSStrike/wiki/FAQ#it-says-fuzzywuzzy-is-not-installed-but-its)
7069
- [What's up with Blind XSS?](https://github.com/s0md3v/XSStrike/wiki/FAQ#whats-up-with-blind-xss)
7170
- [Why XSStrike boasts that it is the most advanced XSS detection suite?](https://github.com/s0md3v/XSStrike/wiki/FAQ#why-xsstrike-boasts-that-it-is-the-most-advanced-xss-detection-suite)
@@ -103,4 +102,5 @@ Ways to contribute
103102

104103
Licensed under the GNU GPLv3, see [LICENSE](LICENSE) for more information.
105104

106-
The WAF signatures in `/db/wafSignatures.json` are taken & modified from [sqlmap](https://github.com/sqlmapproject/sqlmap). I extracted them from sqlmap's waf detection modules which can found [here](https://github.com/sqlmapproject/sqlmap/blob/master/waf/) and converted them to JSON.
105+
The WAF signatures in `/db/wafSignatures.json` are taken & modified from [sqlmap](https://github.com/sqlmapproject/sqlmap). I extracted them from sqlmap's waf detection modules which can found [here](https://github.com/sqlmapproject/sqlmap/blob/master/waf/) and converted them to JSON.\
106+
`/plugins/retireJS.py` is a modified version of [retirejslib](https://github.com/FallibleInc/retirejslib/).

core/browserEngine.py

Lines changed: 0 additions & 28 deletions
This file was deleted.

core/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
changes = '''better dom xss scanning;add headers from command line;many bug fixes'''
1+
changes = '''Removed browser engine emulation;Fixed a few bugs;Added a plugin to scan for outdated JS libraries;Improved crawling and DOM scanning'''
22
globalVariables = {} # it holds variables during runtime for collaboration across modules
33

44
defaultEditor = 'nano'

core/dom.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ def dom(response):
77
highlighted = []
88
sources = r'''document\.(URL|documentURI|URLUnencoded|baseURI|cookie|referrer)|location\.(href|search|hash|pathname)|window\.name|history\.(pushState|replaceState)(local|session)Storage'''
99
sinks = r'''eval|evaluate|execCommand|assign|navigate|getResponseHeaderopen|showModalDialog|Function|set(Timeout|Interval|Immediate)|execScript|crypto.generateCRMFRequest|ScriptElement\.(src|text|textContent|innerText)|.*?\.onEventName|document\.(write|writeln)|.*?\.innerHTML|Range\.createContextualFragment|(document|window)\.location'''
10-
scripts = re.findall(r'(?i)(?s)<scrip[^>]*(.*?)</script>', response)
10+
scripts = re.findall(r'(?i)(?s)<script[^>]*>(.*?)</script>', response)
1111
for script in scripts:
1212
script = script.split('\n')
1313
num = 1

core/photon.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from urllib.parse import urlparse
44

55

6+
from plugins.retireJs import retireJs
67
from core.utils import getUrl, getParams
78
from core.requester import requester
89
from core.zetanize import zetanize
@@ -36,6 +37,7 @@ def rec(target):
3637
inps.append({'name': name, 'value': value})
3738
forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}})
3839
response = requester(url, params, headers, True, delay, timeout).text
40+
retireJs(url, response)
3941
forms.append(zetanize(response))
4042
matches = findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response)
4143
for link in matches: # iterate over the matches
@@ -53,9 +55,11 @@ def rec(target):
5355
storage.add(main_url + '/' + link)
5456
for x in range(level):
5557
urls = storage - processed # urls to crawl = all urls - urls that have been crawled
58+
# for url in urls:
59+
# rec(url)
5660
threadpool = concurrent.futures.ThreadPoolExecutor(
5761
max_workers=threadCount)
5862
futures = (threadpool.submit(rec, url) for url in urls)
59-
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
63+
for i in concurrent.futures.as_completed(futures):
6064
pass
6165
return [forms, processed]

core/requester.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,7 @@
55
import warnings
66

77
import core.config
8-
from core.config import globalVariables
9-
from core.utils import converter
8+
from core.utils import converter, getVar
109
from core.log import setup_logger
1110

1211
logger = setup_logger(__name__)
@@ -15,9 +14,9 @@
1514

1615

1716
def requester(url, data, headers, GET, delay, timeout):
18-
if core.config.globalVariables['jsonData']:
17+
if getVar('jsonData'):
1918
data = converter(data)
20-
elif core.config.globalVariables['path']:
19+
elif getVar('path'):
2120
url = converter(data, url)
2221
data = []
2322
GET, POST = True, False
@@ -37,7 +36,7 @@ def requester(url, data, headers, GET, delay, timeout):
3736
if GET:
3837
response = requests.get(url, params=data, headers=headers,
3938
timeout=timeout, verify=False, proxies=core.config.proxies)
40-
elif core.config.globalVariables['jsonData']:
39+
elif getVar('jsonData'):
4140
response = requests.get(url, json=data, headers=headers,
4241
timeout=timeout, verify=False, proxies=core.config.proxies)
4342
else:

core/utils.py

Lines changed: 47 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ def getParams(url, data, GET):
163163
if data[:1] == '?':
164164
data = data[1:]
165165
elif data:
166-
if core.config.globalVariables['jsonData'] or core.config.globalVariables['path']:
166+
if getVar('jsonData') or getVar('path'):
167167
params = data
168168
else:
169169
try:
@@ -197,6 +197,51 @@ def writer(obj, path):
197197

198198
def reader(path):
199199
with open(path, 'r') as f:
200-
result = [line.strip(
200+
result = [line.rstrip(
201201
'\n').encode('utf-8').decode('utf-8') for line in f]
202202
return result
203+
204+
def js_extractor(response):
205+
"""Extract js files from the response body"""
206+
scripts = []
207+
matches = re.findall(r'<(?:script|SCRIPT).*?(?:src|SRC)=([^\s>]+)', response)
208+
for match in matches:
209+
match = match.replace('\'', '').replace('"', '').replace('`', '')
210+
scripts.append(match)
211+
return scripts
212+
213+
214+
def handle_anchor(parent_url, url):
215+
if parent_url.count('/') > 2:
216+
replacable = re.search(r'/[^/]*?$', parent_url).group()
217+
if replacable != '/':
218+
parent_url = parent_url.replace(replacable, '')
219+
scheme = urlparse(parent_url).scheme
220+
if url[:4] == 'http':
221+
return url
222+
elif url[:2] == '//':
223+
return scheme + ':' + url
224+
elif url[:1] == '/':
225+
return parent_url + url
226+
else:
227+
if parent_url.endswith('/') or url.startswith('/'):
228+
return parent_url + url
229+
else:
230+
return parent_url + '/' + url
231+
232+
233+
def deJSON(data):
234+
return data.replace('\\\\', '\\')
235+
236+
237+
def getVar(name):
238+
return core.config.globalVariables[name]
239+
240+
def updateVar(name, data, mode=None):
241+
if mode:
242+
if mode == 'append':
243+
core.config.globalVariables[name].append(data)
244+
elif mode == 'add':
245+
core.config.globalVariables[name].add(data)
246+
else:
247+
core.config.globalVariables[name] = data

0 commit comments

Comments
 (0)