mirror of
https://github.com/xmendez/wfuzz.git
synced 2024-10-03 22:07:09 +03:00
commit
05c8a6f3ae
@ -1,2 +1,3 @@
|
||||
include README.md
|
||||
include LICENSE
|
||||
include docs/*
|
||||
|
2
Makefile
2
Makefile
@ -4,7 +4,7 @@ test:
|
||||
tox --recreate
|
||||
flake8:
|
||||
pip install flake8
|
||||
flake8 --ignore=E501,E402,F401 src tests
|
||||
flake8 --ignore=E501,E402,F401,W504 src tests
|
||||
publish:
|
||||
pip install 'twine>=1.5.0'
|
||||
python setup.py sdist
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
<a href="https://pypi.python.org/pypi/wfuzz"><img src="https://img.shields.io/pypi/v/wfuzz.svg"></a>
|
||||
<a href="https://pypi.python.org/pypi/wfuzz"><img src="https://img.shields.io/pypi/pyversions/wfuzz.svg"></a>
|
||||
[![Build Status](https://travis-ci.org/xmendez/wfuzz.svg?branch=master)](https://travis-ci.org/xmendez/wfuzz)
|
||||
<a href="https://codecov.io/github/xmendez/wfuzz"><img src="https://codecov.io/github/xmendez/wfuzz/coverage.svg?branch=master"></a>
|
||||
|
||||
Wfuzz has been created to facilitate the task in web applications assessments and it is based on a simple concept: it replaces any reference to the FUZZ keyword by the value of a given payload.
|
||||
|
@ -7,7 +7,7 @@ All options that are available within the Wfuzz command line interface are avail
|
||||
CLI Option Library Option
|
||||
======================== =====================================================================================
|
||||
<URL> url="url"
|
||||
--recipe <filename> recipe="filename"
|
||||
--recipe <filename> recipe=["filename"]
|
||||
--oF <filename> save="filename"
|
||||
-f filename,printer printer=("filename", "printer")
|
||||
--dry-run dryrun=True
|
||||
@ -19,6 +19,7 @@ CLI Option Library Option
|
||||
-Z scanmode=True
|
||||
--req-delay N req_delay=0
|
||||
--conn-delay N conn_delay=0.0
|
||||
--no-cache no_cache=True
|
||||
--script=<plugins> script="plugins"
|
||||
--script-args n1=v1,... script_args={n1: v1}
|
||||
-m iterator iterator="iterator"
|
||||
@ -45,7 +46,7 @@ Fuzzing a URL with wfuzz library is very simple. Firstly, import the wfuzz modul
|
||||
|
||||
>>> import wfuzz
|
||||
|
||||
Now, let's try to fuzz a webpage to look for hidden content, such as directories. For this example, let's use Acunetix's testphp (http://testphp.vulnweb.com/)::
|
||||
Now, let's try to fuzz a web page to look for hidden content, such as directories. For this example, let's use Acunetix's testphp (http://testphp.vulnweb.com/)::
|
||||
|
||||
>>> import wfuzz
|
||||
>>> for r in wfuzz.fuzz(url="http://testphp.vulnweb.com/FUZZ", hc=[404], payloads=[("file",dict(fn="wordlist/general/common.txt"))]):
|
||||
@ -67,14 +68,13 @@ A FuzzSession object has all the methods of the main wfuzz API.
|
||||
The FuzzSession object allows you to persist certain parameters across fuzzing sessions::
|
||||
|
||||
>>> import wfuzz
|
||||
>>> s=wfuzz.FuzzSession(url="http://testphp.vulnweb.com/FUZZ")
|
||||
>>> s = wfuzz.FuzzSession(url="http://testphp.vulnweb.com/FUZZ")
|
||||
>>> for r in s.fuzz(hc=[404], payloads=[("file",dict(fn="wordlist/general/common.txt"))]):
|
||||
... print r
|
||||
...
|
||||
00060: C=301 7 L 12 W 184 Ch "admin"
|
||||
00183: C=403 10 L 29 W 263 Ch "cgi-bin"
|
||||
...
|
||||
>>> s.close()
|
||||
|
||||
FuzzSession can also be used as context manager::
|
||||
|
||||
@ -89,12 +89,13 @@ FuzzSession can also be used as context manager::
|
||||
Get payload
|
||||
===========
|
||||
|
||||
The get_payload function generates a Wfuzz payload from a Python iterable. It is a quick and flexible way of getting a payload programatically without using Wfuzz payloads plugins.
|
||||
The get_payload function generates a Wfuzz payload from a Python iterable. It is a quick and flexible way of getting a payload programmatically without using Wfuzz payloads plugins.
|
||||
|
||||
Generating a new payload and start fuzzing is really simple::
|
||||
|
||||
>>> import wfuzz
|
||||
>>> for r in wfuzz.get_payload(range(5)).fuzz(url="http://testphp.vulnweb.com/FUZZ"):
|
||||
>>> s = wfuzz.get_payload(range(5))
|
||||
>>> for r in s.fuzz(url="http://testphp.vulnweb.com/FUZZ"):
|
||||
... print r
|
||||
...
|
||||
00012: C=404 7 L 12 W 168 Ch "0"
|
||||
@ -102,12 +103,12 @@ Generating a new payload and start fuzzing is really simple::
|
||||
00014: C=404 7 L 12 W 168 Ch "2"
|
||||
00015: C=404 7 L 12 W 168 Ch "3"
|
||||
00016: C=404 7 L 12 W 168 Ch "4"
|
||||
>>>
|
||||
|
||||
The get_payloads method can be used when various payloads are needed::
|
||||
|
||||
>>> import wfuzz
|
||||
>>> for r in wfuzz.get_payloads([range(5), ["a","b"]]).fuzz(url="http://testphp.vulnweb.com/FUZZ/FUZ2Z"):
|
||||
>>> s = wfuzz.get_payloads([range(5), ["a","b"]])
|
||||
>>> for r in s.fuzz(url="http://testphp.vulnweb.com/FUZZ/FUZ2Z"):
|
||||
... print r
|
||||
...
|
||||
00028: C=404 7 L 12 W 168 Ch "4 - b"
|
||||
@ -120,16 +121,16 @@ The get_payloads method can be used when various payloads are needed::
|
||||
00020: C=404 7 L 12 W 168 Ch "0 - b"
|
||||
00023: C=404 7 L 12 W 168 Ch "2 - a"
|
||||
00019: C=404 7 L 12 W 168 Ch "0 - a"
|
||||
>>>
|
||||
|
||||
Get session
|
||||
===========
|
||||
|
||||
The get_session function generates a Wfuzz session object from the specified command line. It is a quick way of getting a payload programatically from a string representing CLI options::
|
||||
The get_session function generates a Wfuzz session object from the specified command line. It is a quick way of getting a payload programmatically from a string representing CLI options::
|
||||
|
||||
$ python
|
||||
>>> import wfuzz
|
||||
>>> for r in wfuzz.get_session("-z range,0-10 http://testphp.vulnweb.com/FUZZ").fuzz():
|
||||
>>> s = wfuzz.get_session("-z range,0-10 http://testphp.vulnweb.com/FUZZ")
|
||||
>>> for r in s.fuzz():
|
||||
... print r
|
||||
...
|
||||
00002: C=404 7 L 12 W 168 Ch "1"
|
||||
@ -144,3 +145,29 @@ The get_session function generates a Wfuzz session object from the specified com
|
||||
00009: C=404 7 L 12 W 168 Ch "8"
|
||||
00010: C=404 7 L 12 W 168 Ch "9"
|
||||
|
||||
Interacting with the results
|
||||
============================
|
||||
|
||||
Once a Wfuzz result is available the grammar defined in the filter language can be used to work with the results' values. For example::
|
||||
|
||||
$ python
|
||||
>>> import wfuzz
|
||||
|
||||
>>> with wfuzz.get_session("-z list --zD test -u http://testphp.vulnweb.com/userinfo.php -d uname=FUZZ&pass=FUZZ") as s:
|
||||
... for r in s.fuzz():
|
||||
... print(r.history.cookies.response)
|
||||
... print(r.history.params.all)
|
||||
... print(r.history.params.post)
|
||||
... print(r.history.params.post.uname)
|
||||
... print(r.history.params.post['pass'])
|
||||
{'login': 'test%2Ftest'}
|
||||
{'uname': 'test', 'pass': 'test'}
|
||||
{'uname': 'test', 'pass': 'test'}
|
||||
test
|
||||
test
|
||||
>>>
|
||||
|
||||
The result object has also a method to evaluate a language expression::
|
||||
|
||||
>> print(r.eval("r.cookies.response"))
|
||||
login=test%2Ftest
|
||||
|
@ -4,7 +4,7 @@ Advanced Usage
|
||||
Wfuzz global options
|
||||
--------------------
|
||||
|
||||
Wfuzz global options can be tweaked by modifying the "wfuzz.ini" at the user's home direcory::
|
||||
Wfuzz global options can be tweaked by modifying the "wfuzz.ini" at the user's home directory::
|
||||
|
||||
~/.wfuzz$ cat wfuzz.ini
|
||||
|
||||
@ -108,6 +108,10 @@ Encoders are specified as a payload parameter. There are two equivalent ways of
|
||||
00004: C=404 7 L 12 W 168 Ch "a2ef406e2c2351e0b9e80029c909242d"
|
||||
...
|
||||
|
||||
* The not so long way using the zE command line switch::
|
||||
|
||||
$ wfuzz -z file --zD wordlist/general/common.txt --zE md5 http://testphp.vulnweb.com/FUZZ
|
||||
|
||||
* The not so long way::
|
||||
|
||||
$ wfuzz -z file,wordlist/general/common.txt,md5 http://testphp.vulnweb.com/FUZZ
|
||||
@ -192,9 +196,9 @@ Wfuzz's web application vulnerability scanner is supported by plugins. A list of
|
||||
|
||||
Scripts are grouped in categories. A script could belong to several categories at the same time.
|
||||
|
||||
Thre are two general categories:
|
||||
There are two general categories:
|
||||
|
||||
* passive: Passive scripts analyze existing requests and responses without performing new requests.
|
||||
* passive: Passive scripts analyse existing requests and responses without performing new requests.
|
||||
* active: Active scripts perform new requests to the application to probe it for vulnerabilities.
|
||||
|
||||
Additional categories are:
|
||||
@ -237,6 +241,31 @@ An example, parsing a "robots.txt" file is shown below::
|
||||
Filtered Requests: 0
|
||||
Requests/sec.: 0
|
||||
|
||||
In order to not scan the same requests (with the same parameters) over an over again, there is a cache,the cache can be disabled with the --no-cache flag.
|
||||
|
||||
For example, if we target a web server with the same URL but different parameter values, we get::
|
||||
|
||||
$ wfuzz -z range --zD 0-3 -z list --zD "'" -u http://testphp.vulnweb.com/artists.php?artist=FUZZFUZ2Z -A
|
||||
|
||||
000000004: 0.195s 200 101 L 287 W 3986 Ch nginx/1.4.1 "3 - '"
|
||||
|_ Error identified: Warning: mysql_fetch_array()
|
||||
000000001: 0.198s 200 101 L 287 W 3986 Ch nginx/1.4.1 "0 - '"
|
||||
000000002: 0.198s 200 101 L 287 W 3986 Ch nginx/1.4.1 "1 - '"
|
||||
000000003: 0.198s 200 101 L 287 W 3986 Ch nginx/1.4.1 "2 - '"
|
||||
|
||||
But, if we do the same but disabling the cache::
|
||||
|
||||
$ wfuzz -z range --zD 0-3 -z list --zD "'" -u http://testphp.vulnweb.com/artists.php?artist=FUZZFUZ2Z -A --no-cache
|
||||
|
||||
000000004: 1.170s 200 101 L 287 W 3986 Ch nginx/1.4.1 "3 - '"
|
||||
|_ Error identified: Warning: mysql_fetch_array()
|
||||
000000002: 1.173s 200 101 L 287 W 3986 Ch nginx/1.4.1 "1 - '"
|
||||
|_ Error identified: Warning: mysql_fetch_array()
|
||||
000000001: 1.174s 200 101 L 287 W 3986 Ch nginx/1.4.1 "0 - '"
|
||||
|_ Error identified: Warning: mysql_fetch_array()
|
||||
000000003: 1.173s 200 101 L 287 W 3986 Ch nginx/1.4.1 "2 - '"
|
||||
|_ Error identified: Warning: mysql_fetch_array()
|
||||
|
||||
Custom scripts
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
@ -282,12 +311,26 @@ You can combine a recipe with additional command line options, for example::
|
||||
|
||||
$ wfuzz --recipe /tmp/recipe -b cookie1=value
|
||||
|
||||
In case of repeated options, command line options have precedence over options included in the recipe.
|
||||
Several recipes can also be combined::
|
||||
|
||||
$ wfuzz --recipe /tmp/recipe --recipe /tmp/recipe2
|
||||
|
||||
In case of repeated options, command line options have precedence over options included in the recipe. Last recipe has precedence.
|
||||
|
||||
Connect to an specific host
|
||||
---------------------------------------
|
||||
|
||||
The --ip option can be used to connect to a specific host and port instead of the URL's host and port::
|
||||
|
||||
$ wfuzz -z range,1-1 --ip 127.0.0.1 http://www.google.com/anything/FUZZ
|
||||
|
||||
This useful, for example, to test if a reverse proxy can be manipulated into misrouting requests to a destination of our choice.
|
||||
|
||||
|
||||
Scan Mode: Ignore Errors and Exceptions
|
||||
---------------------------------------
|
||||
|
||||
In the event of a network problem (e.g. DNS failure, refused connection, etc), Wfuzz will raise an exception and stop execution as shown below::
|
||||
In the event of a network problem (e.g. DNS failure, refused connection, etc.), Wfuzz will raise an exception and stop execution as shown below::
|
||||
|
||||
$ wfuzz -z list,support-web-none http://FUZZ.google.com/
|
||||
********************************************************
|
||||
@ -357,12 +400,16 @@ Timeouts
|
||||
|
||||
You can tell Wfuzz to stop waiting for server to response a connection request after a given number of seconds --conn-delay and also the maximum number of seconds that the response is allowed to take using --req-delay parameter.
|
||||
|
||||
These timeouts are really handy when you are using Wfuzz to bruteforce resources behind a proxy, ports, hostnames, virtual hosts, etc.
|
||||
These timeouts are really handy when you are using Wfuzz to brute force resources behind a proxy, ports, hostnames, virtual hosts, etc.
|
||||
|
||||
Filter Language
|
||||
---------------
|
||||
|
||||
Wfuzz's filter language grammar is build using `pyparsing <http://pyparsing.wikispaces.com/>`_, therefore it must be installed before using the command line parameters "--filter, --prefilter, --slice".
|
||||
Wfuzz's filter language grammar is build using `pyparsing <http://pyparsing.wikispaces.com/>`_, therefore it must be installed before using the command line parameters "--filter, --prefilter, --slice, --field and --efield".
|
||||
|
||||
The information about the filter language can be also obtained executing::
|
||||
|
||||
wfuzz --filter-help
|
||||
|
||||
A filter expression must be built using the following symbols and operators:
|
||||
|
||||
@ -372,7 +419,7 @@ A filter expression must be built using the following symbols and operators:
|
||||
|
||||
* Expression Operators
|
||||
|
||||
Expressions operators such as "= != < > >= <=" could be used to check values. Additionally, the following for matching text are available:
|
||||
Expressions operators such as "= != < > >= <=" could be used to check values. Additionally, the following operators for matching text are available:
|
||||
|
||||
============ ====================================================================
|
||||
Operator Description
|
||||
@ -382,6 +429,16 @@ Operator Description
|
||||
!~ Equivalent to Python's "str2" not in "str1" (case insensitive)
|
||||
============ ====================================================================
|
||||
|
||||
Also, assignment operators:
|
||||
|
||||
============ ====================================================================
|
||||
Operator Description
|
||||
============ ====================================================================
|
||||
:= Assigns a value
|
||||
=+ Concatenates value at the left
|
||||
=- Concatenates value at the right
|
||||
============ ====================================================================
|
||||
|
||||
Where values could be:
|
||||
|
||||
* Basic primitives:
|
||||
@ -401,13 +458,14 @@ BBB Baseline
|
||||
Name Short version Description
|
||||
================================ ======================= =============================================
|
||||
value|unquote() value|un() Unquotes the value
|
||||
value|lower() value|l() lowercase of the value
|
||||
value|upper() uppercase of the value
|
||||
value|lower() value|l() lower-case of the value
|
||||
value|upper() upper-case of the value
|
||||
value|encode('encoder', 'value') value|e('enc', 'val') Returns encoder.encode(value)
|
||||
value|decode('decoder', 'value') value|d('dec', 'val') Returns encoder.decode(value)
|
||||
value|replace('what', 'with') value|r('what', 'with') Returns value replacing what for with
|
||||
value|unique(value) value|u(value) Returns True if a value is unique.
|
||||
value|startswith('value') value|sw('param') Returns true if the value string starts with param
|
||||
value|unique() value|u() Returns True if a value is unique.
|
||||
value|startswith('value') value|sw('value') Returns true if the value string starts with param
|
||||
value|gregex('expression') value|gre('exp') Returns first regex group that matches in value
|
||||
================================ ======================= =============================================
|
||||
|
||||
* When a FuzzResult is available, you could perform runtime introspection of the objects using the following symbols
|
||||
@ -415,16 +473,19 @@ value|startswith('value') value|sw('param') Returns true if the val
|
||||
============ ============== =============================================
|
||||
Name Short version Description
|
||||
============ ============== =============================================
|
||||
url Wfuzz's result HTTP request url
|
||||
description Wfuzz's result description
|
||||
nres Wfuzz's result identifier
|
||||
code c HTTP response's code
|
||||
code c Wfuzz's result HTTP response's code
|
||||
chars h Wfuzz's result HTTP response chars
|
||||
lines l Wfuzz's result HTTP response lines
|
||||
words w Wfuzz's result HTTP response words
|
||||
md5 Wfuzz's result HTTP response md5 hash
|
||||
history r Wfuzz's result associated FuzzRequest object
|
||||
plugins Wfuzz's results associated plugins result in the form of {'plugin id': ['result']}
|
||||
============ ============== =============================================
|
||||
|
||||
Or FuzzRequest object's attribute such as:
|
||||
FuzzRequest object's attribute (you need to use the r. prefix) such as:
|
||||
|
||||
============================ =============================================
|
||||
Name Description
|
||||
@ -435,44 +496,49 @@ scheme HTTP request's scheme
|
||||
host HTTP request's host
|
||||
content HTTP response's content
|
||||
raw_content HTTP response's content including headers
|
||||
cookies.request HTTP request cookie
|
||||
cookies.response HTTP response cookie
|
||||
cookies.request.<<name>> HTTP request cookie
|
||||
cookies.response.<<name>> HTTP response cookie
|
||||
headers.request All HTTP request headers
|
||||
headers.response All HTTP response headers
|
||||
headers.request.<<name>> HTTP request given header
|
||||
headers.response.<<name>> HTTP response given header
|
||||
params All HTTP request GET and POST parameters
|
||||
cookies.all All HTTP request and response cookies
|
||||
cookies.request HTTP requests cookieS
|
||||
cookies.response HTTP response cookies
|
||||
cookies.request.<<name>> Specified HTTP request cookie
|
||||
cookies.response.<<name>> Specified HTTP response cookie
|
||||
headers.all All HTTP request and response headers
|
||||
headers.request HTTP request headers
|
||||
headers.response HTTP response headers
|
||||
headers.request.<<name>> Specified HTTP request given header
|
||||
headers.response.<<name>> Specified HTTP response given header
|
||||
params.all All HTTP request GET and POST parameters
|
||||
params.get All HTTP request GET parameters
|
||||
params.post All HTTP request POST parameters
|
||||
params.get/post.<<name>> A given HTTP request GET/POST parameter
|
||||
params.get.<<name>> Spcified HTTP request GET parameter
|
||||
params.post.<<name>> Spcified HTTP request POST parameter
|
||||
pstrip Returns a signature of the HTTP request using the parameter's names without values (useful for unique operations)
|
||||
is_path Returns true when the HTTP request path refers to a directory.
|
||||
reqtime Returns the total time that HTTP request took to be retrieved
|
||||
============================ =============================================
|
||||
|
||||
URL field is broken in smaller parts using the urlparse Python's module, which parses a URL into: scheme://netloc/path;parameters?query#fragment.
|
||||
It is worth noting that Wfuzz will try to parse the POST parameters according to the specified content type header. Currently, application/x-www-form-urlencoded, multipart/form-dat and application/json are supported.
|
||||
|
||||
For example, for the "http://www.google.com/dir/test.php?id=1" URL you can get the following values:
|
||||
FuzzRequest URL field is broken in smaller (read only) parts using the urlparse Python's module in the urlp attribute.
|
||||
|
||||
Urlparse parses a URL into: scheme://netloc/path;parameters?query#fragment. For example, for the "http://www.google.com/dir/test.php?id=1" URL you can get the following values:
|
||||
|
||||
=================== =============================================
|
||||
Name Value
|
||||
=================== =============================================
|
||||
url.scheme http
|
||||
url.netloc www.google.com
|
||||
url.path /dir/test.php
|
||||
url.params
|
||||
url.query id=1
|
||||
url.fragment
|
||||
url.domain google.com
|
||||
url.ffname test.php
|
||||
url.fext .php
|
||||
url.fname test
|
||||
url.pstrip Returns a hash of the request using the parameter's names without values (useful for unique operations)
|
||||
url.hasquery Returns true when the URL contains a query string.
|
||||
url.ispath Returns true when the URL path refers to a directory.
|
||||
url.isbllist Returns true when the URL file extension is included in the configuration discovery's blacklist
|
||||
urlp.scheme http
|
||||
urlp.netloc www.google.com
|
||||
urlp.path /dir/test.php
|
||||
urlp.params
|
||||
urlp.query id=1
|
||||
urlp.fragment
|
||||
urlp.ffname test.php
|
||||
urlp.fext .php
|
||||
urlp.fname test
|
||||
urlp.hasquery Returns true when the URL contains a query string.
|
||||
urlp.isbllist Returns true when the URL file extension is included in the configuration discovery's blacklist
|
||||
=================== =============================================
|
||||
|
||||
Payload instrospection can also be performed by using the keyword FUZZ:
|
||||
Payload introspection can also be performed by using the keyword FUZZ:
|
||||
|
||||
============ ==============================================
|
||||
Name Description
|
||||
@ -486,7 +552,7 @@ Where field is one of the described above.
|
||||
Filtering results
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
The --filter command line parameter in conjuntion with the described filter language allows you to peform more complex result triage than the standard filter switches such as "--hc/hl/hw/hh", "--sc/sl/sw/sh" and "-ss/hs".
|
||||
The --filter command line parameter in conjunction with the described filter language allows you to perform more complex result triage than the standard filter switches such as "--hc/hl/hw/hh", "--sc/sl/sw/sh" and "-ss/hs".
|
||||
|
||||
An example below::
|
||||
|
||||
@ -510,7 +576,7 @@ An example below::
|
||||
Filtered Requests: 9
|
||||
Requests/sec.: 7.572076
|
||||
|
||||
Using result and payload instrospection to look for specific content returned in the response::
|
||||
Using result and payload introspection to look for specific content returned in the response::
|
||||
|
||||
$ wfuzz -z list,echoedback -d searchFor=FUZZ --filter "content~FUZZ" http://testphp.vulnweb.com/search.php?test=query
|
||||
|
||||
@ -522,14 +588,22 @@ A more interesting variation of the above examples could be::
|
||||
|
||||
$ wfuzz -w fuzzdb/attack/xss/xss-rsnake.txt -d searchFor=FUZZ --filter "content~FUZZ" http://testphp.vulnweb.com/search.php?test=query
|
||||
|
||||
You can use the fields as boolean values as well. For example, this filter will show only the requests with parameters::
|
||||
|
||||
$ wfuzz -z range --zD 0-1 -u http://testphp.vulnweb.com/artists.php?artist=FUZZ --filter 'r.params.all'
|
||||
|
||||
Results with plugin issues can be filter as well::
|
||||
|
||||
$ wfuzz -z list --zD index -u http://testphp.vulnweb.com/FUZZ.php --script headers --filter "plugins~'nginx'"
|
||||
|
||||
Filtering a payload
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Slice
|
||||
"""""""
|
||||
|
||||
The --slice command line parameter in conjuntion with the described filter language allows you to filter a payload.
|
||||
The payload to filter, specified by the -z switch must preceed --slice comamand line parameter.
|
||||
The --slice command line parameter in conjunction with the described filter language allows you to filter a payload.
|
||||
The payload to filter, specified by the -z switch must precede --slice command line parameter.
|
||||
|
||||
An example is shown below::
|
||||
|
||||
@ -568,7 +642,7 @@ In this context you are filtering a FuzzResult object, which is the result of co
|
||||
Reutilising previous results
|
||||
--------------------------------------
|
||||
|
||||
Previously performed HTTP requests/responses contain a treasure trove of data. Wfuzz payloads and object instrospection (explained in the filter grammar section) exposes a Python object interface to requests/responses recorded by Wfuzz or other tools.
|
||||
Previously performed HTTP requests/responses contain a treasure trove of data. Wfuzz payloads and object introspection (explained in the filter grammar section) exposes a Python object interface to requests/responses recorded by Wfuzz or other tools.
|
||||
|
||||
This allows you to perform manual and semi-automatic tests with full context and understanding of your actions, without relying on a web application scanner underlying implementation.
|
||||
|
||||
@ -591,7 +665,7 @@ $ wfuzz --oF /tmp/session -z range,0-10 http://www.google.com/dir/test.php?id=FU
|
||||
|
||||
Wfuzz can read burp's (TM) log or saved states. This allows to filter or reutilise burp proxy requests and responses.
|
||||
|
||||
Then, you can reutilise those results by using the denoted payloads. To repeat a request exactly how it was stored, you must use the FUZZ keywork on the command line::
|
||||
Then, you can reutilise those results by using the denoted payloads. To repeat a request exactly how it was stored, you must use the FUZZ keyword on the command line::
|
||||
|
||||
$ wfuzz -z burpstate,a_burp_state.burp FUZZ
|
||||
|
||||
@ -617,7 +691,7 @@ Previous requests can also be modified by using the usual command line switches.
|
||||
|__ C=200 114 L 373 W 5347 Ch "http://testphp.vulnweb.com/userinfo.php"
|
||||
|
||||
|
||||
* Same request against another url::
|
||||
* Same request against another URL::
|
||||
|
||||
$ wfuzz -z burpstate,a_burp_state.burp -H "addme: header" -u http://www.otherhost.com FUZZ
|
||||
|
||||
@ -627,7 +701,7 @@ If you do not want to use the full saved request:
|
||||
|
||||
$ wfuzz -z wfuzzp,/tmp/session --zP attr=url FUZZ
|
||||
|
||||
* Or by specyfing the FUZZ keyword and a field name in the form of FUZZ[field]::
|
||||
* Or by specifying the FUZZ keyword and a field name in the form of FUZZ[field]::
|
||||
|
||||
$ wfuzz -z wfuzzp,/tmp/session FUZZ[url]
|
||||
|
||||
@ -649,6 +723,18 @@ The above command will generate HTTP requests such as the following::
|
||||
|
||||
You can filter the payload using the filter grammar as described before.
|
||||
|
||||
The assignment operators can be used to modify previous requests easily, for example, let's add a quote to every parameter looking for SQL injection issues::
|
||||
|
||||
$ wfuzz -z range,1-5 --oF /tmp/session http://testphp.vulnweb.com/artists.php?artist=FUZZ
|
||||
000003: C=200 118 L 455 W 5326 Ch "3"
|
||||
...
|
||||
000004: C=200 99 L 272 W 3868 Ch "4"
|
||||
|
||||
$ wfuzz -z wfuzzp,/tmp/session --prefilter "r.params.get=+'\''" -A FUZZ
|
||||
00010: 0.161s C=200 101 L 287 W 3986 Ch nginx/1.4.1 "http://testphp.vulnweb.com/artists.php?artist=1'"
|
||||
|_ Error identified: Warning: mysql_fetch_array()
|
||||
...
|
||||
|
||||
wfpayload
|
||||
^^^^^^^^^
|
||||
|
||||
@ -658,4 +744,16 @@ For example, the following will return a unique list of HTTP requests including
|
||||
|
||||
$ wfpayload -z burplog,a_burp_log.log --slice "params.get~'authtoken' and url.pstrip|u()"
|
||||
|
||||
Authtoken is the parameter used by BEA WebLogic Commerce Servers (TM) as a CSRF token, and thefore the above will find all the requests exposing the CSRF token in the URL.
|
||||
Authtoken is the parameter used by BEA WebLogic Commerce Servers (TM) as a CSRF token, and therefore the above will find all the requests exposing the CSRF token in the URL.
|
||||
|
||||
You can also select the field to show, for example::
|
||||
|
||||
$ wfpayload -z wfuzzp --zD /tmp/session --field r.params.get
|
||||
artist=5
|
||||
...
|
||||
|
||||
Or::
|
||||
|
||||
$ wfpayload -z wfuzzp --zD /tmp/session --efield r.params.get
|
||||
000000006: 200 99 L 272 W 3868 Ch "5 | artist=5"
|
||||
...
|
||||
|
@ -116,7 +116,7 @@ Headers can also be fuzzed::
|
||||
Fuzzing HTTP Verbs
|
||||
------------------
|
||||
|
||||
HTTP verbs fuzzing can be specified using the -X swith::
|
||||
HTTP verbs fuzzing can be specified using the -X switch::
|
||||
|
||||
$ wfuzz -z list,GET-HEAD-POST-TRACE-OPTIONS -X FUZZ http://testphp.vulnweb.com/
|
||||
********************************************************
|
||||
@ -193,7 +193,7 @@ If you want to fuzz a resource from a protected website you can also use "--basi
|
||||
Recursion
|
||||
---------
|
||||
|
||||
The -R swith can be used to specify a payload recursion's depth. For example, if you want to search for existing directories and then fuzz within these directories again using the same payload you can use the following command::
|
||||
The -R switch can be used to specify a payload recursion's depth. For example, if you want to search for existing directories and then fuzz within these directories again using the same payload you can use the following command::
|
||||
|
||||
$ wfuzz -z list,"admin-CVS-cgi\-bin" -R1 http://testphp.vulnweb.com/FUZZ
|
||||
********************************************************
|
||||
@ -236,7 +236,7 @@ Wfuzz supports writing the results to a file in a different format. This is perf
|
||||
|
||||
$ wfuzz -e printers
|
||||
|
||||
For example, to write results to an output file in json format use the following command::
|
||||
For example, to write results to an output file in JSON format use the following command::
|
||||
|
||||
$ wfuzz -f /tmp/outfile,json -w wordlist/general/common.txt http://testphp.vulnweb.com/FUZZ
|
||||
|
||||
@ -248,8 +248,19 @@ Wfuzz supports showing the results in various formats. This is performed by plug
|
||||
|
||||
$ wfuzz -e printers
|
||||
|
||||
For example, to show results in json format use the following command::
|
||||
For example, to show results in JSON format use the following command::
|
||||
|
||||
$ wfuzz -o json -w wordlist/general/common.txt http://testphp.vulnweb.com/FUZZ
|
||||
|
||||
When using the default output you can also select an additional FuzzResult's field to show together with the payload description::
|
||||
|
||||
$ wfuzz -z range --zD 0-1 -u http://testphp.vulnweb.com/artists.php?artist=FUZZ --field r
|
||||
...
|
||||
000000001: 200 99 L 272 W 3868 Ch 0 | GET /artists.php?artist=0 HTTP/1.1
|
||||
Content-Type: application/x-www-form-urlencoded
|
||||
User-Agent: Wfuzz/2.4
|
||||
Host: testphp.vulnweb.com
|
||||
...
|
||||
|
||||
|
||||
The above is useful, for example, to debug what exact HTTP request Wfuzz sent to the remote Web server. Check the filter language section in the advance usage document for the available fields.
|
||||
|
@ -31,7 +31,7 @@ The obtained output is shown below::
|
||||
Filtered Requests: 0
|
||||
Requests/sec.: 225.4143
|
||||
|
||||
Wfuzz output allows to analyze the web server responses and filter the desired results based on the HTTP response message obtained, for example, response codes, response length, etc.
|
||||
Wfuzz output allows to analyse the web server responses and filter the desired results based on the HTTP response message obtained, for example, response codes, response length, etc.
|
||||
|
||||
Each line provides the following information:
|
||||
|
||||
@ -100,7 +100,11 @@ Each FUZZ keyword must have its corresponding payload. There are several equival
|
||||
|
||||
* The long way explicitly defining the payload's parameter name through the command line::
|
||||
|
||||
$ wfuzz -z file --zP fn=wordlist/general/common.txt http://testphp.vulnweb.com/FUZZ
|
||||
$ wfuzz -z file --zP fn=wordlist/general/common.txt http://testphp.vulnweb.com/FUZZ
|
||||
|
||||
* The not so long way explicitly defining the payload's default parameter through the --zD command line option::
|
||||
|
||||
$ wfuzz -z file --zD wordlist/general/common.txt http://testphp.vulnweb.com/FUZZ
|
||||
|
||||
* The not so long way defining only the value of the payload's default parameter::
|
||||
|
||||
@ -232,7 +236,7 @@ Here the {} defines the value of the FUZZ word for this first HTTP request, and
|
||||
Regex filters
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
The command line parameters "--ss" and "--hs" allow to filter the responses using a regular expression against the returned content. For example, the following allows to find web servers vulnerables to "shellshock" (see http://edge-security.blogspot.co.uk/2014/10/scan-for-shellshock-with-wfuzz.html for more information)::
|
||||
The command line parameters "--ss" and "--hs" allow to filter the responses using a regular expression against the returned content. For example, the following allows to find web servers vulnerable to "shellshock" (see http://edge-security.blogspot.co.uk/2014/10/scan-for-shellshock-with-wfuzz.html for more information)::
|
||||
|
||||
$ wfuzz -H "User-Agent: () { :;}; echo; echo vulnerable" --ss vulnerable -w cgis.txt http://localhost:8000/FUZZ
|
||||
|
||||
|
2
setup.py
2
setup.py
@ -33,6 +33,8 @@ if sys.platform.startswith("win"):
|
||||
|
||||
setup(
|
||||
name="wfuzz",
|
||||
include_package_data=True,
|
||||
data_files=[('docs/user', ['docs/user/advanced.rst'])],
|
||||
packages=find_packages(where='src'),
|
||||
package_dir={'wfuzz': 'src/wfuzz'},
|
||||
entry_points={
|
||||
|
@ -1,5 +1,5 @@
|
||||
__title__ = 'wfuzz'
|
||||
__version__ = "2.3.4"
|
||||
__version__ = "2.4"
|
||||
__build__ = 0x023000
|
||||
__author__ = 'Xavier Mendez'
|
||||
__license__ = 'GPL 2.0'
|
||||
@ -23,6 +23,9 @@ try:
|
||||
if "openssl".lower() not in pycurl.version.lower():
|
||||
print("\nWarning: Pycurl is not compiled against Openssl. Wfuzz might not work correctly when fuzzing SSL sites. Check Wfuzz's documentation for more information.\n")
|
||||
|
||||
if not hasattr(pycurl, "CONNECT_TO"):
|
||||
print("\nWarning: Pycurl and/or libcurl version is old. CONNECT_TO option is missing. Wfuzz --ip option will not be available.\n")
|
||||
|
||||
if not hasattr(pycurl, "PATH_AS_IS"):
|
||||
print("\nWarning: Pycurl and/or libcurl version is old. PATH_AS_IS option is missing. Wfuzz might not correctly fuzz URLS with '..'.\n")
|
||||
|
||||
|
@ -7,7 +7,7 @@ from .fuzzobjects import FuzzResultFactory, FuzzStats
|
||||
from .facade import Facade
|
||||
from .exception import FuzzExceptBadOptions, FuzzExceptNoPluginError
|
||||
|
||||
from .filter import FuzzResFilter
|
||||
from .filter import FuzzResFilterSlice
|
||||
|
||||
import re
|
||||
|
||||
@ -24,7 +24,7 @@ from builtins import object
|
||||
|
||||
class sliceit(object):
|
||||
def __init__(self, payload, slicestr):
|
||||
self.ffilter = FuzzResFilter(filter_string=slicestr)
|
||||
self.ffilter = FuzzResFilterSlice(filter_string=slicestr)
|
||||
self.payload = payload
|
||||
|
||||
def __iter__(self):
|
||||
@ -56,164 +56,177 @@ class tupleit(object):
|
||||
|
||||
|
||||
class dictionary(object):
|
||||
def __init__(self, payload, encoders_list):
|
||||
self.__payload = payload
|
||||
self.__encoders = encoders_list
|
||||
self.__generator = self._gen() if self.__encoders else None
|
||||
def __init__(self, payload, encoders_list):
|
||||
self.__payload = payload
|
||||
self.__encoders = encoders_list
|
||||
self.__generator = self._gen() if self.__encoders else None
|
||||
|
||||
def count(self):
|
||||
return (self.__payload.count() * len(self.__encoders)) if self.__encoders else self.__payload.count()
|
||||
def count(self):
|
||||
return (self.__payload.count() * len(self.__encoders)) if self.__encoders else self.__payload.count()
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def _gen(self):
|
||||
while 1:
|
||||
try:
|
||||
payload_list = next(self.__payload)
|
||||
except StopIteration:
|
||||
return
|
||||
def _gen(self):
|
||||
while 1:
|
||||
try:
|
||||
payload_list = next(self.__payload)
|
||||
except StopIteration:
|
||||
return
|
||||
|
||||
for name in self.__encoders:
|
||||
if name.find('@') > 0:
|
||||
string = payload_list
|
||||
for i in reversed(name.split("@")):
|
||||
string = Facade().encoders.get_plugin(i)().encode(string)
|
||||
yield string
|
||||
else:
|
||||
plugin_list = Facade().encoders.get_plugins(name)
|
||||
if not plugin_list:
|
||||
raise FuzzExceptNoPluginError(name + " encoder does not exists (-e encodings for a list of available encoders)")
|
||||
for name in self.__encoders:
|
||||
if name.find('@') > 0:
|
||||
string = payload_list
|
||||
for i in reversed(name.split("@")):
|
||||
string = Facade().encoders.get_plugin(i)().encode(string)
|
||||
yield string
|
||||
else:
|
||||
plugin_list = Facade().encoders.get_plugins(name)
|
||||
if not plugin_list:
|
||||
raise FuzzExceptNoPluginError(name + " encoder does not exists (-e encodings for a list of available encoders)")
|
||||
|
||||
for e in plugin_list:
|
||||
yield e().encode(payload_list)
|
||||
for e in plugin_list:
|
||||
yield e().encode(payload_list)
|
||||
|
||||
def __next__(self):
|
||||
return next(self.__generator) if self.__encoders else next(self.__payload)
|
||||
def __next__(self):
|
||||
return next(self.__generator) if self.__encoders else next(self.__payload)
|
||||
|
||||
|
||||
class requestGenerator(object):
|
||||
def __init__(self, options):
|
||||
self.options = options
|
||||
self.seed = FuzzResultFactory.from_options(options)
|
||||
self.baseline = FuzzResultFactory.from_baseline(self.seed, options)
|
||||
self.dictio = self.get_dictio()
|
||||
def __init__(self, options):
|
||||
self.options = options
|
||||
self.seed = FuzzResultFactory.from_options(options)
|
||||
self.baseline = FuzzResultFactory.from_baseline(self.seed, options)
|
||||
self._payload_list = []
|
||||
self.dictio = self.get_dictio()
|
||||
|
||||
self.stats = FuzzStats.from_requestGenerator(self)
|
||||
self.stats = FuzzStats.from_requestGenerator(self)
|
||||
|
||||
self._allvar_gen = None
|
||||
if self.seed.history.wf_allvars is not None:
|
||||
self._allvar_gen = self.__allvars_gen(self.dictio)
|
||||
self._allvar_gen = None
|
||||
if self.seed.history.wf_allvars is not None:
|
||||
self._allvar_gen = self.__allvars_gen(self.dictio)
|
||||
|
||||
def stop(self):
|
||||
self.stats.cancelled = True
|
||||
def stop(self):
|
||||
self.stats.cancelled = True
|
||||
self.close()
|
||||
|
||||
def restart(self, seed):
|
||||
self.seed = seed
|
||||
self.dictio = self.get_dictio()
|
||||
def restart(self, seed):
|
||||
self.seed = seed
|
||||
self.dictio = self.get_dictio()
|
||||
|
||||
def _check_dictio_len(self, element):
|
||||
marker_regex = re.compile(r"FUZ\d*Z", re.MULTILINE | re.DOTALL)
|
||||
fuzz_words = marker_regex.findall(str(self.seed.history))
|
||||
method, userpass = self.seed.history.auth
|
||||
def _check_dictio_len(self, element):
|
||||
fuzz_words = self.options["compiled_filter"].get_fuzz_words() + self.options["compiled_prefilter"].get_fuzz_words() + self.get_fuzz_words()
|
||||
|
||||
fuzz_words += marker_regex.findall(self.seed.history.scheme)
|
||||
if len(element) != len(set(fuzz_words)):
|
||||
raise FuzzExceptBadOptions("FUZZ words and number of payloads do not match!")
|
||||
|
||||
if method:
|
||||
fuzz_words += marker_regex.findall(userpass)
|
||||
def get_fuzz_words(self):
|
||||
marker_regex = re.compile(r"FUZ\d*Z", re.MULTILINE | re.DOTALL)
|
||||
fuzz_words = marker_regex.findall(str(self.seed.history))
|
||||
method, userpass = self.seed.history.auth
|
||||
|
||||
if self.options["seed_payload"]:
|
||||
fuzz_words += ["FUZZ"]
|
||||
fuzz_words += marker_regex.findall(self.seed.history.scheme)
|
||||
|
||||
if len(element) != len(set(fuzz_words)):
|
||||
raise FuzzExceptBadOptions("FUZZ words and number of payloads do not match!")
|
||||
if method:
|
||||
fuzz_words += marker_regex.findall(userpass)
|
||||
|
||||
def count(self):
|
||||
v = self.dictio.count()
|
||||
if self.seed.history.wf_allvars is not None:
|
||||
v *= len(self.seed.history.wf_allvars_set)
|
||||
if self.options["seed_payload"]:
|
||||
fuzz_words += ["FUZZ"]
|
||||
|
||||
if self.baseline:
|
||||
v += 1
|
||||
return fuzz_words
|
||||
|
||||
return v
|
||||
def count(self):
|
||||
v = self.dictio.count()
|
||||
if self.seed.history.wf_allvars is not None:
|
||||
v *= len(self.seed.history.wf_allvars_set)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
if self.baseline:
|
||||
v += 1
|
||||
|
||||
def __allvars_gen(self, dic):
|
||||
# no FUZZ keyword allowed
|
||||
marker_regex = re.compile(r"FUZ\d*Z", re.MULTILINE | re.DOTALL)
|
||||
if len(marker_regex.findall(str(self.seed.history))) > 0:
|
||||
raise FuzzExceptBadOptions("FUZZ words not allowed when using all parameters brute forcing.")
|
||||
return v
|
||||
|
||||
if len(self.seed.history.wf_allvars_set) == 0:
|
||||
raise FuzzExceptBadOptions("No variables on specified variable set: " + self.seed.history.wf_allvars)
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
for payload in dic:
|
||||
for r in FuzzResultFactory.from_all_fuzz_request(self.seed, payload):
|
||||
yield r
|
||||
def __allvars_gen(self, dic):
|
||||
# no FUZZ keyword allowed
|
||||
marker_regex = re.compile(r"FUZ\d*Z", re.MULTILINE | re.DOTALL)
|
||||
if len(marker_regex.findall(str(self.seed.history))) > 0:
|
||||
raise FuzzExceptBadOptions("FUZZ words not allowed when using all parameters brute forcing.")
|
||||
|
||||
def __next__(self):
|
||||
if self.stats.cancelled:
|
||||
raise StopIteration
|
||||
if len(self.seed.history.wf_allvars_set) == 0:
|
||||
raise FuzzExceptBadOptions("No variables on specified variable set: " + self.seed.history.wf_allvars)
|
||||
|
||||
if self.baseline and self.stats.processed() == 0 and self.stats.pending_seeds() <= 1:
|
||||
return self.baseline
|
||||
for payload in dic:
|
||||
for r in FuzzResultFactory.from_all_fuzz_request(self.seed, payload):
|
||||
yield r
|
||||
|
||||
if self.seed.history.wf_allvars is not None:
|
||||
return next(self._allvar_gen)
|
||||
else:
|
||||
n = next(self.dictio)
|
||||
if self.stats.processed() == 0 or (self.baseline and self.stats.processed() == 1):
|
||||
self._check_dictio_len(n)
|
||||
def __next__(self):
|
||||
if self.stats.cancelled:
|
||||
raise StopIteration
|
||||
|
||||
return FuzzResultFactory.from_seed(self.seed, n, self.options)
|
||||
if self.baseline and self.stats.processed() == 0 and self.stats.pending_seeds() <= 1:
|
||||
return self.baseline
|
||||
|
||||
def get_dictio(self):
|
||||
class wrapper(object):
|
||||
def __init__(self, iterator):
|
||||
self._it = iter(iterator)
|
||||
if self.seed.history.wf_allvars is not None:
|
||||
return next(self._allvar_gen)
|
||||
else:
|
||||
n = next(self.dictio)
|
||||
if self.stats.processed() == 0 or (self.baseline and self.stats.processed() == 1):
|
||||
self._check_dictio_len(n)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
return FuzzResultFactory.from_seed(self.seed, n, self.options)
|
||||
|
||||
def count(self):
|
||||
return -1
|
||||
def close(self):
|
||||
for payload in self._payload_list:
|
||||
payload.close()
|
||||
|
||||
def __next__(self):
|
||||
return str(next(self._it))
|
||||
def get_dictio(self):
|
||||
class wrapper(object):
|
||||
def __init__(self, iterator):
|
||||
self._it = iter(iterator)
|
||||
|
||||
selected_dic = []
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
if self.options["dictio"]:
|
||||
for d in [wrapper(x) for x in self.options["dictio"]]:
|
||||
selected_dic.append(d)
|
||||
else:
|
||||
for payload in self.options["payloads"]:
|
||||
try:
|
||||
name, params, slicestr = [x[0] for x in zip_longest(payload, (None, None, None))]
|
||||
except ValueError:
|
||||
raise FuzzExceptBadOptions("You must supply a list of payloads in the form of [(name, {params}), ... ]")
|
||||
def count(self):
|
||||
return -1
|
||||
|
||||
if not params:
|
||||
raise FuzzExceptBadOptions("You must supply a list of payloads in the form of [(name, {params}), ... ]")
|
||||
def __next__(self):
|
||||
return str(next(self._it))
|
||||
|
||||
p = Facade().payloads.get_plugin(name)(params)
|
||||
pp = dictionary(p, params["encoder"]) if "encoder" in params else p
|
||||
selected_dic.append(sliceit(pp, slicestr) if slicestr else pp)
|
||||
selected_dic = []
|
||||
self._payload_list = []
|
||||
|
||||
if not selected_dic:
|
||||
raise FuzzExceptBadOptions("Empty dictionary! Check payload and filter")
|
||||
if self.options["dictio"]:
|
||||
for d in [wrapper(x) for x in self.options["dictio"]]:
|
||||
selected_dic.append(d)
|
||||
else:
|
||||
for payload in self.options["payloads"]:
|
||||
try:
|
||||
name, params, slicestr = [x[0] for x in zip_longest(payload, (None, None, None))]
|
||||
except ValueError:
|
||||
raise FuzzExceptBadOptions("You must supply a list of payloads in the form of [(name, {params}), ... ]")
|
||||
|
||||
if len(selected_dic) == 1:
|
||||
if self.options["iterator"]:
|
||||
raise FuzzExceptBadOptions("Several dictionaries must be used when specifying an iterator")
|
||||
return tupleit(selected_dic[0])
|
||||
elif self.options["iterator"]:
|
||||
return Facade().iterators.get_plugin(self.options["iterator"])(*selected_dic)
|
||||
else:
|
||||
return Facade().iterators.get_plugin("product")(*selected_dic)
|
||||
if not params:
|
||||
raise FuzzExceptBadOptions("You must supply a list of payloads in the form of [(name, {params}), ... ]")
|
||||
|
||||
p = Facade().payloads.get_plugin(name)(params)
|
||||
self._payload_list.append(p)
|
||||
pp = dictionary(p, params["encoder"]) if "encoder" in params else p
|
||||
selected_dic.append(sliceit(pp, slicestr) if slicestr else pp)
|
||||
|
||||
if not selected_dic:
|
||||
raise FuzzExceptBadOptions("Empty dictionary! Check payload and filter")
|
||||
|
||||
if len(selected_dic) == 1:
|
||||
if self.options["iterator"]:
|
||||
raise FuzzExceptBadOptions("Several dictionaries must be used when specifying an iterator")
|
||||
return tupleit(selected_dic[0])
|
||||
elif self.options["iterator"]:
|
||||
return Facade().iterators.get_plugin(self.options["iterator"])(*selected_dic)
|
||||
else:
|
||||
return Facade().iterators.get_plugin("product")(*selected_dic)
|
||||
|
||||
|
||||
class Fuzzer(object):
|
||||
|
708
src/wfuzz/externals/reqresp/Request.py
vendored
708
src/wfuzz/externals/reqresp/Request.py
vendored
@ -30,427 +30,433 @@ if not hasattr(pycurl, "PATH_AS_IS"):
|
||||
|
||||
|
||||
class Request:
|
||||
def __init__(self):
|
||||
self.__host = None # www.google.com:80
|
||||
self.__path = None # /index.php
|
||||
self.__params = None # Mierdaza de index.php;lskjflkasjflkasjfdlkasdf?
|
||||
self.schema = "http" # http
|
||||
def __init__(self):
|
||||
self.__host = None # www.google.com:80
|
||||
self.__path = None # /index.php
|
||||
self.__params = None # Mierdaza de index.php;lskjflkasjflkasjfdlkasdf?
|
||||
self.schema = "http" # http
|
||||
|
||||
# #### Variables calculadas por getters NO SE PUEDEN MODIFICAR
|
||||
# self.urlWithoutPath # http://www.google.es
|
||||
# self.pathWithVariables # /index.php?a=b&c=d
|
||||
# self.urlWithoutVariables=None # http://www.google.es/index.php
|
||||
# self.completeUrl="" # http://www.google.es/index.php?a=b
|
||||
# self.finalUrl="" # Url despues de hacer el FollowLocation
|
||||
# self.redirectUrl="" # Url redirected
|
||||
# self.postdata="" # Datos por POST, toto el string
|
||||
# ###############
|
||||
# #### Variables calculadas por getters NO SE PUEDEN MODIFICAR
|
||||
# self.urlWithoutPath # http://www.google.es
|
||||
# self.pathWithVariables # /index.php?a=b&c=d
|
||||
# self.urlWithoutVariables=None # http://www.google.es/index.php
|
||||
# self.completeUrl="" # http://www.google.es/index.php?a=b
|
||||
# self.finalUrl="" # Url despues de hacer el FollowLocation
|
||||
# self.redirectUrl="" # Url redirected
|
||||
# self.postdata="" # Datos por POST, toto el string
|
||||
# ###############
|
||||
|
||||
self.ContentType = "application/x-www-form-urlencoded" # None es normal encoding
|
||||
self.multiPOSThead = {}
|
||||
self.ContentType = "application/x-www-form-urlencoded" # None es normal encoding
|
||||
self.multiPOSThead = {}
|
||||
|
||||
self.__variablesGET = VariablesSet()
|
||||
self.__variablesPOST = VariablesSet()
|
||||
self.__variablesGET = VariablesSet()
|
||||
self.__variablesPOST = VariablesSet()
|
||||
self._non_parsed_post = None
|
||||
|
||||
# diccionario, por ejemplo headers["Cookie"]
|
||||
self._headers = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
"User-Agent": "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1)"
|
||||
}
|
||||
# diccionario, por ejemplo headers["Cookie"]
|
||||
self._headers = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
"User-Agent": "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1)"
|
||||
}
|
||||
|
||||
self.response = None # Apunta a la response que produce dicha request
|
||||
self.response = None # Apunta a la response que produce dicha request
|
||||
|
||||
# ################## lo de debajo no se deberia acceder directamente
|
||||
# ################## lo de debajo no se deberia acceder directamente
|
||||
|
||||
self.time = None # 23:00:00
|
||||
self.ip = None # 192.168.1.1
|
||||
self._method = None
|
||||
self.protocol = "HTTP/1.1" # HTTP/1.1
|
||||
self.__performHead = ""
|
||||
self.__performBody = ""
|
||||
self.time = None # 23:00:00
|
||||
self.ip = None # 192.168.1.1
|
||||
self._method = None
|
||||
self.protocol = "HTTP/1.1" # HTTP/1.1
|
||||
self.__performHead = ""
|
||||
self.__performBody = ""
|
||||
|
||||
self.__authMethod = None
|
||||
self.__userpass = ""
|
||||
self.__authMethod = None
|
||||
self.__userpass = ""
|
||||
|
||||
self.description = "" # For temporally store imformation
|
||||
self.description = "" # For temporally store imformation
|
||||
|
||||
self.__proxy = None
|
||||
self.proxytype = None
|
||||
self.__timeout = None
|
||||
self.__totaltimeout = None
|
||||
self.__finalurl = ""
|
||||
self.__proxy = None
|
||||
self.proxytype = None
|
||||
self.__timeout = None
|
||||
self.__totaltimeout = None
|
||||
self.__finalurl = ""
|
||||
|
||||
self.followLocation = False
|
||||
self.__userpass = ""
|
||||
self.followLocation = False
|
||||
self.__userpass = ""
|
||||
|
||||
self.totaltime = None
|
||||
self.totaltime = None
|
||||
|
||||
@property
|
||||
def method(self):
|
||||
if self._method is None:
|
||||
return "POST" if self.getPOSTVars() else "GET"
|
||||
@property
|
||||
def method(self):
|
||||
if self._method is None:
|
||||
return "POST" if (self.getPOSTVars() or self._non_parsed_post is not None) else "GET"
|
||||
|
||||
return self._method
|
||||
return self._method
|
||||
|
||||
@method.setter
|
||||
def method(self, value):
|
||||
if value == "None":
|
||||
value = None
|
||||
@method.setter
|
||||
def method(self, value):
|
||||
if value == "None":
|
||||
value = None
|
||||
|
||||
self._method = value
|
||||
self._method = value
|
||||
|
||||
def setFinalUrl(self, fu):
|
||||
self.__finalurl = fu
|
||||
def setFinalUrl(self, fu):
|
||||
self.__finalurl = fu
|
||||
|
||||
def __str__(self):
|
||||
str = "[ URL: %s" % (self.completeUrl)
|
||||
if self.postdata:
|
||||
str += " - {}: \"{}\"".format(self.method, self.postdata)
|
||||
if "Cookie" in self._headers:
|
||||
str += " - COOKIE: \"%s\"" % self._headers["Cookie"]
|
||||
str += " ]"
|
||||
return str
|
||||
def __str__(self):
|
||||
str = "[ URL: %s" % (self.completeUrl)
|
||||
if self.postdata:
|
||||
str += " - {}: \"{}\"".format(self.method, self.postdata)
|
||||
if "Cookie" in self._headers:
|
||||
str += " - COOKIE: \"%s\"" % self._headers["Cookie"]
|
||||
str += " ]"
|
||||
return str
|
||||
|
||||
def getHost(self):
|
||||
return self.__host
|
||||
def getHost(self):
|
||||
return self.__host
|
||||
|
||||
def getXML(self, obj):
|
||||
r = obj.createElement("request")
|
||||
r.setAttribute("method", self.method)
|
||||
url = obj.createElement("URL")
|
||||
url.appendChild(obj.createTextNode(self.completeUrl))
|
||||
r.appendChild(url)
|
||||
if self.postdata:
|
||||
pd = obj.createElement("PostData")
|
||||
pd.appendChild(obj.createTextNode(self.postdata))
|
||||
r.appendChild(pd)
|
||||
if "Cookie" in self._headers:
|
||||
ck = obj.createElement("Cookie")
|
||||
ck.appendChild(obj.createTextNode(self._headers["Cookie"]))
|
||||
r.appendChild(ck)
|
||||
def getXML(self, obj):
|
||||
r = obj.createElement("request")
|
||||
r.setAttribute("method", self.method)
|
||||
url = obj.createElement("URL")
|
||||
url.appendChild(obj.createTextNode(self.completeUrl))
|
||||
r.appendChild(url)
|
||||
if self.postdata:
|
||||
pd = obj.createElement("PostData")
|
||||
pd.appendChild(obj.createTextNode(self.postdata))
|
||||
r.appendChild(pd)
|
||||
if "Cookie" in self._headers:
|
||||
ck = obj.createElement("Cookie")
|
||||
ck.appendChild(obj.createTextNode(self._headers["Cookie"]))
|
||||
r.appendChild(ck)
|
||||
|
||||
return r
|
||||
return r
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == "urlWithoutVariables":
|
||||
return urlunparse((self.schema, self.__host, self.__path, '', '', ''))
|
||||
elif name == "pathWithVariables":
|
||||
return urlunparse(('', '', self.__path, '', self.__variablesGET.urlEncoded(), ''))
|
||||
elif name == "completeUrl":
|
||||
return urlunparse((self.schema, self.__host, self.__path, self.__params, self.__variablesGET.urlEncoded(), ''))
|
||||
elif name == "finalUrl":
|
||||
if self.__finalurl:
|
||||
return self.__finalurl
|
||||
return self.completeUrl
|
||||
elif name == "urlWithoutPath":
|
||||
return "%s://%s" % (self.schema, self._headers["Host"])
|
||||
elif name == "path":
|
||||
return self.__path
|
||||
elif name == "postdata":
|
||||
if self.ContentType == "application/x-www-form-urlencoded":
|
||||
return self.__variablesPOST.urlEncoded()
|
||||
elif self.ContentType == "multipart/form-data":
|
||||
return self.__variablesPOST.multipartEncoded()
|
||||
else:
|
||||
return self.__uknPostData
|
||||
else:
|
||||
raise AttributeError
|
||||
def __getattr__(self, name):
|
||||
if name == "urlWithoutVariables":
|
||||
return urlunparse((self.schema, self.__host, self.__path, '', '', ''))
|
||||
elif name == "pathWithVariables":
|
||||
return urlunparse(('', '', self.__path, '', self.__variablesGET.urlEncoded(), ''))
|
||||
elif name == "completeUrl":
|
||||
return urlunparse((self.schema, self.__host, self.__path, self.__params, self.__variablesGET.urlEncoded(), ''))
|
||||
elif name == "finalUrl":
|
||||
if self.__finalurl:
|
||||
return self.__finalurl
|
||||
return self.completeUrl
|
||||
elif name == "urlWithoutPath":
|
||||
return "%s://%s" % (self.schema, self._headers["Host"])
|
||||
elif name == "path":
|
||||
return self.__path
|
||||
elif name == "postdata":
|
||||
if self._non_parsed_post is not None:
|
||||
return self._non_parsed_post
|
||||
|
||||
def setUrl(self, urltmp):
|
||||
self.__variablesGET = VariablesSet()
|
||||
self.schema, self.__host, self.__path, self.__params, variables, f = urlparse(urltmp)
|
||||
if "Host" not in self._headers or (not self._headers["Host"]):
|
||||
self._headers["Host"] = self.__host
|
||||
if self.ContentType == "application/x-www-form-urlencoded":
|
||||
return self.__variablesPOST.urlEncoded()
|
||||
elif self.ContentType == "multipart/form-data":
|
||||
return self.__variablesPOST.multipartEncoded()
|
||||
elif self.ContentType == 'application/json':
|
||||
return self.__variablesPOST.json_encoded()
|
||||
else:
|
||||
return self.__variablesPOST.urlEncoded()
|
||||
else:
|
||||
raise AttributeError
|
||||
|
||||
if variables:
|
||||
self.__variablesGET.parseUrlEncoded(variables)
|
||||
def setUrl(self, urltmp):
|
||||
self.__variablesGET = VariablesSet()
|
||||
self.schema, self.__host, self.__path, self.__params, variables, f = urlparse(urltmp)
|
||||
if "Host" not in self._headers or (not self._headers["Host"]):
|
||||
self._headers["Host"] = self.__host
|
||||
|
||||
if variables:
|
||||
self.__variablesGET.parseUrlEncoded(variables)
|
||||
|
||||
# ############## PROXY ##################################
|
||||
def getProxy(self):
|
||||
return self.__proxy
|
||||
def getProxy(self):
|
||||
return self.__proxy
|
||||
|
||||
def setProxy(self, prox, ptype):
|
||||
self.__proxy = prox
|
||||
self.proxytype = ptype
|
||||
def setProxy(self, prox, ptype):
|
||||
self.__proxy = prox
|
||||
self.proxytype = ptype
|
||||
|
||||
# ############## FOLLOW LOCATION ########################
|
||||
def setFollowLocation(self, value):
|
||||
self.followLocation = value
|
||||
def setFollowLocation(self, value):
|
||||
self.followLocation = value
|
||||
|
||||
# ############# TIMEOUTS ################################
|
||||
def setConnTimeout(self, time):
|
||||
self.__timeout = time
|
||||
def setConnTimeout(self, time):
|
||||
self.__timeout = time
|
||||
|
||||
def getConnTimeout(self):
|
||||
return self.__timeout
|
||||
def getConnTimeout(self):
|
||||
return self.__timeout
|
||||
|
||||
def setTotalTimeout(self, time):
|
||||
self.__totaltimeout = time
|
||||
def setTotalTimeout(self, time):
|
||||
self.__totaltimeout = time
|
||||
|
||||
def getTotalTimeout(self):
|
||||
return self.__totaltimeout
|
||||
def getTotalTimeout(self):
|
||||
return self.__totaltimeout
|
||||
|
||||
# ############# Autenticacion ###########################
|
||||
def setAuth(self, method, string):
|
||||
self.__authMethod = method
|
||||
self.__userpass = string
|
||||
def setAuth(self, method, string):
|
||||
self.__authMethod = method
|
||||
self.__userpass = string
|
||||
|
||||
def getAuth(self):
|
||||
return self.__authMethod, self.__userpass
|
||||
def getAuth(self):
|
||||
return self.__authMethod, self.__userpass
|
||||
|
||||
# ############# TRATAMIENTO VARIABLES GET & POST #########################
|
||||
|
||||
def existsGETVar(self, key):
|
||||
return self.__variablesGET.existsVar(key)
|
||||
def existsGETVar(self, key):
|
||||
return self.__variablesGET.existsVar(key)
|
||||
|
||||
def existPOSTVar(self, key):
|
||||
return self.__variablesPOST.existsVar(key)
|
||||
def existPOSTVar(self, key):
|
||||
return self.__variablesPOST.existsVar(key)
|
||||
|
||||
def setVariablePOST(self, key, value):
|
||||
v = self.__variablesPOST.getVariable(key)
|
||||
v.update(value)
|
||||
# self._headers["Content-Length"] = str(len(self.postdata))
|
||||
def setVariablePOST(self, key, value):
|
||||
v = self.__variablesPOST.getVariable(key)
|
||||
v.update(value)
|
||||
# self._headers["Content-Length"] = str(len(self.postdata))
|
||||
|
||||
def setVariableGET(self, key, value):
|
||||
v = self.__variablesGET.getVariable(key)
|
||||
v.update(value)
|
||||
def setVariableGET(self, key, value):
|
||||
v = self.__variablesGET.getVariable(key)
|
||||
v.update(value)
|
||||
|
||||
def getGETVars(self):
|
||||
return self.__variablesGET.variables
|
||||
def getGETVars(self):
|
||||
return self.__variablesGET.variables
|
||||
|
||||
def getPOSTVars(self):
|
||||
return self.__variablesPOST.variables
|
||||
def getPOSTVars(self):
|
||||
return self.__variablesPOST.variables
|
||||
|
||||
def setPostData(self, pd, boundary=None):
|
||||
self.__variablesPOST = VariablesSet()
|
||||
if self.ContentType == "application/x-www-form-urlencoded":
|
||||
self.__variablesPOST.parseUrlEncoded(pd)
|
||||
elif self.ContentType == "multipart/form-data":
|
||||
self.__variablesPOST.parseMultipart(pd, boundary)
|
||||
else:
|
||||
self.__uknPostData = pd
|
||||
def setPostData(self, pd, boundary=None):
|
||||
try:
|
||||
self.__variablesPOST = VariablesSet()
|
||||
if self.ContentType == "application/x-www-form-urlencoded":
|
||||
self.__variablesPOST.parseUrlEncoded(pd)
|
||||
elif self.ContentType == "multipart/form-data":
|
||||
self.__variablesPOST.parseMultipart(pd, boundary)
|
||||
elif self.ContentType == 'application/json':
|
||||
self.__variablesPOST.parse_json_encoded(pd)
|
||||
else:
|
||||
self.__variablesPOST.parseUrlEncoded(pd)
|
||||
except Exception:
|
||||
self._non_parsed_post = pd
|
||||
|
||||
############################################################################
|
||||
|
||||
def addHeader(self, key, value):
|
||||
k = string.capwords(key, "-")
|
||||
self._headers[k] = value
|
||||
def addHeader(self, key, value):
|
||||
k = string.capwords(key, "-")
|
||||
self._headers[k] = value
|
||||
|
||||
def delHeader(self, key):
|
||||
k = string.capwords(key, "-")
|
||||
if k in self._headers:
|
||||
del self._headers[k]
|
||||
def delHeader(self, key):
|
||||
k = string.capwords(key, "-")
|
||||
if k in self._headers:
|
||||
del self._headers[k]
|
||||
|
||||
def __getitem__(self, key):
|
||||
k = string.capwords(key, "-")
|
||||
if k in self._headers:
|
||||
return self._headers[k]
|
||||
else:
|
||||
return ""
|
||||
def __getitem__(self, key):
|
||||
k = string.capwords(key, "-")
|
||||
if k in self._headers:
|
||||
return self._headers[k]
|
||||
else:
|
||||
return ""
|
||||
|
||||
def getHeaders(self):
|
||||
header_list = []
|
||||
for i, j in self._headers.items():
|
||||
header_list += ["%s: %s" % (i, j)]
|
||||
return header_list
|
||||
def getHeaders(self):
|
||||
header_list = []
|
||||
for i, j in self._headers.items():
|
||||
header_list += ["%s: %s" % (i, j)]
|
||||
return header_list
|
||||
|
||||
def head(self):
|
||||
conn = pycurl.Curl()
|
||||
conn.setopt(pycurl.SSL_VERIFYPEER, False)
|
||||
conn.setopt(pycurl.SSL_VERIFYHOST, 0)
|
||||
conn.setopt(pycurl.URL, self.completeUrl)
|
||||
def head(self):
|
||||
conn = pycurl.Curl()
|
||||
conn.setopt(pycurl.SSL_VERIFYPEER, False)
|
||||
conn.setopt(pycurl.SSL_VERIFYHOST, 0)
|
||||
conn.setopt(pycurl.URL, self.completeUrl)
|
||||
|
||||
conn.setopt(pycurl.NOBODY, True) # para hacer un pedido HEAD
|
||||
conn.setopt(pycurl.NOBODY, True) # para hacer un pedido HEAD
|
||||
|
||||
conn.setopt(pycurl.WRITEFUNCTION, self.header_callback)
|
||||
conn.perform()
|
||||
conn.setopt(pycurl.WRITEFUNCTION, self.header_callback)
|
||||
conn.perform()
|
||||
|
||||
rp = Response()
|
||||
rp.parseResponse(self.__performHead)
|
||||
self.response = rp
|
||||
rp = Response()
|
||||
rp.parseResponse(self.__performHead)
|
||||
self.response = rp
|
||||
|
||||
def createPath(self, newpath):
|
||||
'''Creates new url from a location header || Hecho para el followLocation=true'''
|
||||
if "http" in newpath[:4].lower():
|
||||
return newpath
|
||||
def createPath(self, newpath):
|
||||
'''Creates new url from a location header || Hecho para el followLocation=true'''
|
||||
if "http" in newpath[:4].lower():
|
||||
return newpath
|
||||
|
||||
parts = urlparse(self.completeUrl)
|
||||
if "/" != newpath[0]:
|
||||
newpath = "/".join(parts[2].split("/")[:-1]) + "/" + newpath
|
||||
parts = urlparse(self.completeUrl)
|
||||
if "/" != newpath[0]:
|
||||
newpath = "/".join(parts[2].split("/")[:-1]) + "/" + newpath
|
||||
|
||||
return urlunparse([parts[0], parts[1], newpath, '', '', ''])
|
||||
return urlunparse([parts[0], parts[1], newpath, '', '', ''])
|
||||
|
||||
# pycurl - reqresp conversions
|
||||
@staticmethod
|
||||
def to_pycurl_object(c, req):
|
||||
# pycurl - reqresp conversions
|
||||
@staticmethod
|
||||
def to_pycurl_object(c, req):
|
||||
|
||||
c.setopt(pycurl.MAXREDIRS, 5)
|
||||
c.setopt(pycurl.MAXREDIRS, 5)
|
||||
|
||||
c.setopt(pycurl.WRITEFUNCTION, req.body_callback)
|
||||
c.setopt(pycurl.HEADERFUNCTION, req.header_callback)
|
||||
c.setopt(pycurl.WRITEFUNCTION, req.body_callback)
|
||||
c.setopt(pycurl.HEADERFUNCTION, req.header_callback)
|
||||
|
||||
c.setopt(pycurl.NOSIGNAL, 1)
|
||||
c.setopt(pycurl.SSL_VERIFYPEER, False)
|
||||
c.setopt(pycurl.SSL_VERIFYHOST, 0)
|
||||
c.setopt(pycurl.NOSIGNAL, 1)
|
||||
c.setopt(pycurl.SSL_VERIFYPEER, False)
|
||||
c.setopt(pycurl.SSL_VERIFYHOST, 0)
|
||||
|
||||
if PYCURL_PATH_AS_IS:
|
||||
c.setopt(pycurl.PATH_AS_IS, 1)
|
||||
if PYCURL_PATH_AS_IS:
|
||||
c.setopt(pycurl.PATH_AS_IS, 1)
|
||||
|
||||
c.setopt(pycurl.URL, python2_3_convert_to_unicode(req.completeUrl))
|
||||
c.setopt(pycurl.URL, python2_3_convert_to_unicode(req.completeUrl))
|
||||
|
||||
if req.getConnTimeout():
|
||||
c.setopt(pycurl.CONNECTTIMEOUT, req.getConnTimeout())
|
||||
if req.getConnTimeout():
|
||||
c.setopt(pycurl.CONNECTTIMEOUT, req.getConnTimeout())
|
||||
|
||||
if req.getTotalTimeout():
|
||||
c.setopt(pycurl.TIMEOUT, req.getTotalTimeout())
|
||||
if req.getTotalTimeout():
|
||||
c.setopt(pycurl.TIMEOUT, req.getTotalTimeout())
|
||||
|
||||
authMethod, userpass = req.getAuth()
|
||||
if authMethod or userpass:
|
||||
if authMethod == "basic":
|
||||
c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
|
||||
elif authMethod == "ntlm":
|
||||
c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_NTLM)
|
||||
elif authMethod == "digest":
|
||||
c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
|
||||
c.setopt(pycurl.USERPWD, python2_3_convert_to_unicode(userpass))
|
||||
authMethod, userpass = req.getAuth()
|
||||
if authMethod or userpass:
|
||||
if authMethod == "basic":
|
||||
c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
|
||||
elif authMethod == "ntlm":
|
||||
c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_NTLM)
|
||||
elif authMethod == "digest":
|
||||
c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
|
||||
c.setopt(pycurl.USERPWD, python2_3_convert_to_unicode(userpass))
|
||||
else:
|
||||
c.unsetopt(pycurl.USERPWD)
|
||||
|
||||
c.setopt(pycurl.HTTPHEADER, python2_3_convert_to_unicode(req.getHeaders()))
|
||||
|
||||
curl_options = {
|
||||
"GET": pycurl.HTTPGET,
|
||||
"POST": pycurl.POST,
|
||||
"PATCH": pycurl.UPLOAD,
|
||||
"HEAD": pycurl.NOBODY,
|
||||
}
|
||||
|
||||
for o in curl_options.values():
|
||||
c.setopt(o, False)
|
||||
|
||||
if req.method in curl_options:
|
||||
c.unsetopt(pycurl.CUSTOMREQUEST)
|
||||
c.setopt(curl_options[req.method], True)
|
||||
else:
|
||||
c.setopt(pycurl.CUSTOMREQUEST, req.method)
|
||||
|
||||
if req.getPOSTVars() or req._non_parsed_post is not None:
|
||||
c.setopt(pycurl.POSTFIELDS, python2_3_convert_to_unicode(req.postdata))
|
||||
|
||||
c.setopt(pycurl.FOLLOWLOCATION, 1 if req.followLocation else 0)
|
||||
|
||||
proxy = req.getProxy()
|
||||
if proxy is not None:
|
||||
c.setopt(pycurl.PROXY, python2_3_convert_to_unicode(proxy))
|
||||
if req.proxytype == "SOCKS5":
|
||||
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
|
||||
elif req.proxytype == "SOCKS4":
|
||||
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
|
||||
req.delHeader("Proxy-Connection")
|
||||
else:
|
||||
c.setopt(pycurl.PROXY, "")
|
||||
|
||||
return c
|
||||
|
||||
def response_from_conn_object(self, conn, header, body):
|
||||
# followlocation
|
||||
if conn.getinfo(pycurl.EFFECTIVE_URL) != self.completeUrl:
|
||||
self.setFinalUrl(conn.getinfo(pycurl.EFFECTIVE_URL))
|
||||
|
||||
self.totaltime = conn.getinfo(pycurl.TOTAL_TIME)
|
||||
|
||||
self.response = Response()
|
||||
self.response.parseResponse(header, rawbody=body)
|
||||
|
||||
return self.response
|
||||
|
||||
def perform(self):
|
||||
self.__performHead = ""
|
||||
self.__performBody = ""
|
||||
self.__headersSent = ""
|
||||
|
||||
try:
|
||||
conn = Request.to_pycurl_object(pycurl.Curl(), self)
|
||||
conn.perform()
|
||||
self.response_from_conn_object(conn, self.__performHead, self.__performBody)
|
||||
except pycurl.error as error:
|
||||
errno, errstr = error
|
||||
raise ReqRespException(ReqRespException.FATAL, errstr)
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
# ######## ESTE conjunto de funciones no es necesario para el uso habitual de la clase
|
||||
|
||||
def getAll(self):
|
||||
pd = self.postdata
|
||||
string = str(self.method) + " " + str(self.pathWithVariables) + " " + str(self.protocol) + "\n"
|
||||
for i, j in self._headers.items():
|
||||
string += i + ": " + j + "\n"
|
||||
string += "\n" + pd
|
||||
|
||||
return string
|
||||
|
||||
# #########################################################################
|
||||
|
||||
def header_callback(self, data):
|
||||
self.__performHead += data
|
||||
|
||||
def body_callback(self, data):
|
||||
self.__performBody += data
|
||||
|
||||
def Substitute(self, src, dst):
|
||||
a = self.getAll()
|
||||
rx = re.compile(src)
|
||||
b = rx.sub(dst, a)
|
||||
del rx
|
||||
self.parseRequest(b, self.schema)
|
||||
|
||||
def parseRequest(self, rawRequest, prot="http"):
|
||||
''' Aun esta en fase BETA y por probar'''
|
||||
tp = TextParser()
|
||||
tp.setSource("string", rawRequest)
|
||||
|
||||
self.__variablesPOST = VariablesSet()
|
||||
self._headers = {} # diccionario, por ejemplo headers["Cookie"]
|
||||
|
||||
tp.readLine()
|
||||
try:
|
||||
tp.search(r"^(\w+) (.*) (HTTP\S*)$")
|
||||
self.method = tp[0][0]
|
||||
self.protocol = tp[0][2]
|
||||
except Exception as a:
|
||||
print(rawRequest)
|
||||
raise a
|
||||
|
||||
pathTMP = tp[0][1].replace(" ", "%20")
|
||||
pathTMP = ('', '') + urlparse(pathTMP)[2:]
|
||||
pathTMP = urlunparse(pathTMP)
|
||||
|
||||
while True:
|
||||
tp.readLine()
|
||||
if (tp.search("^([^:]+): (.*)$")):
|
||||
self.addHeader(tp[0][0], tp[0][1])
|
||||
else:
|
||||
c.unsetopt(pycurl.USERPWD)
|
||||
break
|
||||
|
||||
c.setopt(pycurl.HTTPHEADER, python2_3_convert_to_unicode(req.getHeaders()))
|
||||
self.setUrl(prot + "://" + self._headers["Host"] + pathTMP)
|
||||
|
||||
curl_options = {
|
||||
"GET": pycurl.HTTPGET,
|
||||
"POST": pycurl.POST,
|
||||
"PATCH": pycurl.UPLOAD,
|
||||
"HEAD": pycurl.NOBODY,
|
||||
}
|
||||
pd = ""
|
||||
# TODO: hacky, might need to change tp.readline returning read bytes instead
|
||||
while tp.readLine():
|
||||
pd += tp.lastline
|
||||
|
||||
for o in curl_options.values():
|
||||
c.setopt(o, False)
|
||||
if pd:
|
||||
boundary = None
|
||||
if "Content-Type" in self._headers:
|
||||
values = self._headers["Content-Type"].split(";")
|
||||
self.ContentType = values[0].strip().lower()
|
||||
if self.ContentType == "multipart/form-data":
|
||||
boundary = values[1].split("=")[1].strip()
|
||||
|
||||
if req.method in curl_options:
|
||||
c.unsetopt(pycurl.CUSTOMREQUEST)
|
||||
c.setopt(curl_options[req.method], True)
|
||||
else:
|
||||
c.setopt(pycurl.CUSTOMREQUEST, req.method)
|
||||
|
||||
if req.getPOSTVars():
|
||||
c.setopt(pycurl.POSTFIELDS, python2_3_convert_to_unicode(req.postdata))
|
||||
|
||||
c.setopt(pycurl.FOLLOWLOCATION, 1 if req.followLocation else 0)
|
||||
|
||||
proxy = req.getProxy()
|
||||
if proxy is not None:
|
||||
c.setopt(pycurl.PROXY, python2_3_convert_to_unicode(proxy))
|
||||
if req.proxytype == "SOCKS5":
|
||||
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
|
||||
elif req.proxytype == "SOCKS4":
|
||||
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
|
||||
req.delHeader("Proxy-Connection")
|
||||
else:
|
||||
c.setopt(pycurl.PROXY, "")
|
||||
|
||||
return c
|
||||
|
||||
def response_from_conn_object(self, conn, header, body):
|
||||
# followlocation
|
||||
if conn.getinfo(pycurl.EFFECTIVE_URL) != self.completeUrl:
|
||||
self.setFinalUrl(conn.getinfo(pycurl.EFFECTIVE_URL))
|
||||
|
||||
self.totaltime = conn.getinfo(pycurl.TOTAL_TIME)
|
||||
|
||||
rp = Response()
|
||||
rp.parseResponse(header, rawbody=body)
|
||||
|
||||
if self.schema == "https" and self.__proxy:
|
||||
self.response = Response()
|
||||
self.response.parseResponse(rp.getContent())
|
||||
else:
|
||||
self.response = rp
|
||||
|
||||
return rp
|
||||
|
||||
def perform(self):
|
||||
self.__performHead = ""
|
||||
self.__performBody = ""
|
||||
self.__headersSent = ""
|
||||
|
||||
try:
|
||||
conn = Request.to_pycurl_object(pycurl.Curl(), self)
|
||||
conn.perform()
|
||||
self.response_from_conn_object(conn, self.__performHead, self.__performBody)
|
||||
except pycurl.error as error:
|
||||
errno, errstr = error
|
||||
raise ReqRespException(ReqRespException.FATAL, errstr)
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
# ######## ESTE conjunto de funciones no es necesario para el uso habitual de la clase
|
||||
|
||||
def getAll(self):
|
||||
pd = self.postdata
|
||||
string = str(self.method) + " " + str(self.pathWithVariables) + " " + str(self.protocol) + "\n"
|
||||
for i, j in self._headers.items():
|
||||
string += i + ": " + j + "\n"
|
||||
string += "\n" + pd
|
||||
|
||||
return string
|
||||
|
||||
# #########################################################################
|
||||
|
||||
def header_callback(self, data):
|
||||
self.__performHead += data
|
||||
|
||||
def body_callback(self, data):
|
||||
self.__performBody += data
|
||||
|
||||
def Substitute(self, src, dst):
|
||||
a = self.getAll()
|
||||
rx = re.compile(src)
|
||||
b = rx.sub(dst, a)
|
||||
del rx
|
||||
self.parseRequest(b, self.schema)
|
||||
|
||||
def parseRequest(self, rawRequest, prot="http"):
|
||||
''' Aun esta en fase BETA y por probar'''
|
||||
tp = TextParser()
|
||||
tp.setSource("string", rawRequest)
|
||||
|
||||
self.__variablesPOST = VariablesSet()
|
||||
self._headers = {} # diccionario, por ejemplo headers["Cookie"]
|
||||
|
||||
tp.readLine()
|
||||
try:
|
||||
tp.search(r"^(\w+) (.*) (HTTP\S*)$")
|
||||
self.method = tp[0][0]
|
||||
self.protocol = tp[0][2]
|
||||
except Exception as a:
|
||||
print(rawRequest)
|
||||
raise a
|
||||
|
||||
pathTMP = tp[0][1].replace(" ", "%20")
|
||||
pathTMP = ('', '') + urlparse(pathTMP)[2:]
|
||||
pathTMP = urlunparse(pathTMP)
|
||||
|
||||
while True:
|
||||
tp.readLine()
|
||||
if (tp.search("^([^:]+): (.*)$")):
|
||||
self.addHeader(tp[0][0], tp[0][1])
|
||||
else:
|
||||
break
|
||||
|
||||
self.setUrl(prot + "://" + self._headers["Host"] + pathTMP)
|
||||
|
||||
pd = ""
|
||||
while tp.readLine():
|
||||
pd += tp.lastFull_line
|
||||
|
||||
if pd:
|
||||
boundary = None
|
||||
if "Content-Type" in self._headers:
|
||||
values = self._headers["Content-Type"].split(";")
|
||||
self.ContentType = values[0].strip().lower()
|
||||
if self.ContentType == "multipart/form-data":
|
||||
boundary = values[1].split("=")[1].strip()
|
||||
|
||||
self.setPostData(pd, boundary)
|
||||
self.setPostData(pd, boundary)
|
||||
|
292
src/wfuzz/externals/reqresp/Response.py
vendored
292
src/wfuzz/externals/reqresp/Response.py
vendored
@ -53,174 +53,176 @@ def get_encodings_from_content(content):
|
||||
|
||||
|
||||
class Response:
|
||||
def __init__(self, protocol="", code="", message=""):
|
||||
self.protocol = protocol # HTTP/1.1
|
||||
self.code = code # 200
|
||||
self.message = message # OK
|
||||
self._headers = [] # bueno pues las cabeceras igual que en la request
|
||||
self.__content = "" # contenido de la response (si i solo si Content-Length existe)
|
||||
self.md5 = "" # hash de los contenidos del resultado
|
||||
self.charlen = "" # Cantidad de caracteres de la respuesta
|
||||
def __init__(self, protocol="", code="", message=""):
|
||||
self.protocol = protocol # HTTP/1.1
|
||||
self.code = code # 200
|
||||
self.message = message # OK
|
||||
self._headers = [] # bueno pues las cabeceras igual que en la request
|
||||
self.__content = "" # contenido de la response (si i solo si Content-Length existe)
|
||||
self.md5 = "" # hash de los contenidos del resultado
|
||||
self.charlen = "" # Cantidad de caracteres de la respuesta
|
||||
|
||||
def addHeader(self, key, value):
|
||||
k = string.capwords(key, "-")
|
||||
self._headers += [(k, value)]
|
||||
def addHeader(self, key, value):
|
||||
k = string.capwords(key, "-")
|
||||
self._headers += [(k, value)]
|
||||
|
||||
def delHeader(self, key):
|
||||
for i in self._headers:
|
||||
if i[0].lower() == key.lower():
|
||||
self._headers.remove(i)
|
||||
def delHeader(self, key):
|
||||
for i in self._headers:
|
||||
if i[0].lower() == key.lower():
|
||||
self._headers.remove(i)
|
||||
|
||||
def addContent(self, text):
|
||||
self.__content = self.__content + text
|
||||
def addContent(self, text):
|
||||
self.__content = self.__content + text
|
||||
|
||||
def __getitem__(self, key):
|
||||
for i, j in self._headers:
|
||||
if key == i:
|
||||
return j
|
||||
print("Error al obtener header!!!")
|
||||
def __getitem__(self, key):
|
||||
for i, j in self._headers:
|
||||
if key == i:
|
||||
return j
|
||||
print("Error al obtener header!!!")
|
||||
|
||||
def getCookie(self):
|
||||
str = []
|
||||
for i, j in self._headers:
|
||||
if i.lower() == "set-cookie":
|
||||
str.append(j.split(";")[0])
|
||||
return "; ".join(str)
|
||||
def getCookie(self):
|
||||
str = []
|
||||
for i, j in self._headers:
|
||||
if i.lower() == "set-cookie":
|
||||
str.append(j.split(";")[0])
|
||||
return "; ".join(str)
|
||||
|
||||
def has_header(self, key):
|
||||
for i, j in self._headers:
|
||||
if i.lower() == key.lower():
|
||||
return True
|
||||
return False
|
||||
def has_header(self, key):
|
||||
for i, j in self._headers:
|
||||
if i.lower() == key.lower():
|
||||
return True
|
||||
return False
|
||||
|
||||
def getLocation(self):
|
||||
for i, j in self._headers:
|
||||
if i.lower() == "location":
|
||||
return j
|
||||
return None
|
||||
def getLocation(self):
|
||||
for i, j in self._headers:
|
||||
if i.lower() == "location":
|
||||
return j
|
||||
return None
|
||||
|
||||
def header_equal(self, header, value):
|
||||
for i, j in self._headers:
|
||||
if i == header and j.lower() == value.lower():
|
||||
return True
|
||||
return False
|
||||
def header_equal(self, header, value):
|
||||
for i, j in self._headers:
|
||||
if i == header and j.lower() == value.lower():
|
||||
return True
|
||||
return False
|
||||
|
||||
def getHeaders(self):
|
||||
return self._headers
|
||||
def getHeaders(self):
|
||||
return self._headers
|
||||
|
||||
def getContent(self):
|
||||
return self.__content
|
||||
def getContent(self):
|
||||
return self.__content
|
||||
|
||||
def getTextHeaders(self):
|
||||
string = str(self.protocol) + " " + str(self.code) + " " + str(self.message) + "\r\n"
|
||||
for i, j in self._headers:
|
||||
string += i + ": " + j + "\r\n"
|
||||
def getTextHeaders(self):
|
||||
string = str(self.protocol) + " " + str(self.code) + " " + str(self.message) + "\r\n"
|
||||
for i, j in self._headers:
|
||||
string += i + ": " + j + "\r\n"
|
||||
|
||||
return string
|
||||
return string
|
||||
|
||||
def getAll(self):
|
||||
string = self.getTextHeaders() + "\r\n" + self.getContent()
|
||||
return string
|
||||
def getAll(self):
|
||||
string = self.getTextHeaders() + "\r\n" + self.getContent()
|
||||
return string
|
||||
|
||||
def Substitute(self, src, dst):
|
||||
a = self.getAll()
|
||||
b = a.replace(src, dst)
|
||||
self.parseResponse(b)
|
||||
def Substitute(self, src, dst):
|
||||
a = self.getAll()
|
||||
b = a.replace(src, dst)
|
||||
self.parseResponse(b)
|
||||
|
||||
def getAll_wpost(self):
|
||||
string = str(self.protocol) + " " + str(self.code) + " " + str(self.message) + "\r\n"
|
||||
for i, j in self._headers:
|
||||
string += i + ": " + j + "\r\n"
|
||||
return string
|
||||
def getAll_wpost(self):
|
||||
string = str(self.protocol) + " " + str(self.code) + " " + str(self.message) + "\r\n"
|
||||
for i, j in self._headers:
|
||||
string += i + ": " + j + "\r\n"
|
||||
return string
|
||||
|
||||
def parseResponse(self, rawheader, rawbody=None, type="curl"):
|
||||
self.__content = ""
|
||||
def parseResponse(self, rawheader, rawbody=None, type="curl"):
|
||||
self.__content = ""
|
||||
self._headers = []
|
||||
|
||||
tp = TextParser()
|
||||
tp.setSource("string", rawheader)
|
||||
|
||||
tp.readUntil(r"(HTTP\S*) ([0-9]+)")
|
||||
while True:
|
||||
while True:
|
||||
try:
|
||||
self.protocol = tp[0][0]
|
||||
except Exception:
|
||||
self.protocol = "unknown"
|
||||
|
||||
try:
|
||||
self.code = tp[0][1]
|
||||
except Exception:
|
||||
self.code = "0"
|
||||
|
||||
if self.code != "100":
|
||||
break
|
||||
else:
|
||||
tp.readUntil(r"(HTTP\S*) ([0-9]+)")
|
||||
|
||||
self.code = int(self.code)
|
||||
|
||||
while True:
|
||||
tp.readLine()
|
||||
if (tp.search("^([^:]+): ?(.*)$")):
|
||||
self.addHeader(tp[0][0], tp[0][1])
|
||||
else:
|
||||
break
|
||||
|
||||
# curl sometimes sends two headers when using follow, 302 and the final header
|
||||
# also when using proxies
|
||||
tp.readLine()
|
||||
if not tp.search(r"(HTTP\S*) ([0-9]+)"):
|
||||
break
|
||||
else:
|
||||
self._headers = []
|
||||
|
||||
tp = TextParser()
|
||||
rawheader = python2_3_convert_from_unicode(rawheader.decode("utf-8", errors='replace'))
|
||||
tp.setSource("string", rawheader)
|
||||
# TODO: this might add to rawbody not directly to __content
|
||||
while tp.skip(1):
|
||||
self.addContent(tp.lastFull_line)
|
||||
|
||||
tp.readUntil(r"(HTTP\S*) ([0-9]+)")
|
||||
while True:
|
||||
while True:
|
||||
try:
|
||||
self.protocol = tp[0][0]
|
||||
except Exception:
|
||||
self.protocol = "unknown"
|
||||
if type == 'curl':
|
||||
self.delHeader("Transfer-Encoding")
|
||||
|
||||
try:
|
||||
self.code = tp[0][1]
|
||||
except Exception:
|
||||
self.code = "0"
|
||||
if self.header_equal("Transfer-Encoding", "chunked"):
|
||||
result = ""
|
||||
content = BytesIO(rawbody)
|
||||
hexa = content.readline()
|
||||
nchunk = int(hexa.strip(), 16)
|
||||
|
||||
if self.code != "100":
|
||||
break
|
||||
else:
|
||||
tp.readUntil(r"(HTTP\S*) ([0-9]+)")
|
||||
while nchunk:
|
||||
result += content.read(nchunk)
|
||||
content.readline()
|
||||
hexa = content.readline()
|
||||
nchunk = int(hexa.strip(), 16)
|
||||
|
||||
self.code = int(self.code)
|
||||
rawbody = result
|
||||
|
||||
while True:
|
||||
tp.readLine()
|
||||
if (tp.search("^([^:]+): ?(.*)$")):
|
||||
self.addHeader(tp[0][0], tp[0][1])
|
||||
else:
|
||||
break
|
||||
if self.header_equal("Content-Encoding", "gzip"):
|
||||
compressedstream = BytesIO(rawbody)
|
||||
gzipper = gzip.GzipFile(fileobj=compressedstream)
|
||||
rawbody = gzipper.read()
|
||||
self.delHeader("Content-Encoding")
|
||||
elif self.header_equal("Content-Encoding", "deflate"):
|
||||
deflated_data = None
|
||||
try:
|
||||
deflater = zlib.decompressobj()
|
||||
deflated_data = deflater.decompress(rawbody)
|
||||
deflated_data += deflater.flush()
|
||||
except zlib.error:
|
||||
try:
|
||||
deflater = zlib.decompressobj(-zlib.MAX_WBITS)
|
||||
deflated_data = deflater.decompress(rawbody)
|
||||
deflated_data += deflater.flush()
|
||||
except zlib.error:
|
||||
deflated_data = ''
|
||||
rawbody = deflated_data
|
||||
self.delHeader("Content-Encoding")
|
||||
|
||||
# curl sometimes sends two headers when using follow, 302 and the final header
|
||||
tp.readLine()
|
||||
if not tp.search(r"(HTTP\S*) ([0-9]+)"):
|
||||
break
|
||||
else:
|
||||
self._headers = []
|
||||
if rawbody is not None:
|
||||
# Try to get charset encoding from headers
|
||||
content_encoding = get_encoding_from_headers(dict(self.getHeaders()))
|
||||
|
||||
while tp.skip(1):
|
||||
self.addContent(tp.lastFull_line)
|
||||
# fallback to default encoding
|
||||
if content_encoding is None:
|
||||
content_encoding = "utf-8"
|
||||
|
||||
if type == 'curl':
|
||||
self.delHeader("Transfer-Encoding")
|
||||
|
||||
if self.header_equal("Transfer-Encoding", "chunked"):
|
||||
result = ""
|
||||
content = BytesIO(rawbody)
|
||||
hexa = content.readline()
|
||||
nchunk = int(hexa.strip(), 16)
|
||||
|
||||
while nchunk:
|
||||
result += content.read(nchunk)
|
||||
content.readline()
|
||||
hexa = content.readline()
|
||||
nchunk = int(hexa.strip(), 16)
|
||||
|
||||
rawbody = result
|
||||
|
||||
if self.header_equal("Content-Encoding", "gzip"):
|
||||
compressedstream = BytesIO(rawbody)
|
||||
gzipper = gzip.GzipFile(fileobj=compressedstream)
|
||||
rawbody = gzipper.read()
|
||||
self.delHeader("Content-Encoding")
|
||||
elif self.header_equal("Content-Encoding", "deflate"):
|
||||
deflated_data = None
|
||||
try:
|
||||
deflater = zlib.decompressobj()
|
||||
deflated_data = deflater.decompress(rawbody)
|
||||
deflated_data += deflater.flush()
|
||||
except zlib.error:
|
||||
try:
|
||||
deflater = zlib.decompressobj(-zlib.MAX_WBITS)
|
||||
deflated_data = deflater.decompress(rawbody)
|
||||
deflated_data += deflater.flush()
|
||||
except zlib.error:
|
||||
deflated_data = ''
|
||||
rawbody = deflated_data
|
||||
self.delHeader("Content-Encoding")
|
||||
|
||||
# Try to get charset encoding from headers
|
||||
content_encoding = get_encoding_from_headers(dict(self.getHeaders()))
|
||||
|
||||
# fallback to default encoding
|
||||
if content_encoding is None:
|
||||
content_encoding = "utf-8"
|
||||
|
||||
self.__content = python2_3_convert_from_unicode(rawbody.decode(content_encoding, errors='replace'))
|
||||
self.__content = python2_3_convert_from_unicode(rawbody.decode(content_encoding, errors='replace'))
|
||||
|
234
src/wfuzz/externals/reqresp/TextParser.py
vendored
234
src/wfuzz/externals/reqresp/TextParser.py
vendored
@ -9,143 +9,143 @@ from builtins import object
|
||||
|
||||
|
||||
class TextParser(object):
|
||||
def __init__(self):
|
||||
self.string = ""
|
||||
self.oldindex = 0
|
||||
self.newindex = 0
|
||||
self.type = ""
|
||||
self.lastFull_line = None
|
||||
self.lastline = None
|
||||
def __init__(self):
|
||||
self.string = ""
|
||||
self.oldindex = 0
|
||||
self.newindex = 0
|
||||
self.type = ""
|
||||
self.lastFull_line = None
|
||||
self.lastline = None
|
||||
|
||||
self.actualIndex = 0
|
||||
self.actualIndex = 0
|
||||
|
||||
def __del__(self):
|
||||
if self.type == "file":
|
||||
self.fd.close()
|
||||
def __del__(self):
|
||||
if self.type == "file":
|
||||
self.fd.close()
|
||||
|
||||
def __str__(self):
|
||||
return str(self.matches)
|
||||
def __str__(self):
|
||||
return str(self.matches)
|
||||
|
||||
def __iter__(self):
|
||||
self.actualIndex = 0
|
||||
return self
|
||||
def __iter__(self):
|
||||
self.actualIndex = 0
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
try:
|
||||
value = self.matches[self.actualIndex]
|
||||
self.actualIndex += 1
|
||||
return value
|
||||
except Exception:
|
||||
raise StopIteration
|
||||
def __next__(self):
|
||||
try:
|
||||
value = self.matches[self.actualIndex]
|
||||
self.actualIndex += 1
|
||||
return value
|
||||
except Exception:
|
||||
raise StopIteration
|
||||
|
||||
def setSource(self, t, *args):
|
||||
'''Se especifica el tipo de entrada. Puede ser fichero o entrada estandard
|
||||
def setSource(self, t, *args):
|
||||
'''Se especifica el tipo de entrada. Puede ser fichero o entrada estandard
|
||||
|
||||
Ejemplos: setSource("file","/tmp/file")
|
||||
setSource("stdin")\n'''
|
||||
Ejemplos: setSource("file","/tmp/file")
|
||||
setSource("stdin")\n'''
|
||||
|
||||
if t == "file":
|
||||
self.type = t
|
||||
self.fd = open(args[0], "r")
|
||||
elif t == "stdin":
|
||||
if self.type == "file":
|
||||
self.fd.close()
|
||||
self.type = t
|
||||
elif t == "string":
|
||||
if self.type == "file":
|
||||
self.fd.close()
|
||||
self.type = t
|
||||
self.string = args[0]
|
||||
self.oldindex = 0
|
||||
self.newindex = 0
|
||||
else:
|
||||
print("Bad argument -- TextParser.setSource()\n")
|
||||
sys.exit(-1)
|
||||
if t == "file":
|
||||
self.type = t
|
||||
self.fd = open(args[0], "r")
|
||||
elif t == "stdin":
|
||||
if self.type == "file":
|
||||
self.fd.close()
|
||||
self.type = t
|
||||
elif t == "string":
|
||||
if self.type == "file":
|
||||
self.fd.close()
|
||||
self.type = t
|
||||
self.string = args[0]
|
||||
self.oldindex = 0
|
||||
self.newindex = 0
|
||||
else:
|
||||
print("Bad argument -- TextParser.setSource()\n")
|
||||
sys.exit(-1)
|
||||
|
||||
def seekinit(self):
|
||||
self.oldindex = 0
|
||||
self.newindex = 0
|
||||
def seekinit(self):
|
||||
self.oldindex = 0
|
||||
self.newindex = 0
|
||||
|
||||
def readUntil(self, pattern, caseSens=True):
|
||||
"Lee lineas hasta que el patron (pattern) conincide en alguna linea"
|
||||
def readUntil(self, pattern, caseSens=True):
|
||||
"Lee lineas hasta que el patron (pattern) conincide en alguna linea"
|
||||
|
||||
while True:
|
||||
if (self.readLine() == 0):
|
||||
return False
|
||||
if (self.search(pattern, caseSens) is True):
|
||||
break
|
||||
while True:
|
||||
if (self.readLine() == 0):
|
||||
return False
|
||||
if (self.search(pattern, caseSens) is True):
|
||||
break
|
||||
|
||||
return True
|
||||
return True
|
||||
|
||||
def search(self, pattern, caseSens=True, debug=0):
|
||||
"Intenta hacer Matching entre el pattern pasado por parametro y la ultima linea leida"
|
||||
def search(self, pattern, caseSens=True, debug=0):
|
||||
"Intenta hacer Matching entre el pattern pasado por parametro y la ultima linea leida"
|
||||
|
||||
if not caseSens:
|
||||
self.regexp = re.compile(pattern, re.IGNORECASE)
|
||||
else:
|
||||
self.regexp = re.compile(pattern)
|
||||
self.matches = self.regexp.findall(self.lastline)
|
||||
j = 0
|
||||
for i in self.matches:
|
||||
if not isinstance(i, tuple):
|
||||
self.matches[j] = tuple([self.matches[j]])
|
||||
j += 1
|
||||
if not caseSens:
|
||||
self.regexp = re.compile(pattern, re.IGNORECASE)
|
||||
else:
|
||||
self.regexp = re.compile(pattern)
|
||||
self.matches = self.regexp.findall(self.lastline)
|
||||
j = 0
|
||||
for i in self.matches:
|
||||
if not isinstance(i, tuple):
|
||||
self.matches[j] = tuple([self.matches[j]])
|
||||
j += 1
|
||||
|
||||
# DEBUG PARA MATCHING
|
||||
if (debug == 1):
|
||||
print(("[", self.lastline, "-", pattern, "]"))
|
||||
print((len(self.matches)))
|
||||
print((self.matches))
|
||||
if (debug == 1):
|
||||
print(("[", self.lastline, "-", pattern, "]"))
|
||||
print((len(self.matches)))
|
||||
print((self.matches))
|
||||
|
||||
if len(self.matches) == 0:
|
||||
return False
|
||||
if len(self.matches) == 0:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def __getitem__(self, key):
|
||||
"Para acceder a cada uno de los patrones que coinciden, esta preparado paragrupos de patrones, no para solo un patron"
|
||||
|
||||
return self.matches[key]
|
||||
|
||||
def skip(self, lines):
|
||||
"Salta las lines que se indiquen en el parametro"
|
||||
|
||||
for i in range(lines):
|
||||
if (self.readLine() == 0):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def readLine(self):
|
||||
"Lee la siguiente linea eliminando retornos de carro"
|
||||
|
||||
if self.type == "file":
|
||||
self.lastFull_line = self.fd.readline()
|
||||
elif self.type == "stdin":
|
||||
self.lastFull_line = input()
|
||||
elif self.type == "string":
|
||||
if self.newindex == -1:
|
||||
return 0
|
||||
|
||||
if self.oldindex >= 0:
|
||||
self.newindex = self.string.find("\n", self.oldindex, len(self.string))
|
||||
if self.newindex == -1:
|
||||
self.lastFull_line = self.string[self.oldindex:len(self.string)]
|
||||
else:
|
||||
return True
|
||||
self.lastFull_line = self.string[self.oldindex:self.newindex + 1]
|
||||
|
||||
def __getitem__(self, key):
|
||||
"Para acceder a cada uno de los patrones que coinciden, esta preparado paragrupos de patrones, no para solo un patron"
|
||||
self.oldindex = self.newindex + 1
|
||||
else:
|
||||
self.lastFull_line = ''
|
||||
|
||||
return self.matches[key]
|
||||
bytes_read = len(self.lastFull_line)
|
||||
|
||||
def skip(self, lines):
|
||||
"Salta las lines que se indiquen en el parametro"
|
||||
s = self.lastFull_line
|
||||
self.lastline = s
|
||||
|
||||
for i in range(lines):
|
||||
if (self.readLine() == 0):
|
||||
return False
|
||||
if s[-2:] == '\r\n':
|
||||
self.lastline = s[:-2]
|
||||
elif s[-1:] == '\r' or s[-1:] == '\n':
|
||||
self.lastline = s[:-1]
|
||||
|
||||
return True
|
||||
|
||||
def readLine(self):
|
||||
"Lee la siguiente linea eliminando retornos de carro"
|
||||
|
||||
if self.type == "file":
|
||||
self.lastFull_line = self.fd.readline()
|
||||
elif self.type == "stdin":
|
||||
self.lastFull_line = input()
|
||||
elif self.type == "string":
|
||||
if self.newindex == -1:
|
||||
return 0
|
||||
|
||||
if self.oldindex >= 0:
|
||||
self.newindex = self.string.find("\n", self.oldindex, len(self.string))
|
||||
if self.newindex == -1:
|
||||
self.lastFull_line = self.string[self.oldindex:len(self.string)]
|
||||
else:
|
||||
self.lastFull_line = self.string[self.oldindex:self.newindex + 1]
|
||||
|
||||
self.oldindex = self.newindex + 1
|
||||
else:
|
||||
self.lastFull_line = ''
|
||||
|
||||
bytes_read = len(self.lastFull_line)
|
||||
|
||||
s = self.lastFull_line
|
||||
self.lastline = s
|
||||
|
||||
if s[-2:] == '\r\n':
|
||||
self.lastline = s[:-2]
|
||||
elif s[-1:] == '\r' or s[-1:] == '\n':
|
||||
self.lastline = s[:-1]
|
||||
|
||||
return bytes_read
|
||||
return bytes_read
|
||||
|
194
src/wfuzz/externals/reqresp/Variables.py
vendored
194
src/wfuzz/externals/reqresp/Variables.py
vendored
@ -1,122 +1,136 @@
|
||||
from .TextParser import TextParser
|
||||
import json
|
||||
|
||||
|
||||
class Variable:
|
||||
def __init__(self, name, value="", extraInfo=""):
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.initValue = value
|
||||
self.extraInfo = extraInfo
|
||||
def __init__(self, name, value="", extraInfo=""):
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.initValue = value
|
||||
self.extraInfo = extraInfo
|
||||
|
||||
def restore(self):
|
||||
self.value = self.initValue
|
||||
def restore(self):
|
||||
self.value = self.initValue
|
||||
|
||||
def change(self, newval):
|
||||
self.initValue = self.value = newval
|
||||
def change(self, newval):
|
||||
self.initValue = self.value = newval
|
||||
|
||||
def update(self, val):
|
||||
self.value = val
|
||||
def update(self, val):
|
||||
self.value = val
|
||||
|
||||
def append(self, val):
|
||||
self.value += val
|
||||
def append(self, val):
|
||||
self.value += val
|
||||
|
||||
def __str__(self):
|
||||
return "[ %s : %s ]" % (self.name, self.value)
|
||||
def __str__(self):
|
||||
return "[ %s : %s ]" % (self.name, self.value)
|
||||
|
||||
|
||||
class VariablesSet:
|
||||
def __init__(self):
|
||||
self.variables = []
|
||||
self.boundary = None
|
||||
def __init__(self):
|
||||
self.variables = []
|
||||
self.boundary = None
|
||||
|
||||
def names(self):
|
||||
dicc = []
|
||||
for i in self.variables:
|
||||
dicc.append(i.name)
|
||||
def names(self):
|
||||
dicc = []
|
||||
for i in self.variables:
|
||||
dicc.append(i.name)
|
||||
|
||||
return dicc
|
||||
return dicc
|
||||
|
||||
def existsVar(self, name):
|
||||
return name in self.names()
|
||||
def existsVar(self, name):
|
||||
return name in self.names()
|
||||
|
||||
def addVariable(self, name, value="", extraInfo=""):
|
||||
self.variables.append(Variable(name, value, extraInfo))
|
||||
def addVariable(self, name, value="", extraInfo=""):
|
||||
self.variables.append(Variable(name, value, extraInfo))
|
||||
|
||||
def getVariable(self, name):
|
||||
dicc = []
|
||||
for i in self.variables:
|
||||
if i.name == name:
|
||||
dicc.append(i)
|
||||
def getVariable(self, name):
|
||||
dicc = []
|
||||
for i in self.variables:
|
||||
if i.name == name:
|
||||
dicc.append(i)
|
||||
|
||||
if len(dicc) > 1:
|
||||
raise Exception("Variable exists more than one time!!! :D" % (name))
|
||||
if len(dicc) > 1:
|
||||
raise Exception("Variable exists more than one time!!! :D" % (name))
|
||||
|
||||
if not dicc:
|
||||
var = Variable(name)
|
||||
self.variables.append(var)
|
||||
return var
|
||||
if not dicc:
|
||||
var = Variable(name)
|
||||
self.variables.append(var)
|
||||
return var
|
||||
|
||||
return dicc[0]
|
||||
return dicc[0]
|
||||
|
||||
def urlEncoded(self):
|
||||
return "&".join(["=".join([i.name, i.value]) if i.value is not None else i.name for i in self.variables])
|
||||
def urlEncoded(self):
|
||||
return "&".join(["=".join([i.name, i.value]) if i.value is not None else i.name for i in self.variables])
|
||||
|
||||
def parseUrlEncoded(self, cad):
|
||||
dicc = []
|
||||
def json_encoded(self):
|
||||
dicc = {i.name: i.value for i in self.variables}
|
||||
|
||||
if cad == '':
|
||||
dicc.append(Variable('', None))
|
||||
return json.dumps(dicc)
|
||||
|
||||
for i in cad.split("&"):
|
||||
if i:
|
||||
list = i.split("=", 1)
|
||||
if len(list) == 1:
|
||||
dicc.append(Variable(list[0], None))
|
||||
elif len(list) == 2:
|
||||
dicc.append(Variable(list[0], list[1]))
|
||||
def parse_json_encoded(self, cad):
|
||||
dicc = []
|
||||
|
||||
self.variables = dicc
|
||||
for key, value in json.loads(cad).items():
|
||||
dicc.append(Variable(key, value))
|
||||
|
||||
def multipartEncoded(self):
|
||||
if not self.boundary:
|
||||
self.boundary = "---------------------------D33PB1T0R3QR3SP0B0UND4RY2203"
|
||||
pd = ""
|
||||
for i in self.variables:
|
||||
pd += "--" + self.boundary + "\r\n"
|
||||
pd += "%s\r\n\r\n%s\r\n" % ("\r\n".join(i.extraInfo), i.value)
|
||||
pd += "--" + self.boundary + "--\r\n"
|
||||
return pd
|
||||
self.variables = dicc
|
||||
|
||||
def parseMultipart(self, cad, boundary):
|
||||
self.boundary = boundary
|
||||
dicc = []
|
||||
tp = TextParser()
|
||||
tp.setSource("string", cad)
|
||||
def parseUrlEncoded(self, cad):
|
||||
dicc = []
|
||||
|
||||
while True:
|
||||
headers = []
|
||||
if not tp.readUntil("name=\"([^\"]+)\""):
|
||||
break
|
||||
var = tp[0][0]
|
||||
headers.append(tp.lastFull_line.strip())
|
||||
while True:
|
||||
tp.readLine()
|
||||
if tp.search("^([^:]+): (.*)$"):
|
||||
headers.append(tp.lastFull_line.strip())
|
||||
else:
|
||||
break
|
||||
if cad == '':
|
||||
dicc.append(Variable('', None))
|
||||
|
||||
value = ""
|
||||
while True:
|
||||
tp.readLine()
|
||||
if not tp.search(boundary):
|
||||
value += tp.lastFull_line
|
||||
else:
|
||||
break
|
||||
for i in cad.split("&"):
|
||||
if i:
|
||||
list = i.split("=", 1)
|
||||
if len(list) == 1:
|
||||
dicc.append(Variable(list[0], None))
|
||||
elif len(list) == 2:
|
||||
dicc.append(Variable(list[0], list[1]))
|
||||
|
||||
if value[-2:] == "\r\n":
|
||||
value = value[:-2]
|
||||
self.variables = dicc
|
||||
|
||||
dicc.append(Variable(var, value, headers))
|
||||
def multipartEncoded(self):
|
||||
if not self.boundary:
|
||||
self.boundary = "---------------------------D33PB1T0R3QR3SP0B0UND4RY2203"
|
||||
pd = ""
|
||||
for i in self.variables:
|
||||
pd += "--" + self.boundary + "\r\n"
|
||||
pd += "%s\r\n\r\n%s\r\n" % ("\r\n".join(i.extraInfo), i.value)
|
||||
pd += "--" + self.boundary + "--\r\n"
|
||||
return pd
|
||||
|
||||
self.variables = dicc
|
||||
def parseMultipart(self, cad, boundary):
|
||||
self.boundary = boundary
|
||||
dicc = []
|
||||
tp = TextParser()
|
||||
tp.setSource("string", cad)
|
||||
|
||||
while True:
|
||||
headers = []
|
||||
if not tp.readUntil("name=\"([^\"]+)\""):
|
||||
break
|
||||
var = tp[0][0]
|
||||
headers.append(tp.lastFull_line.strip())
|
||||
while True:
|
||||
tp.readLine()
|
||||
if tp.search("^([^:]+): (.*)$"):
|
||||
headers.append(tp.lastFull_line.strip())
|
||||
else:
|
||||
break
|
||||
|
||||
value = ""
|
||||
while True:
|
||||
tp.readLine()
|
||||
if not tp.search(boundary):
|
||||
value += tp.lastFull_line
|
||||
else:
|
||||
break
|
||||
|
||||
if value[-2:] == "\r\n":
|
||||
value = value[:-2]
|
||||
|
||||
dicc.append(Variable(var, value, headers))
|
||||
|
||||
self.variables = dicc
|
||||
|
@ -10,6 +10,9 @@ import os
|
||||
# python2 and 3: metaclass
|
||||
from future.utils import with_metaclass
|
||||
|
||||
ERROR_CODE = -1
|
||||
BASELINE_CODE = -2
|
||||
|
||||
|
||||
class Settings(SettingsBase):
|
||||
def get_config_file(self):
|
||||
@ -17,7 +20,10 @@ class Settings(SettingsBase):
|
||||
|
||||
def set_defaults(self):
|
||||
return dict(
|
||||
plugins=[("bing_apikey", '')],
|
||||
plugins=[
|
||||
("bing_apikey", ''),
|
||||
("shodan_apikey", '')
|
||||
],
|
||||
kbase=[("discovery.blacklist", '.svg-.css-.js-.jpg-.gif-.png-.jpeg-.mov-.avi-.flv-.ico')],
|
||||
connection=[
|
||||
("concurrent", '10'),
|
||||
@ -76,6 +82,9 @@ class Facade(with_metaclass(utils.Singleton, object)):
|
||||
def proxy(self, which):
|
||||
return self._load(which)
|
||||
|
||||
def get_registrants(self):
|
||||
return self.__plugins.keys()
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in ["printers", "payloads", "iterators", "encoders", "scripts"]:
|
||||
return self._load(name)
|
||||
|
@ -1,8 +1,9 @@
|
||||
from .exception import FuzzExceptIncorrectFilter, FuzzExceptBadOptions, FuzzExceptInternalError, FuzzException
|
||||
from .fuzzobjects import FuzzResult
|
||||
from .utils import rgetattr, rsetattr, value_in_any_list_item
|
||||
|
||||
import re
|
||||
import collections
|
||||
import operator
|
||||
|
||||
# Python 2 and 3: alternative 4
|
||||
try:
|
||||
@ -10,7 +11,7 @@ try:
|
||||
except ImportError:
|
||||
from urllib import unquote
|
||||
|
||||
from .facade import Facade
|
||||
from .facade import Facade, ERROR_CODE, BASELINE_CODE
|
||||
|
||||
|
||||
PYPARSING = True
|
||||
@ -25,33 +26,34 @@ class FuzzResFilter:
|
||||
def __init__(self, ffilter=None, filter_string=None):
|
||||
if PYPARSING:
|
||||
quoted_str_value = QuotedString('\'', unquoteResults=True, escChar='\\')
|
||||
int_values = Word("0123456789")
|
||||
int_values = Word("0123456789").setParseAction(lambda s, l, t: [int(t[0])])
|
||||
error_value = Literal("XXX").setParseAction(self.__compute_xxx_value)
|
||||
bbb_value = Literal("BBB").setParseAction(self.__compute_bbb_value)
|
||||
field_value = Word(alphas + "." + "_" + "-")
|
||||
reserverd_words = oneOf("BBB XXX")
|
||||
|
||||
basic_primitives = int_values | quoted_str_value
|
||||
|
||||
operator_names = oneOf("m d e un u r l sw unique startswith decode encode unquote replace lower upper").setParseAction(lambda s, l, t: [(l, t[0])])
|
||||
operator_names = oneOf("m d e un u r l sw gre gregex unique startswith decode encode unquote replace lower upper").setParseAction(lambda s, l, t: [(l, t[0])])
|
||||
|
||||
fuzz_symbol = (Suppress("FUZ") + Optional(Word("23456789"), 1).setParseAction(lambda s, l, t: [int(t[0]) - 1]) + Suppress("Z")).setParseAction(self.__compute_fuzz_symbol)
|
||||
fuzz_symbol = (Suppress("FUZ") + Optional(Word("23456789"), 1).setParseAction(lambda s, l, t: [int(t[0]) - 1]) + Suppress("Z")).setParseAction(self._compute_fuzz_symbol)
|
||||
operator_call = Group(Suppress("|") + operator_names + Suppress("(") + Optional(basic_primitives, None) + Optional(Suppress(",") + basic_primitives, None) + Suppress(")"))
|
||||
|
||||
fuzz_value = (fuzz_symbol + Optional(Suppress("[") + field_value + Suppress("]"), None)).setParseAction(self.__compute_fuzz_value)
|
||||
fuzz_value_op = ((fuzz_symbol + Suppress("[") + Optional(field_value)).setParseAction(self.__compute_fuzz_value) + operator_call + Suppress("]")).setParseAction(self.__compute_perl_value)
|
||||
fuzz_value_op2 = ((fuzz_symbol + operator_call).setParseAction(self.__compute_perl_value))
|
||||
|
||||
res_value_op = (Word(alphas + "." + "_" + "-").setParseAction(self.__compute_res_value) + Optional(operator_call, None)).setParseAction(self.__compute_perl_value)
|
||||
res_value_op = (~reserverd_words + Word("0123456789" + alphas + "." + "_" + "-").setParseAction(self.__compute_res_value) + Optional(operator_call, None)).setParseAction(self.__compute_perl_value)
|
||||
basic_primitives_op = (basic_primitives + Optional(operator_call, None)).setParseAction(self.__compute_perl_value)
|
||||
|
||||
fuzz_statement = fuzz_value ^ fuzz_value_op ^ fuzz_value_op2 ^ res_value_op ^ basic_primitives_op
|
||||
fuzz_statement = basic_primitives_op ^ fuzz_value ^ fuzz_value_op ^ fuzz_value_op2 ^ res_value_op
|
||||
|
||||
operator = oneOf("and or")
|
||||
not_operator = Optional(oneOf("not"), "notpresent")
|
||||
|
||||
symbol_expr = Group(fuzz_statement + oneOf("= != < > >= <= =~ !~ ~") + (bbb_value ^ error_value ^ fuzz_statement ^ basic_primitives)).setParseAction(self.__compute_expr)
|
||||
symbol_expr = Group(fuzz_statement + oneOf("= == != < > >= <= =~ !~ ~ := =+ =-") + (bbb_value ^ error_value ^ basic_primitives ^ fuzz_statement)).setParseAction(self.__compute_expr)
|
||||
|
||||
definition = fuzz_statement ^ symbol_expr
|
||||
definition = symbol_expr ^ fuzz_statement
|
||||
definition_not = not_operator + definition
|
||||
definition_expr = definition_not + ZeroOrMore(operator + definition_not)
|
||||
|
||||
@ -87,54 +89,52 @@ class FuzzResFilter:
|
||||
self.hideparams['filter_string'] = filter_string
|
||||
|
||||
self.baseline = None
|
||||
self.stack = {}
|
||||
self.stack = []
|
||||
|
||||
self._cache = collections.defaultdict(set)
|
||||
|
||||
def set_baseline(self, res):
|
||||
if FuzzResult.BASELINE_CODE in self.hideparams['lines']:
|
||||
if BASELINE_CODE in self.hideparams['lines']:
|
||||
self.hideparams['lines'].append(res.lines)
|
||||
if FuzzResult.BASELINE_CODE in self.hideparams['codes']:
|
||||
if BASELINE_CODE in self.hideparams['codes']:
|
||||
self.hideparams['codes'].append(res.code)
|
||||
if FuzzResult.BASELINE_CODE in self.hideparams['words']:
|
||||
if BASELINE_CODE in self.hideparams['words']:
|
||||
self.hideparams['words'].append(res.words)
|
||||
if FuzzResult.BASELINE_CODE in self.hideparams['chars']:
|
||||
if BASELINE_CODE in self.hideparams['chars']:
|
||||
self.hideparams['chars'].append(res.chars)
|
||||
|
||||
self.baseline = res
|
||||
|
||||
def __compute_res_value(self, tokens):
|
||||
self.stack["field"] = tokens[0]
|
||||
self.stack.append(tokens[0])
|
||||
|
||||
return self.res.get_field(self.stack["field"])
|
||||
try:
|
||||
return rgetattr(self.res, tokens[0])
|
||||
except AttributeError:
|
||||
raise FuzzExceptIncorrectFilter("Non-existing introspection field or HTTP parameter \"{}\"!".format(tokens[0]))
|
||||
|
||||
def __compute_fuzz_symbol(self, tokens):
|
||||
def _compute_fuzz_symbol(self, tokens):
|
||||
i = tokens[0]
|
||||
|
||||
try:
|
||||
return self.res.payload[i]
|
||||
return self.res.payload[i].content
|
||||
except IndexError:
|
||||
raise FuzzExceptIncorrectFilter("Non existent FUZZ payload! Use a correct index.")
|
||||
except AttributeError:
|
||||
if i == 0:
|
||||
return self.res
|
||||
else:
|
||||
raise FuzzExceptIncorrectFilter("Non existent FUZZ payload! Use a correct index.")
|
||||
|
||||
def __compute_fuzz_value(self, tokens):
|
||||
fuzz_val, field = tokens
|
||||
|
||||
self.stack["field"] = field
|
||||
self.stack.append(field)
|
||||
|
||||
try:
|
||||
return fuzz_val.get_field(field) if field else fuzz_val
|
||||
return rgetattr(fuzz_val, field) if field else fuzz_val
|
||||
except IndexError:
|
||||
raise FuzzExceptIncorrectFilter("Non existent FUZZ payload! Use a correct index.")
|
||||
except AttributeError as e:
|
||||
raise FuzzExceptIncorrectFilter("A field expression must be used with a fuzzresult payload not a string. %s" % str(e))
|
||||
|
||||
def __compute_bbb_value(self, tokens):
|
||||
element = self.stack["field"]
|
||||
element = self.stack[0] if self.stack else None
|
||||
|
||||
if self.baseline is None:
|
||||
raise FuzzExceptBadOptions("FilterQ: specify a baseline value when using BBB")
|
||||
@ -154,6 +154,7 @@ class FuzzResFilter:
|
||||
|
||||
def __compute_perl_value(self, tokens):
|
||||
leftvalue, exp = tokens
|
||||
# import pdb; pdb.set_trace()
|
||||
|
||||
if exp:
|
||||
loc_op, middlevalue, rightvalue = exp
|
||||
@ -173,6 +174,17 @@ class FuzzResFilter:
|
||||
return leftvalue.upper()
|
||||
elif op == "lower" or op == "l":
|
||||
return leftvalue.lower()
|
||||
elif op == 'gregex' or op == "gre":
|
||||
search_res = None
|
||||
try:
|
||||
regex = re.compile(middlevalue)
|
||||
search_res = regex.search(leftvalue)
|
||||
except re.error as e:
|
||||
raise FuzzExceptBadOptions("Invalid regex expression used in expression: %s" % str(e))
|
||||
|
||||
if search_res is None:
|
||||
return ''
|
||||
return search_res.group(1)
|
||||
elif op == 'startswith' or op == "sw":
|
||||
return leftvalue.strip().startswith(middlevalue)
|
||||
elif op == 'unique' or op == "u":
|
||||
@ -187,35 +199,56 @@ class FuzzResFilter:
|
||||
return ret
|
||||
|
||||
def __compute_xxx_value(self, tokens):
|
||||
return FuzzResult.ERROR_CODE
|
||||
return ERROR_CODE
|
||||
|
||||
def __compute_expr(self, tokens):
|
||||
leftvalue, operator, rightvalue = tokens[0]
|
||||
leftvalue, exp_operator, rightvalue = tokens[0]
|
||||
|
||||
field_to_set = self.stack[0] if self.stack else None
|
||||
|
||||
try:
|
||||
if operator == "=":
|
||||
return leftvalue == rightvalue
|
||||
elif operator == "<=":
|
||||
if exp_operator in ["=", '==']:
|
||||
return str(leftvalue) == str(rightvalue)
|
||||
elif exp_operator == "<=":
|
||||
return leftvalue <= rightvalue
|
||||
elif operator == ">=":
|
||||
elif exp_operator == ">=":
|
||||
return leftvalue >= rightvalue
|
||||
elif operator == "<":
|
||||
elif exp_operator == "<":
|
||||
return leftvalue < rightvalue
|
||||
elif operator == ">":
|
||||
elif exp_operator == ">":
|
||||
return leftvalue > rightvalue
|
||||
elif operator == "!=":
|
||||
elif exp_operator == "!=":
|
||||
return leftvalue != rightvalue
|
||||
elif operator == "=~":
|
||||
elif exp_operator == "=~":
|
||||
regex = re.compile(rightvalue, re.MULTILINE | re.DOTALL)
|
||||
return regex.search(leftvalue) is not None
|
||||
elif operator == "!~":
|
||||
return rightvalue.lower() not in leftvalue.lower()
|
||||
elif operator == "~":
|
||||
return rightvalue.lower() in leftvalue.lower()
|
||||
elif exp_operator in ["!~", "~"]:
|
||||
ret = True
|
||||
|
||||
if isinstance(leftvalue, str):
|
||||
ret = rightvalue.lower() in leftvalue.lower()
|
||||
elif isinstance(leftvalue, list):
|
||||
ret = value_in_any_list_item(rightvalue, leftvalue)
|
||||
elif isinstance(leftvalue, dict):
|
||||
return len({k: v for (k, v) in leftvalue.items() if rightvalue.lower() in k.lower() or value_in_any_list_item(rightvalue, v)}) > 0
|
||||
else:
|
||||
raise FuzzExceptBadOptions("Invalid operand type {}".format(rightvalue))
|
||||
|
||||
return ret if exp_operator == "~" else not ret
|
||||
elif exp_operator == ":=":
|
||||
rsetattr(self.res, field_to_set, rightvalue, None)
|
||||
elif exp_operator == "=+":
|
||||
rsetattr(self.res, field_to_set, rightvalue, operator.add)
|
||||
elif exp_operator == "=-":
|
||||
rsetattr(self.res, field_to_set, rightvalue, lambda x, y: y + x)
|
||||
except re.error as e:
|
||||
raise FuzzExceptBadOptions("Invalid regex expression used in expression: %s" % str(e))
|
||||
except TypeError as e:
|
||||
raise FuzzExceptBadOptions("Invalid regex expression used in filter: %s" % str(e))
|
||||
raise FuzzExceptBadOptions("Invalid operand types used in expression: %s" % str(e))
|
||||
except ParseException as e:
|
||||
raise FuzzExceptBadOptions("Invalid regex expression used in filter: %s" % str(e))
|
||||
raise FuzzExceptBadOptions("Invalid filter: %s" % str(e))
|
||||
|
||||
return True
|
||||
|
||||
def __myreduce(self, elements):
|
||||
first = elements[0]
|
||||
@ -225,6 +258,7 @@ class FuzzResFilter:
|
||||
elif elements[i] == "or":
|
||||
first = (first or elements[i + 1])
|
||||
|
||||
self.stack = []
|
||||
return first
|
||||
|
||||
def __compute_not_operator(self, tokens):
|
||||
@ -268,7 +302,7 @@ class FuzzResFilter:
|
||||
|
||||
if res.code in self.hideparams['codes'] or res.lines in self.hideparams['lines'] \
|
||||
or res.words in self.hideparams['words'] or res.chars in self.hideparams['chars']:
|
||||
cond1 = self.hideparams['codes_show']
|
||||
cond1 = self.hideparams['codes_show']
|
||||
|
||||
if self.hideparams['regex']:
|
||||
if self.hideparams['regex'].search(res.history.content):
|
||||
@ -307,3 +341,19 @@ class FuzzResFilter:
|
||||
ffilter.hideparams['chars'] = filter_options["hh"]
|
||||
|
||||
return ffilter
|
||||
|
||||
def get_fuzz_words(self):
|
||||
marker_regex = re.compile(r"FUZ\d*Z", re.MULTILINE | re.DOTALL)
|
||||
fuzz_words = marker_regex.findall(self.hideparams["filter_string"])
|
||||
|
||||
return fuzz_words
|
||||
|
||||
|
||||
class FuzzResFilterSlice(FuzzResFilter):
|
||||
def _compute_fuzz_symbol(self, tokens):
|
||||
i = tokens[0]
|
||||
|
||||
if i != 0:
|
||||
raise FuzzExceptIncorrectFilter("Non existent FUZZ payload! Use a correct index.")
|
||||
|
||||
return self.res
|
||||
|
@ -3,6 +3,7 @@ import hashlib
|
||||
import re
|
||||
import itertools
|
||||
import operator
|
||||
import pycurl
|
||||
|
||||
# Python 2 and 3
|
||||
import sys
|
||||
@ -13,63 +14,54 @@ else:
|
||||
|
||||
from threading import Lock
|
||||
from collections import namedtuple
|
||||
from collections import OrderedDict
|
||||
from collections import defaultdict
|
||||
|
||||
from .filter import FuzzResFilter
|
||||
from .externals.reqresp import Request, Response
|
||||
from .exception import FuzzExceptBadAPI, FuzzExceptBadOptions, FuzzExceptInternalError
|
||||
from .facade import Facade
|
||||
from .facade import Facade, ERROR_CODE
|
||||
from .mixins import FuzzRequestUrlMixing, FuzzRequestSoupMixing
|
||||
|
||||
from .utils import python2_3_convert_to_unicode
|
||||
from .utils import python2_3_convert_to_unicode, python2_3_convert_from_unicode
|
||||
from .utils import MyCounter
|
||||
from .utils import rgetattr
|
||||
from .utils import DotDict
|
||||
|
||||
auth_header = namedtuple("auth_header", "method credentials")
|
||||
|
||||
|
||||
class headers:
|
||||
class headers(object):
|
||||
class header(DotDict):
|
||||
def __str__(self):
|
||||
return "\n".join(["{}: {}".format(k, v) for k, v in self.items()])
|
||||
|
||||
def __init__(self, req):
|
||||
self._req = req
|
||||
|
||||
@property
|
||||
def response(self):
|
||||
return OrderedDict(self._req.response.getHeaders()) if self._req.response else {}
|
||||
return headers.header(self._req.response.getHeaders()) if self._req.response else {}
|
||||
|
||||
@property
|
||||
def request(self):
|
||||
return OrderedDict([x.split(": ", 1) for x in self._req.getHeaders()])
|
||||
return headers.header([x.split(": ", 1) for x in self._req.getHeaders()])
|
||||
|
||||
def add(self, dd):
|
||||
for k, v in list(dd.items()):
|
||||
self._req._headers[k] = v
|
||||
@request.setter
|
||||
def request(self, values_dict):
|
||||
self._req._headers.update(values_dict)
|
||||
if "Content-Type" in values_dict:
|
||||
self._req.ContentType = values_dict['Content-Type']
|
||||
|
||||
def get_field(self, field):
|
||||
attr = field.split(".")
|
||||
num_fields = len(attr)
|
||||
|
||||
if num_fields == 2:
|
||||
if attr[1] == "request":
|
||||
return ", ".join(["%s:%s" % (x[0], x[1]) for x in list(self.request.items())])
|
||||
elif attr[1] == "response":
|
||||
return ", ".join(["%s:%s" % (x[0], x[1]) for x in list(self.response.items())])
|
||||
else:
|
||||
raise FuzzExceptBadAPI("headers must be specified in the form of headers.[request|response].<header name>")
|
||||
elif num_fields != 3:
|
||||
raise FuzzExceptBadAPI("headers must be specified in the form of headers.[request|response].<header name>")
|
||||
|
||||
ret = ""
|
||||
try:
|
||||
if attr[1] == "request":
|
||||
ret = self.request[attr[2]]
|
||||
elif attr[1] == "response":
|
||||
ret = self.response[attr[2]]
|
||||
else:
|
||||
raise FuzzExceptBadAPI("headers must be specified in the form of headers.[request|response].<header name>")
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return ret.strip()
|
||||
@property
|
||||
def all(self):
|
||||
return headers.header(self.request + self.response)
|
||||
|
||||
|
||||
class cookies:
|
||||
class cookies(object):
|
||||
class cookie(DotDict):
|
||||
def __str__(self):
|
||||
return "\n".join(["{}={}".format(k, v) for k, v in self.items()])
|
||||
|
||||
def __init__(self, req):
|
||||
self._req = req
|
||||
|
||||
@ -78,106 +70,71 @@ class cookies:
|
||||
if self._req.response:
|
||||
c = self._req.response.getCookie().split("; ")
|
||||
if c[0]:
|
||||
return OrderedDict([[x[0], x[2]] for x in [x.partition("=") for x in c]])
|
||||
return cookies.cookie([[x[0], x[2]] for x in [x.partition("=") for x in c]])
|
||||
|
||||
return {}
|
||||
return cookies.cookie({})
|
||||
|
||||
@property
|
||||
def request(self):
|
||||
if 'Cookie' in self._req._headers:
|
||||
c = self._req._headers['Cookie'].split("; ")
|
||||
if c[0]:
|
||||
return OrderedDict([[x[0], x[2]] for x in [x.partition("=") for x in c]])
|
||||
return cookies.cookie([[x[0], x[2]] for x in [x.partition("=") for x in c]])
|
||||
|
||||
return {}
|
||||
return cookies.cookie({})
|
||||
|
||||
def get_field(self, field):
|
||||
attr = field.split(".")
|
||||
num_fields = len(attr)
|
||||
@request.setter
|
||||
def request(self, values):
|
||||
self._req._headers["Cookie"] = "; ".join(values)
|
||||
|
||||
if num_fields == 2:
|
||||
|
||||
if attr[1] == "response":
|
||||
if self._req.response:
|
||||
return self._req.response.getCookie()
|
||||
elif attr[1] == "request":
|
||||
return self._req['COOKIE']
|
||||
else:
|
||||
raise FuzzExceptBadAPI("Cookie must be specified in the form of cookies.[request|response]")
|
||||
elif num_fields == 3:
|
||||
try:
|
||||
if attr[1] == "request":
|
||||
return self.request[attr[2]]
|
||||
elif attr[1] == "response":
|
||||
return self.response[attr[2]]
|
||||
else:
|
||||
raise FuzzExceptBadAPI("headers must be specified in the form of headers.[request|response].<header name>")
|
||||
except KeyError:
|
||||
return ""
|
||||
|
||||
else:
|
||||
raise FuzzExceptBadAPI("Cookie must be specified in the form of cookies.[request|response].<<name>>")
|
||||
|
||||
return ""
|
||||
@property
|
||||
def all(self):
|
||||
return cookies.cookie(self.request + self.response)
|
||||
|
||||
|
||||
class params(object):
|
||||
class param(DotDict):
|
||||
def __str__(self):
|
||||
return "\n".join(["{}={}".format(k, v) for k, v in self.items()])
|
||||
|
||||
def __init__(self, req):
|
||||
self._req = req
|
||||
|
||||
@property
|
||||
def get(self):
|
||||
return OrderedDict([(x.name, x.value) for x in self._req.getGETVars()])
|
||||
return params.param([(x.name, x.value) for x in self._req.getGETVars()])
|
||||
|
||||
@get.setter
|
||||
def get(self, values):
|
||||
if isinstance(values, dict):
|
||||
for key, value in values.items():
|
||||
self._req.setVariableGET(key, value)
|
||||
self._req.setVariableGET(key, str(value))
|
||||
else:
|
||||
raise FuzzExceptBadAPI("GET Parameters must be specified as a dictionary")
|
||||
|
||||
@property
|
||||
def post(self):
|
||||
return OrderedDict([(x.name, x.value) for x in self._req.getPOSTVars()])
|
||||
if self._req._non_parsed_post is None:
|
||||
return params.param([(x.name, x.value) for x in self._req.getPOSTVars()])
|
||||
else:
|
||||
return self._req.postdata
|
||||
|
||||
@post.setter
|
||||
def post(self, pp):
|
||||
if isinstance(pp, dict):
|
||||
for key, value in pp.items():
|
||||
self._req.setVariablePOST(key, str(value))
|
||||
self._req.setVariablePOST(key, str(value) if value is not None else value)
|
||||
elif isinstance(pp, str):
|
||||
self._req.setPostData(pp)
|
||||
|
||||
def get_field(self, field):
|
||||
attr = field.split(".")
|
||||
num_fields = len(attr)
|
||||
@property
|
||||
def all(self):
|
||||
return params.param(self.get + self.post)
|
||||
|
||||
if num_fields == 1 and attr[0] == "params":
|
||||
pp = ", ".join(["%s:%s" % (x[0], x[1]) for x in list(dict(list(self.get.items()) + list(self.post.items())).items())])
|
||||
return "" if not pp else pp
|
||||
elif num_fields == 2:
|
||||
if attr[1] == "get":
|
||||
return ", ".join(["%s=%s" % (x[0], x[1]) for x in list(self.get.items())])
|
||||
elif attr[1] == "post":
|
||||
return ", ".join(["%s=%s" % (x[0], x[1]) for x in list(self.post.items())])
|
||||
else:
|
||||
raise FuzzExceptBadAPI("Parameters must be specified as params.[get/post].<name>")
|
||||
elif num_fields == 3:
|
||||
ret = ""
|
||||
try:
|
||||
if attr[1] == "get":
|
||||
ret = self.get[attr[2]]
|
||||
elif attr[1] == "post":
|
||||
ret = self.post[attr[2]]
|
||||
else:
|
||||
raise FuzzExceptBadAPI("Parameters must be specified as params.[get/post].<name>")
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return ret
|
||||
else:
|
||||
raise FuzzExceptBadAPI("Parameters must be specified as params.[get/post].<name>")
|
||||
@all.setter
|
||||
def all(self, values):
|
||||
self.get = values
|
||||
self.post = values
|
||||
|
||||
|
||||
class FuzzRequest(FuzzRequestUrlMixing, FuzzRequestSoupMixing):
|
||||
@ -188,8 +145,9 @@ class FuzzRequest(FuzzRequestUrlMixing, FuzzRequestSoupMixing):
|
||||
self._allvars = None
|
||||
self.wf_fuzz_methods = None
|
||||
self.wf_retries = 0
|
||||
self.wf_ip = None
|
||||
|
||||
self.headers.add({"User-Agent": Facade().sett.get("connection", "user-agent")})
|
||||
self.headers.request = {"User-Agent": Facade().sett.get("connection", "user-agent")}
|
||||
|
||||
# methods for accessing HTTP requests information consistenly accross the codebase
|
||||
|
||||
@ -249,7 +207,8 @@ class FuzzRequest(FuzzRequestUrlMixing, FuzzRequestSoupMixing):
|
||||
|
||||
@url.setter
|
||||
def url(self, u):
|
||||
if not u.startswith("FUZ") and urlparse(u).scheme == "":
|
||||
# urlparse goes wrong with IP:port without scheme (https://bugs.python.org/issue754016)
|
||||
if not u.startswith("FUZ") and (urlparse(u).netloc == "" or urlparse(u).scheme == ""):
|
||||
u = "http://" + u
|
||||
|
||||
if urlparse(u).path == "":
|
||||
@ -273,7 +232,7 @@ class FuzzRequest(FuzzRequestUrlMixing, FuzzRequestSoupMixing):
|
||||
|
||||
@code.setter
|
||||
def code(self, c):
|
||||
self._request.response.code = c
|
||||
self._request.response.code = int(c)
|
||||
|
||||
@property
|
||||
def auth(self):
|
||||
@ -301,46 +260,6 @@ class FuzzRequest(FuzzRequestUrlMixing, FuzzRequestSoupMixing):
|
||||
def reqtime(self, t):
|
||||
self._request.totaltime = t
|
||||
|
||||
def set_field(self, field, value):
|
||||
if field in ["url"]:
|
||||
self.url = value
|
||||
|
||||
def get_field(self, field):
|
||||
alias = dict([('c', 'code')])
|
||||
|
||||
if field in alias:
|
||||
field = alias[field]
|
||||
|
||||
if field in ["url", "method", "scheme", "host", "content", "raw_content", "code"]:
|
||||
return getattr(self, field)
|
||||
elif field in ["code"]:
|
||||
return str(getattr(self, field))
|
||||
elif field.startswith("cookies"):
|
||||
return self.cookies.get_field(field).strip()
|
||||
elif field.startswith("headers"):
|
||||
return self.headers.get_field(field)
|
||||
elif field.startswith("params"):
|
||||
return self.params.get_field(field)
|
||||
elif field.startswith("url."):
|
||||
attr = field.split(".")
|
||||
allowed_attr = ["scheme", "netloc", "path", "params", "query", "fragment", "ffname", "fext", "fname", "isbllist", "hasquery"]
|
||||
|
||||
if len(attr) != 2:
|
||||
raise FuzzExceptBadAPI("Url must be specified as url.<field>")
|
||||
|
||||
if attr[1] in allowed_attr:
|
||||
return getattr(self.urlparse, attr[1])
|
||||
elif attr[1] == "pstrip":
|
||||
return self.to_cache_key()
|
||||
elif attr[1] == "ispath":
|
||||
return self.is_path
|
||||
else:
|
||||
raise FuzzExceptBadAPI("Unknown url attribute. It must be one of %s" % ",".join(allowed_attr))
|
||||
|
||||
return ""
|
||||
else:
|
||||
raise FuzzExceptBadAPI("Unknown FuzzResult attribute: %s." % (field,))
|
||||
|
||||
# Info extra that wfuzz needs within an HTTP request
|
||||
@property
|
||||
def wf_allvars_set(self):
|
||||
@ -361,7 +280,7 @@ class FuzzRequest(FuzzRequestUrlMixing, FuzzRequestSoupMixing):
|
||||
elif self.wf_allvars == "allpost":
|
||||
self.params.post = varset
|
||||
elif self.wf_allvars == "allheaders":
|
||||
self._request.headers.add(varset)
|
||||
self._request.headers.request = varset
|
||||
else:
|
||||
raise FuzzExceptBadOptions("Unknown variable set: " + self.wf_allvars)
|
||||
except TypeError:
|
||||
@ -396,17 +315,28 @@ class FuzzRequest(FuzzRequestUrlMixing, FuzzRequestSoupMixing):
|
||||
return Facade().http_pool.perform(res)
|
||||
|
||||
def to_http_object(self, c):
|
||||
return Request.to_pycurl_object(c, self._request)
|
||||
pycurl_c = Request.to_pycurl_object(c, self._request)
|
||||
|
||||
if self.wf_ip:
|
||||
pycurl_c.setopt(pycurl.CONNECT_TO, ["::{}:{}".format(self.wf_ip['ip'], self.wf_ip['port'])])
|
||||
|
||||
return pycurl_c
|
||||
|
||||
def from_http_object(self, c, bh, bb):
|
||||
return self._request.response_from_conn_object(c, bh, bb)
|
||||
raw_header = python2_3_convert_from_unicode(bh.decode("utf-8", errors='surrogateescape'))
|
||||
return self._request.response_from_conn_object(c, raw_header, bb)
|
||||
|
||||
def update_from_raw_http(self, raw, scheme, raw_response=None):
|
||||
def update_from_raw_http(self, raw, scheme, raw_response=None, raw_content=None):
|
||||
self._request.parseRequest(raw, scheme)
|
||||
|
||||
# Parse request sets postdata = '' when there's POST request without data
|
||||
if self.method == "POST" and not self.params.post:
|
||||
self.params.post = {'': None}
|
||||
|
||||
if raw_response:
|
||||
rp = Response()
|
||||
rp.parseResponse(raw_response)
|
||||
raw_response = python2_3_convert_from_unicode(raw_response.decode("utf-8", errors='surrogateescape'))
|
||||
rp.parseResponse(raw_response, raw_content)
|
||||
self._request.response = rp
|
||||
|
||||
return self._request
|
||||
@ -430,6 +360,9 @@ class FuzzRequest(FuzzRequestUrlMixing, FuzzRequestSoupMixing):
|
||||
if options["url"] != "FUZZ":
|
||||
self.url = options["url"]
|
||||
|
||||
# headers must be parsed first as they might affect how reqresp parases other params
|
||||
self.headers.request = dict(options['headers'])
|
||||
|
||||
if options['auth'][0] is not None:
|
||||
self.auth = (options['auth'][0], options['auth'][1])
|
||||
|
||||
@ -439,14 +372,15 @@ class FuzzRequest(FuzzRequestUrlMixing, FuzzRequestSoupMixing):
|
||||
if options['postdata'] is not None:
|
||||
self.params.post = options['postdata']
|
||||
|
||||
if options['connect_to_ip']:
|
||||
self.wf_ip = options['connect_to_ip']
|
||||
|
||||
if options['method']:
|
||||
self.method = options['method']
|
||||
self.wf_fuzz_methods = options['method']
|
||||
|
||||
if options['cookie']:
|
||||
self.headers.add({"Cookie": "; ".join(options['cookie'])})
|
||||
|
||||
self.headers.add(dict(options['headers']))
|
||||
self.cookies.request = options['cookie']
|
||||
|
||||
if options['allvars']:
|
||||
self.wf_allvars = options['allvars']
|
||||
@ -457,8 +391,9 @@ class FuzzRequest(FuzzRequestUrlMixing, FuzzRequestSoupMixing):
|
||||
newreq.wf_proxy = self.wf_proxy
|
||||
newreq.wf_allvars = self.wf_allvars
|
||||
newreq.wf_fuzz_methods = self.wf_fuzz_methods
|
||||
newreq.wf_ip = self.wf_ip
|
||||
|
||||
newreq.headers.add(self.headers.request)
|
||||
newreq.headers.request = self.headers.request
|
||||
newreq.params.post = self.params.post
|
||||
|
||||
newreq.follow = self.follow
|
||||
@ -476,29 +411,29 @@ class FuzzResultFactory:
|
||||
def replace_fuzz_word(text, fuzz_word, payload):
|
||||
marker_regex = re.compile(r"(%s)(?:\[(.*?)\])?" % (fuzz_word,), re.MULTILINE | re.DOTALL)
|
||||
|
||||
for fw, field in marker_regex.findall(text):
|
||||
for fuzz_word, field in marker_regex.findall(text):
|
||||
if field:
|
||||
marker_regex = re.compile(r"(%s)(?:\[(.*?)\])?" % (fuzz_word,), re.MULTILINE | re.DOTALL)
|
||||
subs_array = []
|
||||
fields_array = []
|
||||
|
||||
for fw, field in marker_regex.findall(text):
|
||||
if not field:
|
||||
raise FuzzExceptBadOptions("You must specify a field when using a payload containing a full fuzz request, ie. FUZZ[url], or use FUZZ only to repeat the same request.")
|
||||
for fuzz_word, field in marker_regex.findall(text):
|
||||
if not field:
|
||||
raise FuzzExceptBadOptions("You must specify a field when using a payload containing a full fuzz request, ie. FUZZ[url], or use FUZZ only to repeat the same request.")
|
||||
|
||||
try:
|
||||
subs = payload.get_field(field)
|
||||
except AttributeError:
|
||||
raise FuzzExceptBadOptions("A FUZZ[field] expression must be used with a fuzzresult payload not a string.")
|
||||
try:
|
||||
subs = str(rgetattr(payload, field))
|
||||
except AttributeError:
|
||||
raise FuzzExceptBadOptions("A FUZZ[field] expression must be used with a fuzzresult payload not a string.")
|
||||
|
||||
text = text.replace("%s[%s]" % (fw, field), subs)
|
||||
subs_array.append(subs)
|
||||
text = text.replace("%s[%s]" % (fuzz_word, field), subs)
|
||||
fields_array.append(field)
|
||||
|
||||
return (text, subs_array)
|
||||
return (text, fields_array)
|
||||
else:
|
||||
try:
|
||||
return (text.replace(fuzz_word, payload), [payload])
|
||||
return (text.replace(fuzz_word, payload), [None])
|
||||
except TypeError:
|
||||
raise FuzzExceptBadOptions("Tried to replace FUZZ with a whole fuzzresult payload.")
|
||||
raise FuzzExceptBadOptions("Tried to replace {} with a whole fuzzresult payload.".format(fuzz_word))
|
||||
|
||||
@staticmethod
|
||||
def from_seed(seed, payload, seed_options):
|
||||
@ -509,29 +444,27 @@ class FuzzResultFactory:
|
||||
scheme = newres.history.scheme
|
||||
auth_method, userpass = newres.history.auth
|
||||
|
||||
descr_array = []
|
||||
|
||||
for payload_pos, payload_content in enumerate(payload, start=1):
|
||||
fuzz_word = "FUZ" + str(payload_pos) + "Z" if payload_pos > 1 else "FUZZ"
|
||||
|
||||
newres.payload.append(payload_content)
|
||||
fuzz_values_array = []
|
||||
|
||||
# substitute entire seed when using a request payload generator without specifying field
|
||||
if fuzz_word == "FUZZ" and seed_options["seed_payload"] and isinstance(payload_content, FuzzResult):
|
||||
# new seed
|
||||
newres = payload_content.from_soft_copy()
|
||||
newres.payload = []
|
||||
|
||||
descr_array.append(newres.history.redirect_url)
|
||||
fuzz_values_array.append(None)
|
||||
|
||||
newres.payload = [payload_content]
|
||||
newres.history.update_from_options(seed_options)
|
||||
newres._description = ""
|
||||
newres.update_from_options(seed_options)
|
||||
rawReq = str(newres.history)
|
||||
rawUrl = newres.history.redirect_url
|
||||
scheme = newres.history.scheme
|
||||
auth_method, userpass = newres.history.auth
|
||||
|
||||
desc = None
|
||||
desc = []
|
||||
|
||||
if auth_method and (userpass.count(fuzz_word)):
|
||||
userpass, desc = FuzzResultFactory.replace_fuzz_word(userpass, fuzz_word, payload_content)
|
||||
@ -544,20 +477,15 @@ class FuzzResultFactory:
|
||||
scheme, desc = FuzzResultFactory.replace_fuzz_word(scheme, fuzz_word, payload_content)
|
||||
|
||||
if desc:
|
||||
descr_array += desc
|
||||
fuzz_values_array += desc
|
||||
|
||||
if len(descr_array) == 0:
|
||||
raise FuzzExceptBadOptions("No %s word!" % fuzz_word)
|
||||
newres.payload.append(FuzzPayload(payload_content, fuzz_values_array))
|
||||
|
||||
newres.history.update_from_raw_http(rawReq, scheme)
|
||||
newres.history.url = rawUrl
|
||||
if auth_method != 'None':
|
||||
newres.history.auth = (auth_method, userpass)
|
||||
|
||||
if newres._description:
|
||||
newres._description += " - "
|
||||
|
||||
newres._description += " - ".join(descr_array)
|
||||
newres.type = FuzzResult.result
|
||||
|
||||
return newres
|
||||
@ -614,9 +542,7 @@ class FuzzResultFactory:
|
||||
baseline_res.history.update_from_raw_http(rawReq, scheme)
|
||||
|
||||
baseline_res = FuzzResultFactory.from_seed(baseline_res, baseline_payload, options)
|
||||
|
||||
baseline_res.is_baseline = True
|
||||
baseline_res.payload = baseline_payload
|
||||
|
||||
return baseline_res
|
||||
|
||||
@ -629,8 +555,7 @@ class FuzzResultFactory:
|
||||
for var_name in seed.history.wf_allvars_set.keys():
|
||||
payload_content = payload[0]
|
||||
fuzzres = seed.from_soft_copy()
|
||||
fuzzres._description = var_name + "=" + payload_content
|
||||
fuzzres.payload.append(payload_content)
|
||||
fuzzres.payload.append(FuzzPayload(payload_content, [None]))
|
||||
|
||||
fuzzres.history.wf_allvars_set = {var_name: payload_content}
|
||||
|
||||
@ -644,27 +569,10 @@ class FuzzResultFactory:
|
||||
fr.wf_fuzz_methods = options['method']
|
||||
fr.update_from_options(options)
|
||||
|
||||
return FuzzResult(fr)
|
||||
fuzz_res = FuzzResult(fr)
|
||||
fuzz_res.update_from_options(options)
|
||||
|
||||
|
||||
class MyCounter:
|
||||
def __init__(self, count=0):
|
||||
self._count = count
|
||||
self._mutex = Lock()
|
||||
|
||||
def inc(self):
|
||||
self._operation(1)
|
||||
|
||||
def dec(self):
|
||||
self._operation(-1)
|
||||
|
||||
def _operation(self, dec):
|
||||
with self._mutex:
|
||||
self._count += dec
|
||||
|
||||
def __call__(self):
|
||||
with self._mutex:
|
||||
return self._count
|
||||
return fuzz_res
|
||||
|
||||
|
||||
class FuzzStats:
|
||||
@ -755,18 +663,38 @@ class FuzzStats:
|
||||
self.pending_seeds._operation(fuzzstats2.pending_seeds())
|
||||
|
||||
|
||||
class FuzzPayload():
|
||||
def __init__(self, content, fields):
|
||||
self.content = content
|
||||
self.fields = fields
|
||||
|
||||
def description(self, default):
|
||||
ret_str_values = []
|
||||
for fuzz_value in self.fields:
|
||||
if fuzz_value is None and isinstance(self.content, FuzzResult):
|
||||
ret_str_values.append(default)
|
||||
elif fuzz_value is not None and isinstance(self.content, FuzzResult):
|
||||
ret_str_values.append(str(rgetattr(self.content, fuzz_value)))
|
||||
elif fuzz_value is None:
|
||||
ret_str_values.append(self.content)
|
||||
else:
|
||||
ret_str_values.append(fuzz_value)
|
||||
|
||||
return " - ".join(ret_str_values)
|
||||
|
||||
def __str__(self):
|
||||
return "content: {} fields: {}".format(self.content, self.fields)
|
||||
|
||||
|
||||
class FuzzResult:
|
||||
seed, backfeed, result, error, startseed, endseed, cancel, discarded = list(range(8))
|
||||
newid = itertools.count(0)
|
||||
ERROR_CODE = -1
|
||||
BASELINE_CODE = -2
|
||||
|
||||
def __init__(self, history=None, exception=None, track_id=True):
|
||||
self.history = history
|
||||
|
||||
self.type = None
|
||||
self.exception = exception
|
||||
self._description = ""
|
||||
self.is_baseline = False
|
||||
self.rlevel = 1
|
||||
self.nres = next(FuzzResult.newid) if track_id else 0
|
||||
@ -783,12 +711,23 @@ class FuzzResult:
|
||||
|
||||
self.payload = []
|
||||
|
||||
self._description = None
|
||||
self._show_field = False
|
||||
|
||||
@property
|
||||
def plugins(self):
|
||||
dic = defaultdict(list)
|
||||
|
||||
for pl in self.plugins_res:
|
||||
dic[pl.source].append(pl.issue)
|
||||
|
||||
return dic
|
||||
|
||||
def update(self, exception=None):
|
||||
self.type = FuzzResult.result
|
||||
|
||||
if exception:
|
||||
self.exception = exception
|
||||
self._description = self._description + "! " + str(self.exception)
|
||||
|
||||
if self.history and self.history.content:
|
||||
m = hashlib.md5()
|
||||
@ -801,20 +740,6 @@ class FuzzResult:
|
||||
|
||||
return self
|
||||
|
||||
def set_field(self, field, value):
|
||||
return self.history.set_field(field, value)
|
||||
|
||||
def get_field(self, field):
|
||||
alias = dict([('l', 'lines'), ('h', 'chars'), ('w', 'words'), ('c', 'code')])
|
||||
|
||||
if field in alias:
|
||||
field = alias[field]
|
||||
|
||||
if field in ["code", "description", "nres", "chars", "lines", "words", "md5"]:
|
||||
return str(getattr(self, field))
|
||||
else:
|
||||
return self.history.get_field(field)
|
||||
|
||||
def __str__(self):
|
||||
if self.type == FuzzResult.result:
|
||||
res = "%05d: C=%03d %4d L\t %5d W\t %5d Ch\t \"%s\"" % (self.nres, self.code, self.lines, self.words, self.chars, self.description)
|
||||
@ -825,11 +750,39 @@ class FuzzResult:
|
||||
else:
|
||||
return "Control result, type: %s" % ("seed", "backfeed", "result", "error", "startseed", "endseed", "cancel", "discarded")[self.type]
|
||||
|
||||
def _payload_description(self):
|
||||
if not self.payload:
|
||||
return self.url
|
||||
|
||||
payl_descriptions = [payload.description(self.url) for payload in self.payload]
|
||||
ret_str = ' - '.join([p_des for p_des in payl_descriptions if p_des])
|
||||
|
||||
return ret_str
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
return self._description
|
||||
ret_str = ""
|
||||
|
||||
if self._show_field is True:
|
||||
ret_str = self.eval(self._description)
|
||||
elif self._show_field is False and self._description is not None:
|
||||
ret_str = "{} | {}".format(self._payload_description(), self.eval(self._description))
|
||||
else:
|
||||
ret_str = self._payload_description()
|
||||
|
||||
if self.exception:
|
||||
return ret_str + "! " + str(self.exception)
|
||||
|
||||
return ret_str
|
||||
|
||||
def eval(self, expr):
|
||||
return FuzzResFilter(filter_string=expr).is_visible(self)
|
||||
|
||||
# parameters in common with fuzzrequest
|
||||
@property
|
||||
def content(self):
|
||||
return self.history.content if self.history else ""
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self.history.url if self.history else ""
|
||||
@ -841,7 +794,7 @@ class FuzzResult:
|
||||
# elif not self.history.code:
|
||||
# return 0
|
||||
else:
|
||||
return FuzzResult.ERROR_CODE
|
||||
return ERROR_CODE
|
||||
|
||||
@property
|
||||
def timer(self):
|
||||
@ -865,14 +818,19 @@ class FuzzResult:
|
||||
fr = FuzzResult(self.history.from_copy(), track_id=track_id)
|
||||
|
||||
fr.exception = self.exception
|
||||
fr._description = self._description
|
||||
fr.is_baseline = self.is_baseline
|
||||
fr.type = self.type
|
||||
fr.rlevel = self.rlevel
|
||||
fr.payload = list(self.payload)
|
||||
fr._description = self._description
|
||||
fr._show_field = self._show_field
|
||||
|
||||
return fr
|
||||
|
||||
def update_from_options(self, options):
|
||||
self._description = options['description']
|
||||
self._show_field = options['show_field']
|
||||
|
||||
@staticmethod
|
||||
def to_new_exception(exception):
|
||||
fr = FuzzResult(exception=exception, track_id=False)
|
||||
@ -890,7 +848,6 @@ class FuzzResult:
|
||||
def to_new_url(self, url):
|
||||
fr = self.from_soft_copy()
|
||||
fr.history.url = str(url)
|
||||
fr._description = fr.history.path
|
||||
fr.rlevel = self.rlevel + 1
|
||||
fr.type = FuzzResult.backfeed
|
||||
fr.is_baseline = False
|
||||
@ -942,5 +899,6 @@ class PluginRequest(PluginItem):
|
||||
plreq = PluginRequest()
|
||||
plreq.source = source
|
||||
plreq.fuzzitem = res.to_new_url(url)
|
||||
plreq.fuzzitem.payload = [FuzzPayload(url, [None])]
|
||||
|
||||
return plreq
|
||||
|
@ -188,7 +188,7 @@ class JobMan(FuzzQueue):
|
||||
def process(self, res):
|
||||
# process request through plugins
|
||||
if not res.exception:
|
||||
if self.cache.update_cache(res.history, "processed"):
|
||||
if self.options['no_cache'] or self.cache.update_cache(res.history, "processed"):
|
||||
|
||||
plugins_res_queue = Queue()
|
||||
|
||||
@ -212,7 +212,7 @@ class JobMan(FuzzQueue):
|
||||
self._throw(FuzzExceptPluginError(item.issue))
|
||||
res.plugins_res.append(item)
|
||||
elif item.plugintype == PluginItem.backfeed:
|
||||
if self.cache.update_cache(item.fuzzitem.history, "backfeed"):
|
||||
if self.options['no_cache'] or self.cache.update_cache(item.fuzzitem.history, "backfeed"):
|
||||
res.plugins_backfeed.append(item)
|
||||
else:
|
||||
raise FuzzExceptInternalError("Jobman: Unknown pluginitem type in queue!")
|
||||
|
@ -27,6 +27,14 @@ class FuzzRequestUrlMixing(object):
|
||||
def urlparse(self):
|
||||
return parse_url(self.url)
|
||||
|
||||
@property
|
||||
def urlp(self):
|
||||
return parse_url(self.url)
|
||||
|
||||
@property
|
||||
def pstrip(self):
|
||||
return self.to_cache_key()
|
||||
|
||||
@property
|
||||
def is_path(self):
|
||||
if self.code == 200 and self.url[-1] == '/':
|
||||
|
@ -152,10 +152,10 @@ class HttpPool:
|
||||
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
|
||||
elif ptype == "SOCKS4":
|
||||
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
|
||||
elif ptype == "HTML":
|
||||
elif ptype == "HTTP":
|
||||
pass
|
||||
else:
|
||||
raise FuzzExceptBadOptions("Bad proxy type specified, correct values are HTML, SOCKS4 or SOCKS5.")
|
||||
raise FuzzExceptBadOptions("Bad proxy type specified, correct values are HTTP, SOCKS4 or SOCKS5.")
|
||||
else:
|
||||
c.setopt(pycurl.PROXY, "")
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
from .exception import FuzzExceptBadRecipe, FuzzExceptBadOptions, FuzzExceptBadFile
|
||||
from .facade import Facade
|
||||
from .facade import Facade, ERROR_CODE, BASELINE_CODE
|
||||
|
||||
from .fuzzobjects import FuzzResult, FuzzStats
|
||||
from .fuzzobjects import FuzzStats
|
||||
from .filter import FuzzResFilter
|
||||
from .core import requestGenerator
|
||||
from .utils import (
|
||||
json_minify,
|
||||
python2_3_convert_from_unicode
|
||||
python2_3_convert_from_unicode,
|
||||
)
|
||||
|
||||
from .core import Fuzzer
|
||||
@ -28,23 +28,22 @@ import json
|
||||
class FuzzSession(UserDict):
|
||||
def __init__(self, **kwargs):
|
||||
self.data = self._defaults()
|
||||
self.keys_not_to_dump = ["interactive", "recipe", "seed_payload", "send_discarded", "compiled_genreq", "compiled_filter", "compiled_prefilter", "compiled_printer"]
|
||||
self.keys_not_to_dump = ["interactive", "recipe", "seed_payload", "send_discarded", "compiled_genreq", "compiled_filter", "compiled_prefilter", "compiled_printer", "description", "show_field"]
|
||||
|
||||
# recipe must be superseded by options
|
||||
if "recipe" in kwargs and kwargs["recipe"]:
|
||||
self.import_from_file(kwargs["recipe"])
|
||||
for recipe in kwargs["recipe"]:
|
||||
self.import_from_file(recipe)
|
||||
|
||||
self.update(kwargs)
|
||||
|
||||
self.cache = HttpCache()
|
||||
self.http_pool = None
|
||||
|
||||
self.fz = None
|
||||
self.stats = FuzzStats()
|
||||
|
||||
def _defaults(self):
|
||||
return dict(
|
||||
seed_payload=False,
|
||||
send_discarded=False,
|
||||
console_printer="",
|
||||
hs=None,
|
||||
@ -65,7 +64,7 @@ class FuzzSession(UserDict):
|
||||
verbose=False,
|
||||
interactive=False,
|
||||
dryrun=False,
|
||||
recipe="",
|
||||
recipe=[],
|
||||
save="",
|
||||
proxies=None,
|
||||
conn_delay=int(Facade().sett.get('connection', 'conn_delay')),
|
||||
@ -85,11 +84,16 @@ class FuzzSession(UserDict):
|
||||
allvars=None,
|
||||
script="",
|
||||
script_args={},
|
||||
connect_to_ip=None,
|
||||
description=None,
|
||||
no_cache=False,
|
||||
show_field=None,
|
||||
|
||||
# this is equivalent to payloads but in a different format
|
||||
dictio=None,
|
||||
|
||||
# these will be compiled
|
||||
seed_payload=False,
|
||||
filter="",
|
||||
prefilter="",
|
||||
compiled_genreq=None,
|
||||
@ -102,46 +106,53 @@ class FuzzSession(UserDict):
|
||||
self.data.update(options)
|
||||
|
||||
def validate(self):
|
||||
error_list = []
|
||||
|
||||
if self.data['dictio'] and self.data['payloads']:
|
||||
return "Bad usage: Dictio and payloads options are mutually exclusive. Only one could be specified."
|
||||
raise FuzzExceptBadOptions("Bad usage: Dictio and payloads options are mutually exclusive. Only one could be specified.")
|
||||
|
||||
if self.data['rlevel'] > 0 and self.data['dryrun']:
|
||||
return "Bad usage: Recursion cannot work without making any HTTP request."
|
||||
error_list.append("Bad usage: Recursion cannot work without making any HTTP request.")
|
||||
|
||||
if self.data['script'] and self.data['dryrun']:
|
||||
return "Bad usage: Plugins cannot work without making any HTTP request."
|
||||
error_list.append("Bad usage: Plugins cannot work without making any HTTP request.")
|
||||
|
||||
if self.data['no_cache'] not in [True, False]:
|
||||
raise FuzzExceptBadOptions("Bad usage: No-cache is a boolean value")
|
||||
|
||||
if not self.data['url']:
|
||||
return "Bad usage: You must specify an URL."
|
||||
error_list.append("Bad usage: You must specify an URL.")
|
||||
|
||||
if not self.data['payloads'] and not self.data["dictio"]:
|
||||
return "Bad usage: You must specify a payload."
|
||||
error_list.append("Bad usage: You must specify a payload.")
|
||||
|
||||
if self.data["hs"] and self.data["ss"]:
|
||||
return "Bad usage: Hide and show regex filters flags are mutually exclusive. Only one could be specified."
|
||||
raise FuzzExceptBadOptions("Bad usage: Hide and show regex filters flags are mutually exclusive. Only one could be specified.")
|
||||
|
||||
if self.data["rlevel"] < 0:
|
||||
return "Bad usage: Recursion level must be a positive int."
|
||||
raise FuzzExceptBadOptions("Bad usage: Recursion level must be a positive int.")
|
||||
|
||||
if self.data['allvars'] not in [None, 'allvars', 'allpost', 'allheaders']:
|
||||
raise FuzzExceptBadOptions("Bad options: Incorrect all parameters brute forcing type specified, correct values are allvars,allpost or allheaders.")
|
||||
|
||||
if self.data['proxies']:
|
||||
for ip, port, ttype in self.data['proxies']:
|
||||
if ttype not in ("SOCKS5", "SOCKS4", "HTML"):
|
||||
raise FuzzExceptBadOptions("Bad proxy type specified, correct values are HTML, SOCKS4 or SOCKS5.")
|
||||
if ttype not in ("SOCKS5", "SOCKS4", "HTTP"):
|
||||
raise FuzzExceptBadOptions("Bad proxy type specified, correct values are HTTP, SOCKS4 or SOCKS5.")
|
||||
|
||||
try:
|
||||
if [x for x in ["sc", "sw", "sh", "sl"] if len(self.data[x]) > 0] and \
|
||||
[x for x in ["hc", "hw", "hh", "hl"] if len(self.data[x]) > 0]:
|
||||
return "Bad usage: Hide and show filters flags are mutually exclusive. Only one group could be specified."
|
||||
raise FuzzExceptBadOptions("Bad usage: Hide and show filters flags are mutually exclusive. Only one group could be specified.")
|
||||
|
||||
if ([x for x in ["sc", "sw", "sh", "sl"] if len(self.data[x]) > 0] or
|
||||
[x for x in ["hc", "hw", "hh", "hl"] if len(self.data[x]) > 0]) and \
|
||||
self.data['filter']:
|
||||
return "Bad usage: Advanced and filter flags are mutually exclusive. Only one could be specified."
|
||||
raise FuzzExceptBadOptions("Bad usage: Advanced and filter flags are mutually exclusive. Only one could be specified.")
|
||||
except TypeError:
|
||||
return "Bad options: Filter must be specified in the form of [int, ... , int]."
|
||||
raise FuzzExceptBadOptions("Bad options: Filter must be specified in the form of [int, ... , int].")
|
||||
|
||||
return error_list
|
||||
|
||||
def export_to_file(self, filename):
|
||||
try:
|
||||
@ -187,22 +198,32 @@ class FuzzSession(UserDict):
|
||||
return json.dumps(tmp, sort_keys=True, indent=4, separators=(',', ': '))
|
||||
|
||||
def payload(self, **kwargs):
|
||||
self.data.update(kwargs)
|
||||
return requestGenerator(self).get_dictio()
|
||||
try:
|
||||
self.data.update(kwargs)
|
||||
self.data['compiled_genreq'] = requestGenerator(self)
|
||||
for r in self.data['compiled_genreq'].get_dictio():
|
||||
yield r
|
||||
finally:
|
||||
self.data['compiled_genreq'].close()
|
||||
|
||||
def fuzz(self, **kwargs):
|
||||
self.data.update(kwargs)
|
||||
|
||||
fz = None
|
||||
try:
|
||||
self.fz = Fuzzer(self.compile())
|
||||
fz = Fuzzer(self.compile())
|
||||
|
||||
for f in self.fz:
|
||||
for f in fz:
|
||||
yield f
|
||||
|
||||
finally:
|
||||
if self.fz:
|
||||
self.fz.cancel_job()
|
||||
self.stats.update(self.fz.genReq.stats)
|
||||
if fz:
|
||||
fz.cancel_job()
|
||||
self.stats.update(fz.genReq.stats)
|
||||
|
||||
if self.http_pool:
|
||||
self.http_pool.deregister()
|
||||
self.http_pool = None
|
||||
|
||||
def get_payloads(self, iterator):
|
||||
self.data["dictio"] = iterator
|
||||
@ -224,7 +245,9 @@ class FuzzSession(UserDict):
|
||||
# Validate options
|
||||
error = self.validate()
|
||||
if error:
|
||||
raise FuzzExceptBadOptions(error)
|
||||
raise FuzzExceptBadOptions(error[0])
|
||||
|
||||
self.data["seed_payload"] = True if self.data["url"] == "FUZZ" else False
|
||||
|
||||
# printer
|
||||
try:
|
||||
@ -238,15 +261,15 @@ class FuzzSession(UserDict):
|
||||
self.data["compiled_printer"] = Facade().printers.get_plugin(printer)(filename)
|
||||
|
||||
try:
|
||||
self.data['hc'] = [FuzzResult.BASELINE_CODE if i == "BBB" else FuzzResult.ERROR_CODE if i == "XXX" else int(i) for i in self.data['hc']]
|
||||
self.data['hw'] = [FuzzResult.BASELINE_CODE if i == "BBB" else FuzzResult.ERROR_CODE if i == "XXX" else int(i) for i in self.data['hw']]
|
||||
self.data['hl'] = [FuzzResult.BASELINE_CODE if i == "BBB" else FuzzResult.ERROR_CODE if i == "XXX" else int(i) for i in self.data['hl']]
|
||||
self.data['hh'] = [FuzzResult.BASELINE_CODE if i == "BBB" else FuzzResult.ERROR_CODE if i == "XXX" else int(i) for i in self.data['hh']]
|
||||
self.data['hc'] = [BASELINE_CODE if i == "BBB" else ERROR_CODE if i == "XXX" else int(i) for i in self.data['hc']]
|
||||
self.data['hw'] = [BASELINE_CODE if i == "BBB" else ERROR_CODE if i == "XXX" else int(i) for i in self.data['hw']]
|
||||
self.data['hl'] = [BASELINE_CODE if i == "BBB" else ERROR_CODE if i == "XXX" else int(i) for i in self.data['hl']]
|
||||
self.data['hh'] = [BASELINE_CODE if i == "BBB" else ERROR_CODE if i == "XXX" else int(i) for i in self.data['hh']]
|
||||
|
||||
self.data['sc'] = [FuzzResult.BASELINE_CODE if i == "BBB" else FuzzResult.ERROR_CODE if i == "XXX" else int(i) for i in self.data['sc']]
|
||||
self.data['sw'] = [FuzzResult.BASELINE_CODE if i == "BBB" else FuzzResult.ERROR_CODE if i == "XXX" else int(i) for i in self.data['sw']]
|
||||
self.data['sl'] = [FuzzResult.BASELINE_CODE if i == "BBB" else FuzzResult.ERROR_CODE if i == "XXX" else int(i) for i in self.data['sl']]
|
||||
self.data['sh'] = [FuzzResult.BASELINE_CODE if i == "BBB" else FuzzResult.ERROR_CODE if i == "XXX" else int(i) for i in self.data['sh']]
|
||||
self.data['sc'] = [BASELINE_CODE if i == "BBB" else ERROR_CODE if i == "XXX" else int(i) for i in self.data['sc']]
|
||||
self.data['sw'] = [BASELINE_CODE if i == "BBB" else ERROR_CODE if i == "XXX" else int(i) for i in self.data['sw']]
|
||||
self.data['sl'] = [BASELINE_CODE if i == "BBB" else ERROR_CODE if i == "XXX" else int(i) for i in self.data['sl']]
|
||||
self.data['sh'] = [BASELINE_CODE if i == "BBB" else ERROR_CODE if i == "XXX" else int(i) for i in self.data['sh']]
|
||||
except ValueError:
|
||||
raise FuzzExceptBadOptions("Bad options: Filter must be specified in the form of [int, ... , int, BBB, XXX].")
|
||||
|
||||
@ -257,10 +280,16 @@ class FuzzSession(UserDict):
|
||||
# seed
|
||||
self.data["compiled_genreq"] = requestGenerator(self)
|
||||
|
||||
if self.data["compiled_genreq"].baseline is None and (FuzzResult.BASELINE_CODE in self.data['hc'] or
|
||||
FuzzResult.BASELINE_CODE in self.data['hl'] or FuzzResult.BASELINE_CODE in self.data['hw'] or
|
||||
FuzzResult.BASELINE_CODE in self.data['hh']):
|
||||
raise FuzzExceptBadOptions("Bad options: specify a baseline value when using BBB")
|
||||
# Check payload num
|
||||
fuzz_words = self.data["compiled_filter"].get_fuzz_words() + self.data["compiled_prefilter"].get_fuzz_words() + self.data["compiled_genreq"].get_fuzz_words()
|
||||
|
||||
if self.data['allvars'] is None and len(set(fuzz_words)) == 0:
|
||||
raise FuzzExceptBadOptions("You must specify at least a FUZZ word!")
|
||||
|
||||
if self.data["compiled_genreq"].baseline is None and (BASELINE_CODE in self.data['hc'] or
|
||||
BASELINE_CODE in self.data['hl'] or BASELINE_CODE in self.data['hw'] or
|
||||
BASELINE_CODE in self.data['hh']):
|
||||
raise FuzzExceptBadOptions("Bad options: specify a baseline value when using BBB")
|
||||
|
||||
if self.data["script"]:
|
||||
Facade().scripts.kbase.update(self.data["script_args"])
|
||||
@ -276,6 +305,6 @@ class FuzzSession(UserDict):
|
||||
return self
|
||||
|
||||
def close(self):
|
||||
self.http_pool.deregister()
|
||||
if self.fz:
|
||||
self.fz.cancel_job()
|
||||
if self.http_pool:
|
||||
self.http_pool.deregister()
|
||||
self.http_pool = None
|
||||
|
@ -121,6 +121,9 @@ class BasePayload(object):
|
||||
def __iter__(self):
|
||||
raise FuzzExceptPluginError("Method iter not implemented")
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def find_file(self, name):
|
||||
if os.path.exists(name):
|
||||
return name
|
||||
|
@ -1,5 +1,7 @@
|
||||
from wfuzz.exception import FuzzExceptMissingAPIKey, FuzzExceptResourceParseError
|
||||
from wfuzz.facade import Facade
|
||||
from wfuzz.utils import MyCounter
|
||||
|
||||
|
||||
# Python 2 and 3: alternative 4
|
||||
try:
|
||||
@ -13,6 +15,18 @@ import json
|
||||
|
||||
# python 2 and 3: iterator
|
||||
from builtins import object
|
||||
from threading import Thread
|
||||
from queue import Queue
|
||||
|
||||
import shodan
|
||||
|
||||
# TODO: test cases
|
||||
m = {
|
||||
'matches': [
|
||||
{'_shodan': {'id': '54e0ae62-9e22-404b-91b4-92f99e89c987', 'options': {}, 'ptr': True, 'module': 'auto', 'crawler': '62861a86c4e4b71dceed5113ce9593b98431f89a'}, 'hash': -1355923443, 'os': None, 'ip': 1240853908, 'isp': 'Comcast Cable', 'http': {'html_hash': -2142469325, 'robots_hash': None, 'redirects': [], 'securitytxt': None, 'title': '400 Bad Request', 'sitemap_hash': None, 'robots': None, 'favicon': None, 'host': '73.245.237.148', 'html': '<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">\n<html><head>\n<title>400 Bad Request</title>\n</head><body>\n<h1>Bad Request</h1>\n<p>Your browser sent a request that this server could not understand.<br />\nReason: You\'re speaking plain HTTP to an SSL-enabled server port.<br />\n Instead use the HTTPS scheme to access this URL, please.<br />\n</p>\n<p>Additionally, a 404 Not Found\nerror was encountered while trying to use an ErrorDocument to handle the request.</p>\n</body></html>\n', 'location': '/', 'components': {}, 'server': 'Apache', 'sitemap': None, 'securitytxt_hash': None}, 'port': 9445, 'hostnames': ['c-73-245-237-148.hsd1.fl.comcast.net'], 'location': {'city': 'Fort Lauderdale', 'region_code': 'FL', 'area_code': 954, 'longitude': -80.3704, 'country_code3': 'USA', 'country_name': 'United States', 'postal_code': '33331', 'dma_code': 528, 'country_code': 'US', 'latitude': 26.065200000000004}, 'timestamp': '2019-04-10T10:30:48.297701', 'domains': ['comcast.net'], 'org': 'Comcast Cable', 'data': 'HTTP/1.1 400 Bad Request\r\nDate: Wed, 10 Apr 2019 10:19:07 GMT\r\nServer: Apache\r\nContent-Length: 481\r\nConnection: close\r\nContent-Type: text/html; charset=iso-8859-1\r\n\r\n', 'asn': 'AS7922', 'transport': 'tcp', 'ip_str': '73.245.237.148'},
|
||||
{'_shodan': {'id': '4ace6fd1-8295-4aea-a086-2280598ca9e7', 'options': {}, 'ptr': True, 'module': 'auto', 'crawler': '62861a86c4e4b71dceed5113ce9593b98431f89a'}, 'product': 'Apache httpd', 'hash': 370611044, 'os': None, 'ip': 35226500, 'isp': 'EE High Speed Internet', 'http': {'html_$ ash': -163723763, 'robots_hash': None, 'redirects': [], 'securitytxt': None, 'title': '401 Authorization Required', 'sitemap_hash': None, 'robots': None, 'favicon': None, 'host': '2.25.131.132', 'html': '<HEAD><TITLE>401 Authorization Required</TITLE></HEAD>\n<BODY><H1>401 Authoriza$ ion Required</H1>\nBrowser not authentication-capable or authentication failed.\n</BODY>\n', 'location': '/', 'components': {}, 'server': 'Apache', 'sitemap': None, 'securitytxt_hash': None}, 'cpe': ['cpe:/a:apache:http_server'], 'port': 8085, 'hostnames': [], 'location': {'city': '$ helmsford', 'region_code': 'E4', 'area_code': None, 'longitude': 0.48330000000001405, 'country_code3': 'GBR', 'country_name': 'United Kingdom', 'postal_code': 'CM2', 'dma_code': None, 'country_code': 'GB', 'latitude': 51.733300000000014}, 'timestamp': '2019-04-10T11:03:59.955967', '$ omains': [], 'org': 'EE High Speed Internet', 'data': 'HTTP/1.1 401 Unauthorized\r\nServer: Apache\r\nConnection: Close\r\nContent-type: text/html\r\nWWW-Authenticate: Digest realm="DSLForum CPE Management", algorithm=MD5, qop=auth, stale=FALSE, nonce="3d7a3f71e72e095dba31fd77d4db74$5", opaque="5ccc069c403ebaf9f0171e9517f40e41"\r\n\r\n', 'asn': 'AS12576', 'transport': 'tcp', 'ip_str': '2.25.131.132'},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class BingIter(object):
|
||||
@ -115,3 +129,112 @@ class BingIter(object):
|
||||
return elem.encode('utf-8')
|
||||
else:
|
||||
return elem
|
||||
|
||||
|
||||
class ShodanIter():
|
||||
SHODAN_RES_PER_PAGE = 100
|
||||
MAX_ENQUEUED_RES = SHODAN_RES_PER_PAGE + 1
|
||||
NUM_OF_WORKERS = 1
|
||||
SLOW_START = True
|
||||
|
||||
def __init__(self, dork, page, limit):
|
||||
key = Facade().sett.get('plugins', 'shodan_apikey')
|
||||
if not key:
|
||||
raise FuzzExceptMissingAPIKey("A Shodan api key is needed. Please check ~/.wfuzz/wfuzz.ini")
|
||||
|
||||
self.api = shodan.Shodan(key)
|
||||
self._dork = dork
|
||||
self._page = MyCounter(page)
|
||||
self._page_limit = self._page() + limit if limit > 0 else -1
|
||||
|
||||
self.results_queue = Queue(self.MAX_ENQUEUED_RES)
|
||||
self.page_queue = Queue()
|
||||
|
||||
self._threads = []
|
||||
|
||||
self._started = False
|
||||
self._cancel_job = False
|
||||
|
||||
def _do_search(self):
|
||||
while 1:
|
||||
page = self.page_queue.get()
|
||||
if page is None:
|
||||
self.page_queue.task_done()
|
||||
break
|
||||
|
||||
if self._cancel_job:
|
||||
self.page_queue.task_done()
|
||||
continue
|
||||
|
||||
if self._page_limit > 0 and page >= self._page_limit:
|
||||
self.page_queue.task_done()
|
||||
self.results_queue.put(None)
|
||||
continue
|
||||
|
||||
try:
|
||||
results = self.api.search(self._dork, page=page)
|
||||
for item in results['matches']:
|
||||
if not self._cancel_job:
|
||||
self.results_queue.put(item)
|
||||
|
||||
self.page_queue.task_done()
|
||||
if not self._cancel_job:
|
||||
self.page_queue.put(self._page.inc())
|
||||
except shodan.APIError as e:
|
||||
self.page_queue.task_done()
|
||||
if "Invalid page size" in str(e):
|
||||
self.results_queue.put(None)
|
||||
elif "Insufficient query credits" in str(e):
|
||||
self.results_queue.put(None)
|
||||
else:
|
||||
self.results_queue.put(e)
|
||||
continue
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def _start(self):
|
||||
for th_n in range(self.NUM_OF_WORKERS):
|
||||
worker = Thread(target=self._do_search)
|
||||
worker.setName('_do_search_{}'.format(str(th_n)))
|
||||
self._threads.append(worker)
|
||||
worker.start()
|
||||
|
||||
self.page_queue.put(self._page())
|
||||
if not self.SLOW_START:
|
||||
for _ in range(self.NUM_OF_WORKERS - 1):
|
||||
self.page_queue.put(self._page.inc())
|
||||
|
||||
def _stop(self):
|
||||
self._cancel_job = True
|
||||
|
||||
for th in self._threads:
|
||||
self.page_queue.put(None)
|
||||
|
||||
self.page_queue.join()
|
||||
|
||||
for th in self._threads:
|
||||
th.join()
|
||||
|
||||
self._threads = []
|
||||
|
||||
self.results_queue.put(None)
|
||||
|
||||
def __next__(self):
|
||||
if not self._started:
|
||||
self._start()
|
||||
self._started = True
|
||||
|
||||
res = self.results_queue.get()
|
||||
self.results_queue.task_done()
|
||||
|
||||
if res is None:
|
||||
self._stop()
|
||||
self._cancel_job = False
|
||||
self._started = False
|
||||
raise StopIteration
|
||||
elif isinstance(res, Exception):
|
||||
self._stop()
|
||||
raise res
|
||||
|
||||
return res
|
||||
|
@ -232,8 +232,8 @@ class doble_nibble_hex:
|
||||
for c in strt:
|
||||
if not c == "%":
|
||||
if s.search(c):
|
||||
fin += c
|
||||
continue
|
||||
fin += c
|
||||
continue
|
||||
fin += con % ord(c)
|
||||
else:
|
||||
fin += c
|
||||
|
@ -6,6 +6,7 @@ from wfuzz.fuzzobjects import FuzzResult
|
||||
from wfuzz.fuzzobjects import FuzzRequest
|
||||
from wfuzz.plugin_api.base import BasePayload
|
||||
from wfuzz.externals.moduleman.plugin import moduleman_plugin
|
||||
from wfuzz.utils import rgetattr
|
||||
|
||||
|
||||
@moduleman_plugin
|
||||
@ -41,7 +42,7 @@ class autorize(BasePayload):
|
||||
def __next__(self):
|
||||
next_item = next(self._it)
|
||||
|
||||
return next_item if not self.attr else next_item.get_field(self.attr)
|
||||
return next_item if not self.attr else rgetattr(next_item, self.attr)
|
||||
|
||||
def _gen_wfuzz(self, output_fn):
|
||||
try:
|
||||
|
@ -2,10 +2,15 @@ from wfuzz.externals.moduleman.plugin import moduleman_plugin
|
||||
from wfuzz.exception import FuzzExceptBadFile
|
||||
from wfuzz.fuzzobjects import FuzzResult, FuzzRequest
|
||||
from wfuzz.plugin_api.base import BasePayload
|
||||
from wfuzz.utils import rgetattr
|
||||
|
||||
import re
|
||||
|
||||
CRLF = "\r\n"
|
||||
import sys
|
||||
if sys.version_info < (3, 0):
|
||||
from io import open
|
||||
|
||||
CRLF = "\n"
|
||||
DELIMITER = "%s%s" % ('=' * 54, CRLF)
|
||||
CRLF_DELIMITER = CRLF + DELIMITER
|
||||
HEADER = re.compile(r'(\d{1,2}:\d{2}:\d{2} (AM|PM|))[ \t]+(\S+)([ \t]+\[(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|unknown host)\])?')
|
||||
@ -44,13 +49,13 @@ class burplog(BasePayload):
|
||||
def __next__(self):
|
||||
next_item = next(self._it)
|
||||
|
||||
return next_item if not self.attr else next_item.get_field(self.attr)
|
||||
return next_item if not self.attr else rgetattr(next_item, self.attr)
|
||||
|
||||
def parse_burp_log(self, burp_log):
|
||||
burp_file = None
|
||||
|
||||
try:
|
||||
burp_file = open(self.find_file(burp_log), 'rb')
|
||||
burp_file = open(self.find_file(burp_log), 'r', encoding="utf-8", errors="surrogateescape")
|
||||
|
||||
history = 'START'
|
||||
|
||||
|
@ -2,6 +2,8 @@ from wfuzz.externals.moduleman.plugin import moduleman_plugin
|
||||
from wfuzz.exception import FuzzExceptBadFile, FuzzExceptBadOptions
|
||||
from wfuzz.fuzzobjects import FuzzResult, FuzzRequest
|
||||
from wfuzz.plugin_api.base import BasePayload
|
||||
from wfuzz.utils import rgetattr
|
||||
|
||||
|
||||
import datetime
|
||||
import string
|
||||
@ -19,6 +21,8 @@ class burpstate(BasePayload):
|
||||
author = ("Xavi Mendez (@xmendez)", )
|
||||
version = "0.1"
|
||||
description = (
|
||||
"*ALERT*: https://portswigger.net/blog/goodbye-state-files-we-wont-miss-you",
|
||||
"",
|
||||
"Returns fuzz results' from a Burp saved state file. This payload's code is based on burp2xml.py:",
|
||||
"Developed by Paul Haas, <phaas AT redspin DOT com> under Redspin. Inc.",
|
||||
"Licensed under the GNU Public License version 3.0 (2008-2009)",
|
||||
@ -69,51 +73,51 @@ class burpstate(BasePayload):
|
||||
def __next__(self):
|
||||
next_item = next(self._it)
|
||||
|
||||
return next_item if not self.attr else next_item.get_field(self.attr)
|
||||
return next_item if not self.attr else rgetattr(next_item, self.attr)
|
||||
|
||||
def milliseconds_to_date(self, milliseconds):
|
||||
'''Convert milliseconds since Epoch (from Java) to Python date structure:
|
||||
See: http://java.sun.com/j2se/1.4.2/docs/api/java/util/Date.html
|
||||
'''Convert milliseconds since Epoch (from Java) to Python date structure:
|
||||
See: http://java.sun.com/j2se/1.4.2/docs/api/java/util/Date.html
|
||||
|
||||
There is no direct way to convert milliseconds since Epoch to Python object
|
||||
So we convert the milliseconds to seconds first as a POSIX timestamp which
|
||||
can be used to get a valid date, and then use the parsed values from that
|
||||
object along with converting mili -> micro seconds in a new date object.'''
|
||||
try:
|
||||
d = datetime.datetime.fromtimestamp(milliseconds / 1000)
|
||||
date = datetime.datetime(d.year, d.month, d.day, d.hour, d.minute, d.second, (milliseconds % 1000) * 1000)
|
||||
except ValueError: # Bad date, just return the milliseconds
|
||||
date = str(milliseconds)
|
||||
return None
|
||||
return date
|
||||
There is no direct way to convert milliseconds since Epoch to Python object
|
||||
So we convert the milliseconds to seconds first as a POSIX timestamp which
|
||||
can be used to get a valid date, and then use the parsed values from that
|
||||
object along with converting mili -> micro seconds in a new date object.'''
|
||||
try:
|
||||
d = datetime.datetime.fromtimestamp(milliseconds / 1000)
|
||||
date = datetime.datetime(d.year, d.month, d.day, d.hour, d.minute, d.second, (milliseconds % 1000) * 1000)
|
||||
except ValueError: # Bad date, just return the milliseconds
|
||||
date = str(milliseconds)
|
||||
return None
|
||||
return date
|
||||
|
||||
def burp_binary_field(self, field, i):
|
||||
'''Strip Burp Suite's binary format characters types from our data.
|
||||
The first character after the leading tag describes the type of the data.'''
|
||||
if len(field) <= i:
|
||||
return None, -1
|
||||
elif field[i] == '\x00': # 4 byte integer value
|
||||
return str(struct.unpack('>I', field[i + 1:i + 5])[0]), 5
|
||||
elif field[i] == '\x01': # Two possible unsigned long long types
|
||||
if field[i + 1] == '\x00': # (64bit) 8 Byte Java Date
|
||||
ms = struct.unpack('>Q', field[i + 1:i + 9])[0]
|
||||
date = self.milliseconds_to_date(ms)
|
||||
value = date.ctime() if date else 0 # Use the ctime string format for date
|
||||
else: # Serial Number only used ocasionally in Burp
|
||||
value = str(struct.unpack('>Q', field[i + 1:i + 9])[0])
|
||||
return value, 9
|
||||
elif field[i] == '\x02': # Boolean Object True/False
|
||||
return str(struct.unpack('?', field[i + 1:i + 2])[0]), 2
|
||||
elif field[i] == '\x03' or field[i] == '\x04': # 4 byte length + string
|
||||
length = struct.unpack('>I', field[i + 1:i + 5])[0]
|
||||
# print "Saw string of length", length, "at", i + 5, i + 5+length
|
||||
value = field[i + 5:i + 5 + length]
|
||||
if '<' in value or '>' in value or '&' in value: # Sanatize HTML w/CDATA
|
||||
value = '<![CDATA[' + value.replace(']]>', ']]><![CDATA[') + ']]>'
|
||||
value = ''.join(c for c in value if c in nvprint) # Remove nonprintables
|
||||
return value, 5 + length # ** TODO: Verify length by matching end tag **
|
||||
print("Unknown binary format", repr(field[i]))
|
||||
'''Strip Burp Suite's binary format characters types from our data.
|
||||
The first character after the leading tag describes the type of the data.'''
|
||||
if len(field) <= i:
|
||||
return None, -1
|
||||
elif field[i] == '\x00': # 4 byte integer value
|
||||
return str(struct.unpack('>I', field[i + 1:i + 5])[0]), 5
|
||||
elif field[i] == '\x01': # Two possible unsigned long long types
|
||||
if field[i + 1] == '\x00': # (64bit) 8 Byte Java Date
|
||||
ms = struct.unpack('>Q', field[i + 1:i + 9])[0]
|
||||
date = self.milliseconds_to_date(ms)
|
||||
value = date.ctime() if date else 0 # Use the ctime string format for date
|
||||
else: # Serial Number only used ocasionally in Burp
|
||||
value = str(struct.unpack('>Q', field[i + 1:i + 9])[0])
|
||||
return value, 9
|
||||
elif field[i] == '\x02': # Boolean Object True/False
|
||||
return str(struct.unpack('?', field[i + 1:i + 2])[0]), 2
|
||||
elif field[i] == '\x03' or field[i] == '\x04': # 4 byte length + string
|
||||
length = struct.unpack('>I', field[i + 1:i + 5])[0]
|
||||
# print "Saw string of length", length, "at", i + 5, i + 5+length
|
||||
value = field[i + 5:i + 5 + length]
|
||||
if '<' in value or '>' in value or '&' in value: # Sanatize HTML w/CDATA
|
||||
value = '<![CDATA[' + value.replace(']]>', ']]><![CDATA[') + ']]>'
|
||||
value = ''.join(c for c in value if c in nvprint) # Remove nonprintables
|
||||
return value, 5 + length # ** TODO: Verify length by matching end tag **
|
||||
print("Unknown binary format", repr(field[i]))
|
||||
return None, -1
|
||||
|
||||
def strip_cdata(self, data):
|
||||
if data.startswith('<![CDATA['):
|
||||
@ -125,41 +129,41 @@ class burpstate(BasePayload):
|
||||
return data
|
||||
|
||||
def burp_to_xml(self, filename):
|
||||
'''Unzip Burp's file, remove non-printable characters, CDATA any HTML,
|
||||
include a valid XML header and trailer, and return a valid XML string.'''
|
||||
'''Unzip Burp's file, remove non-printable characters, CDATA any HTML,
|
||||
include a valid XML header and trailer, and return a valid XML string.'''
|
||||
|
||||
z = zipfile.ZipFile(self.find_file(filename)) # Open Burp's zip file
|
||||
burp = z.read('burp', 'rb') # Read-in the main burp file
|
||||
m = TAG.match(burp, 0) # Match a tag at the start of the string
|
||||
while m:
|
||||
index = m.end()
|
||||
etag = m.group().replace('<', '</') # Matching tag
|
||||
z = zipfile.ZipFile(self.find_file(filename)) # Open Burp's zip file
|
||||
burp = z.read('burp', 'rb') # Read-in the main burp file
|
||||
m = TAG.match(burp, 0) # Match a tag at the start of the string
|
||||
while m:
|
||||
index = m.end()
|
||||
etag = m.group().replace('<', '</') # Matching tag
|
||||
|
||||
m = TAG.match(burp, index) # Attempt to get the next tag
|
||||
if not m: # Data folows
|
||||
# Read the type of data using Burp's binary data headers
|
||||
value, length = self.burp_binary_field(burp, index)
|
||||
if value is None:
|
||||
break
|
||||
m = TAG.match(burp, index) # Attempt to get the next tag
|
||||
if not m: # Data folows
|
||||
# Read the type of data using Burp's binary data headers
|
||||
value, length = self.burp_binary_field(burp, index)
|
||||
if value is None:
|
||||
break
|
||||
|
||||
index += length + len(etag) # Point our index to the next tag
|
||||
m = TAG.match(burp, index) # And retrieve it
|
||||
index += length + len(etag) # Point our index to the next tag
|
||||
m = TAG.match(burp, index) # And retrieve it
|
||||
|
||||
if self.params["checkversion"] and etag == "</version>" and value not in ["65", "67"]:
|
||||
raise FuzzExceptBadFile("Unknown burp log version %s" % value)
|
||||
if self.params["checkversion"] and etag == "</version>" and value not in ["65", "67"]:
|
||||
raise FuzzExceptBadFile("Unknown burp log version %s" % value)
|
||||
|
||||
if etag == "</https>":
|
||||
https_tag = value == "True"
|
||||
if etag == "</https>":
|
||||
https_tag = value == "True"
|
||||
|
||||
if etag in self.request_tags:
|
||||
raw_request = self.strip_cdata(value)
|
||||
if etag in self.request_tags:
|
||||
raw_request = self.strip_cdata(value)
|
||||
|
||||
if etag in self.response_tags:
|
||||
fr = FuzzRequest()
|
||||
fr.update_from_raw_http(raw_request, "http" if not https_tag else "https", self.strip_cdata(value))
|
||||
frr = FuzzResult(history=fr)
|
||||
if etag in self.response_tags:
|
||||
fr = FuzzRequest()
|
||||
fr.update_from_raw_http(raw_request, "http" if not https_tag else "https", self.strip_cdata(value))
|
||||
frr = FuzzResult(history=fr)
|
||||
|
||||
raw_request = ""
|
||||
https_tag = ""
|
||||
raw_request = ""
|
||||
https_tag = ""
|
||||
|
||||
yield frr.update()
|
||||
yield frr.update()
|
||||
|
@ -1,7 +1,7 @@
|
||||
from wfuzz.externals.moduleman.plugin import moduleman_plugin
|
||||
from wfuzz.exception import FuzzExceptBadFile
|
||||
from wfuzz.plugin_api.base import BasePayload
|
||||
from wfuzz.utils import open_file_detect_encoding
|
||||
from wfuzz.utils import FileDetOpener
|
||||
|
||||
|
||||
@moduleman_plugin
|
||||
@ -18,6 +18,8 @@ class file(BasePayload):
|
||||
|
||||
parameters = (
|
||||
("fn", "", True, "Filename of a valid dictionary"),
|
||||
("count", 'True', False, "Indicates if the number of words in the file should be counted."),
|
||||
("encoding", 'Auto', False, "Indicates the file encoding."),
|
||||
)
|
||||
|
||||
default_parameter = "fn"
|
||||
@ -26,23 +28,27 @@ class file(BasePayload):
|
||||
BasePayload.__init__(self, params)
|
||||
|
||||
try:
|
||||
self.f = open_file_detect_encoding(self.find_file(self.params["fn"]))
|
||||
encoding = self.params['encoding'] if self.params['encoding'].lower() != 'auto' else None
|
||||
self.f = FileDetOpener(self.find_file(self.params["fn"]), encoding)
|
||||
except IOError as e:
|
||||
raise FuzzExceptBadFile("Error opening file. %s" % str(e))
|
||||
|
||||
self.__count = None
|
||||
|
||||
def __next__(self):
|
||||
line = self.f.readline()
|
||||
line = next(self.f)
|
||||
if not line:
|
||||
self.f.close()
|
||||
raise StopIteration
|
||||
return line.strip()
|
||||
|
||||
def count(self):
|
||||
if self.params["count"].lower() == 'false':
|
||||
return -1
|
||||
|
||||
if self.__count is None:
|
||||
self.__count = len(self.f.readlines())
|
||||
self.f.seek(0)
|
||||
self.__count = len(list(self.f))
|
||||
self.f.reset()
|
||||
|
||||
return self.__count
|
||||
|
||||
|
@ -45,10 +45,10 @@ class names(BasePayload):
|
||||
str3 = ""
|
||||
str4 = ""
|
||||
for i in range(0, len(parts) - 1):
|
||||
str1 = str1 + parts[i] + "."
|
||||
str2 = str2 + parts[i]
|
||||
str3 = str3 + parts[i][0] + "."
|
||||
str4 = str4 + parts[i][0]
|
||||
str1 = str1 + parts[i] + "."
|
||||
str2 = str2 + parts[i]
|
||||
str3 = str3 + parts[i][0] + "."
|
||||
str4 = str4 + parts[i][0]
|
||||
str5 = str1 + parts[-1]
|
||||
str6 = str2 + parts[-1]
|
||||
str7 = str4 + parts[-1]
|
||||
|
55
src/wfuzz/plugins/payloads/shodanp.py
Normal file
55
src/wfuzz/plugins/payloads/shodanp.py
Normal file
@ -0,0 +1,55 @@
|
||||
from wfuzz.externals.moduleman.plugin import moduleman_plugin
|
||||
from wfuzz.plugin_api.payloadtools import ShodanIter
|
||||
from wfuzz.plugin_api.base import BasePayload
|
||||
|
||||
|
||||
@moduleman_plugin
|
||||
class shodanp(BasePayload):
|
||||
name = "shodanp"
|
||||
author = ("Xavi Mendez (@xmendez)",)
|
||||
version = "0.1"
|
||||
description = (
|
||||
"Queries the Shodan API",
|
||||
)
|
||||
|
||||
summary = "Returns URLs of a given Shodan API search (needs api key)."
|
||||
category = ["default"]
|
||||
priority = 99
|
||||
|
||||
parameters = (
|
||||
("search", "", True, "Shodan search string."),
|
||||
("page", "0", False, "Offset page, starting at zero."),
|
||||
("limit", "0", False, "Number of pages (1 query credit = 100 results). Zero for all."),
|
||||
)
|
||||
|
||||
default_parameter = "search"
|
||||
|
||||
def __init__(self, params):
|
||||
BasePayload.__init__(self, params)
|
||||
|
||||
search = params["search"]
|
||||
page = int(params["page"])
|
||||
limit = int(params["limit"])
|
||||
|
||||
self._it = ShodanIter(search, page, limit)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def count(self):
|
||||
return -1
|
||||
|
||||
def close(self):
|
||||
self._it._stop()
|
||||
|
||||
def __next__(self):
|
||||
match = next(self._it)
|
||||
|
||||
port = match['port']
|
||||
scheme = 'https' if 'ssl' in match or port == 443 else 'http'
|
||||
|
||||
if match['hostnames']:
|
||||
for hostname in match['hostnames']:
|
||||
return "{}://{}:{}".format(scheme, hostname, port)
|
||||
else:
|
||||
return "{}://{}:{}".format(scheme, match['ip_str'], port)
|
@ -5,6 +5,7 @@ from wfuzz.externals.moduleman.plugin import moduleman_plugin
|
||||
from wfuzz.exception import FuzzExceptBadFile
|
||||
from wfuzz.fuzzobjects import FuzzResult
|
||||
from wfuzz.plugin_api.base import BasePayload
|
||||
from wfuzz.utils import rgetattr
|
||||
|
||||
|
||||
@moduleman_plugin
|
||||
@ -45,7 +46,7 @@ class wfuzzp(BasePayload):
|
||||
def __next__(self):
|
||||
next_item = next(self._it)
|
||||
|
||||
return next_item if not self.attr else next_item.get_field(self.attr)
|
||||
return next_item if not self.attr else rgetattr(next_item, self.attr)
|
||||
|
||||
def _gen_wfuzz(self, output_fn):
|
||||
try:
|
||||
|
@ -234,7 +234,7 @@ class raw(BasePrinter):
|
||||
self.f.write(" %4d L\t %5d W\t %5d Ch %20.20s %51.51s \"%s\"\n" % (res.lines, res.words, res.chars, server[:17], location[:48], res.description))
|
||||
|
||||
for i in res.plugins_res:
|
||||
self.f.write(" |_ %s\n" % i.issue)
|
||||
self.f.write(" |_ %s\n" % i.issue)
|
||||
|
||||
def _print(self, res):
|
||||
if res.exception:
|
||||
@ -245,7 +245,7 @@ class raw(BasePrinter):
|
||||
self.f.write(" %4d L\t %5d W\t %5d Ch\t \"%s\"\n" % (res.lines, res.words, res.chars, res.description))
|
||||
|
||||
for i in res.plugins_res:
|
||||
self.f.write(" |_ %s\n" % i.issue)
|
||||
self.f.write(" |_ %s\n" % i.issue)
|
||||
|
||||
def result(self, res):
|
||||
if res.type == FuzzResult.result:
|
||||
|
@ -1,18 +1,23 @@
|
||||
import re
|
||||
import sys
|
||||
import getopt
|
||||
from collections import defaultdict
|
||||
|
||||
from wfuzz.utils import allowed_fields, get_path
|
||||
from wfuzz.filter import PYPARSING
|
||||
from wfuzz.facade import Facade
|
||||
from wfuzz.options import FuzzSession
|
||||
from wfuzz.exception import FuzzException, FuzzExceptBadOptions, FuzzExceptBadInstall
|
||||
from .common import help_banner
|
||||
from .common import help_banner, exec_banner
|
||||
from .common import usage
|
||||
from .common import brief_usage
|
||||
from .common import verbose_usage
|
||||
from wfuzz import __version__ as version
|
||||
from .output import table_print
|
||||
|
||||
short_opts = "hLAZX:vcb:e:R:d:z:r:f:t:w:V:H:m:f:o:s:p:w:u:"
|
||||
long_opts = ['efield=', 'no-cache', 'ee=', 'zE=', 'zD=', 'field=', 'ip=', 'filter-help', 'AAA', 'AA', 'slice=', 'zP=', 'oF=', 'recipe=', 'dump-recipe=', 'req-delay=', 'conn-delay=', 'sc=', 'sh=', 'sl=', 'sw=', 'ss=', 'hc=', 'hh=', 'hl=', 'hw=', 'hs=', 'ntlm=', 'basic=', 'digest=', 'follow', 'script-help=', 'script=', 'script-args=', 'prefilter=', 'filter=', 'interact', 'help', 'version', 'dry-run', 'prev']
|
||||
|
||||
|
||||
class CLParser:
|
||||
def __init__(self, argv):
|
||||
@ -35,6 +40,9 @@ class CLParser:
|
||||
table_print([x[cols:] for x in Facade().proxy(registrant).get_plugins_ext(category)])
|
||||
sys.exit(0)
|
||||
|
||||
def show_plugins_names(self, registrant):
|
||||
print("\n".join(Facade().proxy(registrant).get_plugins_names("$all$")))
|
||||
|
||||
def show_plugin_ext_help(self, registrant, category="$all$"):
|
||||
for p in Facade().proxy(registrant).get_plugins(category):
|
||||
print("Name: %s %s" % (p.name, p.version))
|
||||
@ -54,12 +62,12 @@ class CLParser:
|
||||
def parse_cl(self):
|
||||
# Usage and command line help
|
||||
try:
|
||||
opts, args = getopt.getopt(self.argv[1:], "hLAZX:vcb:e:R:d:z:r:f:t:w:V:H:m:f:o:s:p:w:u:", ['AAA', 'AA', 'slice=', 'zP=', 'oF=', 'recipe=', 'dump-recipe=', 'req-delay=', 'conn-delay=', 'sc=', 'sh=', 'sl=', 'sw=', 'ss=', 'hc=', 'hh=', 'hl=', 'hw=', 'hs=', 'ntlm=', 'basic=', 'digest=', 'follow', 'script-help=', 'script=', 'script-args=', 'prefilter=', 'filter=', 'interact', 'help', 'version', 'dry-run', 'prev'])
|
||||
opts, args = getopt.getopt(self.argv[1:], short_opts, long_opts)
|
||||
optsd = defaultdict(list)
|
||||
|
||||
payload_cache = {}
|
||||
for i, j in opts:
|
||||
if i in ["-z", "--zP", "--slice", "-w"]:
|
||||
if i in ["-z", "--zP", "--slice", "-w", "--zD", "--zE"]:
|
||||
if i in ["-z", "-w"]:
|
||||
if payload_cache:
|
||||
optsd["payload"].append(payload_cache)
|
||||
@ -93,9 +101,6 @@ class CLParser:
|
||||
|
||||
cli_url = optsd["-u"][0]
|
||||
|
||||
if url == "FUZZ" or cli_url == "FUZZ":
|
||||
options["seed_payload"] = True
|
||||
|
||||
if cli_url:
|
||||
url = cli_url
|
||||
|
||||
@ -104,7 +109,8 @@ class CLParser:
|
||||
|
||||
# parse options from recipe first
|
||||
if "--recipe" in optsd:
|
||||
options.import_from_file(optsd["--recipe"][0])
|
||||
for recipe in optsd["--recipe"]:
|
||||
options.import_from_file(recipe)
|
||||
|
||||
# command line has priority over recipe
|
||||
self._parse_options(optsd, options)
|
||||
@ -115,12 +121,14 @@ class CLParser:
|
||||
self._parse_scripts(optsd, options)
|
||||
|
||||
if "--dump-recipe" in optsd:
|
||||
error = options.validate()
|
||||
if error:
|
||||
raise FuzzExceptBadOptions(error)
|
||||
print(exec_banner)
|
||||
|
||||
for error_msg in options.validate():
|
||||
print("WARNING: {}".format(error_msg))
|
||||
|
||||
print("")
|
||||
|
||||
options.export_to_file(optsd["--dump-recipe"][0])
|
||||
print(help_banner)
|
||||
print("Recipe written to %s." % (optsd["--dump-recipe"][0],))
|
||||
sys.exit(0)
|
||||
|
||||
@ -148,6 +156,15 @@ class CLParser:
|
||||
self.show_verbose_usage()
|
||||
sys.exit(0)
|
||||
|
||||
if "--filter-help" in optsd:
|
||||
text_regex = re.compile("Filter Language\n---------------\n\n(.*?)Filtering results", re.MULTILINE | re.DOTALL)
|
||||
try:
|
||||
print(text_regex.search(open(get_path("../docs/user/advanced.rst")).read()).group(1))
|
||||
except IOError:
|
||||
print(text_regex.search(open(get_path("../../docs/user/advanced.rst")).read()).group(1))
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
# Extensions help
|
||||
if "--script-help" in optsd:
|
||||
script_string = optsd["--script-help"][0]
|
||||
@ -156,6 +173,30 @@ class CLParser:
|
||||
|
||||
self.show_plugin_ext_help("scripts", category=script_string)
|
||||
|
||||
if "--ee" in optsd:
|
||||
if "payloads" in optsd["--ee"]:
|
||||
self.show_plugins_names("payloads")
|
||||
elif "encoders" in optsd["--ee"]:
|
||||
self.show_plugins_names("encoders")
|
||||
elif "iterators" in optsd["--ee"]:
|
||||
self.show_plugins_names("iterators")
|
||||
elif "printers" in optsd["--ee"]:
|
||||
self.show_plugins_names("printers")
|
||||
elif "scripts" in optsd["--ee"]:
|
||||
self.show_plugins_names("scripts")
|
||||
elif "fields" in optsd["--ee"]:
|
||||
print('\n'.join(allowed_fields))
|
||||
elif "files" in optsd["--ee"]:
|
||||
print('\n'.join(Facade().sett.get('general', 'lookup_dirs').split(",")))
|
||||
elif "registrants" in optsd["--ee"]:
|
||||
print('\n'.join(Facade().get_registrants()))
|
||||
elif "options" in optsd["--ee"]:
|
||||
print("\n".join(["-{}".format(opt) for opt in short_opts.replace(":", "")]))
|
||||
print("\n".join(["--{}".format(opt.replace("=", "")) for opt in long_opts]))
|
||||
else:
|
||||
raise FuzzExceptBadOptions("Unknown category. Valid values are: payloads, encoders, iterators, printers or scripts.")
|
||||
sys.exit(0)
|
||||
|
||||
if "-e" in optsd:
|
||||
if "payloads" in optsd["-e"]:
|
||||
self.show_plugins_help("payloads")
|
||||
@ -186,7 +227,7 @@ class CLParser:
|
||||
|
||||
def _check_options(self, optsd):
|
||||
# Check for repeated flags
|
||||
opt_list = [i for i in optsd if i not in ["-z", "--zP", "--slice", "payload", "-w", "-b", "-H", "-p"] and len(optsd[i]) > 1]
|
||||
opt_list = [i for i in optsd if i not in ["--recipe", "-z", "--zP", "--zD", "--slice", "payload", "-w", "-b", "-H", "-p"] and len(optsd[i]) > 1]
|
||||
if opt_list:
|
||||
raise FuzzExceptBadOptions("Bad usage: Only one %s option could be specified at the same time." % " ".join(opt_list))
|
||||
|
||||
@ -274,12 +315,23 @@ class CLParser:
|
||||
else:
|
||||
name = vals[0]
|
||||
|
||||
default_param_cli = payload["--zD"] if "--zD" in payload else None
|
||||
if default_param_cli and default_param:
|
||||
raise FuzzExceptBadOptions("--zD and -z parameters are exclusive.")
|
||||
elif default_param_cli:
|
||||
default_param = default_param_cli
|
||||
|
||||
if extraparams:
|
||||
params = dict([x.split("=", 1) for x in extraparams.split(",")])
|
||||
if default_param:
|
||||
params['default'] = default_param
|
||||
|
||||
encoders = vals[2] if len(vals) == 3 else None
|
||||
encoders_cli = payload["--zE"] if "--zE" in payload else None
|
||||
if encoders_cli and encoders:
|
||||
raise FuzzExceptBadOptions("--zE and -z encoders are exclusive.")
|
||||
elif encoders_cli:
|
||||
encoders = encoders_cli
|
||||
|
||||
if encoders:
|
||||
params['encoder'] = encoders.split("-")
|
||||
@ -329,6 +381,25 @@ class CLParser:
|
||||
if "--follow" in optsd or "-L" in optsd:
|
||||
options['follow'] = True
|
||||
|
||||
if "--field" in optsd:
|
||||
options['description'] = optsd["--field"][0]
|
||||
options["show_field"] = True
|
||||
elif "--efield" in optsd:
|
||||
options['description'] = optsd["--efield"][0]
|
||||
options["show_field"] = False
|
||||
else:
|
||||
options["show_field"] = None
|
||||
|
||||
if "--ip" in optsd:
|
||||
splitted = optsd["--ip"][0].partition(":")
|
||||
if not splitted[0]:
|
||||
raise FuzzExceptBadOptions("An IP must be specified")
|
||||
|
||||
options["connect_to_ip"] = {
|
||||
"ip": splitted[0],
|
||||
"port": splitted[2] if splitted[2] else "80"
|
||||
}
|
||||
|
||||
if "-d" in optsd:
|
||||
options['postdata'] = optsd["-d"][0]
|
||||
|
||||
@ -364,7 +435,7 @@ class CLParser:
|
||||
vals = p.split(":")
|
||||
|
||||
if len(vals) == 2:
|
||||
proxy.append((vals[0], vals[1], "HTML"))
|
||||
proxy.append((vals[0], vals[1], "HTTP"))
|
||||
elif len(vals) == 3:
|
||||
proxy.append((vals[0], vals[1], vals[2]))
|
||||
else:
|
||||
@ -410,6 +481,9 @@ class CLParser:
|
||||
if "--prev" in optsd:
|
||||
options["previous"] = True
|
||||
|
||||
if "--no-cache" in optsd:
|
||||
options["no_cache"] = True
|
||||
|
||||
if "-c" in optsd:
|
||||
options["colour"] = True
|
||||
|
||||
|
@ -80,10 +80,11 @@ usage = '''%s\n\nOptions:
|
||||
verbose_usage = '''%s\n\nOptions:
|
||||
\t-h/--help : This help
|
||||
\t--help : Advanced help
|
||||
\t--filter-help : Filter language specification
|
||||
\t--version : Wfuzz version details
|
||||
\t-e <type> : List of available encoders/payloads/iterators/printers/scripts
|
||||
\t
|
||||
\t--recipe <filename> : Reads options from a recipe
|
||||
\t--recipe <filename> : Reads options from a recipe. Repeat for various recipes.
|
||||
\t--dump-recipe <filename> : Prints current options as a recipe
|
||||
\t--oF <filename> : Saves fuzz results to a file. These can be consumed later using the wfuzz payload.
|
||||
\t
|
||||
@ -94,6 +95,8 @@ verbose_usage = '''%s\n\nOptions:
|
||||
\t--interact : (beta) If selected,all key presses are captured. This allows you to interact with the program.
|
||||
\t--dry-run : Print the results of applying the requests without actually making any HTTP request.
|
||||
\t--prev : Print the previous HTTP requests (only when using payloads generating fuzzresults)
|
||||
\t--efield <expr> : Show the specified language expression together with the current payload
|
||||
\t--field <expr> : Do not show the payload but only the specified language expression
|
||||
\t
|
||||
\t-p addr : Use Proxy in format ip:port:type. Repeat option for using various proxies.
|
||||
\t Where type could be SOCKS4,SOCKS5 or HTTP if omitted.
|
||||
@ -102,11 +105,13 @@ verbose_usage = '''%s\n\nOptions:
|
||||
\t-s N : Specify time delay between requests (0 default)
|
||||
\t-R depth : Recursive path discovery being depth the maximum recursion level.
|
||||
\t-L,--follow : Follow HTTP redirections
|
||||
\t--ip host:port : Specify an IP to connect to instead of the URL's host in the format ip:port
|
||||
\t-Z : Scan mode (Connection errors will be ignored).
|
||||
\t--req-delay N : Sets the maximum time in seconds the request is allowed to take (CURLOPT_TIMEOUT). Default 90.
|
||||
\t--conn-delay N : Sets the maximum time in seconds the connection phase to the server to take (CURLOPT_CONNECTTIMEOUT). Default 90.
|
||||
\t
|
||||
\t-A, --AA, --AAA : Alias for --script=default,verbose,discovery -v -c
|
||||
\t--no-cache : Disable plugins cache. Every request will be scanned.
|
||||
\t--script= : Equivalent to --script=default
|
||||
\t--script=<plugins> : Runs script's scan. <plugins> is a comma separated list of plugin-files or plugin-categories
|
||||
\t--script-help=<plugins> : Show help about scripts.
|
||||
@ -119,6 +124,8 @@ verbose_usage = '''%s\n\nOptions:
|
||||
\t Encoders category can be used. ie. url
|
||||
\t Use help as a payload to show payload plugin's details (you can filter using --slice)
|
||||
\t--zP <params> : Arguments for the specified payload (it must be preceded by -z or -w).
|
||||
\t--zD <default> : Default parameter for the specified payload (it must be preceded by -z or -w).
|
||||
\t--zE <encoder> : Encoder for the specified payload (it must be preceded by -z or -w).
|
||||
\t--slice <filter> : Filter payload\'s elements using the specified expression. It must be preceded by -z.
|
||||
\t-w wordlist : Specify a wordlist file (alias for -z file,wordlist).
|
||||
\t-V alltype : All parameters bruteforcing (allvars and allpost). No need for FUZZ keyword.
|
||||
@ -167,42 +174,40 @@ class Term:
|
||||
bgCyan = "\x1b[46m"
|
||||
bgWhite = "\x1b[47m"
|
||||
|
||||
noColour = ""
|
||||
|
||||
def get_colour(self, code):
|
||||
cc = ""
|
||||
|
||||
if code == 0:
|
||||
cc = Term.fgYellow
|
||||
wc = 12
|
||||
elif code >= 400 and code < 500:
|
||||
cc = Term.fgRed
|
||||
wc = 12
|
||||
elif code >= 300 and code < 400:
|
||||
cc = Term.fgBlue
|
||||
wc = 11
|
||||
elif code >= 200 and code < 300:
|
||||
cc = Term.fgGreen
|
||||
wc = 10
|
||||
else:
|
||||
cc = Term.fgMagenta
|
||||
wc = 1
|
||||
|
||||
return (cc, wc)
|
||||
return cc
|
||||
|
||||
def delete_line(self):
|
||||
sys.stdout.write("\r" + Term.delete)
|
||||
|
||||
def set_colour(self, colour):
|
||||
cc, wc = colour
|
||||
|
||||
sys.stdout.write(cc)
|
||||
sys.stdout.write(colour)
|
||||
|
||||
def write(self, string, colour):
|
||||
cc, wc = colour
|
||||
|
||||
sys.stdout.write(cc + string + Term.reset)
|
||||
sys.stdout.write(colour + string + Term.reset)
|
||||
|
||||
def go_up(self, lines):
|
||||
sys.stdout.write("\033[" + str(lines) + "A")
|
||||
|
||||
def erase_lines(self, lines):
|
||||
for i in range(lines):
|
||||
if lines <= 1:
|
||||
sys.stdout.write("\r" + Term.delete)
|
||||
sys.stdout.write(Term.oneup)
|
||||
else:
|
||||
for i in range(lines - 1):
|
||||
sys.stdout.write("\r" + Term.delete)
|
||||
sys.stdout.write(Term.oneup)
|
||||
|
@ -1,11 +1,16 @@
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
import threading
|
||||
try:
|
||||
from itertools import zip_longest
|
||||
except ImportError:
|
||||
from itertools import izip_longest as zip_longest
|
||||
|
||||
from wfuzz.fuzzobjects import FuzzResult
|
||||
|
||||
from .common import exec_banner, Term
|
||||
from .getch import _Getch
|
||||
from .output import getTerminalSize, wrap_always
|
||||
|
||||
usage = '''\r\n
|
||||
Interactive keyboard commands:\r\n
|
||||
@ -136,22 +141,20 @@ class Controller:
|
||||
|
||||
|
||||
class View:
|
||||
widths = [10, 8, 6, 6, 9, getTerminalSize()[0] - 65]
|
||||
verbose_widths = [10, 10, 8, 6, 6, 9, 30, 30, getTerminalSize()[0] - 145]
|
||||
|
||||
def __init__(self, session_options):
|
||||
self.colour = session_options["colour"]
|
||||
self.verbose = session_options["verbose"]
|
||||
self.previous = session_options["previous"]
|
||||
self.term = Term()
|
||||
self.printed_lines = 1
|
||||
|
||||
def _print_verbose(self, res, print_nres=True):
|
||||
txt_colour = ("", 8) if not res.is_baseline or not self.colour else (Term.fgCyan, 8)
|
||||
txt_colour = Term.noColour if not res.is_baseline or not self.colour else Term.fgCyan
|
||||
if self.previous and self.colour and not print_nres:
|
||||
txt_colour = Term.fgCyan, 8
|
||||
|
||||
self.term.set_colour(txt_colour)
|
||||
|
||||
if print_nres:
|
||||
self.term.write("%05d: " % (res.nres), txt_colour)
|
||||
self.term.write("%.3fs C=" % (res.timer), txt_colour)
|
||||
txt_colour = Term.fgCyan
|
||||
|
||||
location = ""
|
||||
if 'Location' in res.history.headers.response:
|
||||
@ -163,54 +166,101 @@ class View:
|
||||
if 'Server' in res.history.headers.response:
|
||||
server = res.history.headers.response['Server']
|
||||
|
||||
if res.exception:
|
||||
self.term.write("XXX", self.term.get_colour(res.code) if self.colour else ("", 8))
|
||||
else:
|
||||
self.term.write("%03d" % (res.code), self.term.get_colour(res.code) if self.colour else ("", 8))
|
||||
|
||||
self.term.write(" %4d L\t %5d W\t %5d Ch %20.20s %51.51s \"%s\"" % (res.lines, res.words, res.chars, server[:17], location[:48], res.description), txt_colour)
|
||||
|
||||
sys.stdout.flush()
|
||||
|
||||
def _print(self, res, print_nres=True):
|
||||
txt_colour = ("", 8) if not res.is_baseline or not self.colour else (Term.fgCyan, 8)
|
||||
if self.previous and self.colour and not print_nres:
|
||||
txt_colour = Term.fgCyan, 8
|
||||
rows = [
|
||||
("%09d:" % res.nres if print_nres else " |_", txt_colour),
|
||||
("%.3fs" % res.timer, txt_colour),
|
||||
("%s" % "XXX" if res.exception else str(res.code), self.term.get_colour(res.code) if self.colour else txt_colour),
|
||||
("%d L" % res.lines, txt_colour),
|
||||
("%d W" % res.words, txt_colour),
|
||||
("%d Ch" % res.chars, txt_colour),
|
||||
(server, txt_colour),
|
||||
(location, txt_colour),
|
||||
("\"%s\"" % res.description, txt_colour),
|
||||
]
|
||||
|
||||
self.term.set_colour(txt_colour)
|
||||
self.printed_lines = self._print_line(rows, self.verbose_widths)
|
||||
|
||||
if print_nres:
|
||||
self.term.write("%06d: C=" % (res.nres), txt_colour)
|
||||
else:
|
||||
self.term.write(" C=", txt_colour)
|
||||
def _print_header(self, rows, maxWidths):
|
||||
print("=" * (3 * len(maxWidths) + sum(maxWidths[:-1]) + 10))
|
||||
self._print_line(rows, maxWidths)
|
||||
sys.stdout.write("\n\r")
|
||||
print("=" * (3 * len(maxWidths) + sum(maxWidths[:-1]) + 10))
|
||||
print("")
|
||||
|
||||
if res.exception:
|
||||
self.term.write("XXX", self.term.get_colour(res.code) if self.colour else ("", 8))
|
||||
else:
|
||||
self.term.write("%03d" % (res.code), self.term.get_colour(res.code) if self.colour else ("", 8))
|
||||
self.term.write(" %4d L\t %5d W\t %5d Ch\t \"%s\"" % (res.lines, res.words, res.chars, res.description), txt_colour)
|
||||
def _print_line(self, rows, maxWidths):
|
||||
def wrap_row(rows, maxWidths):
|
||||
newRows = [wrap_always(item[0], width).split('\n') for item, width in zip(rows, maxWidths)]
|
||||
return [[substr or '' for substr in item] for item in zip_longest(*newRows)]
|
||||
|
||||
new_rows = wrap_row(rows, maxWidths)
|
||||
|
||||
for row in new_rows[:-1]:
|
||||
sys.stdout.write(" ".join([colour + str.ljust(str(item), width) + Term.reset for (item, width, colour) in zip(row, maxWidths, [colour[1] for colour in rows])]))
|
||||
sys.stdout.write("\n\r")
|
||||
|
||||
for row in new_rows[-1:]:
|
||||
sys.stdout.write(" ".join([colour + str.ljust(str(item), width) + Term.reset for (item, width, colour) in zip(row, maxWidths, [colour[1] for colour in rows])]))
|
||||
|
||||
sys.stdout.flush()
|
||||
return len(new_rows)
|
||||
|
||||
def _print(self, res, print_nres=True):
|
||||
txt_colour = Term.noColour if not res.is_baseline or not self.colour else Term.fgCyan
|
||||
if self.previous and self.colour and not print_nres:
|
||||
txt_colour = Term.fgCyan
|
||||
|
||||
rows = [
|
||||
("%09d:" % res.nres if print_nres else " |_", txt_colour),
|
||||
("%s" % "XXX" if res.exception else str(res.code), self.term.get_colour(res.code) if self.colour else txt_colour),
|
||||
("%d L" % res.lines, txt_colour),
|
||||
("%d W" % res.words, txt_colour),
|
||||
("%d Ch" % res.chars, txt_colour),
|
||||
("\"%s\"" % res.description, txt_colour),
|
||||
]
|
||||
|
||||
self.term.set_colour(txt_colour)
|
||||
self.printed_lines = self._print_line(rows, self.widths)
|
||||
|
||||
def header(self, summary):
|
||||
print(exec_banner)
|
||||
print("Target: %s\r" % summary.url)
|
||||
if summary.total_req > 0:
|
||||
print("Total requests: %d\r\n" % summary.total_req)
|
||||
else:
|
||||
if summary:
|
||||
print("Target: %s\r" % summary.url)
|
||||
if summary.total_req > 0:
|
||||
print("Total requests: %d\r\n" % summary.total_req)
|
||||
else:
|
||||
print("Total requests: <<unknown>>\r\n")
|
||||
|
||||
if self.verbose:
|
||||
print("==============================================================================================================================================\r")
|
||||
print("ID C.Time Response Lines Word Chars Server Redirect Payload \r")
|
||||
print("==============================================================================================================================================\r\n")
|
||||
rows = [
|
||||
("ID", Term.noColour),
|
||||
("C.Time", Term.noColour),
|
||||
("Response", Term.noColour),
|
||||
("Lines", Term.noColour),
|
||||
("Word", Term.noColour),
|
||||
("Chars", Term.noColour),
|
||||
("Server", Term.noColour),
|
||||
("Redirect", Term.noColour),
|
||||
("Payload", Term.noColour),
|
||||
]
|
||||
|
||||
widths = self.verbose_widths
|
||||
else:
|
||||
print("==================================================================\r")
|
||||
print("ID Response Lines Word Chars Payload \r")
|
||||
print("==================================================================\r\n")
|
||||
rows = [
|
||||
("ID", Term.noColour),
|
||||
("Response", Term.noColour),
|
||||
("Lines", Term.noColour),
|
||||
("Word", Term.noColour),
|
||||
("Chars", Term.noColour),
|
||||
("Payload", Term.noColour),
|
||||
]
|
||||
|
||||
widths = self.widths
|
||||
|
||||
self._print_header(rows, widths)
|
||||
|
||||
def result(self, res):
|
||||
self.term.delete_line()
|
||||
self.term.erase_lines(self.printed_lines)
|
||||
|
||||
if self.verbose:
|
||||
self._print_verbose(res)
|
||||
@ -218,19 +268,29 @@ class View:
|
||||
self._print(res)
|
||||
|
||||
if res.type == FuzzResult.result:
|
||||
if self.previous and len(res.payload) > 0 and isinstance(res.payload[0], FuzzResult):
|
||||
sys.stdout.write("\n\r |__ ")
|
||||
if self.previous and len(res.payload) > 0 and isinstance(res.payload[0].content, FuzzResult):
|
||||
sys.stdout.write("\n\r")
|
||||
if self.verbose:
|
||||
self._print_verbose(res.payload[0], print_nres=False)
|
||||
self._print_verbose(res.payload[0].content, print_nres=False)
|
||||
else:
|
||||
self._print(res.payload[0], print_nres=False)
|
||||
sys.stdout.write("\n\r")
|
||||
self._print(res.payload[0].content, print_nres=False)
|
||||
|
||||
for i in res.plugins_res:
|
||||
print(" |_ %s\r" % i.issue)
|
||||
if res.plugins_res:
|
||||
sys.stdout.write("\n\r")
|
||||
|
||||
for i in res.plugins_res[:-1]:
|
||||
sys.stdout.write(" |_ %s\r" % i.issue)
|
||||
sys.stdout.write("\n\r")
|
||||
|
||||
for i in res.plugins_res[-1:]:
|
||||
sys.stdout.write(" |_ %s\r" % i.issue)
|
||||
|
||||
for i in range(self.printed_lines):
|
||||
sys.stdout.write("\n\r")
|
||||
|
||||
def footer(self, summary):
|
||||
self.term.delete_line()
|
||||
sys.stdout.write("\r\n")
|
||||
self.term.erase_lines(self.printed_lines + 1)
|
||||
sys.stdout.write("\n\r")
|
||||
sys.stdout.write("\n\r")
|
||||
|
||||
print(summary)
|
||||
|
@ -2,7 +2,57 @@ import re
|
||||
import os
|
||||
import sys
|
||||
import six
|
||||
from threading import Lock
|
||||
import functools
|
||||
|
||||
from chardet.universaldetector import UniversalDetector
|
||||
import chardet
|
||||
from .exception import FuzzExceptInternalError
|
||||
|
||||
allowed_fields = [
|
||||
"description",
|
||||
"nres",
|
||||
"code",
|
||||
"chars",
|
||||
"lines",
|
||||
"words",
|
||||
"md5",
|
||||
"l",
|
||||
"h",
|
||||
"w",
|
||||
"c",
|
||||
"history",
|
||||
"plugins",
|
||||
|
||||
"url",
|
||||
"content",
|
||||
|
||||
"history.url",
|
||||
"history.method",
|
||||
"history.scheme",
|
||||
"history.host",
|
||||
"history.content",
|
||||
"history.raw_content"
|
||||
"history.is_path",
|
||||
"history.pstrip",
|
||||
"history.cookies",
|
||||
"history.headers",
|
||||
"history.params",
|
||||
|
||||
"r",
|
||||
"r.reqtime",
|
||||
"r.url",
|
||||
"r.method",
|
||||
"r.scheme",
|
||||
"r.host",
|
||||
"r.content",
|
||||
"r.raw_content"
|
||||
"r.is_path",
|
||||
"r.pstrip",
|
||||
"r.cookies.",
|
||||
"r.headers.",
|
||||
"r.params.",
|
||||
]
|
||||
|
||||
|
||||
def json_minify(string, strip_space=True):
|
||||
@ -133,6 +183,92 @@ def convert_to_unicode(text):
|
||||
return text
|
||||
|
||||
|
||||
class FileDetOpener:
|
||||
typical_encodings = [
|
||||
'UTF-8',
|
||||
'ISO-8859-1',
|
||||
'Windows-1251',
|
||||
'Shift JIS',
|
||||
'Windows-1252',
|
||||
'GB2312',
|
||||
'EUC-KR',
|
||||
'EUC-JP',
|
||||
'GBK',
|
||||
'ISO-8859-2',
|
||||
'Windows-1250',
|
||||
'ISO-8859-15',
|
||||
'Windows-1256',
|
||||
'ISO-8859-9',
|
||||
'Big5',
|
||||
'Windows-1254',
|
||||
]
|
||||
|
||||
def __init__(self, file_path, encoding=None):
|
||||
self.cache = []
|
||||
self.file_des = open(file_path, mode='rb')
|
||||
self.det_encoding = encoding
|
||||
self.encoding_forced = False
|
||||
|
||||
def close(self):
|
||||
self.file_des.close()
|
||||
|
||||
def reset(self):
|
||||
self.file_des.seek(0)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
decoded_line = None
|
||||
line = None
|
||||
last_error = None
|
||||
|
||||
while decoded_line is None:
|
||||
|
||||
while self.det_encoding is None:
|
||||
detect_encoding = self.detect_encoding().get('encoding', 'utf-8')
|
||||
self.det_encoding = detect_encoding if detect_encoding is not None else 'utf-8'
|
||||
|
||||
if line is None:
|
||||
if self.cache:
|
||||
line = self.cache.pop()
|
||||
else:
|
||||
line = next(self.file_des)
|
||||
if not line:
|
||||
raise StopIteration
|
||||
|
||||
try:
|
||||
decoded_line = line.decode(self.det_encoding)
|
||||
except UnicodeDecodeError:
|
||||
if last_error is not None and last_error:
|
||||
self.det_encoding = last_error.pop()
|
||||
elif last_error is None and not self.encoding_forced:
|
||||
last_error = list(reversed(self.typical_encodings))
|
||||
last_error.append(chardet.detect(line).get('encoding'))
|
||||
elif not last_error:
|
||||
raise FuzzExceptInternalError("Unable to decode wordlist file!")
|
||||
|
||||
decoded_line = None
|
||||
|
||||
return decoded_line
|
||||
|
||||
def detect_encoding(self):
|
||||
detector = UniversalDetector()
|
||||
detector.reset()
|
||||
|
||||
for line in self.file_des:
|
||||
detector.feed(line)
|
||||
self.cache.append(line)
|
||||
if detector.done:
|
||||
break
|
||||
|
||||
detector.close()
|
||||
|
||||
return detector.result
|
||||
|
||||
next = __next__ # for Python 2
|
||||
|
||||
|
||||
def open_file_detect_encoding(file_path):
|
||||
def detect_encoding(file_path):
|
||||
detector = UniversalDetector()
|
||||
@ -151,3 +287,125 @@ def open_file_detect_encoding(file_path):
|
||||
return open(file_path, "r", encoding=detect_encoding(file_path).get('encoding', 'utf-8'))
|
||||
else:
|
||||
return open(file_path, "r")
|
||||
|
||||
|
||||
class MyCounter:
|
||||
def __init__(self, count=0):
|
||||
self._count = count
|
||||
self._mutex = Lock()
|
||||
|
||||
def inc(self):
|
||||
return self._operation(1)
|
||||
|
||||
def dec(self):
|
||||
return self._operation(-1)
|
||||
|
||||
def _operation(self, dec):
|
||||
with self._mutex:
|
||||
self._count += dec
|
||||
return self._count
|
||||
|
||||
def __call__(self):
|
||||
with self._mutex:
|
||||
return self._count
|
||||
|
||||
|
||||
def _check_allowed_field(attr):
|
||||
if [field for field in allowed_fields if attr.startswith(field)]:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _get_alias(attr):
|
||||
attr_alias = {
|
||||
'l': 'lines',
|
||||
'h': 'chars',
|
||||
'w': 'words',
|
||||
'c': 'code',
|
||||
'r': 'history',
|
||||
}
|
||||
|
||||
if attr in attr_alias:
|
||||
return attr_alias[attr]
|
||||
|
||||
return attr
|
||||
|
||||
|
||||
def rsetattr(obj, attr, new_val, operation):
|
||||
if not _check_allowed_field(attr):
|
||||
raise AttributeError("Unknown field {}".format(attr))
|
||||
|
||||
pre, _, post = attr.rpartition('.')
|
||||
|
||||
pre_post = None
|
||||
if len(attr.split('.')) > 3:
|
||||
pre_post = post
|
||||
pre, _, post = pre.rpartition('.')
|
||||
|
||||
post = _get_alias(post)
|
||||
|
||||
try:
|
||||
obj_to_set = rgetattr(obj, pre) if pre else obj
|
||||
prev_val = rgetattr(obj, attr)
|
||||
if pre_post is not None:
|
||||
prev_val = DotDict({pre_post: prev_val})
|
||||
|
||||
if operation is not None:
|
||||
val = operation(prev_val, new_val)
|
||||
else:
|
||||
if isinstance(prev_val, DotDict):
|
||||
val = {k: new_val for k, v in prev_val.items()}
|
||||
else:
|
||||
val = new_val
|
||||
|
||||
return setattr(obj_to_set, post, val)
|
||||
except AttributeError:
|
||||
raise AttributeError("rsetattr: Can't set '{}' attribute of {}.".format(post, obj_to_set.__class__))
|
||||
|
||||
|
||||
def rgetattr(obj, attr, *args):
|
||||
def _getattr(obj, attr):
|
||||
attr = _get_alias(attr)
|
||||
try:
|
||||
return getattr(obj, attr, *args)
|
||||
except AttributeError:
|
||||
raise AttributeError("rgetattr: Can't get '{}' attribute from '{}'.".format(attr, obj.__class__))
|
||||
|
||||
if not _check_allowed_field(attr):
|
||||
raise AttributeError("Unknown field {}".format(attr))
|
||||
|
||||
return functools.reduce(_getattr, [obj] + attr.split('.'))
|
||||
|
||||
|
||||
class DotDict(dict):
|
||||
__setattr__ = dict.__setitem__
|
||||
__delattr__ = dict.__delitem__
|
||||
|
||||
def __getattr__(*args):
|
||||
if args[1] not in args[0]:
|
||||
raise KeyError("DotDict: Non-existing field {}".format(args[1]))
|
||||
|
||||
# python 3 val = dict.get(*args, None)
|
||||
val = dict.get(*args)
|
||||
return DotDict(val) if type(val) is dict else val
|
||||
# return DotDict(val) if type(val) is dict else DotDict({args[1]: val})
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, str):
|
||||
return DotDict({k: v + other for k, v in self.items() if v})
|
||||
elif isinstance(other, DotDict):
|
||||
# python 3 return DotDict({**self, **other})
|
||||
new_dic = DotDict(self)
|
||||
new_dic.update(other)
|
||||
return new_dic
|
||||
|
||||
def __radd__(self, other):
|
||||
if isinstance(other, str):
|
||||
return DotDict({k: other + v for k, v in self.items() if v})
|
||||
|
||||
|
||||
def value_in_any_list_item(value, list_obj):
|
||||
if isinstance(list_obj, list):
|
||||
return len([item for item in list_obj if value.lower() in item.lower()]) > 0
|
||||
elif isinstance(list_obj, str):
|
||||
return value.lower() in list_obj.lower()
|
||||
|
@ -9,6 +9,8 @@ from .ui.console.mvc import Controller, KeyPress, View
|
||||
from .ui.console.common import help_banner2
|
||||
from .ui.console.clparser import CLParser
|
||||
|
||||
from .fuzzobjects import FuzzResult
|
||||
|
||||
|
||||
def main():
|
||||
kb = None
|
||||
@ -68,13 +70,17 @@ def main_filter():
|
||||
\n\twfpayload [Options]\n\n
|
||||
\nOptions:\n
|
||||
\t--help : This help
|
||||
\t--slice <filter> : Filter payload\'s elements using the specified expression.
|
||||
\t-v : Verbose output
|
||||
\t-z payload : Specify a payload for each FUZZ keyword used in the form of type,parameters,encoder.
|
||||
\t A list of encoders can be used, ie. md5-sha1. Encoders can be chained, ie. md5@sha1.
|
||||
\t Encoders category can be used. ie. url
|
||||
\t--zD default : Default argument for the specified payload (it must be preceded by -z or -w).
|
||||
\t--zP <params> : Arguments for the specified payload (it must be preceded by -z or -w).
|
||||
\t--slice <filter> : Filter payload\'s elements using the specified expression.
|
||||
\t-w wordlist : Specify a wordlist file (alias for -z file,wordlist).
|
||||
\t-m iterator : Specify an iterator for combining payloads (product by default)
|
||||
\t--field <expr> : Do not show the payload but the specified language expression
|
||||
\t--efield <expr> : Show the specified language expression together with the current payload
|
||||
""")
|
||||
|
||||
from .api import payload
|
||||
@ -82,7 +88,7 @@ def main_filter():
|
||||
import getopt
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "hz:m:w:", ["help", "slice=", "zP="])
|
||||
opts, args = getopt.getopt(sys.argv[1:], "vhz:m:w:", ["field=", "help", "slice=", "zD=", "zP=", "efield="])
|
||||
except getopt.GetoptError as err:
|
||||
print((str(err)))
|
||||
usage()
|
||||
@ -92,22 +98,44 @@ def main_filter():
|
||||
usage()
|
||||
sys.exit()
|
||||
|
||||
field = None
|
||||
raw_output = False
|
||||
|
||||
for o, value in opts:
|
||||
if o in ("-h", "--help"):
|
||||
usage()
|
||||
sys.exit()
|
||||
if o in ("--efield"):
|
||||
field = value
|
||||
if o in ("--field"):
|
||||
field = value
|
||||
raw_output = True
|
||||
|
||||
try:
|
||||
for res in payload(**CLParser(sys.argv).parse_cl()):
|
||||
session_options = CLParser(sys.argv).parse_cl()
|
||||
printer = None
|
||||
|
||||
for res in payload(**session_options):
|
||||
if len(res) > 1:
|
||||
raise FuzzExceptBadOptions("wfpayload can only be used to generate one word dictionaries")
|
||||
else:
|
||||
r = res[0]
|
||||
|
||||
if "FuzzResult" in str(r.__class__):
|
||||
r._description = r.url
|
||||
# TODO: all should be same object type and no need for isinstance
|
||||
if isinstance(r, FuzzResult):
|
||||
if raw_output:
|
||||
print(r.eval(field if field is not None else "url"))
|
||||
else:
|
||||
if printer is None:
|
||||
printer = View(session_options)
|
||||
printer.header(None)
|
||||
|
||||
print(r)
|
||||
if field:
|
||||
r._description = field
|
||||
r._show_field = False
|
||||
printer.result(r)
|
||||
else:
|
||||
print(r)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
@ -1,13 +1,13 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import copy
|
||||
import os
|
||||
import unittest
|
||||
import tempfile
|
||||
|
||||
import wfuzz
|
||||
|
||||
|
||||
LOCAL_DOMAIN = "http://localhost"
|
||||
URL_LOCAL = "%s:8000/dir" % (LOCAL_DOMAIN)
|
||||
HTTPD_PORT = 8000
|
||||
@ -38,10 +38,61 @@ REPLACE_HOSTNAMES = [
|
||||
# conn delays?
|
||||
# script args
|
||||
|
||||
testing_savedsession_tests = [
|
||||
]
|
||||
|
||||
testing_tests = [
|
||||
]
|
||||
|
||||
savedsession_tests = [
|
||||
# parse post params
|
||||
("test_novalue_post_fuzz", "-z list --zD a -u {}/anything -d FUZZ".format(HTTPBIN_URL), "-z wfuzzp --zD $$PREVFILE$$ -u FUZZ --filter r.params.post.a:=1 --field r.params.post.a", ["1"], None),
|
||||
("test_json_post_fuzz2", "-z list --zD anything -u {}/FUZZ -d {{\"a\":\"2\"}} -H Content-Type:application/json".format(HTTPBIN_URL), "-z wfuzzp --zD $$PREVFILE$$ -u FUZZ --field r.params.post.a", ["2"], None),
|
||||
("test_json_post_fuzz3", "-z list --zD anything -u {}/FUZZ -d {{\"a\":\"2\"}} -H Content-Type:application/json".format(HTTPBIN_URL), "-z wfuzzp --zD $$PREVFILE$$ -u FUZZ --filter r.params.post.a:=1 --field r.params.post.a", ["1"], None),
|
||||
|
||||
# field fuzz values
|
||||
("test_desc_fuzz", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ FUZZ", ["http://localhost:9000/1"], None),
|
||||
("test_desc_attr", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ FUZZ[url]", ["http://localhost:9000/1"], None),
|
||||
("test_desc_concat_number", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ FUZZ[url]FUZZ[c]", ["http://localhost:9000/1 - 404"], None),
|
||||
("test_desc_url_number", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ FUZZ[c]", ["http://localhost:9000/1 - 404"], "Pycurl error 7:"),
|
||||
|
||||
# set values
|
||||
("test_desc_concat_number", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --slice r.c:=302 FUZZ[url]FUZZ[c]", ["http://localhost:9000/1 - 302"], None),
|
||||
("test_desc_rewrite_url", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --prefilter=r.url:=r.url|replace('1','2') FUZZ", ["http://localhost:9000/2"], None),
|
||||
("test_desc_rewrite_url2", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --slice r.url:=r.url|replace('1','2') FUZZ[url]", ["http://localhost:9000/2"], None),
|
||||
|
||||
# fuzz value slice filters
|
||||
("test_desc_concat_fuzz_symbol_op", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --prefilter FUZZ[r.url]=+'2' FUZZ", ["http://localhost:9000/12"], None),
|
||||
("test_fuzz_symbol_code", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --slice FUZZ[c]=404 FUZZ", ["http://localhost:9000/1"], None),
|
||||
("test_fuzz_value_code", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --slice c=404 FUZZ", ["http://localhost:9000/1"], None),
|
||||
|
||||
# fuzz value exceptions
|
||||
("test_fuzz_symbol_code", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --slice FUZ1Z[c]=404 FUZZ", ["http://localhost:9000/1"], "Unknown field"),
|
||||
("test_fuzz_symbol_code2", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --slice FUZ2Z[c]=404 FUZZ", ["http://localhost:9000/1"], "Non existent FUZZ payload"),
|
||||
("test_desc_assign_fuzz_symbol_op", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --slice FUZZ[r.url]:=FUZZ[r.url|replace('1','2')] FUZZ[url]", ["http://localhost:9000/2"], None),
|
||||
("test_fuzz_param_int", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --slice r.params.get:=2 FUZZ", ["http://localhost:9000/2"], "Non existent FUZZ payload"),
|
||||
|
||||
# filter based on various payloads
|
||||
("test_fuzz_fuz2z_code", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ -z list,404-302-200 --prefilter FUZZ[code]=FUZ2Z FUZZ[url]/FUZ2Z", ['http://localhost:9000/1 - 404'], None),
|
||||
("test_fuzz_fuz2z_code2", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ -z list,404-302-200 --prefilter FUZZ[code]=FUZ2Z FUZZ[url]", ['http://localhost:9000/1'], None),
|
||||
("test_fuzz_fuz2z_code3", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ -z list,404-302-200 --prefilter FUZZ[code]=FUZ2Z FUZZ", ['http://localhost:9000/1'], None),
|
||||
|
||||
# set values various payloads
|
||||
("test_set_fuzz_from_fuz2z_full", "-z range,1-1 {}/FUZZ?param=1".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ -z list,6-3 --prefilter r.params.get.param:=FUZ2Z FUZZ", ["http://localhost:9000/1?param=6", "http://localhost:9000/1?param=3"], None),
|
||||
("test_set_fuzz_from_fuz2z_full2", "-z range,1-1 {}/FUZZ?param=1".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ -z list,6-3 --prefilter FUZZ[r.params.get.param]:=FUZ2Z FUZZ", ["http://localhost:9000/1?param=6", "http://localhost:9000/1?param=3"], None),
|
||||
("test_set_fuzz_from_fuz2z_full_all", "-z range,1-1 {}/FUZZ?param=1¶m2=2".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ -z range,6-6 --prefilter r.params.all:=FUZ2Z FUZZ", ["http://localhost:9000/1?param=6¶m2=6"], None),
|
||||
("test_app_fuzz_from_fuz2z_full_all", "-z range,1-1 {}/FUZZ?param=1¶m2=2".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ -z range,6-6 --prefilter r.params.all=+FUZ2Z FUZZ", ["http://localhost:9000/1?param=16¶m2=26"], None),
|
||||
# fails ("test_set_fuzz_from_fuz2z_url", "-z range,1-1 {}/FUZZ?param=1".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ -z list,6-3 --prefilter r.params.get.param:=FUZ2Z FUZZ[url]", ["http://localhost:9000/1?param=6", "http://localhost:9000/1?param=3"], None),
|
||||
|
||||
# test different field
|
||||
("test_field", "-z range,1-1 {}/FUZZ".format(HTTPBIN_URL), "-z wfuzzp,$$PREVFILE$$ --field c FUZZ", [404], None),
|
||||
|
||||
]
|
||||
|
||||
basic_tests = [
|
||||
# different connect host ip
|
||||
# travis has an old pycurl version ("test_static_strquery_set_ip", "http://wfuzz.org/FUZZ?var=1&var2=2", [["anything"], ['PUT', 'GET', 'DELETE']], dict(connect_to_ip={'ip': '127.0.0.1', 'port': '9000'}, method='FUZ2Z', filter="content~'url' and content~'http://wfuzz.org'"), [(200, '/anything')] * 3, None),
|
||||
|
||||
# encoding tests
|
||||
("test_encode_cookie2_utf8_return", "%s/anything" % HTTPBIN_URL, [["は国"]], dict(cookie=["test=FUZZ"], filter="content~'test=\\\\u00e3\\\\u0081\\\\u00af\\\\u00e5\\\\u009b\\\\u00bd'"), [(200, '/anything')], None),
|
||||
("test_encode_header_utf8_return", "%s/headers" % HTTPBIN_URL, [["は国"]], dict(headers=[("myheader", "FUZZ")], filter="content~'Myheader' and content~'\\\\u00e3\\\\u0081\\\\u00af\\\\u00e5\\\\u009b\\\\u00bd'"), [(200, '/headers')], None),
|
||||
@ -52,7 +103,7 @@ basic_tests = [
|
||||
("test_encode_url_filter", "%s/FUZZ" % HTTPBIN_URL, [["は国"]], dict(filter="url~'は国'"), [(404, '/は国')], None),
|
||||
# ("test_encode_var", "%s/anything?var=FUZZ" % HTTPBIN_URL, [["は国"]], dict(filter="content~'\"は国\"'"), [(200, '/anything')], None),
|
||||
("test_encode_var", "%s/anything?var=FUZZ" % HTTPBIN_URL, [["は国"]], dict(filter="content~'\"\\\\u306f\\\\u56fd\"'"), [(200, '/anything')], None),
|
||||
("test_encode_redirect", "%s/redirect-to?url=FUZZ" % HTTPBIN_URL, [["は国"]], dict(filter="headers.response.Location='%C3%A3%C2%81%C2%AF%C3%A5%C2%9B%C2%BD'"), [(302, '/redirect-to')], None),
|
||||
("test_encode_redirect", "%s/redirect-to?url=FUZZ" % HTTPBIN_URL, [["は国"]], dict(filter="r.headers.response.Location='%C3%A3%C2%81%C2%AF%C3%A5%C2%9B%C2%BD'"), [(302, '/redirect-to')], None),
|
||||
# ("test_encode_cookie", "%s/cookies" % HTTPBIN_URL, [["は国"]], dict(cookie=["cookie1=FUZZ"], follow=True, filter="content~FUZZ"), [(200, '/cookies')], None),
|
||||
("test_encode_cookie", "%s/cookies" % HTTPBIN_URL, [["は国"]], dict(cookie=["cookie1=FUZZ"], follow=True, filter="content~'\\\\u306f\\\\u56fd'"), [(200, '/cookies')], None),
|
||||
|
||||
@ -72,13 +123,17 @@ basic_tests = [
|
||||
("test_basic_auth", "%s/basic-auth/FUZZ/FUZZ" % HTTPBIN_URL, [["userpass"]], dict(auth=("basic", "FUZZ:FUZZ")), [(200, '/basic-auth/userpass/userpass')], None),
|
||||
("test_digest_auth", "%s/digest-auth/auth/FUZZ/FUZZ" % HTTPBIN_URL, [["userpass"]], dict(auth=("digest", "FUZZ:FUZZ")), [(200, '/digest-auth/auth/userpass/userpass')], None),
|
||||
("test_delayed_response", "%s/delay/FUZZ" % HTTPBIN_URL, [["2"]], dict(req_delay=1), [(200, '/delay/2')], 'Operation timed out'),
|
||||
("test_static_strquery_set", "%s/FUZZ?var=1&var2=2" % HTTPBIN_URL, [["anything"], ['PUT', 'GET', 'POST', 'DELETE']], dict(method='FUZ2Z', filter="content~'\"args\":{\"var\":\"1\",\"var2\":\"2\"}'"), [(200, '/anything')] * 4, None),
|
||||
("test_static_strquery_set_multiple_method", "%s/FUZZ?var=1&var2=2" % HTTPBIN_URL, [["anything"], ['PUT', 'GET', 'POST', 'DELETE']], dict(method='FUZ2Z', filter="content~FUZ2Z and content~'\"var\": \"1\"' and content~'\"var2\": \"2\"'"), [(200, '/anything')] * 4, None),
|
||||
("test_static_strquery_set_multiple_method_gre", "%s/FUZZ?var=1&var2=2" % HTTPBIN_URL, [["anything"], ['PUT', 'GET', 'POST', 'DELETE']], dict(method='FUZ2Z', filter="content|gre('\"method\": \"(.*)?\",')=FUZ2Z and content~'\"var\": \"1\"' and content~'\"var2\": \"2\"'"), [(200, '/anything')] * 4, None),
|
||||
|
||||
# set static HTTP values
|
||||
("test_static_strquery_set", "%s:8000/FUZZ?var=1&var=2" % LOCAL_DOMAIN, [["echo"]], dict(filter="content~'query=var=1&var=2'"), [(200, '/echo')], None),
|
||||
("test_static_postdata_set", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["echo"]], dict(postdata="a=2", filter="content~'POST_DATA=a=2'"), [(200, '/echo')], None),
|
||||
("test_static_postdata2_set", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["echo"]], dict(postdata="2", filter="content~'POST_DATA=2'"), [(200, '/echo')], None),
|
||||
("test_empty_postdata", "%s/FUZZ" % HTTPBIN_URL, [["anything"]], dict(postdata='', filter="content~'POST' and method='POST'"), [(200, '/anything')], None),
|
||||
("test_static_strquery_set", "%s:8000/FUZZ?var=1&var=2" % LOCAL_DOMAIN, [["echo"]], dict(filter="content=~'query=var=1&var=2$'"), [(200, '/echo')], None),
|
||||
("test_static_postdata_set", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["echo"]], dict(postdata="a=2", filter="content=~'POST_DATA=a=2$'"), [(200, '/echo')], None),
|
||||
("test_static_postdata2_set", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["echo"]], dict(postdata="2", filter="content=~'POST_DATA=2$'"), [(200, '/echo')], None),
|
||||
("test_empty_postdata", "%s/FUZZ" % HTTPBIN_URL, [["anything"]], dict(postdata='', filter="content~'POST' and content~'\"form\": {},' and r.method='POST'"), [(200, '/anything')], None),
|
||||
("test_static_postdata3_set", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["echo"]], dict(headers=[("Content-Type", "application/json")], postdata="2", filter="content=~'POST_DATA=2$' and content=~'command=POST$' and content~'Content-Type: application/json'"), [(200, '/echo')], None),
|
||||
("test_static_postdata3_set2", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["echo"]], dict(headers=[("Content-Type", "aaaa")], postdata="a=2&b=3", filter="(content=~'POST_DATA=a=2&b=3$' or content=~'POST_DATA=b=3&a=2$') and content=~'command=POST$' and content~'Content-Type: aaaa'"), [(200, '/echo')], None),
|
||||
("test_static_postdata3_set3", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["echo"]], dict(headers=[("Content-Type", "application/json")], postdata="{\"a\": \"2\"}", filter="content=~'POST_DATA={\"a\": \"2\"}$' and content=~'command=POST$' and content~'Content-Type: application/json'"), [(200, '/echo')], None),
|
||||
("test_static_method_set", "%s/FUZZ" % URL_LOCAL, [["dir"]], dict(method="OPTIONS", filter="content~'Message: Unsupported method (\\\'OPTIONS\\\')'"), [(501, '/dir/dir')], None),
|
||||
("test_static_header_set", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["echo"]], dict(headers=[("myheader", "isset")], filter="content~'Myheader: isset'"), [(200, '/echo')], None),
|
||||
("test_static_cookie_set", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["echo"]], dict(cookie=["cookie1=value1", ], filter="content~'Cookie: cookie1=value1'"), [(200, '/echo')], None),
|
||||
@ -106,7 +161,7 @@ basic_tests = [
|
||||
("test_url_hostname2_fuzz", "http://FUZZ/dir/a", [["localhost:8000"]], dict(), [(200, '/dir/a')], None),
|
||||
("test_url_schema_fuzz", "FUZZ://localhost:8000/dir/a", [["http"]], dict(), [(200, '/dir/a')], None),
|
||||
("test_url_all_url_fuzz", "FUZZ", [["http://localhost:8000/dir/a"]], dict(), [(200, '/dir/a')], None),
|
||||
("test_url_all_url_fuzz2", "FUZZ", [["http://webscantest.com/datastore/search_get_by_name.php?name=Rake"]], dict(), [(200, '/datastore/search_get_by_name.php')], None),
|
||||
("test_url_all_url_fuzz2", "FUZZ", [["%s/anything/datastore/search_get_by_name.php?name=Rake" % HTTPBIN_URL]], dict(), [(200, '/anything/datastore/search_get_by_name.php')], None),
|
||||
|
||||
# edge cases
|
||||
("test_vhost_fuzz", "%s" % ECHO_URL, [["onevalue", "twovalue"]], dict(headers=[("Host", "FUZZ")], filter="content~'Host:' and content~FUZZ"), [(200, '/echo'), (200, '/echo')], None),
|
||||
@ -119,13 +174,14 @@ basic_tests = [
|
||||
# prefilter, slice
|
||||
("test_prefilter", "%s/FUZZ" % URL_LOCAL, [["a", "a", "a", "a", "a", "a"]], dict(prefilter="FUZZ|u()", ss="one"), [(200, '/dir/a')], None),
|
||||
("test_slice", "%s/FUZZ" % URL_LOCAL, None, dict(payloads=[("list", dict(default="a-a-a-a-a"), "FUZZ|u()")], ss="one"), [(200, '/dir/a')], None),
|
||||
("test_slice2", "%s/FUZZ" % URL_LOCAL, None, dict(payloads=[("range", dict(default="1-10"), "FUZZ='1'")]), [(404, '/dir/1')], None),
|
||||
|
||||
# follow
|
||||
("test_follow", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["redirect"]], dict(follow=True, filter="content~'path=/echo'"), [(200, '/echo')], None),
|
||||
|
||||
# all params
|
||||
("test_all_params_get", "%s:8000/echo?var=1&var2=2" % LOCAL_DOMAIN, [["avalue"]], dict(allvars="allvars", filter="content~'query=var=avalue&var2=2' or content~'var=1&var2=avalue'"), [(200, '/echo'), (200, '/echo')], None),
|
||||
("test_all_params_post", "%s" % ECHO_URL, [["onevalue"]], dict(allvars="allpost", postdata="a=1&b=2", filter="content~'POST_DATA=a=onevalue&b=2' or content~'POST_DATA=a=1&b=onevalue'"), [(200, '/echo'), (200, '/echo')], None),
|
||||
("test_all_params_post", "%s" % ECHO_URL, [["onevalue"]], dict(allvars="allpost", postdata="a=1&b=2", filter="content~'command=POST' and (content~'a=onevalue' and content~'b=2') or (content~'a=1' and content~'b=onevalue')"), [(200, '/echo'), (200, '/echo')], None),
|
||||
|
||||
# simple filter
|
||||
("test_codes_HC", "%s/FUZZ" % URL_LOCAL, [["a", "b", "c"]], dict(hc=[404]), [(200, '/dir/a'), (200, '/dir/b'), (200, '/dir/c')], None),
|
||||
@ -152,6 +208,7 @@ basic_tests = [
|
||||
("test_filter_hw", "%s/FUZZ" % URL_LOCAL, [["a", "b", "c"]], dict(filter="h=28 or w=6"), [(200, '/dir/a')], None),
|
||||
("test_filter_intext", "%s/FUZZ" % URL_LOCAL, [["a", "b", "c"]], dict(filter="content~'one'"), [(200, '/dir/a'), (200, '/dir/b')], None),
|
||||
("test_filter_intext2", "%s/FUZZ" % URL_LOCAL, [["a", "b", "c"]], dict(filter="content!~'one'"), [(200, '/dir/c')], None),
|
||||
("test_dict_filter_strquery_fuzz", "%s:8000/echo?var=FUZZ" % LOCAL_DOMAIN, [["value1"]], dict(filter="r.params.get~'value1'"), [(200, '/echo')], None),
|
||||
|
||||
# baseline
|
||||
("test_baseline", "%s/FUZZ{notthere}" % URL_LOCAL, [["a", "b", "c"]], dict(), [(200, '/dir/a'), (200, '/dir/b'), (200, '/dir/c'), (404, "/dir/notthere")], None),
|
||||
@ -172,6 +229,7 @@ basic_tests = [
|
||||
# plugins
|
||||
("test_robots", "%s:8000/plugins/FUZZ" % LOCAL_DOMAIN, [["robots.txt"]], dict(script="robots"), [(404, '/cal_endar/'), (404, '/crawlsnags/'), (404, '/osrun/'), (200, '/plugins/robots.txt'), (200, '/static/')], None),
|
||||
("test_robots_hc", "%s:8000/plugins/FUZZ" % LOCAL_DOMAIN, [["robots.txt"]], dict(hc=[404], script="robots"), [(200, '/plugins/robots.txt'), (200, '/static/')], None),
|
||||
("test_plugins_filter", "%s/FUZZ" % HTTPBIN_URL, [["anything"]], dict(script='headers', filter="plugins~'unicorn'"), [(200, '/anything')], None),
|
||||
]
|
||||
|
||||
scanmode_tests = [
|
||||
@ -188,7 +246,7 @@ error_tests = [
|
||||
("test_all_params_no_var", "%s:8000/echo" % LOCAL_DOMAIN, [["avalue"]], dict(allvars="allvars", filter="content~'query=var=avalue&var2=2' or content~'var=1&var2=avalue'"), [(200, '/echo'), (200, '/echo')], "No variables on specified variable set"),
|
||||
("test_bad_port", "%s:6666/FUZZ" % LOCAL_DOMAIN, [list(range(1))], dict(), [], 'Failed to connect to localhost port 6666'),
|
||||
("test_bad_num_payloads", "%s:8000/FUZZ" % LOCAL_DOMAIN, [list(range(1)), list(range(1))], dict(), [], 'FUZZ words and number of payloads do not match'),
|
||||
("test_bad_proxy", "%s:8000/FUZZ" % LOCAL_DOMAIN, [list(range(1))], dict(proxies=[("localhost", 888, "HTML")]), [], 'Failed to connect to localhost port 888'),
|
||||
("test_bad_proxy", "%s:8000/FUZZ" % LOCAL_DOMAIN, [list(range(1))], dict(proxies=[("localhost", 888, "HTTP")]), [], 'Failed to connect to localhost port 888'),
|
||||
("test_bad_num_dic", "%s:8000/iterators/FUZZ" % LOCAL_DOMAIN, [list(range(1))], dict(iterator="zip"), [], 'Several dictionaries must be used when specifying an iterator'),
|
||||
]
|
||||
|
||||
@ -222,6 +280,10 @@ def wfuzz_me_test_generator(url, payloads, params, expected_list, extra_params):
|
||||
if proxied_payloads:
|
||||
proxied_payloads = [[payload.replace(original_host, proxied_host) for payload in payloads_list] for payloads_list in proxied_payloads]
|
||||
|
||||
if 'connect_to_ip' in extra_params and extra_params['connect_to_ip']:
|
||||
extra_params['connect_to_ip']['ip'] = 'httpbin'
|
||||
extra_params['connect_to_ip']['port'] = '80'
|
||||
|
||||
with wfuzz.FuzzSession(url=proxied_url) as s:
|
||||
same_list = [(x.code, x.history.urlparse.path) for x in s.get_payloads(proxied_payloads).fuzz(**extra_params)]
|
||||
|
||||
@ -293,7 +355,7 @@ def wfuzz_me_test_generator_recipe(url, payloads, params, expected_list):
|
||||
ret_list = [(x.code, x.history.urlparse.path) for x in fuzzed]
|
||||
|
||||
# repeat test with recipe as only parameter
|
||||
with wfuzz.FuzzSession(recipe=filename) as s:
|
||||
with wfuzz.FuzzSession(recipe=[filename]) as s:
|
||||
if payloads is None:
|
||||
same_list = [(x.code, x.history.urlparse.path) for x in s.fuzz()]
|
||||
else:
|
||||
@ -304,6 +366,26 @@ def wfuzz_me_test_generator_recipe(url, payloads, params, expected_list):
|
||||
return test
|
||||
|
||||
|
||||
def wfuzz_me_test_generator_previous_session(prev_session_cli, next_session_cli, expected_list):
|
||||
def test(self):
|
||||
temp_name = next(tempfile._get_candidate_names())
|
||||
defult_tmp_dir = tempfile._get_default_tempdir()
|
||||
|
||||
filename = os.path.join(defult_tmp_dir, temp_name)
|
||||
|
||||
# first session
|
||||
with wfuzz.get_session(prev_session_cli) as s:
|
||||
ret_list = [x.eval(x._description) if x._description else x.description for x in s.fuzz(save=filename)]
|
||||
|
||||
# second session wfuzzp as payload
|
||||
with wfuzz.get_session(next_session_cli.replace("$$PREVFILE$$", filename)) as s:
|
||||
ret_list = [x.eval(x._description) if x._description else x.description for x in s.fuzz()]
|
||||
|
||||
self.assertEqual(sorted(ret_list), sorted(expected_list))
|
||||
|
||||
return test
|
||||
|
||||
|
||||
def create_test(test_name, url, payloads, params, expected_res, extra_params, exception_str):
|
||||
test_fn = wfuzz_me_test_generator(url, payloads, params, expected_res, extra_params)
|
||||
if exception_str:
|
||||
@ -332,12 +414,13 @@ def duplicate_tests_diff_params(test_list, group, next_extra_params, previous_ex
|
||||
if group == "_proxy_" and "encode" in test_name:
|
||||
continue
|
||||
|
||||
next_extra = dict(list(params.items()) + list(next_extra_params.items()))
|
||||
next_extra = copy.deepcopy(params)
|
||||
next_extra.update(next_extra_params)
|
||||
new_test = "%s_%s" % (test_name, group)
|
||||
|
||||
prev_extra = params
|
||||
prev_extra = copy.deepcopy(params)
|
||||
if previous_extra_params:
|
||||
prev_extra = dict(list(params.items()) + list(previous_extra_params.items()))
|
||||
prev_extra.update(previous_extra_params)
|
||||
|
||||
create_test(new_test, url, payloads, prev_extra, None, next_extra, exception_str)
|
||||
|
||||
@ -358,16 +441,34 @@ def duplicate_tests(test_list, group, test_gen_fun):
|
||||
setattr(DynamicTests, new_test, test_fn)
|
||||
|
||||
|
||||
def create_savedsession_tests(test_list, test_gen_fun):
|
||||
"""
|
||||
generates wfuzz tests that run 2 times with recipe input, expecting same results.
|
||||
|
||||
"""
|
||||
for test_name, prev_cli, next_cli, expected_res, exception_str in test_list:
|
||||
test_fn = test_gen_fun(prev_cli, next_cli, expected_res)
|
||||
if exception_str:
|
||||
test_fn_exc = wfuzz_me_test_generator_exception(test_fn, exception_str)
|
||||
setattr(DynamicTests, test_name, test_fn_exc)
|
||||
else:
|
||||
setattr(DynamicTests, test_name, test_fn)
|
||||
|
||||
|
||||
def create_tests():
|
||||
"""
|
||||
Creates all dynamic tests
|
||||
|
||||
"""
|
||||
if testing_savedsession_tests:
|
||||
create_savedsession_tests(testing_savedsession_tests, wfuzz_me_test_generator_previous_session)
|
||||
return
|
||||
|
||||
if testing_tests:
|
||||
create_tests_from_list(testing_tests)
|
||||
duplicate_tests(testing_tests, "recipe", wfuzz_me_test_generator_recipe)
|
||||
duplicate_tests(testing_tests, "saveres", wfuzz_me_test_generator_saveres)
|
||||
duplicate_tests_diff_params(testing_tests, "_proxy_", dict(proxies=[("localhost", 8080, "HTML")]), None)
|
||||
duplicate_tests_diff_params(testing_tests, "_proxy_", dict(proxies=[("localhost", 8080, "HTTP")]), None)
|
||||
else:
|
||||
# this are the basics
|
||||
basic_functioning_tests = [error_tests, scanmode_tests, basic_tests]
|
||||
@ -375,6 +476,9 @@ def create_tests():
|
||||
for t in basic_functioning_tests:
|
||||
create_tests_from_list(t)
|
||||
|
||||
# description tests
|
||||
create_savedsession_tests(savedsession_tests, wfuzz_me_test_generator_previous_session)
|
||||
|
||||
# duplicate tests with recipe
|
||||
duplicate_tests(basic_tests, "recipe", wfuzz_me_test_generator_recipe)
|
||||
|
||||
@ -382,7 +486,7 @@ def create_tests():
|
||||
duplicate_tests(basic_tests, "saveres", wfuzz_me_test_generator_saveres)
|
||||
|
||||
# duplicate tests with proxy
|
||||
duplicate_tests_diff_params(basic_tests, "_proxy_", dict(proxies=[("localhost", 8080, "HTML")]), None)
|
||||
duplicate_tests_diff_params(basic_tests, "_proxy_", dict(proxies=[("localhost", 8080, "HTTP")]), None)
|
||||
|
||||
|
||||
create_tests()
|
||||
|
@ -1,7 +1,13 @@
|
||||
import unittest
|
||||
import sys
|
||||
from io import BytesIO
|
||||
import gzip
|
||||
import pickle as pickle
|
||||
|
||||
import wfuzz
|
||||
from wfuzz.facade import Facade
|
||||
from wfuzz.fuzzobjects import FuzzRequest
|
||||
from wfuzz.fuzzobjects import FuzzResult
|
||||
|
||||
try:
|
||||
# Python >= 3.3
|
||||
@ -64,6 +70,62 @@ class APITests(unittest.TestCase):
|
||||
self.assertEqual(data.get('url'), 'http://127.0.0.1/FUZZ')
|
||||
self.assertEqual(data.get('payloads'), [('range', {'default': '0-4', 'encoder': None}, None)])
|
||||
|
||||
def test_payload_description(self):
|
||||
class mock_saved_session(object):
|
||||
def __init__(self, description, show_field):
|
||||
fr = FuzzRequest()
|
||||
fr.url = "http://www.wfuzz.org/path?param=1¶m2=2"
|
||||
fuzz_res = FuzzResult(history=fr)
|
||||
fuzz_res._description = description
|
||||
fuzz_res._show_field = show_field
|
||||
|
||||
self.outfile = BytesIO()
|
||||
|
||||
with gzip.GzipFile(fileobj=self.outfile, mode="wb") as f:
|
||||
pickle.dump(fuzz_res, f)
|
||||
|
||||
self.outfile.seek(0)
|
||||
self.outfile.name = "mockfile"
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
return self.outfile.read(*args, **kwargs)
|
||||
|
||||
def seek(self, *args, **kwargs):
|
||||
return self.outfile.seek(*args, **kwargs)
|
||||
|
||||
def tell(self):
|
||||
return self.outfile.tell()
|
||||
|
||||
# load plugins before mocking file object
|
||||
Facade().payloads
|
||||
|
||||
m = mock.MagicMock(name='open', spec=open)
|
||||
m.return_value = mock_saved_session("r.params.all", True)
|
||||
|
||||
mocked_fun = "builtins.open" if sys.version_info >= (3, 0) else "__builtin__.open"
|
||||
with mock.patch(mocked_fun, m):
|
||||
payload_list = list(wfuzz.payload(**{'show_field': True, 'description': 'r', 'payloads': [('wfuzzp', {'default': 'mockedfile', 'encoder': None}, None)]}))
|
||||
self.assertEqual([res[0].description for res in payload_list], [{'param': '1', 'param2': '2'}])
|
||||
|
||||
m = mock.MagicMock(name='open', spec=open)
|
||||
m.return_value = mock_saved_session("url", None)
|
||||
|
||||
mocked_fun = "builtins.open" if sys.version_info >= (3, 0) else "__builtin__.open"
|
||||
with mock.patch(mocked_fun, m):
|
||||
payload_list = list(wfuzz.payload(**{'show_field': True, 'description': 'r', 'payloads': [('wfuzzp', {'default': 'mockedfile', 'encoder': None}, None)]}))
|
||||
self.assertEqual([res[0].description for res in payload_list], ['http://www.wfuzz.org/path?param=1¶m2=2'])
|
||||
|
||||
m = mock.MagicMock(name='open', spec=open)
|
||||
m.return_value = mock_saved_session("r.scheme", False)
|
||||
|
||||
mocked_fun = "builtins.open" if sys.version_info >= (3, 0) else "__builtin__.open"
|
||||
with mock.patch(mocked_fun, m):
|
||||
payload_list = list(wfuzz.payload(**{'show_field': True, 'description': 'r', 'payloads': [('wfuzzp', {'default': 'mockedfile', 'encoder': None}, None)]}))
|
||||
self.assertEqual([res[0].description for res in payload_list], ['http://www.wfuzz.org/path?param=1¶m2=2 | http'])
|
||||
|
||||
def test_payload(self):
|
||||
payload_list = list(wfuzz.payload(**{'payloads': [('range', {'default': '0-4', 'encoder': None}, None)]}))
|
||||
self.assertEqual(payload_list, [('0',), ('1',), ('2',), ('3',), ('4',)])
|
||||
@ -79,10 +141,28 @@ class APITests(unittest.TestCase):
|
||||
payload_list = list(wfuzz.payload(**{'payloads': [('dirwalk', {'default': 'foo', 'encoder': None}, None)]}))
|
||||
self.assertEqual(payload_list, [('baz',), ('bar/spam',), ('bar/eggs',)])
|
||||
|
||||
class mock_file(object):
|
||||
def __init__(self):
|
||||
self.my_iter = iter([b"one", b"two"])
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
return next(self.my_iter)
|
||||
|
||||
def seek(self, pos):
|
||||
self.my_iter = iter([b"one", b"two"])
|
||||
|
||||
next = __next__ # for Python 2
|
||||
|
||||
m = mock.MagicMock(name='open', spec=open)
|
||||
m.return_value = mock_file()
|
||||
|
||||
mocked_fun = "builtins.open" if sys.version_info >= (3, 0) else "__builtin__.open"
|
||||
with mock.patch(mocked_fun, mock.mock_open(read_data="one\ntwo\n")):
|
||||
with mock.patch(mocked_fun, m):
|
||||
payload_list = list(wfuzz.payload(**{'payloads': [('file', {'default': 'mockedfile', 'encoder': None}, None)]}))
|
||||
self.assertEqual(payload_list, [('one',), ('two',)])
|
||||
self.assertEqual(sorted(payload_list), sorted([('one',), ('two',)]))
|
||||
|
||||
payload_list = list(wfuzz.payload(**{'payloads': [('hexrange', {'default': '09-10', 'encoder': None}, None)]}))
|
||||
self.assertEqual(payload_list, [('09',), ('0a',), ('0b',), ('0c',), ('0d',), ('0e',), ('0f',), ('10',)])
|
||||
@ -114,7 +194,3 @@ class APITests(unittest.TestCase):
|
||||
|
||||
payload_list = list(wfuzz.payload(**{'iterator': 'product', 'payloads': [('range', {'default': '0-2', 'encoder': None}, None), ('range', {'default': '0-2', 'encoder': None}, None)]}))
|
||||
self.assertEqual(sorted(payload_list), sorted([('0', '0'), ('0', '1'), ('0', '2'), ('1', '0'), ('1', '1'), ('1', '2'), ('2', '0'), ('2', '1'), ('2', '2')]))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -10,6 +10,42 @@ class CLParserTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(cm.exception.code, 0)
|
||||
|
||||
def test_ip_option(self):
|
||||
options = CLParser(['wfuzz', '--ip', '127.0.0.1']).parse_cl()
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
self.assertEqual(options.data['connect_to_ip']['ip'], '127.0.0.1')
|
||||
self.assertEqual(options.data['connect_to_ip']['port'], '80')
|
||||
|
||||
options = CLParser(['wfuzz', '--ip', '127.0.0.1:22']).parse_cl()
|
||||
|
||||
self.assertEqual(options.data['connect_to_ip']['ip'], '127.0.0.1')
|
||||
self.assertEqual(options.data['connect_to_ip']['port'], '22')
|
||||
|
||||
options = CLParser(['wfuzz', '--ip', '127.0.0.1:']).parse_cl()
|
||||
|
||||
self.assertEqual(options.data['connect_to_ip']['ip'], '127.0.0.1')
|
||||
self.assertEqual(options.data['connect_to_ip']['port'], '80')
|
||||
|
||||
with self.assertRaises(Exception) as cm:
|
||||
options = CLParser(['wfuzz', '--ip', ':80']).parse_cl()
|
||||
self.assertTrue("An IP must be specified" in str(cm.exception))
|
||||
|
||||
def test_ze_zd_option(self):
|
||||
with self.assertRaises(Exception) as cm:
|
||||
options = CLParser(['wfuzz', '-z', 'range,0-10', '--zD', '0-10', 'url']).parse_cl()
|
||||
self.assertTrue("exclusive" in str(cm.exception))
|
||||
|
||||
options = CLParser(['wfuzz', '-z', 'range', '--zD', '0-1', '--zE', 'md5', 'url']).parse_cl()
|
||||
self.assertEqual(options.data['payloads'], [('range', {'default': '0-1', 'encoder': ['md5']}, None)])
|
||||
|
||||
options = CLParser(['wfuzz', '-z', 'range,0-1', '--zE', 'md5', 'url']).parse_cl()
|
||||
self.assertEqual(options.data['payloads'], [('range', {'default': '0-1', 'encoder': ['md5']}, None)])
|
||||
|
||||
options = CLParser(['wfuzz', '-z', 'range', '--zD', '0-1', '--zE', 'md5', 'url']).parse_cl()
|
||||
self.assertEqual(options.data['payloads'], [('range', {'default': '0-1', 'encoder': ['md5']}, None)])
|
||||
|
||||
options = CLParser(['wfuzz', '-z', 'range', '--zD', '0-1']).parse_cl()
|
||||
self.assertEqual(options.data['payloads'], [('range', {'default': '0-1', 'encoder': None}, None)])
|
||||
|
||||
options = CLParser(['wfuzz', '-z', 'range,0-1']).parse_cl()
|
||||
self.assertEqual(options.data['payloads'], [('range', {'default': '0-1', 'encoder': None}, None)])
|
||||
|
17
tests/test_dotdict.py
Normal file
17
tests/test_dotdict.py
Normal file
@ -0,0 +1,17 @@
|
||||
import unittest
|
||||
|
||||
from wfuzz.utils import DotDict
|
||||
|
||||
|
||||
class FilterDotDict(unittest.TestCase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FilterDotDict, self).__init__(*args, **kwargs)
|
||||
self.maxDiff = 1000
|
||||
|
||||
def test_code_set(self):
|
||||
dd = DotDict({'a': '1'})
|
||||
dd2 = DotDict({'a': '2'})
|
||||
|
||||
self.assertEqual(dd + "test", {'a': "1test"})
|
||||
self.assertEqual("test" + dd, {'a': "test1"})
|
||||
self.assertEqual(dd + dd2, {'a': "2"})
|
194
tests/test_filterintro.py
Normal file
194
tests/test_filterintro.py
Normal file
@ -0,0 +1,194 @@
|
||||
import unittest
|
||||
|
||||
# Python 2 and 3: urlib.parse
|
||||
|
||||
from wfuzz.fuzzobjects import FuzzRequest
|
||||
from wfuzz.fuzzobjects import FuzzResult
|
||||
from wfuzz.filter import FuzzResFilter
|
||||
|
||||
|
||||
raw_req = """GET / HTTP/1.1
|
||||
Host: www.wfuzz.org
|
||||
User-Agent: curl/7.58.0
|
||||
Accept: */*
|
||||
"""
|
||||
|
||||
raw_resp = b"""HTTP/1.1 302 Found
|
||||
Content-Type: text/html; charset=utf-8
|
||||
Content-Language: en
|
||||
Location: https://wfuzz.readthedocs.io/en/latest/
|
||||
Vary: Accept-Language, Cookie
|
||||
Server: nginx/1.14.0 (Ubuntu)
|
||||
X-Fallback: True
|
||||
X-Served: Django
|
||||
X-Deity: web01
|
||||
Date: Wed, 23 Jan 2019 21:43:59 GMT
|
||||
Content-Length: 0
|
||||
"""
|
||||
|
||||
|
||||
class FilterTest(unittest.TestCase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FilterTest, self).__init__(*args, **kwargs)
|
||||
self.maxDiff = 1000
|
||||
|
||||
def get_filtered_fuzzrequest(self, filter_str):
|
||||
fr = FuzzRequest()
|
||||
fr.update_from_raw_http(raw_req, "http", raw_resp, b"")
|
||||
|
||||
fuzz_res = FuzzResult(history=fr)
|
||||
|
||||
ffilter = FuzzResFilter(filter_string=filter_str)
|
||||
ffilter.is_visible(fuzz_res)
|
||||
|
||||
return fuzz_res
|
||||
|
||||
def test_code_set(self):
|
||||
self.assertEqual(self.get_filtered_fuzzrequest("r.code:=429").code, 429)
|
||||
self.assertEqual(self.get_filtered_fuzzrequest("r.c:=404").code, 404)
|
||||
self.assertEqual(self.get_filtered_fuzzrequest("r.c=+404").code, 706)
|
||||
self.assertEqual(self.get_filtered_fuzzrequest("r.c=-404").code, 706)
|
||||
|
||||
def test_url_set(self):
|
||||
fr = FuzzRequest()
|
||||
fr.url = "http://www.wfuzz.org/path?param=1¶m2=2"
|
||||
|
||||
fuzz_res = FuzzResult(history=fr)
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.url=+'test'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual(fuzz_res.history.url, "http://www.wfuzz.org/path?param=1¶m2=2test")
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.url:='test'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual(fuzz_res.history.url, "http://test/")
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.url=-'test'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual(fuzz_res.history.url, "testhttp://test/")
|
||||
|
||||
def test_nonexisting(self):
|
||||
fr = FuzzRequest()
|
||||
fr.url = "http://www.wfuzz.org/path?param=1¶m2=2"
|
||||
|
||||
fuzz_res = FuzzResult(history=fr)
|
||||
|
||||
with self.assertRaises(Exception) as context:
|
||||
ffilter = FuzzResFilter(filter_string="url=-'test'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertTrue("rsetattr: Can't set" in str(context.exception))
|
||||
|
||||
with self.assertRaises(Exception) as context:
|
||||
ffilter = FuzzResFilter(filter_string="notthere=-'test'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertTrue("rgetattr: Can't get" in str(context.exception))
|
||||
|
||||
with self.assertRaises(Exception) as context:
|
||||
ffilter = FuzzResFilter(filter_string="r.params.get.notthere=-'test'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertTrue("DotDict: Non-existing field" in str(context.exception))
|
||||
|
||||
def test_params_set_no_value(self):
|
||||
fr = FuzzRequest()
|
||||
fr.url = "http://www.wfuzz.org/path?param"
|
||||
|
||||
fuzz_res = FuzzResult(history=fr)
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.params.all=+'test'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual(fuzz_res.history.params.get, {'param': None})
|
||||
|
||||
def test_params_set(self):
|
||||
fr = FuzzRequest()
|
||||
fr.url = "http://www.wfuzz.org/path?param=1¶m2=2"
|
||||
|
||||
fuzz_res = FuzzResult(history=fr)
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.params.get.param=+'test'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual(fuzz_res.history.params.get.param, "1test")
|
||||
self.assertEqual(fuzz_res.history.params.get, {'param': "1test", 'param2': "2"})
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.params.get.param=-'test'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual(fuzz_res.history.params.get.param, "test1test")
|
||||
self.assertEqual(fuzz_res.history.params.get, {'param': "test1test", 'param2': "2"})
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.params.get.param:='test'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual(fuzz_res.history.params.get.param, "test")
|
||||
self.assertEqual(fuzz_res.history.params.get, {'param': "test", 'param2': "2"})
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.params.get.param2='2'")
|
||||
self.assertEqual(ffilter.is_visible(fuzz_res), True)
|
||||
|
||||
fr.url = "http://www.wfuzz.org/path?param=1¶m2=2"
|
||||
ffilter = FuzzResFilter(filter_string="r.params.all=+'2'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual(fuzz_res.history.params.all, {'param': "12", 'param2': "22"})
|
||||
|
||||
fr.url = "http://www.wfuzz.org/path?param=1¶m2=2"
|
||||
ffilter = FuzzResFilter(filter_string="r.params.all:='2'")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual(fuzz_res.history.params.all, {'param': "2", 'param2': "2"})
|
||||
|
||||
def test_urlp(self):
|
||||
fr = FuzzRequest()
|
||||
fr.url = "http://www.wfuzz.org/path/test.php?param=1¶m2=2"
|
||||
|
||||
fuzz_res = FuzzResult(history=fr)
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.urlp.scheme='http'")
|
||||
self.assertEqual(True, ffilter.is_visible(fuzz_res))
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.urlp.netloc='www.wfuzz.org'")
|
||||
self.assertEqual(True, ffilter.is_visible(fuzz_res))
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.urlp.path='/path/test.php'")
|
||||
self.assertEqual(True, ffilter.is_visible(fuzz_res))
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.urlp.ffname='test.php'")
|
||||
self.assertEqual(True, ffilter.is_visible(fuzz_res))
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.urlp.fext='.php'")
|
||||
self.assertEqual(True, ffilter.is_visible(fuzz_res))
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.urlp.fname='test'")
|
||||
self.assertEqual(True, ffilter.is_visible(fuzz_res))
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.urlp.hasquery")
|
||||
self.assertEqual(True, ffilter.is_visible(fuzz_res))
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="not r.urlp.isbllist")
|
||||
self.assertEqual(True, ffilter.is_visible(fuzz_res))
|
||||
|
||||
def test_ispath(self):
|
||||
fr = FuzzRequest()
|
||||
fr.url = "http://www.wfuzz.org/path?param=1¶m2=2"
|
||||
fuzz_res = FuzzResult(history=fr)
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.is_path")
|
||||
self.assertEqual(False, ffilter.is_visible(fuzz_res))
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.pstrip")
|
||||
self.assertEqual(ffilter.is_visible(fuzz_res), "http://www.wfuzz.org/path-gparam-gparam2")
|
||||
|
||||
def test_lwh(self):
|
||||
fr = FuzzRequest()
|
||||
fr.update_from_raw_http(raw_req, "http", raw_resp, b"Some line\n and words\nasdsdas")
|
||||
|
||||
fuzz_res = FuzzResult(history=fr)
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="h=28 or w=6 or l=2")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual(True, ffilter.is_visible(fuzz_res))
|
||||
|
||||
def test_location(self):
|
||||
fr = FuzzRequest()
|
||||
fr.update_from_raw_http(raw_req, "http", raw_resp, b"Some line\n and words\nasdsdas")
|
||||
|
||||
fuzz_res = FuzzResult(history=fr)
|
||||
|
||||
ffilter = FuzzResFilter(filter_string="r.headers.response.Location")
|
||||
ffilter.is_visible(fuzz_res)
|
||||
self.assertEqual('https://wfuzz.readthedocs.io/en/latest/', ffilter.is_visible(fuzz_res))
|
@ -110,22 +110,22 @@ class ModuleFilterTests(unittest.TestCase):
|
||||
self.assertTrue("Multiple plugins found" in str(context.exception))
|
||||
|
||||
def test_simple_filter(self):
|
||||
with mock.patch('imp.find_module') as mocked_find_module:
|
||||
with mock.patch('imp.load_module') as mocked_load_module:
|
||||
mocked_find_module.return_value = (None, '/any/project.py', ('.py', 'U', 1))
|
||||
mocked_load_module.return_value = sys.modules[__name__]
|
||||
with mock.patch('imp.find_module') as mocked_find_module:
|
||||
with mock.patch('imp.load_module') as mocked_load_module:
|
||||
mocked_find_module.return_value = (None, '/any/project.py', ('.py', 'U', 1))
|
||||
mocked_load_module.return_value = sys.modules[__name__]
|
||||
|
||||
br = BRegistrant(FileLoader(**{"filename": 'project1.py', "base_path": 'any'}))
|
||||
|
||||
with self.assertRaises(Exception) as context:
|
||||
modulefilter.PYPARSING = False
|
||||
br.get_plugins_names('not aggressive')
|
||||
self.assertTrue("Pyparsing missing, complex filters not allowed." in str(context.exception))
|
||||
br = BRegistrant(FileLoader(**{"filename": 'project1.py', "base_path": 'any'}))
|
||||
|
||||
with self.assertRaises(Exception) as context:
|
||||
modulefilter.PYPARSING = False
|
||||
self.assertEqual(sorted(br.get_plugins_names("test*")), sorted(['test_plugin1', 'test_plugin2', 'test_plugin3']))
|
||||
self.assertEqual(sorted(br.get_plugins_names("test_plugin1,test_plugin2")), sorted(['test_plugin1', 'test_plugin2']))
|
||||
self.assertEqual(sorted(br.get_plugins_names("test_plugin5")), sorted([]))
|
||||
br.get_plugins_names('not aggressive')
|
||||
self.assertTrue("Pyparsing missing, complex filters not allowed." in str(context.exception))
|
||||
|
||||
modulefilter.PYPARSING = False
|
||||
self.assertEqual(sorted(br.get_plugins_names("test*")), sorted(['test_plugin1', 'test_plugin2', 'test_plugin3']))
|
||||
self.assertEqual(sorted(br.get_plugins_names("test_plugin1,test_plugin2")), sorted(['test_plugin1', 'test_plugin2']))
|
||||
self.assertEqual(sorted(br.get_plugins_names("test_plugin5")), sorted([]))
|
||||
|
||||
def test_plugin_decorator(self):
|
||||
with self.assertRaises(Exception) as context:
|
||||
@ -135,7 +135,3 @@ class ModuleFilterTests(unittest.TestCase):
|
||||
|
||||
test_plugin4()
|
||||
self.assertTrue("Required method method4 not implemented" in str(context.exception))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -3,6 +3,8 @@ import unittest
|
||||
# Python 2 and 3: urlib.parse
|
||||
|
||||
from wfuzz.fuzzobjects import FuzzRequest
|
||||
from wfuzz.fuzzobjects import FuzzResultFactory
|
||||
from wfuzz.ui.console.clparser import CLParser
|
||||
from wfuzz import __version__ as wfuzz_version
|
||||
|
||||
|
||||
@ -13,6 +15,46 @@ User-Agent: Wfuzz/{}
|
||||
|
||||
""".format(wfuzz_version)
|
||||
|
||||
raw_response_header = b"""HTTP/1.0 200 Connection established
|
||||
|
||||
HTTP/1.1 404 Not Found
|
||||
Content-Type: text/html; charset=UTF-8
|
||||
Referrer-Policy: no-referrer
|
||||
Content-Length: 1564
|
||||
Date: Wed, 24 Apr 2019 22:03:52 GMT
|
||||
Alt-Svc: quic=":443"; ma=2592000; v="46,44,43,39"
|
||||
Connection: close
|
||||
|
||||
"""
|
||||
|
||||
raw_response_body = b'<!DOCTYPE html>\n<html lang=en>\n <meta charset=utf-8>\n <meta name=viewport content="initial-scale=1, minimum-scale=1, width=device-width">\n <title>Error 404 (Not Found)!!1</title>\n <style>\n *{margin:0;padding:0}html,code{font:15px/22px arial,sans-serif}html{background:#fff;color:#222;padding:15px}body{margin:7% auto 0;max-width:390px;min-height:180px;padding:30px 0 15px}* > body{background:url(//www.google.com/images/errors/robot.png) 100% 5px no-repeat;padding-right:205px}p{margin:11px 0 22px;overflow:hidden}ins{color:#777;text-decoration:none}a img{border:0}@media screen and (max-width:772px){body{background:none;margin-top:0;max-width:none;padding-right:0}}#logo{background:url(//www.google.com/images/branding/googlelogo/1x/googlelogo_color_150x54dp.png) no-repeat;margin-left:-5px}@media only screen and (min-resolution:192dpi){#logo{background:url(//www.google.com/images/branding/googlelogo/2x/googlelogo_color_150x54dp.png) no-repeat 0% 0%/100% 100%;-moz-border-image:url(//www.google.com/images/branding/googlelogo/2x/googlelogo_color_150x54dp.png) 0}}@media only screen and (-webkit-min-device-pixel-ratio:2){#logo{background:url(//www.google.com/images/branding/googlelogo/2x/googlelogo_color_150x54dp.png) no-repeat;-webkit-background-size:100% 100%}}#logo{display:inline-block;height:54px;width:150px}\n </style>\n <a href=//www.google.com/><span id=logo aria-label=Google></span></a>\n <p><b>404.</b> <ins>That\xe2\x80\x99s an error.</ins>\n <p>The requested URL <code>/one</code> was not found on this server. <ins>That\xe2\x80\x99s all we know.</ins>\n'
|
||||
|
||||
|
||||
class FuzzResultFactoryTest(unittest.TestCase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FuzzResultFactoryTest, self).__init__(*args, **kwargs)
|
||||
self.maxDiff = 1000
|
||||
|
||||
def test_baseline(self):
|
||||
options = CLParser(['wfuzz', '-z', 'range,1-1', 'http://localhost:9000/FUZZ{first}']).parse_cl()
|
||||
seed = FuzzResultFactory.from_options(options)
|
||||
baseline = FuzzResultFactory.from_baseline(seed, options)
|
||||
|
||||
self.assertEqual(baseline.description, 'first')
|
||||
|
||||
options = CLParser(['wfuzz', '-z', 'range,1-1', '-z', 'range,2-2', 'http://localhost:9000/FUZZ{first}/FUZ2Z{second}']).parse_cl()
|
||||
seed = FuzzResultFactory.from_options(options)
|
||||
baseline = FuzzResultFactory.from_baseline(seed, options)
|
||||
|
||||
self.assertEqual(baseline.description, 'first - second')
|
||||
|
||||
def test_from_conn(self):
|
||||
fr = FuzzRequest()
|
||||
fr.update_from_raw_http(raw_req, 'https', raw_response_header, raw_response_body)
|
||||
|
||||
self.assertEqual(fr.code, 404)
|
||||
self.assertEqual(fr.content.count("\n"), 11)
|
||||
|
||||
|
||||
class FuzzRequestTest(unittest.TestCase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -61,6 +103,21 @@ class FuzzRequestTest(unittest.TestCase):
|
||||
self.assertEqual(fr.path, "FUZZ")
|
||||
self.assertEqual(fr.follow, False)
|
||||
|
||||
fr.url = "http://www.wfuzz.org:80/a"
|
||||
self.assertEqual(fr.host, "www.wfuzz.org:80")
|
||||
|
||||
fr.url = "https://www.wfuzz.org:80/a"
|
||||
self.assertEqual(fr.host, "www.wfuzz.org:80")
|
||||
|
||||
fr.url = "www.wfuzz.org:80/a"
|
||||
self.assertEqual(fr.host, "www.wfuzz.org:80")
|
||||
|
||||
fr.url = "www.wfuzz.org:80"
|
||||
self.assertEqual(fr.host, "www.wfuzz.org:80")
|
||||
|
||||
fr.url = "www.wfuzz.org"
|
||||
self.assertEqual(fr.host, "www.wfuzz.org")
|
||||
|
||||
def test_setpostdata(self):
|
||||
fr = FuzzRequest()
|
||||
fr.url = "http://www.wfuzz.org/"
|
||||
@ -98,6 +155,19 @@ class FuzzRequestTest(unittest.TestCase):
|
||||
self.assertEqual(fr.method, "POST")
|
||||
self.assertEqual(fr.params.post, {'a': '1'})
|
||||
|
||||
fr = FuzzRequest()
|
||||
fr.url = "http://www.wfuzz.org/"
|
||||
fr.params.post = "{'a': '1'}"
|
||||
self.assertEqual(fr.method, "POST")
|
||||
self.assertEqual(fr.params.post, {"{'a': '1'}": None})
|
||||
|
||||
fr = FuzzRequest()
|
||||
fr.url = "http://www.wfuzz.org/"
|
||||
fr.params.post = '1'
|
||||
fr.headers.request = {'Content-Type': 'application/json'}
|
||||
self.assertEqual(fr.method, "POST")
|
||||
self.assertEqual(fr.params.post, {'1': None})
|
||||
|
||||
def test_setgetdata(self):
|
||||
fr = FuzzRequest()
|
||||
|
||||
|
2
tox.ini
2
tox.ini
@ -21,7 +21,7 @@ commands = coverage erase
|
||||
deps = coverage
|
||||
|
||||
[testenv:end]
|
||||
commands = coverage report --skip-covered --include '*python3.5/site-packages/wfuzz*' -m
|
||||
commands = coverage report --skip-covered --include '*python3.6/site-packages/wfuzz*' -m
|
||||
deps = coverage
|
||||
|
||||
[testenv:codecov]
|
||||
|
66
wfuzz_bash_completion
Normal file
66
wfuzz_bash_completion
Normal file
@ -0,0 +1,66 @@
|
||||
# wfuzz bash completion file
|
||||
# by Xavier Mendez (xavi.mendez@gmail.com) aka Javi
|
||||
|
||||
_wfuzz() {
|
||||
|
||||
COMPREPLY=()
|
||||
local cur prev
|
||||
cur=${COMP_WORDS[COMP_CWORD]}
|
||||
prev=${COMP_WORDS[COMP_CWORD-1]}
|
||||
WFUZZ_EX="wfuzz"
|
||||
|
||||
# Change to your wordlists' base directory
|
||||
WLDIR=$($WFUZZ_EX --ee files)
|
||||
|
||||
common_options="-z[PAYLOAD] --zD[DEFAULT] --zE[ENCODERS] --hc[HIDE_HTTP_CODES] -d[POST_DATA] "
|
||||
|
||||
case "$prev" in
|
||||
-u)
|
||||
COMPREPLY=( $( compgen -W "http https" -- $cur ) )
|
||||
;;
|
||||
-w)
|
||||
COMPREPLY=( $(compgen -W "$(find $WLDIR -type f -iname "*.txt")" -- $cur) )
|
||||
;;
|
||||
-w)
|
||||
COMPREPLY=( $(compgen -W "$(find $WLDIR -type f -iname "*.txt")" -- $cur) )
|
||||
;;
|
||||
-z)
|
||||
COMPREPLY=($(compgen -W "$($WFUZZ_EX --ee payloads)" -- $cur))
|
||||
;;
|
||||
-e)
|
||||
COMPREPLY=($(compgen -W "$($WFUZZ_EX --ee registrants)" -- $cur))
|
||||
;;
|
||||
-m)
|
||||
COMPREPLY=($(compgen -W "$($WFUZZ_EX --ee iterators)" -- $cur))
|
||||
;;
|
||||
-o)
|
||||
COMPREPLY=($(compgen -W "$($WFUZZ_EX --ee printers)" -- $cur))
|
||||
;;
|
||||
--script-help)
|
||||
COMPREPLY=($(compgen -W "$($WFUZZ_EX --ee scripts)" -- $cur))
|
||||
;;
|
||||
--script)
|
||||
COMPREPLY=($(compgen -W "$($WFUZZ_EX --ee scripts)" -- $cur))
|
||||
;;
|
||||
--field)
|
||||
COMPREPLY=($(compgen -W "$($WFUZZ_EX --ee fields)" -- $cur))
|
||||
;;
|
||||
--zE)
|
||||
COMPREPLY=($(compgen -W "$($WFUZZ_EX --ee encoders)" -- $cur))
|
||||
;;
|
||||
-V)
|
||||
COMPREPLY=( $( compgen -W "allvars allpost allheaders" -- $cur ) )
|
||||
;;
|
||||
-X)
|
||||
COMPREPLY=( $( compgen -W "FUZZ OPTIONS PUT DELETE POST GET TRACE CONNECT HEAD" -- $cur ) )
|
||||
;;
|
||||
--hc)
|
||||
COMPREPLY=( $( compgen -W "400 401 301 302 500 404 200" -- $cur ) )
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=($(compgen -W "$($WFUZZ_EX --ee options)" -- $cur))
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
complete -F _wfuzz -o default wfuzz
|
@ -845,3 +845,27 @@ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
.\\..\\.\\..\\.\\..\\.\\..\\.\\..\\.\\..\\/etc/passwd
|
||||
.\\..\\.\\..\\.\\..\\.\\..\\.\\..\\.\\..\\.\\..\\/etc/passwd
|
||||
.\\..\\.\\..\\.\\..\\.\\..\\.\\..\\.\\..\\.\\..\\.\\..\\/etc/passwd
|
||||
%00../etc/passwd
|
||||
%00../%00../etc/passwd
|
||||
%00../%00../%00../etc/passwd
|
||||
%00../%00../%00../%00../etc/passwd
|
||||
%00../%00../%00../%00../%00../etc/passwd
|
||||
%00../%00../%00../%00../%00../%00../etc/passwd
|
||||
%00../%00../%00../%00../%00../%00../%00../etc/passwd
|
||||
%00../%00../%00../%00../%00../%00../%00../%00../etc/passwd
|
||||
.%00./etc/passwd
|
||||
.%00./.%00./etc/passwd
|
||||
.%00./.%00./.%00./etc/passwd
|
||||
.%00./.%00./.%00./.%00./etc/passwd
|
||||
.%00./.%00./.%00./.%00./.%00./etc/passwd
|
||||
.%00./.%00./.%00./.%00./.%00./.%00./etc/passwd
|
||||
.%00./.%00./.%00./.%00./.%00./.%00./.%00./etc/passwd
|
||||
.%00./.%00./.%00./.%00./.%00./.%00./.%00./.%00./etc/passwd
|
||||
..%00/etc/passwd
|
||||
..%00/..%00/etc/passwd
|
||||
..%00/..%00/..%00/etc/passwd
|
||||
..%00/..%00/..%00/..%00/etc/passwd
|
||||
..%00/..%00/..%00/..%00/..%00/etc/passwd
|
||||
..%00/..%00/..%00/..%00/..%00/..%00/etc/passwd
|
||||
..%00/..%00/..%00/..%00/..%00/..%00/..%00/etc/passwd
|
||||
..%00/..%00/..%00/..%00/..%00/..%00/..%00/..%00/etc/passwd
|
||||
|
Loading…
Reference in New Issue
Block a user