forked from theanti9/s3knock
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy paths3knock.py
100 lines (83 loc) · 3.33 KB
/
s3knock.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import urllib2,cookielib
import random
import signal
import sys
from sys import argv
from bs4 import BeautifulSoup
USAGE = """Usage:
python s3knock.py wordlist term position
Example: python s3knock.py wordlist.txt tumblr 1
"""
SEPARATORS = ["", "-", ".","_"]
class bcolors:
public = '\033[92m'
exists = '\033[93m'
problem = '\033[91m'
stop = '\033[0m'
discovered = []
def main(wordlist, base, position):
with open(wordlist) as wordlist_file:
lines = [line.rstrip('\n') for line in wordlist_file]
print "Starting search..."
for line in lines:
for sep in SEPARATORS:
if position == 1:
site = "http://%s%s%s.s3.amazonaws.com/" % (base, sep, line if line[-1] != "." else line[-1])
else:
site = "https://%s%s%s.s3.amazonaws.com/" % (line, sep, base)
sys.stdout.write("\033[K")
print "Testing: %s" % site
sys.stdout.write("\033[F")
hdr1 = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
hdr2 = {'User-Agent': 'Mozilla/5.0 AppleWebKit/537.11 Chrome/23.0.1271.64 Safari/537.16',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
req = urllib2.Request(site, headers=random.choice([hdr1, hdr2]))
try:
page = urllib2.urlopen(req)
xml = e.fp.read()
print bcolors.public + '[*] found : ' + site + " Public! " + bcolors.stop
discovered.append(site)
except urllib2.HTTPError, e:
xml = e.fp.read()
soup = BeautifulSoup(xml, features='xml')
for q in soup.find_all('Error'):
if q.find('Code').text != 'NoSuchBucket':
print bcolors.exists + '[*] found : ' + site + " " + q.find('Code').text + bcolors.stop
except urllib2.URLError, e:
print 'INFO: Invalid domain format. No DNS resolution for site %s' % site
print_summary()
def print_summary():
print ""
if not discovered:
print "No public sites found!"
return
print "Summary of public sites found: "
for s in discovered:
print s
def signal_handler(signal, frame):
print "\nCtrl+C detected. Exiting..."
print_summary()
sys.exit(0)
if __name__ == '__main__':
if len(argv) < 4:
print "ERROR: Not enough arguments given"
print USAGE
sys.exit(1)
wordlist = argv[1]
base = argv[2]
try:
position = int(argv[3])
except ValueError as e:
print "ERROR: position argument not a number"
sys.exit(1)
signal.signal(signal.SIGINT, signal_handler)
main(wordlist, base, position)