Python源码示例:lib.core.data.conf.url()
示例1
def _saveToResultsFile():
if not conf.resultsFP:
return
results = {}
techniques = dict(map(lambda x: (x[1], x[0]), getPublicTypeMembers(PAYLOAD.TECHNIQUE)))
for inj in kb.injections:
if inj.place is None or inj.parameter is None:
continue
key = (inj.place, inj.parameter)
if key not in results:
results[key] = []
results[key].extend(inj.data.keys())
for key, value in results.items():
place, parameter = key
line = "%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(map(lambda x: techniques[x][0].upper(), sorted(value))), os.linesep)
conf.resultsFP.writelines(line)
if not results:
line = "%s,,,%s" % (conf.url, os.linesep)
conf.resultsFP.writelines(line)
示例2
def _setCrawler():
if not conf.crawlDepth:
return
if not any((conf.bulkFile, conf.sitemapUrl)):
crawl(conf.url)
else:
if conf.bulkFile:
targets = getFileItems(conf.bulkFile)
else:
targets = parseSitemap(conf.sitemapUrl)
for i in xrange(len(targets)):
try:
target = targets[i]
crawl(target)
if conf.verbose in (1, 2):
status = "%d/%d links visited (%d%%)" % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
except Exception, ex:
errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, ex)
logger.error(errMsg)
示例3
def getAutoDirectories():
retVal = set()
if kb.absFilePaths:
infoMsg = "retrieved web server absolute paths: "
infoMsg += "'%s'" % ", ".join(ntToPosixSlashes(path) for path in kb.absFilePaths)
logger.info(infoMsg)
for absFilePath in kb.absFilePaths:
if absFilePath:
directory = directoryPath(absFilePath)
directory = ntToPosixSlashes(directory)
retVal.add(directory)
else:
warnMsg = "unable to automatically parse any web server path"
logger.warn(warnMsg)
_ = extractRegexResult(r"//[^/]+?(?P<result>/.*)/", conf.url) # web directory
if _:
retVal.add(_)
return list(retVal)
示例4
def maskSensitiveData(msg):
"""
Masks sensitive data in the supplied message
"""
retVal = getUnicode(msg)
for item in filter(None, map(lambda x: conf.get(x), ("hostname", "data", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile"))):
regex = SENSITIVE_DATA_REGEX % re.sub("(\W)", r"\\\1", getUnicode(item))
while extractRegexResult(regex, retVal):
value = extractRegexResult(regex, retVal)
retVal = retVal.replace(value, '*' * len(value))
if not conf.get("hostname"):
match = re.search(r"(?i)sqlmap.+(-u|--url)(\s+|=)([^ ]+)", retVal)
if match:
retVal = retVal.replace(match.group(3), '*' * len(match.group(3)))
if getpass.getuser():
retVal = re.sub(r"(?i)\b%s\b" % re.escape(getpass.getuser()), "*" * len(getpass.getuser()), retVal)
return retVal
示例5
def _saveToResultsFile():
if not conf.resultsFP:
return
results = {}
techniques = dict(map(lambda x: (x[1], x[0]), getPublicTypeMembers(PAYLOAD.TECHNIQUE)))
for inj in kb.injections:
if inj.place is None or inj.parameter is None:
continue
key = (inj.place, inj.parameter)
if key not in results:
results[key] = []
results[key].extend(inj.data.keys())
for key, value in results.items():
place, parameter = key
line = "%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(map(lambda x: techniques[x][0].upper(), sorted(value))), os.linesep)
conf.resultsFP.writelines(line)
if not results:
line = "%s,,,%s" % (conf.url, os.linesep)
conf.resultsFP.writelines(line)
示例6
def _setCrawler():
if not conf.crawlDepth:
return
if not any((conf.bulkFile, conf.sitemapUrl)):
crawl(conf.url)
else:
if conf.bulkFile:
targets = getFileItems(conf.bulkFile)
else:
targets = parseSitemap(conf.sitemapUrl)
for i in xrange(len(targets)):
try:
target = targets[i]
crawl(target)
if conf.verbose in (1, 2):
status = "%d/%d links visited (%d%%)" % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
except Exception, ex:
errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, ex)
logger.error(errMsg)
示例7
def _checkTor():
if not conf.checkTor:
return
infoMsg = "checking Tor connection"
logger.info(infoMsg)
try:
page, _, _ = Request.getPage(url="https://check.torproject.org/", raise404=False)
except SqlmapConnectionException:
page = None
if not page or 'Congratulations' not in page:
errMsg = "it seems that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'"
raise SqlmapConnectionException(errMsg)
else:
infoMsg = "Tor is properly being used"
logger.info(infoMsg)
示例8
def getAutoDirectories():
retVal = set()
if kb.absFilePaths:
infoMsg = "retrieved web server absolute paths: "
infoMsg += "'%s'" % ", ".join(ntToPosixSlashes(path) for path in kb.absFilePaths)
logger.info(infoMsg)
for absFilePath in kb.absFilePaths:
if absFilePath:
directory = directoryPath(absFilePath)
directory = ntToPosixSlashes(directory)
retVal.add(directory)
else:
warnMsg = "unable to automatically parse any web server path"
logger.warn(warnMsg)
_ = extractRegexResult(r"//[^/]+?(?P<result>/.*)/", conf.url) # web directory
if _:
retVal.add(_)
return list(retVal)
示例9
def maskSensitiveData(msg):
"""
Masks sensitive data in the supplied message
"""
retVal = getUnicode(msg)
for item in filter(None, map(lambda x: conf.get(x), ("hostname", "data", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile"))):
regex = SENSITIVE_DATA_REGEX % re.sub("(\W)", r"\\\1", getUnicode(item))
while extractRegexResult(regex, retVal):
value = extractRegexResult(regex, retVal)
retVal = retVal.replace(value, '*' * len(value))
if not conf.get("hostname"):
match = re.search(r"(?i)sqlmap.+(-u|--url)(\s+|=)([^ ]+)", retVal)
if match:
retVal = retVal.replace(match.group(3), '*' * len(match.group(3)))
if getpass.getuser():
retVal = re.sub(r"(?i)\b%s\b" % re.escape(getpass.getuser()), "*" * len(getpass.getuser()), retVal)
return retVal
示例10
def getHostHeader(url):
"""
Returns proper Host header value for a given target URL
>>> getHostHeader('http://www.target.com/vuln.php?id=1')
'www.target.com'
"""
retVal = url
if url:
retVal = urlparse.urlparse(url).netloc
if re.search("http(s)?://\[.+\]", url, re.I):
retVal = extractRegexResult("http(s)?://\[(?P<result>.+)\]", url)
elif any(retVal.endswith(':%d' % _) for _ in (80, 443)):
retVal = retVal.split(':')[0]
return retVal
示例11
def saveResults(domain,msg):
'''
@description: 结果保存,以"域名.txt"命名,url去重复
@param {domain:域名,msg:保存的信息}
@return: null
'''
filename = domain +'.txt'
conf.output_path = os.path.join(paths.OUTPUT_PATH, filename)
#判断文件是否存在,若不存在则创建该文件
if not os.path.exists(conf.output_path):
with open(conf.output_path,'w+') as temp:
pass
with open(conf.output_path,'r+') as result_file:
old = result_file.read()
if msg+'\n' in old:
pass
else:
result_file.write(msg+'\n')
示例12
def __init__(self):
Cmd.__init__(self)
os.system("clear")
banner()
conf.url = None
conf.urlFile = None
conf.cookie = None
#随机ua的实现
#conf.randomAgent = False
conf.threads = 1
#是否需要html报告
conf.report = None
conf.timeout = 3
conf.httpHeaders = HTTP_DEFAULT_HEADER
self.prompt = "ZEROScan > "
示例13
def __goInference(payload, expression):
start = time.time()
if ( conf.eta or conf.threads > 1 ) and kb.dbms:
_, length, _ = queryOutputLength(expression, payload)
else:
length = None
dataToSessionFile("[%s][%s][%s][%s][" % (conf.url, kb.injPlace, conf.parameters[kb.injPlace], expression))
count, value = bisection(payload, expression, length=length)
duration = int(time.time() - start)
if conf.eta and length:
infoMsg = "retrieved: %s" % value
logger.info(infoMsg)
infoMsg = "performed %d queries in %d seconds" % (count, duration)
logger.info(infoMsg)
return value
示例14
def setInjection():
"""
Save information retrieved about injection place and parameter in the
session file.
"""
if kb.injPlace == "User-Agent":
kb.injParameter = conf.agent
condition = (
kb.injPlace and kb.injParameter and ( not kb.resumedQueries
or ( kb.resumedQueries.has_key(conf.url) and
( not kb.resumedQueries[conf.url].has_key("Injection point")
or not kb.resumedQueries[conf.url].has_key("Injection parameter")
or not kb.resumedQueries[conf.url].has_key("Injection type")
) ) )
)
if condition:
dataToSessionFile("[%s][%s][%s][Injection point][%s]\n" % (conf.url, kb.injPlace, conf.parameters[kb.injPlace], kb.injPlace))
dataToSessionFile("[%s][%s][%s][Injection parameter][%s]\n" % (conf.url, kb.injPlace, conf.parameters[kb.injPlace], kb.injParameter))
dataToSessionFile("[%s][%s][%s][Injection type][%s]\n" % (conf.url, kb.injPlace, conf.parameters[kb.injPlace], kb.injType))
示例15
def setDbms(dbms):
"""
@param dbms: database management system to be set into the knowledge
base as fingerprint.
@type dbms: C{str}
"""
condition = (
not kb.resumedQueries
or ( kb.resumedQueries.has_key(conf.url) and
not kb.resumedQueries[conf.url].has_key("DBMS") )
)
if condition:
dataToSessionFile("[%s][%s][%s][DBMS][%s]\n" % (conf.url, kb.injPlace, conf.parameters[kb.injPlace], dbms))
firstRegExp = "(%s|%s)" % ("|".join([alias for alias in MSSQL_ALIASES]),
"|".join([alias for alias in MYSQL_ALIASES]))
dbmsRegExp = re.search("^%s" % firstRegExp, dbms, re.I)
if dbmsRegExp:
dbms = dbmsRegExp.group(1)
kb.dbms = dbms
示例16
def _saveToResultsFile():
if not conf.resultsFP:
return
results = {}
techniques = dict(map(lambda x: (x[1], x[0]), getPublicTypeMembers(PAYLOAD.TECHNIQUE)))
for inj in kb.injections:
if inj.place is None or inj.parameter is None:
continue
key = (inj.place, inj.parameter)
if key not in results:
results[key] = []
results[key].extend(inj.data.keys())
for key, value in results.items():
place, parameter = key
line = "%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(map(lambda x: techniques[x][0].upper(), sorted(value))), os.linesep)
conf.resultsFP.writelines(line)
if not results:
line = "%s,,,%s" % (conf.url, os.linesep)
conf.resultsFP.writelines(line)
示例17
def _setCrawler():
if not conf.crawlDepth:
return
if not any((conf.bulkFile, conf.sitemapUrl)):
crawl(conf.url)
else:
if conf.bulkFile:
targets = getFileItems(conf.bulkFile)
else:
targets = parseSitemap(conf.sitemapUrl)
for i in xrange(len(targets)):
try:
target = targets[i]
crawl(target)
if conf.verbose in (1, 2):
status = "%d/%d links visited (%d%%)" % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
except Exception, ex:
errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, ex)
logger.error(errMsg)
示例18
def getAutoDirectories():
retVal = set()
if kb.absFilePaths:
infoMsg = "retrieved web server absolute paths: "
infoMsg += "'%s'" % ", ".join(ntToPosixSlashes(path) for path in kb.absFilePaths)
logger.info(infoMsg)
for absFilePath in kb.absFilePaths:
if absFilePath:
directory = directoryPath(absFilePath)
directory = ntToPosixSlashes(directory)
retVal.add(directory)
else:
warnMsg = "unable to automatically parse any web server path"
logger.warn(warnMsg)
_ = extractRegexResult(r"//[^/]+?(?P<result>/.*)/", conf.url) # web directory
if _:
retVal.add(_)
return list(retVal)
示例19
def maskSensitiveData(msg):
"""
Masks sensitive data in the supplied message
"""
retVal = getUnicode(msg)
for item in filter(None, map(lambda x: conf.get(x), ("hostname", "data", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile"))):
regex = SENSITIVE_DATA_REGEX % re.sub("(\W)", r"\\\1", getUnicode(item))
while extractRegexResult(regex, retVal):
value = extractRegexResult(regex, retVal)
retVal = retVal.replace(value, '*' * len(value))
if not conf.get("hostname"):
match = re.search(r"(?i)sqlmap.+(-u|--url)(\s+|=)([^ ]+)", retVal)
if match:
retVal = retVal.replace(match.group(3), '*' * len(match.group(3)))
if getpass.getuser():
retVal = re.sub(r"(?i)\b%s\b" % re.escape(getpass.getuser()), "*" * len(getpass.getuser()), retVal)
return retVal
示例20
def webBackdoorRunCmd(self, cmd):
if self.webBackdoorUrl is None:
return
output = None
if not cmd:
cmd = conf.osCmd
cmdUrl = "%s?cmd=%s" % (self.webBackdoorUrl, cmd)
page, _, _ = Request.getPage(url=cmdUrl, direct=True, silent=True, timeout=BACKDOOR_RUN_CMD_TIMEOUT)
if page is not None:
output = re.search(r"<pre>(.+?)</pre>", page, re.I | re.S)
if output:
output = output.group(1)
return output
示例21
def _setCrawler():
if not conf.crawlDepth:
return
if not any((conf.bulkFile, conf.sitemapUrl)):
crawl(conf.url)
else:
if conf.bulkFile:
targets = getFileItems(conf.bulkFile)
else:
targets = parseSitemap(conf.sitemapUrl)
for i in xrange(len(targets)):
try:
target = targets[i]
crawl(target)
if conf.verbose in (1, 2):
status = "%d/%d links visited (%d%%)" % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
except Exception, ex:
errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, getSafeExString(ex))
logger.error(errMsg)
示例22
def _checkTor():
if not conf.checkTor:
return
infoMsg = "checking Tor connection"
logger.info(infoMsg)
try:
page, _, _ = Request.getPage(url="https://check.torproject.org/", raise404=False)
except SqlmapConnectionException:
page = None
if not page or 'Congratulations' not in page:
errMsg = "it appears that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'"
raise SqlmapConnectionException(errMsg)
else:
infoMsg = "Tor is properly being used"
logger.info(infoMsg)
示例23
def getLatestRevision():
"""
Retrieves latest revision from the offical repository
>>> from lib.core.settings import VERSION; getLatestRevision() == VERSION
True
"""
retVal = None
req = urllib2.Request(url="https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/lib/core/settings.py")
try:
content = urllib2.urlopen(req).read()
retVal = extractRegexResult(r"VERSION\s*=\s*[\"'](?P<result>[\d.]+)", content)
except:
pass
return retVal
示例24
def getHostHeader(url):
"""
Returns proper Host header value for a given target URL
>>> getHostHeader('http://www.target.com/vuln.php?id=1')
'www.target.com'
"""
retVal = url
if url:
retVal = urlparse.urlparse(url).netloc
if re.search(r"http(s)?://\[.+\]", url, re.I):
retVal = extractRegexResult(r"http(s)?://\[(?P<result>.+)\]", url)
elif any(retVal.endswith(':%d' % _) for _ in (80, 443)):
retVal = retVal.split(':')[0]
return retVal
示例25
def _findPageForms():
if not conf.forms or conf.crawlDepth:
return
if conf.url and not checkConnection():
return
infoMsg = "searching for forms"
logger.info(infoMsg)
if not any((conf.bulkFile, conf.googleDork, conf.sitemapUrl)):
page, _ = Request.queryPage(content=True)
findPageForms(page, conf.url, True, True)
else:
if conf.bulkFile:
targets = getFileItems(conf.bulkFile)
elif conf.sitemapUrl:
targets = parseSitemap(conf.sitemapUrl)
elif conf.googleDork:
targets = [_[0] for _ in kb.targets]
kb.targets.clear()
for i in xrange(len(targets)):
try:
target = targets[i]
page, _, _ = Request.getPage(url=target.strip(), crawling=True, raise404=False)
findPageForms(page, target, False, True)
if conf.verbose in (1, 2):
status = '%d/%d links visited (%d%%)' % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
except KeyboardInterrupt:
break
except Exception, ex:
errMsg = "problem occurred while searching for forms at '%s' ('%s')" % (target, ex)
logger.error(errMsg)
示例26
def _checkWebSocket():
infoMsg = "checking for WebSocket"
logger.debug(infoMsg)
if conf.url and (conf.url.startswith("ws:/") or conf.url.startswith("wss:/")):
try:
from websocket import ABNF
except ImportError:
errMsg = "sqlmap requires third-party module 'websocket-client' "
errMsg += "in order to use WebSocket funcionality"
raise SqlmapMissingDependence(errMsg)
示例27
def parseTargetUrl():
"""
Parse target URL and set some attributes into the configuration singleton.
"""
if not conf.url:
return
originalUrl = conf.url
if re.search("\[.+\]", conf.url) and not socket.has_ipv6:
errMsg = "IPv6 addressing is not supported "
errMsg += "on this platform"
raise SqlmapGenericException(errMsg)
if not re.search("^http[s]*://", conf.url, re.I) and \
not re.search("^ws[s]*://", conf.url, re.I):
if ":443/" in conf.url:
conf.url = "https://" + conf.url
else:
conf.url = "http://" + conf.url
if CUSTOM_INJECTION_MARK_CHAR in conf.url:
conf.url = conf.url.replace('?', URI_QUESTION_MARKER)
try:
urlSplit = urlparse.urlsplit(conf.url)
except ValueError, ex:
errMsg = "invalid URL '%s' has been given ('%s'). " % (conf.url, ex)
errMsg += "Please be sure that you don't have any leftover characters (e.g. '[' or ']') "
errMsg += "in the hostname part"
raise SqlmapGenericException(errMsg)
示例28
def hashDBWrite(key, value, serialize=False):
"""
Helper function for writing session data to HashDB
"""
_ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE)
conf.hashDB.write(_, value, serialize)
示例29
def hashDBRetrieve(key, unserialize=False, checkConf=False):
"""
Helper function for restoring session data from HashDB
"""
_ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE)
retVal = conf.hashDB.retrieve(_, unserialize) if kb.resumeValues and not (checkConf and any((conf.flushSession, conf.freshQueries))) else None
if not kb.inferenceMode and not kb.fileReadMode and any(_ in (retVal or "") for _ in (PARTIAL_VALUE_MARKER, PARTIAL_HEX_VALUE_MARKER)):
retVal = None
return retVal
示例30
def _findPageForms():
if not conf.forms or conf.crawlDepth:
return
if conf.url and not checkConnection():
return
infoMsg = "searching for forms"
logger.info(infoMsg)
if not any((conf.bulkFile, conf.googleDork, conf.sitemapUrl)):
page, _ = Request.queryPage(content=True)
findPageForms(page, conf.url, True, True)
else:
if conf.bulkFile:
targets = getFileItems(conf.bulkFile)
elif conf.sitemapUrl:
targets = parseSitemap(conf.sitemapUrl)
elif conf.googleDork:
targets = [_[0] for _ in kb.targets]
kb.targets.clear()
for i in xrange(len(targets)):
try:
target = targets[i]
page, _, _ = Request.getPage(url=target.strip(), crawling=True, raise404=False)
findPageForms(page, target, False, True)
if conf.verbose in (1, 2):
status = '%d/%d links visited (%d%%)' % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
except KeyboardInterrupt:
break
except Exception, ex:
errMsg = "problem occurred while searching for forms at '%s' ('%s')" % (target, ex)
logger.error(errMsg)