Skip to content

Commit 133ce25

Browse files
committed
Update
1 parent f94e1e0 commit 133ce25

File tree

4 files changed

+70
-43
lines changed

4 files changed

+70
-43
lines changed

lib/browser.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,8 @@ def __init__(self, link):
1717
self.is_attempted = False
1818

1919
def get_content(self):
20-
url = self.link + '\''
2120
try:
22-
return requests.get(url).text
21+
return requests.get(self.link + '*').text.lower()
2322
except:
2423
pass
2524

@@ -30,7 +29,7 @@ def attempt(self):
3029
if content:
3130
self.is_attempted = True
3231

33-
if 'Invalid SQL' in content or 'error' in content:
32+
if 'sql' in content and 'error' in content and 'at line' in content:
3433
self.is_vulner = True
3534

3635
self.is_active = False

lib/search.py

+66-19
Original file line numberDiff line numberDiff line change
@@ -1,43 +1,90 @@
1-
# Date: 01/02/2019
2-
# Author: Mohamed
3-
# Description: Search for links
41

5-
import requests
2+
3+
import threading
64
from queue import Queue
7-
from bs4 import BeautifulSoup as bs
5+
from requests_html import HTMLSession
6+
7+
8+
class Search:
89

10+
base_url = 'https://bing.com'
11+
parameters = '/search?q={}'
912

10-
class Search(object):
13+
def __init__(self, query):
14+
self.query = query
1115

12-
def __init__(self, dork):
1316
self.is_alive = True
17+
self.is_searching = True
18+
1419
self.links = Queue()
15-
self.url = 'http://www.search-results.com/web?q={0}&page='.format(dork)
20+
21+
self.lock = threading.RLock()
22+
23+
def next_page(self, html):
24+
try:
25+
a = html.find('.b_pag', first=True).find('.b_widePag')
26+
return self.base_url + a[-1].attrs['href']
27+
except:
28+
pass
29+
30+
def is_valid(self, link):
31+
32+
if not '=' in link:
33+
return False
34+
35+
return True
1636

1737
def find_links(self):
18-
page = 0
19-
while self.is_alive:
20-
page += 1
21-
url = self.url + str(page)
2238

23-
html = requests.get(url).content
39+
session = HTMLSession()
40+
session.headers['user-agent'] = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36'
41+
42+
url = self.base_url + self.parameters.format(self.query)
2443

44+
while self.is_alive:
2545
try:
26-
for a in bs(html, 'html.parser').find('section', {'id': 'algo-container'}).find_all('a', href=True):
27-
link = a['href']
46+
html = session.get(url).html
47+
except:
48+
break
2849

29-
if '=' in link:
30-
self.links.put(link)
50+
for r in html.find('.b_algo'):
51+
a = r.find('h2', first=True).find('a', first=True)
3152

32-
except:
33-
self.is_alive = False
53+
try:
54+
link = a.attrs['href']
55+
except:
56+
continue
57+
58+
if self.is_valid(link):
59+
self.links.put(link)
60+
61+
next_page = self.next_page(html)
62+
63+
if not next_page:
64+
break
65+
66+
url = next_page
67+
68+
with self.lock:
69+
self.is_searching = False
3470

3571
def get_link(self):
3672
if self.links.qsize():
3773
return self.links.get()
3874

3975
def start(self):
76+
4077
self.find_links()
4178

79+
def is_active(self):
80+
81+
with self.lock:
82+
is_searching = self.is_searching
83+
84+
if is_searching or self.links.qsize() > 0:
85+
return True
86+
87+
return False
88+
4289
def stop(self):
4390
self.is_alive = False

lib/sql.py

+1-21
Original file line numberDiff line numberDiff line change
@@ -33,11 +33,10 @@ def search_manager(self):
3333

3434
while self.is_alive:
3535

36-
if not self.search.is_alive and not self.search.links.qsize():
36+
if not self.search.is_active():
3737
break
3838
else:
3939
link = self.search.get_link()
40-
4140
if link:
4241
with self.lock:
4342
self.links.append(link)
@@ -48,7 +47,6 @@ def search_manager(self):
4847
self.is_alive = False
4948

5049
def link_manager(self):
51-
# bots_per_proxy = 0
5250
is_started = False
5351

5452
while self.is_alive:
@@ -62,18 +60,9 @@ def link_manager(self):
6260

6361
browsers = []
6462
for link in self.links:
65-
# if not self.proxy or bots_per_proxy >= max_bots_per_proxy:
66-
# self.proxy = self.proxy_manager.get_proxy()
67-
# bots_per_proxy = 0
68-
69-
# if not self.proxy:
70-
# sleep(1.5)
71-
# continue
7263

7364
if not link in self.active_links and len(self.active_links) < max_active_browsers:
74-
# bots_per_proxy += 1
7565
self.active_links.append(link)
76-
# browser = Browser(link, self.proxy)
7766
browser = Browser(link)
7867
browsers.append(browser)
7968
self.browsers.append(browser)
@@ -113,9 +102,6 @@ def browser_manager(self):
113102
else:
114103
self.display.is_not_vulner(browser.link)
115104

116-
# else:
117-
# self.proxy_manager.bad_proxy(browser.proxy)
118-
119105
with self.lock:
120106
self.active_links.remove(browser.link)
121107
self.browsers.remove(browser)
@@ -144,17 +130,11 @@ def start(self):
144130
search_manager.daemon = True
145131
search_manager.start()
146132

147-
# self.display.info('Searching for proxies ...')
148-
# proxy_manager = Thread(target=self.proxy_manager.start)
149-
# proxy_manager.daemon = True
150-
# proxy_manager.start()
151-
152133
self.browser_manager()
153134

154135
def stop(self):
155136
if self.search:
156137
self.search.stop()
157138

158139
self.is_alive = False
159-
# self.proxy_manager.stop()
160140
self.display.shutdown(self.total_found)

requirements.txt

+1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
bs4
22
colorama
33
requests
4+
requests_html

0 commit comments

Comments
 (0)