1
1
import sys
2
- sys .path .append ('service' )
3
- from config_processing import read_config
4
- from logs_processing import logging
5
2
6
3
try :
7
4
import requests .exceptions
10
7
import re
11
8
import requests
12
9
import sqlite3
13
- import time
14
10
import os
15
11
except ImportError as e :
16
12
print (Fore .RED + "Import error appeared. Reason: {}" .format (e ) + Style .RESET_ALL )
@@ -34,51 +30,41 @@ def get_columns_amount(dorking_db_path, table):
34
30
conn .close ()
35
31
return row_count
36
32
37
- def solid_google_dorking (query , dorking_delay , delay_step , pages = 100 ):
33
+ def solid_google_dorking (query , pages = 100 ):
38
34
try :
39
35
browser = mechanicalsoup .StatefulBrowser ()
40
36
browser .open ("https://www.google.com/" )
41
37
browser .select_form ('form[action="/search"]' )
42
38
browser ["q" ] = str (query )
43
39
browser .submit_selected (btnName = "btnG" )
44
40
result_query = []
45
- request_count = 0
46
41
for page in range (pages ):
47
42
for link in browser .links ():
48
43
target = link .attrs ['href' ]
49
44
if (target .startswith ('/url?' ) and not
50
45
target .startswith ("/url?q=http://webcache.googleusercontent.com" )):
51
46
target = re .sub (r"^/url\?q=([^&]*)&.*" , r"\1" , target )
52
47
result_query .append (target )
53
- request_count += 1
54
- if request_count % delay_step == 0 :
55
- time .sleep (dorking_delay )
56
48
try :
57
49
browser .follow_link (nr = page + 1 )
58
50
except mechanicalsoup .LinkNotFoundError :
59
51
break
60
-
61
52
del result_query [- 2 :]
62
53
return result_query
63
54
except requests .exceptions .ConnectionError as e :
64
- print (Fore .RED + "Error while establishing connection with domain. No results will appear. See journal for details" + Style .RESET_ALL )
65
- logging .error (f'DORKING PROCESSING: ERROR. REASON: { e } ' )
55
+ print (Fore .RED + "Error while establishing connection with domain. No results will appear. Reason: {}" .format (e ) + Style .RESET_ALL )
66
56
67
57
def save_results_to_txt (folderpath , table , queries , pages = 10 ):
68
58
try :
69
- config_values = read_config ()
70
- dorking_delay = int (config_values ['dorking_delay (secs)' ])
71
- delay_step = int (config_values ['delay_step' ])
72
59
txt_writepath = folderpath + '//04-dorking_results.txt'
73
60
total_results = []
74
61
total_dorks_amount = len (queries )
75
62
with open (txt_writepath , 'w' ) as f :
76
63
print (Fore .GREEN + "Started Google Dorking. Please, be patient, it may take some time" )
77
- print (Fore .GREEN + f"{ dorking_delay } seconds delay after each { delay_step } dorking requests was configured" + Style .RESET_ALL )
78
64
dorked_query_counter = 0
79
65
for i , query in enumerate (queries , start = 1 ):
80
66
f .write (f"QUERY #{ i } : { query } \n " )
81
- results = solid_google_dorking (query , dorking_delay , delay_step , pages )
67
+ results = solid_google_dorking (query , pages )
82
68
if not results :
83
69
f .write ("=> NO RESULT FOUND\n " )
84
70
total_results .append ((query , 0 ))
@@ -96,17 +82,12 @@ def save_results_to_txt(folderpath, table, queries, pages=10):
96
82
count = 'no results'
97
83
print (Fore .GREEN + f"[+] Found results for " + Fore .LIGHTCYAN_EX + f'{ query } ' + Fore .GREEN + ' query: ' + Fore .LIGHTCYAN_EX + f'{ count } ' + Style .RESET_ALL )
98
84
return f'Successfully dorked domain with { table .upper ()} dorks table' , txt_writepath
99
- except Exception as e :
85
+ except Exception :
100
86
print (Fore .RED + 'Error appeared while trying to dork target. See journal for details' )
101
- logging .error (f'DORKING PROCESSING: ERROR. REASON: { e } ' )
102
87
return 'Domain dorking failed. See journal for details' , txt_writepath
103
88
104
89
def transfer_results_to_xlsx (table , queries , pages = 10 ):
105
- config_values = read_config ()
106
- dorking_delay = int (config_values ['dorking_delay (secs)' ])
107
- delay_step = int (config_values ['delay_step' ])
108
90
print (Fore .GREEN + "Started Google Dorking. Please, be patient, it may take some time" )
109
- print (Fore .GREEN + f"{ dorking_delay } seconds delay after each { delay_step } dorking requests was configured" + Style .RESET_ALL )
110
91
dorked_query_counter = 0
111
92
total_dorks_amount = len (queries )
112
93
dorking_return_list = []
@@ -126,15 +107,15 @@ def transfer_results_to_xlsx(table, queries, pages=10):
126
107
127
108
def dorks_files_check ():
128
109
dorks_path = 'dorking//'
129
- dorks_files = ['iot_dorking.db' , 'files_dorking.db' , 'basic_dorking.db' , 'adminpanels_dorking.db' , 'webstructure_dorking.db' ]
110
+ dorks_files = ['iot_dorking.db' , 'files_dorking.db' , 'basic_dorking.db' ]
130
111
dorks_files_counter = 0
131
112
for dork_files in dorks_files :
132
113
files_path = os .path .join (dorks_path , dork_files )
133
114
if os .path .isfile (files_path ):
134
115
dorks_files_counter += 1
135
116
else :
136
117
pass
137
- if dorks_files_counter == 5 :
118
+ if dorks_files_counter == 3 :
138
119
print (Fore .GREEN + "Dorks databases presence: OK" + Style .RESET_ALL )
139
120
else :
140
121
print (Fore .RED + "Dorks databases presence: NOT OK\n Some files may not be in folder. Please compare dorking folder with the same folder on the official repository\n " + Style .RESET_ALL )
0 commit comments