|
8 | 8 | import networking_processor as np
|
9 | 9 | from pagesearch_main import normal_search, sitemap_inspection_search
|
10 | 10 | from logs_processing import logging
|
| 11 | +from api_virustotal import api_virustotal_check |
| 12 | +from api_securitytrails import api_securitytrails_check |
11 | 13 |
|
12 | 14 | try:
|
13 | 15 | import requests
|
@@ -65,7 +67,7 @@ def report_preprocessing(self, short_domain, report_file_type):
|
65 | 67 | os.makedirs(report_folder, exist_ok=True)
|
66 | 68 | return casename, db_casename, db_creation_date, robots_filepath, sitemap_filepath, sitemap_links_filepath, report_file_type, report_folder, files_ctime, report_ctime
|
67 | 69 |
|
68 |
| - def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, keywords, keywords_flag, dorking_flag): |
| 70 | + def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, keywords, keywords_flag, dorking_flag, used_api_flag): |
69 | 71 | casename, db_casename, db_creation_date, robots_filepath, sitemap_filepath, sitemap_links_filepath, report_file_type, report_folder, ctime, report_ctime = self.report_preprocessing(short_domain, report_file_type)
|
70 | 72 | logging.info(f'### THIS LOG PART FOR {casename} CASE, TIME: {ctime} STARTS HERE')
|
71 | 73 | print(Fore.GREEN + "Started scanning domain" + Style.RESET_ALL)
|
@@ -147,6 +149,16 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
|
147 | 149 | dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
|
148 | 150 | print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
|
149 | 151 |
|
| 152 | + if used_api_flag != ['Empty']: |
| 153 | + print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: API SCANNING]\n" + Style.RESET_ALL) |
| 154 | + if 1 in used_api_flag: |
| 155 | + api_virustotal_check(short_domain) |
| 156 | + if 2 in used_api_flag: |
| 157 | + api_securitytrails_check(short_domain) |
| 158 | + print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: API SCANNING]\n" + Style.RESET_ALL) |
| 159 | + else: |
| 160 | + pass |
| 161 | + |
150 | 162 | data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
|
151 | 163 | subdomain_ip, issuer, subject, notBefore, notAfter, commonName, serialNumber, mx_records,
|
152 | 164 | robots_txt_result, sitemap_xml_result, sitemap_links_status,
|
@@ -180,13 +192,23 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
|
180 | 192 |
|
181 | 193 | if dorking_flag == 'none':
|
182 | 194 | dorking_status = 'Google Dorking mode was not selected for this scan'
|
183 |
| - dorking_file_path = 'Google Dorking mode was not selected for this scan' |
| 195 | + dorking_results = ['Google Dorking mode was not selected for this scan'] |
184 | 196 | else:
|
185 | 197 | dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
|
186 | 198 | print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
|
187 |
| - dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table)) |
| 199 | + dorking_status, dorking_results = dp.transfer_results_to_xlsx(table, dp.get_dorking_query(short_domain, dorking_db_path, table)) |
188 | 200 | print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
|
189 | 201 |
|
| 202 | + if used_api_flag != ['Empty']: |
| 203 | + print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: API SCANNING]\n" + Style.RESET_ALL) |
| 204 | + if 1 in used_api_flag: |
| 205 | + api_virustotal_check(short_domain) |
| 206 | + if 2 in used_api_flag: |
| 207 | + api_securitytrails_check(short_domain) |
| 208 | + print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: API SCANNING]\n" + Style.RESET_ALL) |
| 209 | + else: |
| 210 | + pass |
| 211 | + |
190 | 212 | data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
|
191 | 213 | subdomain_ip, issuer, subject, notBefore, notAfter, commonName, serialNumber, mx_records,
|
192 | 214 | robots_txt_result, sitemap_xml_result, sitemap_links_status,
|
@@ -230,6 +252,16 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
|
230 | 252 | dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
|
231 | 253 | print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
|
232 | 254 |
|
| 255 | + if used_api_flag != ['Empty']: |
| 256 | + print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: API SCANNING]\n" + Style.RESET_ALL) |
| 257 | + if 1 in used_api_flag: |
| 258 | + api_virustotal_check(short_domain) |
| 259 | + if 2 in used_api_flag: |
| 260 | + api_securitytrails_check(short_domain) |
| 261 | + print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: API SCANNING]\n" + Style.RESET_ALL) |
| 262 | + else: |
| 263 | + pass |
| 264 | + |
233 | 265 | data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
|
234 | 266 | subdomain_ip, issuer, subject, notBefore, notAfter, commonName, serialNumber, mx_records,
|
235 | 267 | robots_txt_result, sitemap_xml_result, sitemap_links_status,
|
|
0 commit comments