Skip to content

Commit 84ac817

Browse files
Merge pull request #91 from OSINT-TECHNOLOGIES/rolling
Stabilized v1.1.3
2 parents a22acbb + 2eaf834 commit 84ac817

14 files changed

+431
-250
lines changed

apis/api_keys.db

12 KB
Binary file not shown.

apis/api_keys_reference.db

12 KB
Binary file not shown.

apis/api_securitytrails.py

+59
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
import requests
2+
import sqlite3
3+
from colorama import Fore, Style
4+
5+
def api_securitytrails_check(domain):
6+
conn = sqlite3.connect('apis//api_keys.db')
7+
cursor = conn.cursor()
8+
cursor.execute("SELECT api_name, api_key FROM api_keys")
9+
rows = cursor.fetchall()
10+
for row in rows:
11+
api_name, api_key = row
12+
if api_name == 'SecurityTrails':
13+
api_key = str(row[1])
14+
print(Fore.GREEN + 'Got SecurityTrails API key. Starting SecurityTrails scan...\n')
15+
16+
subdomains_url = f"https://api.securitytrails.com/v1/domain/{domain}/subdomains?apikey={api_key}"
17+
response = requests.get(subdomains_url)
18+
19+
url = f"https://api.securitytrails.com/v1/domain/{domain}?apikey={api_key}"
20+
general_response = requests.get(url)
21+
general_data = general_response.json()
22+
23+
print(Fore.GREEN + "[DOMAIN GENERAL INFORMATION]\n")
24+
print(Fore.GREEN + "Alexa Rank: " + Fore.LIGHTCYAN_EX + f"{general_data['alexa_rank']}")
25+
print(Fore.GREEN + "Apex Domain: " + Fore.LIGHTCYAN_EX + f"{general_data['apex_domain']}")
26+
print(Fore.GREEN + "Hostname: " + Fore.LIGHTCYAN_EX + f"{general_data['hostname']}" + Style.RESET_ALL)
27+
28+
print(Fore.GREEN + "\n[DNS RECORDS]" + Style.RESET_ALL)
29+
for record_type, record_data in general_data['current_dns'].items():
30+
print(Fore.GREEN + f"\n[+] {record_type.upper()} RECORDS:" + Style.RESET_ALL)
31+
for value in record_data.get('values', []):
32+
if record_type == 'a':
33+
print(Fore.GREEN + "IP: " + Fore.LIGHTCYAN_EX + f"{value['ip']} " + Fore.GREEN + "| Organization: " + Fore.LIGHTCYAN_EX + f"{value['ip_organization']}")
34+
elif record_type == 'mx':
35+
print(Fore.GREEN + "Hostname: " + Fore.LIGHTCYAN_EX + f"{value['hostname']} " + Fore.GREEN + "| Priority: " + Fore.LIGHTCYAN_EX + f"{value['priority']} " + Fore.GREEN + "| Organization: " + Fore.LIGHTCYAN_EX + f"{value['hostname_organization']}")
36+
elif record_type == 'ns':
37+
print(Fore.GREEN + "Nameserver: " + Fore.LIGHTCYAN_EX + f"{value['nameserver']} " + Fore.GREEN + "| Organization: " + Fore.LIGHTCYAN_EX + f"{value['nameserver_organization']}")
38+
elif record_type == 'soa':
39+
print(Fore.GREEN + "Email: " + Fore.LIGHTCYAN_EX + f"{value['email']} " + Fore.GREEN + "| TTL: " + Fore.LIGHTCYAN_EX + f"{value['ttl']}")
40+
elif record_type == 'txt':
41+
print(Fore.GREEN + "Value: " + Fore.LIGHTCYAN_EX + f"{value['value']}")
42+
43+
if response.status_code == 200:
44+
data = response.json()
45+
print(Fore.GREEN + "\n[SUBDOMAINS DEEP ENUMERATION]\n")
46+
print(Fore.GREEN + f"Found " + Fore.LIGHTCYAN_EX + f"{data['subdomain_count']} " + Fore.GREEN + "subdomains")
47+
print(Fore.GREEN + "Subdomains list: ")
48+
for i, subdomain in enumerate(data['subdomains'], start=1):
49+
subdomain_url = f"http://{subdomain}.{domain}"
50+
try:
51+
response = requests.get(subdomain_url, timeout=5)
52+
if response.status_code == 200:
53+
print(Fore.GREEN + f"{i}. " + Fore.LIGHTCYAN_EX + f"{subdomain_url} " + Fore.GREEN + "is alive")
54+
else:
55+
pass
56+
except Exception:
57+
pass
58+
else:
59+
pass

apis/api_virustotal.py

+49
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
import requests
2+
import sqlite3
3+
from colorama import Fore, Style
4+
5+
def check_domain(domain, api_key):
6+
url = "https://www.virustotal.com/vtapi/v2/domain/report"
7+
params = {
8+
'domain': domain,
9+
'apikey': api_key
10+
}
11+
12+
response = requests.get(url, params=params)
13+
14+
if response.status_code == 200:
15+
return response.json()
16+
else:
17+
print(f"Error: {response.status_code}")
18+
return None
19+
20+
21+
def api_virustotal_check(domain):
22+
conn = sqlite3.connect('apis//api_keys.db')
23+
cursor = conn.cursor()
24+
cursor.execute("SELECT api_name, api_key FROM api_keys")
25+
rows = cursor.fetchall()
26+
for row in rows:
27+
api_name, api_key = row
28+
if api_name == 'VirusTotal':
29+
api_key = str(row[1])
30+
print(Fore.GREEN + 'Got VirusTotal API key. Starting VirusTotal scan...\n')
31+
32+
result = check_domain(domain, api_key)
33+
34+
if result:
35+
print(Fore.GREEN + "[VIRUSTOTAL DOMAIN REPORT]")
36+
print(Fore.GREEN + f"Domain: {result.get('domain')}")
37+
print(Fore.GREEN + f"Categories: {result.get('categories')}")
38+
print(Fore.GREEN + f"Detected URLs: {len(result.get('detected_urls', []))}")
39+
print(Fore.GREEN + f"Detected Samples: {len(result.get('detected_samples', []))}")
40+
print(Fore.GREEN + f"Undetected Samples: {len(result.get('undetected_samples', []))}\n")
41+
print(Fore.LIGHTGREEN_EX + "-------------------------------------------------\n" + Style.RESET_ALL)
42+
conn.close()
43+
else:
44+
print(Fore.RED + "Failed to get domain report\n")
45+
print(Fore.LIGHTGREEN_EX + "-------------------------------------------------\n" + Style.RESET_ALL)
46+
conn.close()
47+
pass
48+
49+

datagather_modules/crawl_processor.py

+20-20
Original file line numberDiff line numberDiff line change
@@ -115,25 +115,25 @@ def sm_gather(url):
115115
for link in links:
116116
parsed_url = urlparse(link)
117117
hostname = parsed_url.hostname
118-
if hostname and hostname.endswith('facebook.com'):
118+
if hostname and (hostname == 'facebook.com' or hostname.endswith('.facebook.com')):
119119
categorized_links['Facebook'].append(urllib.parse.unquote(link))
120-
elif hostname and hostname.endswith('twitter.com'):
120+
elif hostname and (hostname == 'twitter.com' or hostname.endswith('.twitter.com')):
121121
categorized_links['Twitter'].append(urllib.parse.unquote(link))
122-
elif hostname and hostname.endswith('instagram.com'):
122+
elif hostname and (hostname == 'instagram.com' or hostname.endswith('.instagram.com')):
123123
categorized_links['Instagram'].append(urllib.parse.unquote(link))
124-
elif hostname and hostname.endswith('t.me'):
124+
elif hostname and (hostname == 't.me' or hostname.endswith('.t.me')):
125125
categorized_links['Telegram'].append(urllib.parse.unquote(link))
126-
elif hostname and hostname.endswith('tiktok.com'):
126+
elif hostname and (hostname == 'tiktok.com' or hostname.endswith('.tiktok.com')):
127127
categorized_links['TikTok'].append(urllib.parse.unquote(link))
128-
elif hostname and hostname.endswith('linkedin.com'):
128+
elif hostname and (hostname == 'linkedin.com' or hostname.endswith('.linkedin.com')):
129129
categorized_links['LinkedIn'].append(urllib.parse.unquote(link))
130-
elif hostname and hostname.endswith('vk.com'):
130+
elif hostname and (hostname == 'vk.com' or hostname.endswith('.vk.com')):
131131
categorized_links['VKontakte'].append(urllib.parse.unquote(link))
132-
elif hostname and hostname.endswith('youtube.com'):
132+
elif hostname and (hostname == 'youtube.com' or hostname.endswith('.youtube.com')):
133133
categorized_links['YouTube'].append(urllib.parse.unquote(link))
134-
elif hostname and hostname.endswith('wechat.com'):
134+
elif hostname and (hostname == 'wechat.com' or hostname.endswith('.wechat.com')):
135135
categorized_links['WeChat'].append(urllib.parse.unquote(link))
136-
elif hostname and hostname.endswith('ok.ru'):
136+
elif hostname and (hostname == 'ok.ru' or hostname.endswith('.ok.ru')):
137137
categorized_links['Odnoklassniki'].append(urllib.parse.unquote(link))
138138

139139
if not categorized_links['Odnoklassniki']:
@@ -214,25 +214,25 @@ def domains_reverse_research(subdomains, report_file_type):
214214
for inner_list in subdomain_socials_grouped:
215215
for link in inner_list:
216216
hostname = urlparse(link).hostname
217-
if hostname and hostname.endswith('facebook.com'):
217+
if hostname and (hostname == 'facebook.com' or hostname.endswith('.facebook.com')):
218218
sd_socials['Facebook'].append(urllib.parse.unquote(link))
219-
elif hostname and hostname.endswith('twitter.com'):
219+
elif hostname and (hostname == 'twitter.com' or hostname.endswith('.twitter.com')):
220220
sd_socials['Twitter'].append(urllib.parse.unquote(link))
221-
elif hostname and hostname.endswith('instagram.com'):
221+
elif hostname and (hostname == 'instagram.com' or hostname.endswith('.instagram.com')):
222222
sd_socials['Instagram'].append(urllib.parse.unquote(link))
223-
elif hostname and hostname.endswith('t.me'):
223+
elif hostname and (hostname == 't.me' or hostname.endswith('.t.me')):
224224
sd_socials['Telegram'].append(urllib.parse.unquote(link))
225-
elif hostname and hostname.endswith('tiktok.com'):
225+
elif hostname and (hostname == 'tiktok.com' or hostname.endswith('.tiktok.com')):
226226
sd_socials['TikTok'].append(urllib.parse.unquote(link))
227-
elif hostname and hostname.endswith('linkedin.com'):
227+
elif hostname and (hostname == 'linkedin.com' or hostname.endswith('.linkedin.com')):
228228
sd_socials['LinkedIn'].append(urllib.parse.unquote(link))
229-
elif hostname and hostname.endswith('vk.com'):
229+
elif hostname and (hostname == 'vk.com' or hostname.endswith('.vk.com')):
230230
sd_socials['VKontakte'].append(urllib.parse.unquote(link))
231-
elif hostname and hostname.endswith('youtube.com'):
231+
elif hostname and (hostname == 'youtube.com' or hostname.endswith('.youtube.com')):
232232
sd_socials['YouTube'].append(urllib.parse.unquote(link))
233-
elif hostname and hostname.endswith('wechat.com'):
233+
elif hostname and (hostname == 'wechat.com' or hostname.endswith('.wechat.com')):
234234
sd_socials['WeChat'].append(urllib.parse.unquote(link))
235-
elif hostname and hostname.endswith('ok.ru'):
235+
elif hostname and (hostname == 'ok.ru' or hostname.endswith('.ok.ru')):
236236
sd_socials['Odnoklassniki'].append(urllib.parse.unquote(link))
237237

238238
sd_socials = {k: list(set(v)) for k, v in sd_socials.items()}

datagather_modules/data_assembler.py

+35-3
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@
88
import networking_processor as np
99
from pagesearch_main import normal_search, sitemap_inspection_search
1010
from logs_processing import logging
11+
from api_virustotal import api_virustotal_check
12+
from api_securitytrails import api_securitytrails_check
1113

1214
try:
1315
import requests
@@ -65,7 +67,7 @@ def report_preprocessing(self, short_domain, report_file_type):
6567
os.makedirs(report_folder, exist_ok=True)
6668
return casename, db_casename, db_creation_date, robots_filepath, sitemap_filepath, sitemap_links_filepath, report_file_type, report_folder, files_ctime, report_ctime
6769

68-
def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, keywords, keywords_flag, dorking_flag):
70+
def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, keywords, keywords_flag, dorking_flag, used_api_flag):
6971
casename, db_casename, db_creation_date, robots_filepath, sitemap_filepath, sitemap_links_filepath, report_file_type, report_folder, ctime, report_ctime = self.report_preprocessing(short_domain, report_file_type)
7072
logging.info(f'### THIS LOG PART FOR {casename} CASE, TIME: {ctime} STARTS HERE')
7173
print(Fore.GREEN + "Started scanning domain" + Style.RESET_ALL)
@@ -147,6 +149,16 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
147149
dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
148150
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
149151

152+
if used_api_flag != ['Empty']:
153+
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: API SCANNING]\n" + Style.RESET_ALL)
154+
if 1 in used_api_flag:
155+
api_virustotal_check(short_domain)
156+
if 2 in used_api_flag:
157+
api_securitytrails_check(short_domain)
158+
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: API SCANNING]\n" + Style.RESET_ALL)
159+
else:
160+
pass
161+
150162
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
151163
subdomain_ip, issuer, subject, notBefore, notAfter, commonName, serialNumber, mx_records,
152164
robots_txt_result, sitemap_xml_result, sitemap_links_status,
@@ -180,13 +192,23 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
180192

181193
if dorking_flag == 'none':
182194
dorking_status = 'Google Dorking mode was not selected for this scan'
183-
dorking_file_path = 'Google Dorking mode was not selected for this scan'
195+
dorking_results = ['Google Dorking mode was not selected for this scan']
184196
else:
185197
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
186198
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
187-
dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
199+
dorking_status, dorking_results = dp.transfer_results_to_xlsx(table, dp.get_dorking_query(short_domain, dorking_db_path, table))
188200
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
189201

202+
if used_api_flag != ['Empty']:
203+
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: API SCANNING]\n" + Style.RESET_ALL)
204+
if 1 in used_api_flag:
205+
api_virustotal_check(short_domain)
206+
if 2 in used_api_flag:
207+
api_securitytrails_check(short_domain)
208+
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: API SCANNING]\n" + Style.RESET_ALL)
209+
else:
210+
pass
211+
190212
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
191213
subdomain_ip, issuer, subject, notBefore, notAfter, commonName, serialNumber, mx_records,
192214
robots_txt_result, sitemap_xml_result, sitemap_links_status,
@@ -230,6 +252,16 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
230252
dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
231253
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
232254

255+
if used_api_flag != ['Empty']:
256+
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: API SCANNING]\n" + Style.RESET_ALL)
257+
if 1 in used_api_flag:
258+
api_virustotal_check(short_domain)
259+
if 2 in used_api_flag:
260+
api_securitytrails_check(short_domain)
261+
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: API SCANNING]\n" + Style.RESET_ALL)
262+
else:
263+
pass
264+
233265
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
234266
subdomain_ip, issuer, subject, notBefore, notAfter, commonName, serialNumber, mx_records,
235267
robots_txt_result, sitemap_xml_result, sitemap_links_status,

0 commit comments

Comments
 (0)