Skip to content

Commit

Permalink
Merge pull request #94 from OSINT-TECHNOLOGIES/rolling
Browse files Browse the repository at this point in the history
Stabilized v1.1.5
  • Loading branch information
OSINT-TECHNOLOGIES authored Dec 17, 2024
2 parents 2de7259 + ee6ab6f commit 3606040
Show file tree
Hide file tree
Showing 14 changed files with 345 additions and 72 deletions.
14 changes: 14 additions & 0 deletions apis/api_securitytrails.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,12 @@ def api_securitytrails_check(domain):
api_key = str(row[1])
print(Fore.GREEN + 'Got SecurityTrails API key. Starting SecurityTrails scan...\n')

alive_subdomains = []
txt_records = []
a_records_list = []
mx_records_list = []
ns_records_list = []
soa_records_list = []
subdomains_url = f"https://api.securitytrails.com/v1/domain/{domain}/subdomains?apikey={api_key}"
response = requests.get(subdomains_url)

Expand All @@ -31,14 +37,19 @@ def api_securitytrails_check(domain):
for value in record_data.get('values', []):
if record_type == 'a':
print(Fore.GREEN + "IP: " + Fore.LIGHTCYAN_EX + f"{value['ip']} " + Fore.GREEN + "| Organization: " + Fore.LIGHTCYAN_EX + f"{value['ip_organization']}")
a_records_list.append({'ip': value.get('ip', ''), 'organization': value.get('ip_organization', '')})
elif record_type == 'mx':
print(Fore.GREEN + "Hostname: " + Fore.LIGHTCYAN_EX + f"{value['hostname']} " + Fore.GREEN + "| Priority: " + Fore.LIGHTCYAN_EX + f"{value['priority']} " + Fore.GREEN + "| Organization: " + Fore.LIGHTCYAN_EX + f"{value['hostname_organization']}")
mx_records_list.append({'mx_hostname': value.get('hostname', ''), 'mx_priority': value.get('priority', ''), 'mx_organization': value.get('hostname_organization', '')})
elif record_type == 'ns':
print(Fore.GREEN + "Nameserver: " + Fore.LIGHTCYAN_EX + f"{value['nameserver']} " + Fore.GREEN + "| Organization: " + Fore.LIGHTCYAN_EX + f"{value['nameserver_organization']}")
ns_records_list.append({'ns_nameserver': value.get('nameserver', ''), 'ns_organization': value.get('nameserver_organization', '')})
elif record_type == 'soa':
print(Fore.GREEN + "Email: " + Fore.LIGHTCYAN_EX + f"{value['email']} " + Fore.GREEN + "| TTL: " + Fore.LIGHTCYAN_EX + f"{value['ttl']}")
soa_records_list.append({'soa_email': value.get('email', ''), 'soa_ttl': value.get('ttl', '')})
elif record_type == 'txt':
print(Fore.GREEN + "Value: " + Fore.LIGHTCYAN_EX + f"{value['value']}")
txt_records.append(value['value'])

if response.status_code == 200:
data = response.json()
Expand All @@ -51,9 +62,12 @@ def api_securitytrails_check(domain):
response = requests.get(subdomain_url, timeout=5)
if response.status_code == 200:
print(Fore.GREEN + f"{i}. " + Fore.LIGHTCYAN_EX + f"{subdomain_url} " + Fore.GREEN + "is alive")
alive_subdomains.append(subdomain_url)
else:
pass
except Exception:
pass
else:
pass

return general_data['alexa_rank'], general_data['apex_domain'], general_data['hostname'], alive_subdomains, txt_records, a_records_list, mx_records_list, ns_records_list, soa_records_list
2 changes: 2 additions & 0 deletions apis/api_virustotal.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,12 @@ def api_virustotal_check(domain):
print(Fore.GREEN + f"Undetected Samples: {len(result.get('undetected_samples', []))}\n")
print(Fore.LIGHTGREEN_EX + "-------------------------------------------------\n" + Style.RESET_ALL)
conn.close()
return result.get('categories'), len(result.get('detected_urls', [])), len(result.get('detected_samples', [])), len(result.get('undetected_samples', []))
else:
print(Fore.RED + "Failed to get domain report\n")
print(Fore.LIGHTGREEN_EX + "-------------------------------------------------\n" + Style.RESET_ALL)
conn.close()
return 'Got no information from VirusTotal API', 'Got no information from VirusTotal API', 'Got no information from VirusTotal API', 'Got no information from VirusTotal API'
pass


14 changes: 10 additions & 4 deletions datagather_modules/crawl_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def whois_gather(short_domain):
logging.info('WHOIS INFO GATHERING: OK')
w = whois.whois(short_domain)
if w.org is None:
w['org'] = 'n/a'
w['org'] = 'Organization name was not extracted'
logging.info('WHOIS INFO GATHERING: OK')
return w
except Exception as e:
Expand Down Expand Up @@ -110,7 +110,7 @@ def sm_gather(url):
links = [a['href'] for a in soup.find_all('a', href=True)]
categorized_links = {'Facebook': [], 'Twitter': [], 'Instagram': [],
'Telegram': [], 'TikTok': [], 'LinkedIn': [],
'VKontakte': [], 'YouTube': [], 'Odnoklassniki': [], 'WeChat': []}
'VKontakte': [], 'YouTube': [], 'Odnoklassniki': [], 'WeChat': [], 'X.com': []}

for link in links:
parsed_url = urlparse(link)
Expand All @@ -135,6 +135,8 @@ def sm_gather(url):
categorized_links['WeChat'].append(urllib.parse.unquote(link))
elif hostname and (hostname == 'ok.ru' or hostname.endswith('.ok.ru')):
categorized_links['Odnoklassniki'].append(urllib.parse.unquote(link))
elif hostname and (hostname == 'x.com' or hostname.endswith('.x.com')):
categorized_links['X.com'].append(urllib.parse.unquote(link))

if not categorized_links['Odnoklassniki']:
categorized_links['Odnoklassniki'].append('Odnoklassniki links were not found')
Expand All @@ -156,6 +158,8 @@ def sm_gather(url):
categorized_links['Twitter'].append('Twitter links were not found')
if not categorized_links['Facebook']:
categorized_links['Facebook'].append('Facebook links were not found')
if not categorized_links['X.com']:
categorized_links['X.com'].append('X.com links were not found')

return categorized_links

Expand Down Expand Up @@ -209,7 +213,7 @@ def domains_reverse_research(subdomains, report_file_type):
subdomain_socials_grouped = list(dict(subdomain_socials_grouped).values())

sd_socials = {'Facebook': [], 'Twitter': [], 'Instagram': [], 'Telegram': [], 'TikTok': [], 'LinkedIn': [],
'VKontakte': [], 'YouTube': [], 'Odnoklassniki': [], 'WeChat': []}
'VKontakte': [], 'YouTube': [], 'Odnoklassniki': [], 'WeChat': [], 'X.com': []}

for inner_list in subdomain_socials_grouped:
for link in inner_list:
Expand All @@ -234,6 +238,8 @@ def domains_reverse_research(subdomains, report_file_type):
sd_socials['WeChat'].append(urllib.parse.unquote(link))
elif hostname and (hostname == 'ok.ru' or hostname.endswith('.ok.ru')):
sd_socials['Odnoklassniki'].append(urllib.parse.unquote(link))
elif hostname and (hostname == 'x.com' or hostname.endswith('.x.com')):
sd_socials['Odnoklassniki'].append(urllib.parse.unquote(link))

sd_socials = {k: list(set(v)) for k, v in sd_socials.items()}

Expand All @@ -242,7 +248,7 @@ def domains_reverse_research(subdomains, report_file_type):
if not subdomain_ip:
subdomain_ip = ["No subdomains IP's were found"]

if report_file_type == 'pdf' or report_file_type == 'html':
if report_file_type == 'html':
return subdomain_mails, sd_socials, subdomain_ip
elif report_file_type == 'xlsx':
return subdomain_urls, subdomain_mails, subdomain_ip, sd_socials
Loading

0 comments on commit 3606040

Please sign in to comment.