Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions data/APIs/api.conf
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
[VTOTAL]
api_key =
api_key = xxxx

[CENSYS]
api_id =
secret =

[SHODAN]
api_key =
api_key = xxxx

[SECURITYTRAILS]
api_key =
api_key = xxxx

22 changes: 22 additions & 0 deletions data/txt/domains.txt
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,30 @@
.us
.uk
.co.uk
.in
.asia
.eu
.aero
.mobi
.int
.edu
.coop
.io
.app
.tech
.store
.website
.online
.club
.blog
.xyz
.ai
.dev
.me
.app
.gg
.host
.cloud
.inc
.bot
.wiki
3 changes: 3 additions & 0 deletions lib/tools/bruter.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,9 @@ def nameserver(domain):
checking = bruter(domain)
good_dns = []
print(info + 'Bruteforcing domain extensions and getting DNS records')
if checking is None:
print(tab + bad + "No domains to check - bruter function failed")
return good_dns
print(tab + warn + f'Total domain extension used: {Y}{len(checking)}{W}')
for item in checking:
try:
Expand Down
152 changes: 125 additions & 27 deletions lib/tools/dnslookup.py
Original file line number Diff line number Diff line change
@@ -1,45 +1,143 @@
import random
from os import environ
import socket
import warnings
import urllib3

import thirdparty.requests as requests
from thirdparty.dns.resolver import Resolver
from thirdparty.dns.resolver import Resolver, NXDOMAIN, NoAnswer, NoNameservers, Timeout
from thirdparty.html_similarity import similarity
from urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter

from ..utils.colors import Y, bad, good, info, tab
from ..utils.settings import config
from ..utils.colors import Y, bad, good, info, tab, warn

# Suppress only the specific InsecureRequestWarning
warnings.filterwarnings('ignore', category=urllib3.exceptions.InsecureRequestWarning)

def create_session():
session = requests.Session()
retry_strategy = Retry(
total=3,
backoff_factor=0.5,
status_forcelist=[500, 502, 503, 504],
allowed_methods=frozenset(['GET', 'HEAD', 'OPTIONS'])
)
adapter = HTTPAdapter(max_retries=retry_strategy)
session.mount("http://", adapter)
session.mount("https://", adapter)
return session

def scan(domain, host, userAgent, randomAgent, header):
session = create_session()
headers = dict(x.replace(' ', '').split(':') for x in header.split(',')) if header is not None else {}
(headers.update({'User-agent': random.choice(open("data/txt/random_agents.txt").readlines()).rstrip("\n")})
if randomAgent is True else '')
headers.update({'User-agent': userAgent}) if userAgent is not None else ''
try:
print("\n" + Y + "Attempting to track real IP using: %s\n" % host)
print(info + "Checking if {0} is similar to {1}".format(host, domain))
get_domain = requests.get('http://' + domain, headers=headers, timeout=config['http_timeout_seconds'])
get_host = requests.get('http://' + host, headers=headers, timeout=config['http_timeout_seconds'])
page_similarity = similarity(get_domain.text, get_host.text)
if page_similarity > config['response_similarity_threshold']:
print(tab + good + 'HTML content is %d%% structurally similar to: %s'
% (round(100 * page_similarity, 2), domain))
else:
print(tab + bad + 'Sorry, but HTML content is %d%% structurally similar to: %s'
% (round(100 * page_similarity, 2), domain))
except Exception:
print(tab + bad + 'Connection cannot be established with: %s' % (host))
if randomAgent:
with open("data/txt/random_agents.txt") as f:
headers.update({'User-agent': random.choice(f.readlines()).strip()})
elif userAgent:
headers.update({'User-agent': userAgent})

print("\n" + Y + "Attempting to track real IP using: %s\n" % host)
print(info + "Checking if {0} is similar to {1}".format(host, domain))

try:
# Try HTTPS first, then fallback to HTTP
for protocol in ['https://', 'http://']:
try:
domain_url = protocol + domain
host_url = protocol + host

# Set a shorter timeout and verify=False for both requests
get_domain = session.get(domain_url, headers=headers, timeout=3, verify=False)
get_host = session.get(host_url, headers=headers, timeout=3, verify=False)

page_similarity = similarity(get_domain.text, get_host.text)
if page_similarity > 0.75: # 75% similarity threshold
print(tab + good + 'HTML content is %d%% structurally similar to: %s'
% (round(100 * page_similarity, 2), domain))
else:
print(tab + bad + 'HTML content is only %d%% structurally similar to: %s'
% (round(100 * page_similarity, 2), domain))
return
except requests.exceptions.SSLError:
print(tab + warn + f'SSL verification failed for {protocol}, trying without verification')
continue
except requests.exceptions.ReadTimeout:
print(tab + warn + f'Connection timed out for {protocol}, trying alternative')
continue
except requests.exceptions.RequestException as e:
print(tab + warn + f'Connection failed with {protocol}: {str(e)}')
continue

print(tab + bad + f'Connection cannot be established with: {host} (tried both HTTP and HTTPS)')

except Exception as e:
print(tab + bad + f'Unexpected error while scanning {host}: {str(e)}')

def DNSLookup(domain, host):
isAndroid = "ANDROID_DATA" in environ
sys_r = Resolver(filename='/data/data/com.termux/files/usr/etc/resolv.conf') if isAndroid else Resolver()
dns = [host]
sys_r.timeout = 2 # Reduce timeout to 2 seconds
sys_r.lifetime = 4 # Total lookup time
sys_r.tries = 2 # Number of retries

try:
dream_dns = [item.address for server in dns for item in sys_r.query(server)]
# Try to resolve the host first
try:
dream_dns = []
answers = sys_r.query(host, 'A')
for rdata in answers:
dream_dns.append(rdata.address)
except NXDOMAIN:
print(tab + bad + f'Domain {host} does not exist')
return None
except NoAnswer:
print(tab + bad + f'No DNS records found for {host}')
return None
except NoNameservers:
print(tab + bad + f'No nameservers available for {host}')
return None
except Timeout:
print(tab + bad + f'DNS lookup timed out for {host}')
return None
except Exception as e:
print(tab + bad + f'DNS resolution failed for {host}: {str(e)}')
return None

if not dream_dns:
print(tab + bad + f'No DNS records found for {host}')
return None

# Now try to resolve the domain using the host's nameservers
dream_r = Resolver()
dream_r.nameservers = dream_dns
answer = dream_r.query(domain, 'A')
for A in answer.rrset.items:
return A
except Exception:
pass
dream_r.timeout = 2 # Reduce timeout
dream_r.lifetime = 4 # Total lookup time
dream_r.tries = 2 # Number of retries

try:
answer = dream_r.query(domain, 'A')
if answer and answer.rrset and answer.rrset.items:
return answer.rrset.items[0]
else:
print(tab + bad + f'No A records found for {domain} using {host} nameservers')
return None
except NXDOMAIN:
print(tab + bad + f'Domain {domain} not found using {host} nameservers')
return None
except NoAnswer:
print(tab + bad + f'No answer from {host} nameservers for {domain}')
return None
except NoNameservers:
print(tab + bad + f'No working nameservers found from {host} for {domain}')
return None
except Timeout:
print(tab + bad + f'DNS lookup timed out using {host} nameservers')
return None
except Exception as e:
print(tab + bad + f'Failed to resolve {domain} using {host} nameservers: {str(e)}')
return None

except Exception as e:
print(tab + bad + f'Unexpected error in DNS lookup: {str(e)}')
return None
74 changes: 58 additions & 16 deletions lib/tools/ispcheck.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,59 @@
import re
import socket
import time
from urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter

from lib.utils.colors import R, W, Y, tab, warn
from lib.utils.colors import R, W, Y, tab, warn, bad, good
from thirdparty import requests
from thirdparty.bs4 import BeautifulSoup

cloudlist = ['sucuri',
'cloudflare',
'incapsula']

def create_session():
session = requests.Session()
retry_strategy = Retry(
total=3, # number of retries
backoff_factor=0.5, # wait 0.5, 1, 2, 4... seconds between retries
status_forcelist=[500, 502, 503, 504] # retry on these status codes
)
adapter = HTTPAdapter(max_retries=retry_strategy)
session.mount("http://", adapter)
session.mount("https://", adapter)
return session

def ISPCheck(domain):
reg = re.compile(rf'(?i){"|".join(cloudlist)}')
session = create_session()

# Try to resolve the domain first
try:
header = requests.get('http://' + domain, timeout=1).headers['server'].lower()
if reg.search(header):
return f' is protected by {Y}{header.capitalize()}{W}'
return None
except Exception:
req = requests.get(f'https://check-host.net/ip-info?host={domain}').text
UrlHTML = BeautifulSoup(req, "lxml")
print(f'{tab*2}{warn}Something has gone wrong. Retrying connection')
socket.gethostbyname(domain)
except socket.gaierror:
print(f'{tab*2}{warn}DNS resolution failed for {domain}')
return f' [{R}DNS resolution failed{W}]'

# Try direct connection first
try:
response = session.get('http://' + domain, timeout=3)
if 'server' in response.headers:
header = response.headers['server'].lower()
if reg.search(header):
return f' is protected by {Y}{header.capitalize()}{W}'
return None
except (requests.exceptions.RequestException, socket.error):
pass # Continue to fallback method

# Fallback to check-host.net
try:
print(f'{tab*2}{warn}Direct connection failed, trying check-host.net...')
response = session.get(f'https://check-host.net/ip-info?host={domain}', timeout=5)
if response.status_code != 200:
return f' [{R}cannot retrieve information{W}]'

UrlHTML = BeautifulSoup(response.text, "lxml")
if UrlHTML.findAll('div', {'class': 'error'}):
return f' [{R}cannot retrieve information{W}]'

Expand All @@ -30,13 +63,22 @@ def ISPCheck(domain):
if reg.search(org):
return f' is protected by {Y}{reg.match(org).group().capitalize()}{W}'

# Check ports as last resort
ports = [80, 443]

for port in ports:
checker = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
checker.settimeout(0.5)
if checker.connect_ex((domain, port)) == 0:
try:
checker = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
checker.settimeout(1)
if checker.connect_ex((domain, port)) == 0:
checker.close()
print(f'{tab*2}{good}Port {port} is open')
return None
checker.close()
return
checker.close()
return f' [{R}http{W}/{R}https{W}] ports filtered or closed'
except socket.error:
continue

return f' [{R}http{W}/{R}https{W}] ports filtered or closed'

except Exception as e:
print(f'{tab*2}{bad}Error checking domain: {str(e)}')
return f' [{R}cannot retrieve information{W}]'
Loading