⚠️ Disclaimer: The tools and techniques discussed in this blog is only meant for educational, ethical OSINT & vulnerability research purposes. Author is not responsible for any misuse!
Features
- Enter complete organization name
- It will retrieve all available ASNs
- For each ASN, collect all IPv4 and IPv6 subnet/range
- Passive deep recon
Tool Demo Execution

How it can be helpful for bug bounty hunters / red teamers / pentesters ?
- Increase the attack surface
- Possibility to find more hidden endpoints / old endpoints / forgotten endpoints
- Highly useful in your existing vhost discovery workflow to find what others might have missed.
⛏️ Black IP Python Script
import requests
from bs4 import BeautifulSoup
import time
from datetime import datetime
import re
def get_data():
org_name = input("Enter the Organization Name (e.g., National Aeronautics and Space Administration): ").strip()
if not org_name:
print("Organization name cannot be empty.")
return
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
}
search_url = f"https://bgp.he.net/search?search%5Bsearch%5D={org_name.replace(' ', '+')}&commit=Search"
try:
response = requests.get(search_url, headers=headers)
response.raise_for_status()
except requests.exceptions.RequestException as e:
print(f"Error connecting to BGP.he.net: {e}")
return
soup = BeautifulSoup(response.text, 'html.parser')
asn_list = []
for row in soup.find_all('tr'):
cols = row.find_all('td')
if len(cols) >= 3:
asn_text = cols[0].get_text(strip=True)
org_text = cols[2].get_text(strip=True)
if org_text == org_name and asn_text.startswith('AS'):
asn_list.append(asn_text)
if not asn_list:
print(f"No ASNs found for exact match: {org_name}")
return
print(f"Found ASNs: {', '.join(asn_list)}")
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
asn_filename = f"ASN_{timestamp}.txt"
with open(asn_filename, "w") as f:
f.write("\n".join(asn_list))
print(f"ASN list saved to {asn_filename}")
all_prefixes = []
for asp in asn_list:
print(f"Fetching IPs for {asp}...")
prefix_url = f"https://bgp.he.net/{asp}#_prefixes"
try:
time.sleep(2)
res = requests.get(prefix_url, headers=headers)
res.raise_for_status()
p_soup = BeautifulSoup(res.text, 'html.parser')
links = p_soup.find_all('a', href=re.compile(r'^/net/'))
for link in links:
prefix = link.get_text(strip=True)
if prefix not in all_prefixes:
all_prefixes.append(prefix)
except Exception as e:
print(f"Could not retrieve prefixes for {asp}: {e}")
if all_prefixes:
prefix_filename = f"IPs_{timestamp}.txt"
with open(prefix_filename, "w") as f:
f.write("\n".join(all_prefixes))
print(f"Successfully saved {len(all_prefixes)} unique prefixes to {prefix_filename}")
else:
print("No prefixes found.")
def print_banner():
RED = "\033[91m"
RESET = "\033[0m"
banner = fr"""{RED}
______________________________________________________________
| ___ _ _ ____ _ __ _ ____ |
| | _ )| | /_\ / ___|| |/ / (_)| _ \ |
| | _ \| |__ / _ \ (__ | < | || |_) | |
| |___/|____|/_/ \_\____||_|\_\ |_|| __/ |
| |_| |
| Network Infrastructure Intelligence v1.0 |
|______________________________________________________________|
Script by Legion Hunter
{RESET}"""
print(banner)
if __name__ == "__main__":
print_banner()
get_data()Good luck in your next VAPT/Bug Bounty/VDP engagement 👌 Customize and add more sources as per your workflow :)
