Skip to main content

Passive DNS

The Passive DNS API retrieves historical passive DNS data for specified domains from FullHunt's internal passive DNS database, providing insights into DNS resolution history and infrastructure changes over time.

Domain Lookup​

Retrieve historical passive DNS data for a specified domain.

HTTP Request

GET https://fullhunt.io/api/v1/nexus/passive-dns/lookup

Query Parameters

ParameterRequiredTypeDescription
domainYesstringDomain name to search for historical DNS data

Example Request

curl "https://fullhunt.io/api/v1/nexus/passive-dns/lookup?domain=kaspersky.com" \
-H "X-API-KEY: xxxx-xxxx-xxxx-xxxxxx"

Example Response

{
"count": 7686,
"data": [
"activate.activation-v2.kaspersky.com",
"activation-v2.geo.kaspersky.com",
"activation-v2.kaspersky.com",
"aes.geo.kaspersky.com",
"americas.activation-v2.kaspersky.com",
"americas.activate.activation-v2.kaspersky.com",
"americas.refresh-bkg.activation-v2.kaspersky.com",
"ksn-pixel.geoksn.kaspersky.com"
],
"error": "",
"status": 200
}

Response Fields

FieldTypeDescription
countintegerTotal number of historical DNS records found
dataarrayArray of hostnames/subdomains found in passive DNS data
errorstringError message (empty on success)
statusintegerHTTP status code

Integration Example​

import requests
import json
from datetime import datetime
from collections import defaultdict
import re

class PassiveDNSAnalyzer:
def __init__(self, api_key):
self.api_key = api_key
self.headers = {"X-API-KEY": api_key}
self.base_url = "https://fullhunt.io/api/v1/nexus/passive-dns"

def lookup_domain(self, domain):
"""Get passive DNS data for a domain."""
url = f"{self.base_url}/lookup"
params = {"domain": domain}

response = requests.get(url, headers=self.headers, params=params)

if response.status_code == 200:
return response.json()
else:
print(f"Error looking up domain {domain}: {response.status_code}")
return None

def analyze_subdomain_patterns(self, domain):
"""Analyze subdomain patterns from passive DNS data."""
data = self.lookup_domain(domain)

if not data or not data.get('data'):
print(f"No passive DNS data found for {domain}")
return None

subdomains = data['data']

print(f"πŸ” Passive DNS Analysis for {domain}")
print("=" * 50)
print(f"Total historical DNS records: {data.get('count', 0)}")
print(f"Unique subdomains/hosts: {len(subdomains)}")

# Analyze subdomain patterns
patterns = self._extract_subdomain_patterns(subdomains, domain)

print(f"\nπŸ“Š Subdomain Pattern Analysis:")

# Service patterns
service_keywords = ['api', 'www', 'mail', 'ftp', 'admin', 'test', 'dev', 'staging', 'prod']
service_patterns = {keyword: 0 for keyword in service_keywords}

for subdomain in subdomains:
subdomain_lower = subdomain.lower()
for keyword in service_keywords:
if keyword in subdomain_lower:
service_patterns[keyword] += 1

print(f"\nπŸ”§ Service Patterns:")
for service, count in sorted(service_patterns.items(), key=lambda x: x[1], reverse=True):
if count > 0:
print(f" {service}: {count} subdomains")

# Geographic patterns
geo_patterns = self._extract_geographic_patterns(subdomains)
if geo_patterns:
print(f"\n🌍 Geographic Patterns:")
for geo, count in sorted(geo_patterns.items(), key=lambda x: x[1], reverse=True)[:10]:
print(f" {geo}: {count} subdomains")

# Numeric patterns
numeric_patterns = self._extract_numeric_patterns(subdomains)
print(f"\nπŸ”’ Numeric Patterns:")
print(f" Subdomains with numbers: {numeric_patterns['with_numbers']}")
print(f" Fully numeric subdomains: {numeric_patterns['fully_numeric']}")

return {
'total_records': data.get('count', 0),
'unique_subdomains': len(subdomains),
'service_patterns': service_patterns,
'geographic_patterns': geo_patterns,
'numeric_patterns': numeric_patterns,
'all_subdomains': subdomains
}

def find_infrastructure_evolution(self, domain):
"""Analyze infrastructure evolution based on subdomain naming."""
data = self.lookup_domain(domain)

if not data or not data.get('data'):
return None

subdomains = data['data']

print(f"πŸ—οΈ Infrastructure Evolution Analysis for {domain}")
print("=" * 60)

# Group by potential infrastructure types
infrastructure_groups = {
'cdn': [],
'api': [],
'database': [],
'cache': [],
'load_balancer': [],
'monitoring': [],
'testing': [],
'production': [],
'development': [],
'staging': []
}

keywords = {
'cdn': ['cdn', 'edge', 'static', 'assets', 'cache'],
'api': ['api', 'rest', 'graphql', 'webhook', 'service'],
'database': ['db', 'database', 'mysql', 'postgres', 'mongo', 'redis'],
'cache': ['cache', 'redis', 'memcache', 'varnish'],
'load_balancer': ['lb', 'balancer', 'proxy', 'nginx', 'haproxy'],
'monitoring': ['monitor', 'metrics', 'logs', 'analytics', 'grafana'],
'testing': ['test', 'qa', 'testing', 'sandbox'],
'production': ['prod', 'production', 'live'],
'development': ['dev', 'develop', 'development'],
'staging': ['stage', 'staging', 'uat']
}

for subdomain in subdomains:
subdomain_lower = subdomain.lower()

for category, keyword_list in keywords.items():
for keyword in keyword_list:
if keyword in subdomain_lower:
infrastructure_groups[category].append(subdomain)
break

print(f"🏒 Infrastructure Categories:")
for category, hosts in infrastructure_groups.items():
if hosts:
print(f"\n {category.title()} ({len(hosts)} hosts):")
for host in sorted(set(hosts))[:5]: # Show first 5 unique
print(f" β€’ {host}")
if len(set(hosts)) > 5:
print(f" ... and {len(set(hosts)) - 5} more")

return infrastructure_groups

def compare_domains(self, domain_list):
"""Compare passive DNS data across multiple domains."""
print(f"πŸ”„ Passive DNS Comparison Across {len(domain_list)} Domains")
print("=" * 60)

domain_data = {}

for domain in domain_list:
print(f"\nAnalyzing {domain}...")
data = self.lookup_domain(domain)

if data and data.get('data'):
domain_data[domain] = {
'count': data.get('count', 0),
'subdomains': set(data['data'])
}
print(f" Found {len(data['data'])} unique subdomains")
else:
print(f" No data found")
domain_data[domain] = {'count': 0, 'subdomains': set()}

# Find common subdomains
if len(domain_data) > 1:
all_subdomain_sets = [data['subdomains'] for data in domain_data.values()]

# Find intersection of all domains
common_subdomains = set.intersection(*all_subdomain_sets)

print(f"\n🀝 Shared Infrastructure:")
print(f" Common subdomains across all domains: {len(common_subdomains)}")

if common_subdomains:
print(f" Examples:")
for subdomain in sorted(list(common_subdomains))[:10]:
print(f" β€’ {subdomain}")

# Find unique subdomains per domain
print(f"\n🎯 Unique Infrastructure:")
for domain, data in domain_data.items():
other_subdomains = set()
for other_domain, other_data in domain_data.items():
if other_domain != domain:
other_subdomains.update(other_data['subdomains'])

unique_subdomains = data['subdomains'] - other_subdomains
print(f" {domain}: {len(unique_subdomains)} unique subdomains")

return domain_data

def _extract_subdomain_patterns(self, subdomains, base_domain):
"""Extract patterns from subdomain names."""
patterns = defaultdict(int)

for subdomain in subdomains:
# Remove base domain to get subdomain parts
if subdomain.endswith(f".{base_domain}"):
subdomain_part = subdomain[:-len(f".{base_domain}")]

# Extract patterns (simplified)
if '-' in subdomain_part:
patterns['hyphenated'] += 1
if '.' in subdomain_part:
patterns['multi_level'] += 1
if any(char.isdigit() for char in subdomain_part):
patterns['with_numbers'] += 1

return dict(patterns)

def _extract_geographic_patterns(self, subdomains):
"""Extract geographic patterns from subdomains."""
geo_keywords = [
'us', 'eu', 'asia', 'na', 'emea', 'apac',
'east', 'west', 'north', 'south',
'america', 'europe', 'pacific',
'london', 'paris', 'tokyo', 'sydney', 'singapore',
'ny', 'sf', 'la', 'chicago'
]

geo_patterns = defaultdict(int)

for subdomain in subdomains:
subdomain_lower = subdomain.lower()
for geo in geo_keywords:
if geo in subdomain_lower:
geo_patterns[geo] += 1

return dict(geo_patterns)

def _extract_numeric_patterns(self, subdomains):
"""Extract numeric patterns from subdomains."""
with_numbers = 0
fully_numeric = 0

for subdomain in subdomains:
subdomain_parts = subdomain.split('.')
main_part = subdomain_parts[0] if subdomain_parts else subdomain

if any(char.isdigit() for char in main_part):
with_numbers += 1

if main_part.isdigit():
fully_numeric += 1

return {
'with_numbers': with_numbers,
'fully_numeric': fully_numeric
}

def export_analysis_report(self, domain, filename=None):
"""Export comprehensive analysis to JSON file."""
analysis = self.analyze_subdomain_patterns(domain)
infrastructure = self.find_infrastructure_evolution(domain)

if not analysis:
print("No data to export")
return False

report = {
'timestamp': datetime.now().isoformat(),
'domain': domain,
'subdomain_analysis': analysis,
'infrastructure_evolution': {k: list(v) for k, v in infrastructure.items()} if infrastructure else {},
'summary': {
'total_records': analysis.get('total_records', 0),
'unique_subdomains': analysis.get('unique_subdomains', 0),
'analysis_date': datetime.now().strftime('%Y-%m-%d')
}
}

if filename is None:
filename = f"passive_dns_analysis_{domain}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"

try:
with open(filename, 'w') as f:
json.dump(report, f, indent=2)

print(f"πŸ“ Analysis report exported to: {filename}")
return True

except Exception as e:
print(f"❌ Failed to export report: {e}")
return False

# Usage Examples
api_key = "your-api-key-here"
analyzer = PassiveDNSAnalyzer(api_key)

# Basic domain lookup
domain = "kaspersky.com"
data = analyzer.lookup_domain(domain)
print(f"Found {data.get('count', 0)} historical DNS records for {domain}")

# Detailed analysis
analysis = analyzer.analyze_subdomain_patterns(domain)

# Infrastructure evolution analysis
infrastructure = analyzer.find_infrastructure_evolution(domain)

# Compare multiple domains
domains = ["kaspersky.com", "kaspersky.org", "kasperskylab.com"]
comparison = analyzer.compare_domains(domains)

# Export comprehensive report
analyzer.export_analysis_report(domain)

Advanced Analysis Examples​

import requests
import json
from datetime import datetime, timedelta
import matplotlib.pyplot as plt
import networkx as nx

def timeline_analysis(api_key, domain_list, output_dir="passive_dns_reports"):
"""Perform timeline analysis of multiple domains."""
analyzer = PassiveDNSAnalyzer(api_key)

print(f"πŸ“ˆ Timeline Analysis for {len(domain_list)} domains")
print("=" * 50)

import os
os.makedirs(output_dir, exist_ok=True)

timeline_data = {}

for domain in domain_list:
print(f"\nProcessing {domain}...")

data = analyzer.lookup_domain(domain)

if data and data.get('data'):
timeline_data[domain] = {
'timestamp': datetime.now().isoformat(),
'total_records': data.get('count', 0),
'subdomains': data['data'],
'analysis': analyzer.analyze_subdomain_patterns(domain)
}

print(f" Collected {len(data['data'])} subdomains")
else:
print(f" No data found for {domain}")
timeline_data[domain] = {
'timestamp': datetime.now().isoformat(),
'total_records': 0,
'subdomains': [],
'analysis': None
}

# Generate timeline report
timeline_report = {
'analysis_date': datetime.now().isoformat(),
'domains_analyzed': len(domain_list),
'domain_data': timeline_data,
'summary': {
'total_subdomains': sum(len(data['subdomains']) for data in timeline_data.values()),
'total_records': sum(data['total_records'] for data in timeline_data.values()),
'domains_with_data': len([d for d in timeline_data.values() if d['total_records'] > 0])
}
}

# Save timeline report
timeline_filename = f"{output_dir}/timeline_analysis_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
with open(timeline_filename, 'w') as f:
json.dump(timeline_report, f, indent=2)

print(f"\nπŸ“Š Timeline Analysis Summary:")
print(f" Total subdomains discovered: {timeline_report['summary']['total_subdomains']}")
print(f" Total DNS records: {timeline_report['summary']['total_records']}")
print(f" Domains with data: {timeline_report['summary']['domains_with_data']}/{len(domain_list)}")
print(f" Report saved: {timeline_filename}")

return timeline_report

def infrastructure_mapping(api_key, domain):
"""Create infrastructure map based on passive DNS data."""
analyzer = PassiveDNSAnalyzer(api_key)

print(f"πŸ—ΊοΈ Creating infrastructure map for {domain}")

data = analyzer.lookup_domain(domain)

if not data or not data.get('data'):
print("No data available for mapping")
return None

subdomains = data['data']

# Create network graph
G = nx.Graph()

# Add main domain as central node
G.add_node(domain, node_type='root_domain', size=100)

# Categorize subdomains
categories = {
'api': ['api', 'rest', 'graphql'],
'web': ['www', 'web', 'portal'],
'mail': ['mail', 'smtp', 'imap', 'pop'],
'cdn': ['cdn', 'static', 'assets'],
'database': ['db', 'database', 'data'],
'monitoring': ['monitor', 'metrics', 'logs'],
'development': ['dev', 'test', 'staging'],
'production': ['prod', 'live'],
'geographic': ['us', 'eu', 'asia', 'americas']
}

# Add subdomain nodes with categories
for subdomain in subdomains:
subdomain_lower = subdomain.lower()

# Determine category
node_category = 'other'
for category, keywords in categories.items():
if any(keyword in subdomain_lower for keyword in keywords):
node_category = category
break

G.add_node(subdomain, node_type=node_category, size=20)
G.add_edge(domain, subdomain)

# Create infrastructure summary
infrastructure_summary = {
'total_nodes': G.number_of_nodes(),
'total_edges': G.number_of_edges(),
'categories': {},
'graph_data': {
'nodes': list(G.nodes(data=True)),
'edges': list(G.edges())
}
}

# Count by category
for node, data in G.nodes(data=True):
category = data.get('node_type', 'other')
infrastructure_summary['categories'][category] = infrastructure_summary['categories'].get(category, 0) + 1

print(f"πŸ—οΈ Infrastructure Map Summary:")
print(f" Total nodes: {infrastructure_summary['total_nodes']}")
print(f" Node categories:")
for category, count in sorted(infrastructure_summary['categories'].items()):
print(f" {category}: {count}")

return infrastructure_summary

# Usage for advanced analysis
api_key = "your-api-key-here"

# Timeline analysis across multiple domains
company_domains = ["acme.com", "acme.org", "acmecorp.com"]
timeline_report = timeline_analysis(api_key, company_domains)

# Infrastructure mapping
infrastructure_map = infrastructure_mapping(api_key, "acme.com")

Use Cases​

Historical Investigation​

  • Track infrastructure changes over time
  • Investigate suspicious domain activity
  • Analyze domain usage patterns and evolution

Attack Surface Discovery​

  • Discover historical subdomains that may still be active
  • Identify forgotten or legacy infrastructure
  • Map complete organizational digital footprint

Threat Intelligence​

  • Track threat actor infrastructure evolution
  • Investigate domain relationships and patterns
  • Correlate historical DNS data with security incidents

Digital Forensics​

  • Reconstruct historical network infrastructure
  • Investigate timeline of infrastructure changes
  • Support incident response with historical context

Security Research​

  • Study naming conventions and patterns
  • Analyze organizational infrastructure evolution
  • Research DNS infrastructure trends

Best Practices​

Data Analysis​

  1. Pattern Recognition: Look for naming patterns that reveal infrastructure purpose
  2. Timeline Analysis: Compare current active infrastructure with historical data
  3. Categorization: Group subdomains by function, environment, or geography
  4. Validation: Cross-reference findings with current DNS resolution

Investigation Workflow​

  1. Baseline Establishment: Start with known domains to establish patterns
  2. Expansion: Use historical data to discover additional infrastructure
  3. Validation: Verify if historical subdomains are still active
  4. Documentation: Maintain records of discoveries and their significance

Security Considerations​

  1. Privacy: Respect privacy implications of historical DNS data
  2. Context: Consider that historical data may not reflect current infrastructure
  3. Validation: Always validate findings with current data sources
  4. Legal Compliance: Ensure investigations comply with legal requirements

Integration Tips​

  1. Asset Discovery: Feed historical data into asset management systems
  2. Monitoring: Set up monitoring for historically active but currently inactive infrastructure
  3. Threat Intelligence: Combine with other data sources for comprehensive analysis
  4. Incident Response: Use historical context to support security investigations