- by x32x01 ||
Personally, it’s a very simple Python script to look for SQL injection flaws, and you’ll find plenty of vulnerable sites.
And of course, hackers - the script is open for everyone to improve and develop.
Python:
# -*- coding: utf-8 -*-
import os
import time
import json
import requests
from urllib.parse import quote_plus
from typing import List
import logging
# Logging setup
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# List of dorks (cleaned and updated)
DORKS = [
"inurl:/list_blogs.php?sort_mode=",
"inurl:eventdetails.php id=",
"inurl:commodities.php id=",
"inurl:recipe-view.php?id=",
"inurl:product.php?mid=",
"inurl:view_ad.php?id=",
"inurl:imprimir.php?id=",
"inurl:prodotti.php?id=",
"inurl:index.cgi?aktion=shopview",
"inurl:default.php?id=",
"inurl:/*.php?id=",
"inurl:articles.php?id=",
"inurl:Content.asp?id=",
"site:*.edu inurl:login",
"inurl:php?id= filetype:php",
"inurl:view.php?id="
]
class GoogleDorkScanner:
def __init__(self, api_key: str = None, output_file: str = "dork_results.txt"):
# Use SerpApi (recommended) or leave None to fallback to free alternative
self.api_key = api_key or os.getenv("SERPAPI_KEY")
self.output_file = output_file
self.all_results = set() # To avoid duplicates
def search_serpapi(self, query: str, num: int = 10) -> List[str]:
if not self.api_key:
logger.error("SerpApi key not found! Falling back to SearXNG instance.")
return self.search_searxng(query, num)
params = {
"engine": "google",
"q": query,
"num": num,
"api_key": self.api_key,
"gl": "us",
"hl": "en"
}
try:
response = requests.get("https://serpapi.com/search", params=params, timeout=30)
response.raise_for_status()
data = response.json()
results = []
for result in data.get("organic_results", []):
url = result.get("link")
if url and url not in self.all_results:
self.all_results.add(url)
results.append(url)
logger.info(f"Found: {url}")
return results
except Exception as e:
logger.error(f"SerpApi error: {e}")
return self.search_searxng(query, num)
def search_searxng(self, query: str, num: int = 10) -> List[str]:
# Public SearXNG instances (may get rate-limited)
instances = [
"https://searx.be",
"https://search.disroot.org",
"https://searx.tuxcloud.net",
"https://searxng.site"
]
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'}
for instance in instances:
try:
url = f"{instance}/search"
payload = {
'q': query,
'format': 'json',
'pageno': 1
}
r = requests.post(url, data=payload, headers=headers, timeout=15)
if r.status_code == 200:
data = r.json()
results = []
for res in data.get("results", [])[:num]:
link = res.get("url")
if link and link not in self.all_results:
self.all_results.add(link)
results.append(link)
logger.info(f"Found: {link}")
if results:
return results
except Exception as e:
logger.debug(f"SearXNG instance {instance} failed: {e}")
continue
logger.warning("All SearXNG instances failed or returned no results.")
return []
def save_results(self):
with open(self.output_file, "w", encoding="utf-8") as f:
for url in sorted(self.all_results):
f.write(url + "\n")
logger.info(f"Saved {len(self.all_results)} unique URLs to {self.output_file}")
def run(self, num_results_per_dork: int = 15, delay: float = 1.5):
logger.info(f"Starting scan with {len(DORKS)} dorks...")
for i, dork in enumerate(DORKS, 1):
logger.info(f"[{i}/{len(DORKS)}] Searching: {dork}")
results = self.search_serpapi(dork, num=num_results_per_dork)
if not results:
logger.warning(f"No results found for: {dork}")
time.sleep(delay) # Light delay to stay under the radar
self.save_results()
logger.info("Scanning completed successfully!")
if __name__ == "__main__":
# Put your SerpApi key here or set environment variable SERPAPI_KEY
#scanner = GoogleDorkScanner(api_key="YOUR_SERPAPI_KEY_HERE") # Leave empty to use free SearXNG mode
scanner.run(num_results_per_dork=20, delay=1.5) Last edited: