forked from j1t3sh/SQL-Injection-Finder
-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add skelleton for other module + refactor readme + reporting
- Loading branch information
Showing
22 changed files
with
819 additions
and
375 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
# Crawl | ||
|
||
|
||
## Usefull links | ||
|
||
* https://github.com/0MeMo07/URL-Seeker | ||
* https://github.com/RevoltSecurities/Subdominator |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,164 @@ | ||
import sys | ||
import threading | ||
import concurrent.futures | ||
from urllib.parse import urlparse | ||
from termcolor import cprint | ||
import tqdm | ||
|
||
from attacks.xss.xss_striker import photon_crawler | ||
from reporting.results_manager import ( | ||
get_processed_crawled, | ||
save_crawling_query, | ||
crawling_results, | ||
) | ||
from vpn_proxies.proxies_manager import get_proxies_and_cycle | ||
from scraping.web_scraper import scrape_links_from_url | ||
|
||
|
||
def launch_crawling_attack(config, website_to_test): | ||
try: | ||
proxies, proxy_cycle = get_proxies_and_cycle(config) | ||
|
||
if config["do_web_scrap"]: | ||
# todo MERGE WITH CRAWL | ||
new_urls = [] | ||
|
||
lock = threading.Lock() | ||
|
||
# Now, append a proxy to each task | ||
number_of_worker = len(proxies) | ||
search_tasks_with_proxy = [] | ||
for website in website_to_test: | ||
proxy = next(proxy_cycle) | ||
search_tasks_with_proxy.append({"website": website, "proxy": proxy}) | ||
|
||
with concurrent.futures.ThreadPoolExecutor( | ||
max_workers=number_of_worker | ||
) as executor: | ||
future_to_search = { | ||
executor.submit( | ||
scrape_links_from_url, task["website"], task["proxy"] | ||
): task | ||
for task in search_tasks_with_proxy | ||
} | ||
for website in tqdm( | ||
concurrent.futures.as_completed(future_to_search), | ||
desc=f"Upating links DB for xss website", | ||
unit="site", | ||
total=len(future_to_search), | ||
): | ||
with lock: | ||
new_urls_temps = website.result() | ||
new_urls += new_urls_temps | ||
|
||
cprint(f"Found {len(new_urls)} new links", color="green", file=sys.stderr) | ||
|
||
# crawl the website for more links TODO | ||
|
||
website_to_test += new_urls | ||
|
||
website_to_test = list(set(website_to_test)) | ||
elif config["do_crawl"]: | ||
lock = threading.Lock() | ||
number_of_worker = len(proxies) | ||
search_tasks_with_proxy = [] | ||
|
||
for website in website_to_test: | ||
cprint( | ||
f"Testing {website} for crawling", color="yellow", file=sys.stderr | ||
) | ||
scheme = urlparse(website).scheme | ||
cprint( | ||
"Target scheme: {}".format(scheme), | ||
color="yellow", | ||
file=sys.stderr, | ||
) | ||
host = urlparse(website).netloc | ||
|
||
main_url = scheme + "://" + host | ||
|
||
cprint("Target host: {}".format(host), color="yellow", file=sys.stderr) | ||
|
||
proxy = next(proxy_cycle) | ||
search_tasks_with_proxy.append({"website": website, "proxy": proxy}) | ||
|
||
forms = [] | ||
domURLs = [] | ||
processed_xss_photon_crawl = get_processed_crawled(config) | ||
|
||
with concurrent.futures.ThreadPoolExecutor( | ||
max_workers=number_of_worker | ||
) as executor: | ||
future_to_search = { | ||
executor.submit( | ||
photon_crawler, | ||
task["website"], | ||
config, | ||
task["proxy"], | ||
processed_xss_photon_crawl, | ||
): task | ||
for task in search_tasks_with_proxy | ||
} | ||
for website in tqdm( | ||
concurrent.futures.as_completed(future_to_search), | ||
desc=f"Photon Crawling links DB for xss website", | ||
unit="site", | ||
total=len(future_to_search), | ||
): | ||
with lock: | ||
crawling_result = website.result() | ||
seedUrl = website["website"] | ||
|
||
cprint( | ||
f"Forms: {crawling_result[0]}", | ||
color="green", | ||
file=sys.stderr, | ||
) | ||
cprint( | ||
f"DOM URLs: {crawling_result[1]}", | ||
color="green", | ||
file=sys.stderr, | ||
) | ||
forms_temps = list(set(crawling_result[0])) | ||
|
||
domURLs_temps = list(set(list(crawling_result[1]))) | ||
|
||
difference = abs(len(domURLs) - len(forms)) | ||
|
||
if len(domURLs_temps) > len(forms_temps): | ||
for i in range(difference): | ||
forms_temps.append(0) | ||
elif len(forms_temps) > len(domURLs_temps): | ||
for i in range(difference): | ||
domURLs_temps.append(0) | ||
|
||
result = (seedUrl, forms_temps, domURLs_temps) | ||
|
||
crawling_results.append((result, config)) | ||
|
||
domURLs += domURLs_temps | ||
forms += forms_temps | ||
cprint( | ||
f"Total domURLs links: {len(domURLs)}", | ||
color="green", | ||
file=sys.stderr, | ||
) | ||
cprint( | ||
f"Total forms links: {len(forms)}", | ||
color="green", | ||
file=sys.stderr, | ||
) | ||
except KeyboardInterrupt: | ||
cprint( | ||
"Process interrupted by user during crawling attack phase ... Saving results", | ||
"red", | ||
file=sys.stderr, | ||
) | ||
concurrent.futures.thread._threads_queues.clear() | ||
# https://stackoverflow.com/questions/49992329/the-workers-in-threadpoolexecutor-is-not-really-daemon | ||
for result, config in crawling_results: | ||
save_crawling_query(result, config) | ||
# TODO with attacks | ||
exit(1) | ||
except Exception as e: | ||
cprint(f"Error: {e}", color="red", file=sys.stderr) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
# Dorking | ||
|
||
## Usefull links | ||
|
||
* https://github.com/Ishanoshada/GDorks/blob/main/dorks.txt | ||
* https://github.com/BullsEye0/google_dork_list/tree/master | ||
* https://github.com/Ishanoshada/GDorks/tree/main | ||
* https://github.com/obheda12/GitDorker/blob/master/GitDorker.py | ||
* https://medium.com/@dub-flow/the-easiest-way-to-find-cves-at-the-moment-github-dorks-29d18b0c6900 | ||
* https://book.hacktricks.xyz/generic-methodologies-and-resources/external-recon-methodology/github-leaked-secrets | ||
* https://github.com/gwen001/github-search | ||
* https://obheda12.medium.com/gitdorker-a-new-tool-for-manual-github-dorking-and-easy-bug-bounty-wins-92a0a0a6b8d5 | ||
* https://github.com/spekulatius/infosec-dorks | ||
* Use Google hacking database(https://www.exploit-db.com/google-hacking-database) for good sqli dorks. | ||
|
||
## TODOs | ||
|
||
* implement other search engine queries (https://github.com/epsylon/xsser/blob/master/core/dork.py) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +0,0 @@ | ||
from tqdm import tqdm | ||
Oops, something went wrong.