Skip to content

Commit

Permalink
add pypy support + refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
ElNiak committed Jun 19, 2024
1 parent 145212f commit 54646c7
Show file tree
Hide file tree
Showing 94 changed files with 3,031 additions and 347 deletions.
9 changes: 8 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,11 @@ venv/
vulnerable*.txt
potential*.txt
*.log
__pycache__/
__pycache__/
nordvpn_login.csv
nordvpn_login_token.csv
bounty_drive/outputs/reports/*.csv
bounty_drive/outputs/reports/*.zip
bounty_drive/outputs/html_google_todo/*.html
pypy3-venv/*
python3-venv/*
48 changes: 46 additions & 2 deletions INSTALL.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,54 @@

## Pre-Commit

```bash
python3 -m pip install pre-commit
pre-commit installed at .git/hooks/pre-commit

```

## Classical

## PyPy
```bash
sudo apt-get install python3 python3-dev python3-venv
python3 --version
# Python 3.10.12
```

```bash
python3 -m venv python3-venv
source python3-venv/bin/activate
python3 -m pip install -U pip wheel
python3 -m pip install -r requirements.txt
```

Update `config.ini`

Run with `python3 bounty_drive.py`

## PyPy

Not ready - SEGFAULT in some libs (urllib3, cryptography downgraded).

Install PyPy from [here](https://doc.pypy.org/en/latest/install.html)

Package compatible with PyPy are in `requirements_pypy.txt`
* http://packages.pypy.org/
* https://doc.pypy.org/en/latest/cpython_differences.html

```bash
sudo apt-get install pypy3 pypy3-dev pypy3-venv
pypy3 --version
# Python 3.9.19 (7.3.16+dfsg-2~ppa1~ubuntu20.04, Apr 26 2024, 13:32:24)
# [PyPy 7.3.16 with GCC 9.4.0]
```

```bash
pypy3 -m venv pypy3-venv
source pypy3-venv/bin/activate
pypy3 -m pip install -U pip wheel
pypy3 -m pip install -r requirements_pypy.txt
```

pdate `config.ini`

Run with `pypy3 bounty_drive.py`
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ TODO: we should proxy proxy chains

# HAPPY HUNTING

sudo apt-get install portaudio19-dev

# Ressource:
https://raw.githubusercontent.com/darklotuskdb/SSTI-XSS-Finder/main/Payloads.txt
Expand Down
11 changes: 11 additions & 0 deletions bounty_drive/attacks/dorks/github_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#########################################################################################
# Global variables
#########################################################################################

# GitHub Dorking
GITHUB_API_URL = "https://api.github.com"
TOKENS_LIST = ["your_github_token"] # Add your GitHub tokens here
DORK_LIST = ["example_dork1", "example_dork2"] # Add your dorks here
QUERIES_LIST = ["example_query"] # Add your queries here
ORGANIZATIONS_LIST = ["example_organization"] # Add your organizations here
USERS_LIST = ["example_user"] # Add your users here
2 changes: 1 addition & 1 deletion bounty_drive/attacks/dorks/github_dorking.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import requests
from termcolor import cprint

from utils.github_config import GITHUB_API_URL, TOKENS_LIST
from attacks.dorks.github_config import GITHUB_API_URL, TOKENS_LIST
from utils.app_config import *

token_index = 0
Expand Down
75 changes: 54 additions & 21 deletions bounty_drive/attacks/dorks/google_dorking.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@
USER_AGENTS,
)

from utils.web_scraper import parse_google_search_results, render_js_and_get_text
from scraping.web_scraper import parse_google_search_results, render_js_and_get_text

from utils.proxies_manager import prepare_proxies, round_robin_proxies
from utils.request_manager import param_converter, start_request
from utils.results_manager import get_processed_dorks, safe_add_result
from vpn_proxies.proxies_manager import prepare_proxies, round_robin_proxies
from requester.request_manager import param_converter, start_request
from reporting.results_manager import get_processed_dorks, safe_add_result

dork_id_lock = threading.Lock()

Expand All @@ -32,6 +32,7 @@ def google_search_with_proxy(
category,
config,
domain,
processed_dorks,
retries=1,
advanced=False,
dork_id=0,
Expand All @@ -46,7 +47,7 @@ def google_search_with_proxy(

params = prepare_params(config)

dork_id = perform_searches(
return perform_searches(
full_query,
proxies,
category,
Expand All @@ -55,11 +56,10 @@ def google_search_with_proxy(
config,
advanced,
dork_id,
processed_dorks,
use_session=not (proxy == None),
)

return dork_id


def prepare_params(config):
return {
Expand All @@ -79,6 +79,7 @@ def perform_searches(
config,
advanced,
dork_id,
processed_dorks,
use_session,
):

Expand All @@ -92,6 +93,7 @@ def perform_searches(
config,
advanced,
dork_id,
processed_dorks,
use_session=use_session,
)

Expand All @@ -107,10 +109,30 @@ def execute_search_with_retries(
config,
advanced,
dork_id,
processed_dorks,
use_session=False,
):
base_url = "https://www.google.com/search"
headers = {"User-Agent": random.choice(USER_AGENTS)}
headers = {
"User-Agent": random.choice(USER_AGENTS),
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip,deflate",
"Connection": "close",
"DNT": "1",
"accept-language": "en-US,en;q=0.9",
"cache-control": "max-age=0",
"Upgrade-Insecure-Requests": "1",
}

if query in processed_dorks:
cprint(
f"Skipping already processed dork: {query}",
"yellow",
file=sys.stderr,
)
return dork_id

for retry_no in range(retries):
if use_session:
cprint(
Expand All @@ -127,7 +149,14 @@ def execute_search_with_retries(
headers=headers,
params=params,
is_json=False,
secured=True if "socks" in proxies["https"] else False,
secured=(
True
if proxies
and "https" in proxies
and proxies["https"]
and "socks" in proxies["https"]
else False
),
session=session,
cookies={
"CONSENT": "PENDING+987",
Expand All @@ -148,15 +177,24 @@ def execute_search_with_retries(
headers=headers,
params=params,
is_json=False,
secured=True if "socks" in proxies["https"] else False,
secured=(
True
if proxies
and "https" in proxies
and proxies["https"]
and "socks" in proxies["https"]
else False
),
cookies={
"CONSENT": "PENDING+987",
"SOCS": "CAESHAgBEhJnd3NfMjAyMzA4MTAtMF9SQzIaAmRlIAEaBgiAo_CmBg",
},
)

urls = []
if response:
urls = parse_google_search_results(proxies, advanced, query, response.text)
if not urls or len(urls) == 0:
if (not urls or len(urls) == 0) and config["use_selenium"]:
cprint(
f"Parsing for google search failed for {query} - retrying with selenium...",
"red",
Expand All @@ -168,10 +206,10 @@ def execute_search_with_retries(
urls = parse_google_search_results(
proxies, advanced, query, html_content
)
result = dork_id, category, urls, query
safe_add_result(result, config)
with dork_id_lock:
dork_id += 1
result = dork_id, category, urls, query
safe_add_result(result, config)
# with dork_id_lock:
# dork_id += 1
# TODO to be faster also record non functionnal dork
return dork_id

Expand Down Expand Up @@ -282,12 +320,6 @@ def load_google_dorks_and_search(config, categories):
file=sys.stderr,
)
processed_dorks = get_processed_dorks(config)
search_tasks = filter_search_tasks(search_tasks, processed_dorks)
cprint(
f"Number of dorks to process: {sum([len(search_tasks[task]) for task in search_tasks])}",
"yellow",
file=sys.stderr,
)

if not search_tasks:
cprint(f"No dorks to process.", "red", file=sys.stderr)
Expand Down Expand Up @@ -328,6 +360,7 @@ def load_google_dorks_and_search(config, categories):
task["category"],
config,
task["domain"],
processed_dorks,
): task
for task in search_tasks_with_proxy
}
Expand Down
2 changes: 1 addition & 1 deletion bounty_drive/attacks/sqli/sqli.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from tqdm import tqdm
from urllib3 import Retry
from requests.adapters import HTTPAdapter
from utils.proxies_manager import round_robin_proxies
from vpn_proxies.proxies_manager import round_robin_proxies


def run_sqlmap(url, proxy):
Expand Down
Loading

0 comments on commit 54646c7

Please sign in to comment.