diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 4f107b7023d..4066ae2dc45 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -37,7 +37,7 @@ jobs: poetry run pip install pyattck==7.1.2 maco - name: Run Ruff - run: poetry run ruff check . --line-length 132 --ignore E501,E402 + run: poetry run ruff check . --output-format=github . - name: Run unit tests run: poetry run python -m pytest --import-mode=append @@ -59,13 +59,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Format with black - run: poetry run black . - - # to be replaced with ruff - - name: Format imports with isort - run: poetry run isort . - - name: Commit changes if any # Skip this step if being run by nektos/act if: ${{ !env.ACT }} @@ -73,6 +66,8 @@ jobs: git config user.name "GitHub Actions" git config user.email "action@github.com" if output=$(git status --porcelain) && [ ! -z "$output" ]; then + git pull + git add . git commit -m "style: Automatic code formatting" -a git push fi diff --git a/agent/agent.py b/agent/agent.py index b8968b7f016..e4ec2adbcc4 100644 --- a/agent/agent.py +++ b/agent/agent.py @@ -226,7 +226,6 @@ def handle(self, obj): self.close_connection = True def shutdown(self): - # BaseServer also features a .shutdown() method, but you can't use # that from the same thread as that will deadlock the whole thing. if hasattr(self, "s"): diff --git a/analyzer/linux/lib/common/results.py b/analyzer/linux/lib/common/results.py index 365b089fa7d..b43ca019d05 100644 --- a/analyzer/linux/lib/common/results.py +++ b/analyzer/linux/lib/common/results.py @@ -38,7 +38,7 @@ def upload_to_host(file_path, dump_path, pids="", ppids="", metadata="", categor nc.send(buf, retry=True) buf = infd.read(BUFSIZE) except Exception as e: - log.error("Exception uploading file %s to host: %s", file_path, e, exc_info=True) + log.exception("Exception uploading file %s to host: %s", file_path, e) finally: if nc: nc.close() diff --git a/analyzer/linux/modules/auxiliary/filecollector.py b/analyzer/linux/modules/auxiliary/filecollector.py index c68da449ce5..83fe88f4987 100755 --- a/analyzer/linux/modules/auxiliary/filecollector.py +++ b/analyzer/linux/modules/auxiliary/filecollector.py @@ -51,7 +51,6 @@ def __init__(self, options, config): self.thread.join(0.5) def run(self): - if not HAVE_PYINOTIFY: log.info("Missed dependency: pip3 install pyinotify") return False diff --git a/analyzer/linux/modules/auxiliary/screenshots.py b/analyzer/linux/modules/auxiliary/screenshots.py index 36f25818e7b..b1ef4c83100 100644 --- a/analyzer/linux/modules/auxiliary/screenshots.py +++ b/analyzer/linux/modules/auxiliary/screenshots.py @@ -11,6 +11,7 @@ if HAVE_PIL and HAVE_DBUS_NEXT: from PIL import Image + from lib.api.screenshot import Screenshot, ScreenshotGrabber, ScreenshotsUnsupported from lib.common.abstracts import Auxiliary diff --git a/analyzer/linux/modules/packages/zip.py b/analyzer/linux/modules/packages/zip.py index 0cc17c6b775..20e475b7071 100644 --- a/analyzer/linux/modules/packages/zip.py +++ b/analyzer/linux/modules/packages/zip.py @@ -17,7 +17,6 @@ class Zip(Package): - real_package = None def prepare(self): diff --git a/analyzer/windows/analyzer.py b/analyzer/windows/analyzer.py index deb7a583b87..362b46be381 100644 --- a/analyzer/windows/analyzer.py +++ b/analyzer/windows/analyzer.py @@ -691,7 +691,7 @@ def analysis_loop(self, aux_modules): try: Process(pid=pid).upload_memdump() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) log.info("Process with pid %s appears to have terminated", pid) if pid in self.process_list.pids: self.process_list.remove_pid(pid) @@ -915,7 +915,7 @@ def dump_file(self, filepath, metadata="", pids="", ppids="", category="files"): except (IOError, socket.error) as e: log.error('Unable to upload dropped file at path "%s": %s', filepath, e) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) def delete_file(self, filepath, pid=None): """A file is about to removed and thus should be dumped right away.""" @@ -1508,8 +1508,7 @@ def dispatch(self, data): try: response = fn(arguments) except Exception as e: - log.error(e, exc_info=True) - log.exception("Pipe command handler exception occurred (command %s args %s)", command, arguments) + log.exception("Pipe command handler exception occurred (command %s args %s). %s", command, arguments, str(e)) return response @@ -1536,7 +1535,7 @@ def dispatch(self, data): # When user set wrong package, Example: Emotet package when submit doc, package only is for EXE! except CuckooError: - log.info("You probably submitted the job with wrong package", exc_info=True) + log.exception("You probably submitted the job with wrong package") data["status"] = "exception" data["description"] = "You probably submitted the job with wrong package" try: diff --git a/analyzer/windows/lib/api/process.py b/analyzer/windows/lib/api/process.py index e4b2b6f7592..c9ad9fa898f 100644 --- a/analyzer/windows/lib/api/process.py +++ b/analyzer/windows/lib/api/process.py @@ -43,13 +43,13 @@ CAPEMON64_NAME, LOADER32_NAME, LOADER64_NAME, - TTD32_NAME, - TTD64_NAME, LOGSERVER_PREFIX, PATHS, PIPE, SHUTDOWN_MUTEX, TERMINATE_EVENT, + TTD32_NAME, + TTD64_NAME, ) from lib.common.defines import ( KERNEL32, @@ -601,7 +601,6 @@ def is_64bit(self): return False def write_monitor_config(self, interest=None, nosleepskip=False): - config_path = os.path.join(Path.cwd(), "dll", f"{self.pid}.ini") log.info("Monitor config for %s: %s", self, config_path) @@ -759,7 +758,7 @@ def upload_memdump(self): try: upload_to_host(file_path, os.path.join("memory", f"{self.pid}.dmp"), category="memory") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) log.error(os.path.join("memory", f"{self.pid}.dmp")) log.error(file_path) log.info("Memory dump of %s uploaded", self) diff --git a/analyzer/windows/lib/common/results.py b/analyzer/windows/lib/common/results.py index b6983a52f7d..b552bbe1c79 100644 --- a/analyzer/windows/lib/common/results.py +++ b/analyzer/windows/lib/common/results.py @@ -61,7 +61,7 @@ def upload_to_host(file_path, dump_path, pids="", ppids="", metadata="", categor size -= read_size buf = infd.read(BUFSIZE) except Exception as e: - log.error("Exception uploading file %s to host: %s", file_path, e, exc_info=True) + log.exception("Exception uploading file %s to host: %s", file_path, e) def upload_buffer_to_host(buffer, dump_path, filepath=False, pids="", ppids="", metadata="", category="", duplicated=False): diff --git a/analyzer/windows/lib/core/pipe.py b/analyzer/windows/lib/core/pipe.py index c8fecc6aba5..c5f399ae3e2 100644 --- a/analyzer/windows/lib/core/pipe.py +++ b/analyzer/windows/lib/core/pipe.py @@ -224,7 +224,7 @@ def stop(self): if h.is_alive(): h.stop() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) def disconnect_pipes(): diff --git a/analyzer/windows/modules/auxiliary/dns_etw.py b/analyzer/windows/modules/auxiliary/dns_etw.py index 4c52da10b60..ca427e1e7e2 100644 --- a/analyzer/windows/modules/auxiliary/dns_etw.py +++ b/analyzer/windows/modules/auxiliary/dns_etw.py @@ -43,7 +43,6 @@ def encode(data, encoding="utf-8"): if HAVE_ETW: class ETW_provider(ETW): - def __init__( self, ring_buf_size=1024, diff --git a/analyzer/windows/modules/auxiliary/evtx.py b/analyzer/windows/modules/auxiliary/evtx.py index 41f47c3d16f..1899812f736 100644 --- a/analyzer/windows/modules/auxiliary/evtx.py +++ b/analyzer/windows/modules/auxiliary/evtx.py @@ -12,7 +12,6 @@ class Evtx(Thread, Auxiliary): - evtx_dump = "evtx.zip" windows_logs = [ diff --git a/analyzer/windows/modules/auxiliary/permissions.py b/analyzer/windows/modules/auxiliary/permissions.py index 8b8bb711f1a..17b655119b9 100644 --- a/analyzer/windows/modules/auxiliary/permissions.py +++ b/analyzer/windows/modules/auxiliary/permissions.py @@ -34,7 +34,6 @@ def start(self): log.debug("Adjusting permissions for %s", locations) for location in locations: - # First add a non-inherited permission for Admin Read+Execute # icacls /grant:r "BUILTIN\Administrators:(OI)(CI)(RX)" "BUILTIN\\Administrators:(RX)" /t /c /q modify_admin_params = [ diff --git a/analyzer/windows/modules/packages/pub.py b/analyzer/windows/modules/packages/pub.py index 9da03cc3437..1602615d8b7 100644 --- a/analyzer/windows/modules/packages/pub.py +++ b/analyzer/windows/modules/packages/pub.py @@ -30,7 +30,6 @@ def __init__(self, options=None, config=None): The .pub filename extension will be added automatically.""" def set_keys(self): - baseOfficeKeyPath = r"Software\Microsoft\Office" installedVersions = [] try: diff --git a/analyzer/windows/modules/packages/pub2016.py b/analyzer/windows/modules/packages/pub2016.py index 950c1fa6048..196a395aefb 100644 --- a/analyzer/windows/modules/packages/pub2016.py +++ b/analyzer/windows/modules/packages/pub2016.py @@ -26,7 +26,6 @@ def __init__(self, options=None, config=None): The .pub filename extension will be added automatically.""" def set_keys(self): - baseOfficeKeyPath = r"Software\Microsoft\Office" installedVersions = [] try: diff --git a/analyzer/windows/prescripts/prescript_detection.py b/analyzer/windows/prescripts/prescript_detection.py index 46ff0f4510d..79fe746b516 100644 --- a/analyzer/windows/prescripts/prescript_detection.py +++ b/analyzer/windows/prescripts/prescript_detection.py @@ -599,7 +599,7 @@ def create_trigger( def change_execution_dir(dir): log.info(f"Changing execution directory to {dir}") - log.warn("Changing directory not available in prescript testing") + log.warning("Changing directory not available in prescript testing") def main(args): diff --git a/analyzer/windows/tests/lib/common/test_abstracts.py b/analyzer/windows/tests/lib/common/test_abstracts.py index 1607df26f29..2df0f991625 100644 --- a/analyzer/windows/tests/lib/common/test_abstracts.py +++ b/analyzer/windows/tests/lib/common/test_abstracts.py @@ -6,7 +6,6 @@ class TestPackageConfiguration(unittest.TestCase): - def test_private_package_configuration(self): # test analysis package package_module = self.__class__.__module__ diff --git a/analyzer/windows/tests/test_analyzer.py b/analyzer/windows/tests/test_analyzer.py index a8db4ec4241..353b6401a68 100644 --- a/analyzer/windows/tests/test_analyzer.py +++ b/analyzer/windows/tests/test_analyzer.py @@ -81,7 +81,6 @@ def test_prepare(self, set_lock, init_logging, config, pipeserver): class TestAnalyzerChoosePackage(unittest.TestCase): - def test_choose_package_shellcode(self): test = analyzer.Analyzer() test.config = MagicMock() diff --git a/lib/cuckoo/common/abstracts.py b/lib/cuckoo/common/abstracts.py index f92105ea15c..a0a8ce14c99 100644 --- a/lib/cuckoo/common/abstracts.py +++ b/lib/cuckoo/common/abstracts.py @@ -833,7 +833,6 @@ def set_path(self, analysis_path): CuckooReportError(e) def yara_detected(self, name): - target = self.results.get("target", {}) if target.get("category") in ("file", "static") and target.get("file"): for keyword in ("cape_yara", "yara"): @@ -889,16 +888,22 @@ def yara_detected(self, name): for yara_block in self.results["static"]["office"]["Macro"]["info"].get("macroname", []) or []: for sub_block in self.results["static"]["office"]["Macro"]["info"]["macroname"].get(yara_block, []) or []: if re.findall(name, sub_block["name"], re.I): - yield "macro", os.path.join(macro_path, macroname), sub_block, self.results["static"]["office"]["Macro"][ - "info" - ] + yield ( + "macro", + os.path.join(macro_path, macroname), + sub_block, + self.results["static"]["office"]["Macro"]["info"], + ) if self.results.get("static", {}).get("office", {}).get("XLMMacroDeobfuscator", False): for yara_block in self.results["static"]["office"]["XLMMacroDeobfuscator"].get("info", []).get("yara_macro", []) or []: if re.findall(name, yara_block["name"], re.I): - yield "macro", os.path.join(macro_path, "xlm_macro"), yara_block, self.results["static"]["office"][ - "XLMMacroDeobfuscator" - ]["info"] + yield ( + "macro", + os.path.join(macro_path, "xlm_macro"), + yara_block, + self.results["static"]["office"]["XLMMacroDeobfuscator"]["info"], + ) def signature_matched(self, signame: str) -> bool: # Check if signature has matched (useful for ordered signatures) @@ -964,7 +969,6 @@ def _get_ip_by_host(self, hostname): ) def _get_ip_by_host_dns(self, hostname): - ips = [] try: @@ -1722,7 +1726,7 @@ def update(self) -> bool: try: req = requests.get(self.downloadurl, headers=headers, verify=True) except requests.exceptions.RequestException as e: - log.warn("Error downloading feed for %s: %s", self.feedname, e) + log.warning("Error downloading feed for %s: %s", self.feedname, e) return False if req.status_code == 200: self.downloaddata = req.content diff --git a/lib/cuckoo/common/admin_utils.py b/lib/cuckoo/common/admin_utils.py index a25b6c9bd96..77bbe4f93e3 100644 --- a/lib/cuckoo/common/admin_utils.py +++ b/lib/cuckoo/common/admin_utils.py @@ -382,7 +382,7 @@ def execute_command_on_all(remote_command, servers: list, ssh_proxy: SSHClient): except TimeoutError as e: sys.exit(f"Did you forget to use jump box? {str(e)}") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) def bulk_deploy(files, yara_category, dry_run=False, servers: list = [], ssh_proxy: SSHClient = False): diff --git a/lib/cuckoo/common/cape_utils.py b/lib/cuckoo/common/cape_utils.py index 38c8ef99400..647667042e2 100644 --- a/lib/cuckoo/common/cape_utils.py +++ b/lib/cuckoo/common/cape_utils.py @@ -194,7 +194,7 @@ def static_config_parsers(cape_name, file_path, file_data): cape_config[cape_name].update({key: [value]}) parser_loaded = True except Exception as e: - log.error("CAPE: parsing error on %s with %s: %s", file_path, cape_name, e, exc_info=True) + log.exception("CAPE: parsing error on %s with %s: %s", file_path, cape_name, e) # DC3-MWCP if HAS_MWCP and not parser_loaded and cape_name and cape_name in mwcp_decoders: @@ -260,7 +260,7 @@ def static_config_parsers(cape_name, file_path, file_data): if "rules" in str(e): log.warning("You probably need to compile yara-python with dotnet support") else: - log.error(e, exc_info=True) + log.exception(e) log.warning( "malwareconfig parsing error for %s with %s: %s, you should submit issue/fix to https://github.com/kevthehermit/RATDecoders/", file_path, diff --git a/lib/cuckoo/common/cleaners_utils.py b/lib/cuckoo/common/cleaners_utils.py index 203fef16cd1..d75df8e67b9 100644 --- a/lib/cuckoo/common/cleaners_utils.py +++ b/lib/cuckoo/common/cleaners_utils.py @@ -229,7 +229,7 @@ def delete_data(tid): elif repconf.elasticsearchdb.enabled: delete_analysis_and_related_calls(tid) except Exception as e: - log.error("failed to remove analysis info (may not exist) %s due to %s" % (tid, e), exc_info=True) + log.exception("failed to remove analysis info (may not exist) %s due to %s" % (tid, e)) with db.session.begin(): if db.delete_task(tid): delete_folder(os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % tid)) @@ -654,7 +654,6 @@ def binaries_clean_before_day(days: int): def execute_cleanup(args: dict, init_log=True): - if init_log: init_console_logging() diff --git a/lib/cuckoo/common/demux.py b/lib/cuckoo/common/demux.py index fa42507f698..1273fb625fc 100644 --- a/lib/cuckoo/common/demux.py +++ b/lib/cuckoo/common/demux.py @@ -184,7 +184,7 @@ def _sf_children(child: sfFile): # -> bytes: path_to_extract = os.path.join(tmp_dir, sanitize_filename((child.filename).decode())) _ = path_write_file(path_to_extract, child.contents) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return (path_to_extract.encode(), child.platform, child.get_type(), child.get_size()) @@ -220,7 +220,7 @@ def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True): else: retlist.append(_sf_children(sf_child)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return list(filter(None, retlist)), "" diff --git a/lib/cuckoo/common/dotnet_utils.py b/lib/cuckoo/common/dotnet_utils.py index d2ba9be05a9..9c1a7dc12b7 100644 --- a/lib/cuckoo/common/dotnet_utils.py +++ b/lib/cuckoo/common/dotnet_utils.py @@ -14,7 +14,6 @@ def dotnet_user_strings(file: str = False, data: bytes = False, dn_whitelisting: list = []): - if not HAVE_DNFILE: return [] diff --git a/lib/cuckoo/common/email_utils.py b/lib/cuckoo/common/email_utils.py index 46f0066ff9a..ca212ba19c3 100644 --- a/lib/cuckoo/common/email_utils.py +++ b/lib/cuckoo/common/email_utils.py @@ -28,7 +28,6 @@ def find_attachments_in_email(s, expand_attachment): def _find_attachments_in_email(mesg, expand_attachment, atts): - # MHTML detection if mesg.get_content_maintype() == "multipart" and mesg.get_content_subtype() == "related": for part in mesg.walk(): diff --git a/lib/cuckoo/common/gcp.py b/lib/cuckoo/common/gcp.py index 4accf6643a3..204f1c6c625 100644 --- a/lib/cuckoo/common/gcp.py +++ b/lib/cuckoo/common/gcp.py @@ -63,7 +63,7 @@ def list_instances(self) -> dict: ] servers.setdefault(instance["name"], ips) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) elif HAVE_GCP: try: instance_client = compute_v1.InstancesClient() @@ -88,7 +88,6 @@ def list_instances(self) -> dict: return servers def autodiscovery(self): - while True: servers = self.list_instances() if not servers: @@ -109,9 +108,9 @@ def autodiscovery(self): if not r.ok: log.error("Can't registger worker with IP: %s. status_code: %d ", ip, r.status_code) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) break except Exception as e: - log.error(e, exc_info=True) + log.exception(e) time.sleep(int(self.dist_cfg.GCP.autodiscovery)) diff --git a/lib/cuckoo/common/hypervisor_config.py b/lib/cuckoo/common/hypervisor_config.py index 1f649177d76..a0ac89443da 100644 --- a/lib/cuckoo/common/hypervisor_config.py +++ b/lib/cuckoo/common/hypervisor_config.py @@ -8,7 +8,6 @@ def proxmox_shutdown_vm(machineName: str): - proxmox_server = proxmox_conf.proxmox.hostname # Not supporting multiple servers nodes = proxmox_conf.proxmox.nodes diff --git a/lib/cuckoo/common/icon.py b/lib/cuckoo/common/icon.py index 754b6628bca..cfc88ff5017 100644 --- a/lib/cuckoo/common/icon.py +++ b/lib/cuckoo/common/icon.py @@ -2,11 +2,10 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. -from ctypes import POINTER, Structure, byref +from ctypes import POINTER, Structure, byref, cast, create_string_buffer, pointer, sizeof, string_at from ctypes import c_ubyte as BYTE from ctypes import c_uint as DWORD from ctypes import c_ushort as WORD -from ctypes import cast, create_string_buffer, pointer, sizeof, string_at class GRPICONDIR(Structure): diff --git a/lib/cuckoo/common/integrations/XLMMacroDeobfuscator.py b/lib/cuckoo/common/integrations/XLMMacroDeobfuscator.py index 0da287a8401..a9c32e39723 100644 --- a/lib/cuckoo/common/integrations/XLMMacroDeobfuscator.py +++ b/lib/cuckoo/common/integrations/XLMMacroDeobfuscator.py @@ -45,7 +45,6 @@ def xlmdeobfuscate(filepath: str, task_id: str, password: str = "", on_demand: bool = False): - if not HAVE_XLM_DEOBF or processing_conf.xlsdeobf.on_demand and not on_demand: return xlm_kwargs["file"] = filepath @@ -68,4 +67,4 @@ def xlmdeobfuscate(filepath: str, task_id: str, password: str = "", on_demand: b if "no attribute 'workbook'" in str(e) or "Can't find workbook" in str(e): log.info("Workbook not found. Probably not an Excel file") else: - log.error(e, exc_info=True) + log.exception(e) diff --git a/lib/cuckoo/common/integrations/capa.py b/lib/cuckoo/common/integrations/capa.py index 13dd1c97858..5590c6cde43 100644 --- a/lib/cuckoo/common/integrations/capa.py +++ b/lib/cuckoo/common/integrations/capa.py @@ -292,6 +292,6 @@ def flare_capa_details( except EmptyReportError: log.info("FLARE CAPA -> No process data available") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return capa_output diff --git a/lib/cuckoo/common/integrations/file_extra_info.py b/lib/cuckoo/common/integrations/file_extra_info.py index 972eca864ce..8c6beffea0a 100644 --- a/lib/cuckoo/common/integrations/file_extra_info.py +++ b/lib/cuckoo/common/integrations/file_extra_info.py @@ -559,7 +559,7 @@ def vbe_extract(file: str, **_) -> ExtractorReturnType: try: decoded = vbe_decode_file(file, data) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) if not decoded: log.debug("VBE content wasn't decoded") diff --git a/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py index 540a97d2e22..31bcf4136fc 100644 --- a/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py +++ b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py @@ -17,7 +17,6 @@ @time_tracker def extract_details(file, *, data_dictionary, **_) -> ExtractorReturnType: - if not data_dictionary.get("pe", {}).get("overlay"): return {} diff --git a/lib/cuckoo/common/integrations/floss.py b/lib/cuckoo/common/integrations/floss.py index 5bc55331822..16260fdb767 100644 --- a/lib/cuckoo/common/integrations/floss.py +++ b/lib/cuckoo/common/integrations/floss.py @@ -124,7 +124,7 @@ def run(self): results[stype].append(sval.string) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) fm.set_log_config(fm.DebugLevel.DEFAULT, False) diff --git a/lib/cuckoo/common/integrations/parse_dotnet.py b/lib/cuckoo/common/integrations/parse_dotnet.py index c88c1b936f6..6a86fb30eb4 100644 --- a/lib/cuckoo/common/integrations/parse_dotnet.py +++ b/lib/cuckoo/common/integrations/parse_dotnet.py @@ -56,7 +56,7 @@ def _get_custom_attrs(self) -> List[Dict[str, str]]: except subprocess.CalledProcessError as e: log.error("Monodis: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None def _get_assembly_refs(self) -> List[Dict[str, str]]: @@ -84,7 +84,7 @@ def _get_assembly_refs(self) -> List[Dict[str, str]]: except subprocess.CalledProcessError as e: log.error("Monodis: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None def _get_assembly_info(self) -> Dict[str, str]: @@ -103,7 +103,7 @@ def _get_assembly_info(self) -> Dict[str, str]: except subprocess.CalledProcessError as e: log.error("Monodis: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None def _get_type_refs(self) -> List[Dict[str, str]]: @@ -128,7 +128,7 @@ def _get_type_refs(self) -> List[Dict[str, str]]: except subprocess.CalledProcessError as e: log.error("Monodis: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None def run(self) -> Dict[str, Any]: @@ -151,5 +151,5 @@ def run(self) -> Dict[str, Any]: else: return except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None diff --git a/lib/cuckoo/common/integrations/parse_hwp.py b/lib/cuckoo/common/integrations/parse_hwp.py index 2b037bdf227..5d64bd24ba5 100644 --- a/lib/cuckoo/common/integrations/parse_hwp.py +++ b/lib/cuckoo/common/integrations/parse_hwp.py @@ -38,7 +38,7 @@ def unpack_hwp(self): stream_content = zlib.decompress(contents, -15) self.files[stream_name] = stream_content except Exception as e: - log.error(e, exc_info=True) + log.exception(e) def extract_eps(self) -> List[bytes]: """Extract some information from Encapsulated Post Script files.""" diff --git a/lib/cuckoo/common/integrations/parse_java.py b/lib/cuckoo/common/integrations/parse_java.py index 733ed296342..4d45863c3f9 100644 --- a/lib/cuckoo/common/integrations/parse_java.py +++ b/lib/cuckoo/common/integrations/parse_java.py @@ -41,7 +41,7 @@ def run(self) -> Dict[str, Any]: p = Popen([self.decomp_jar, jar_file], stdout=PIPE) results["decompiled"] = convert_to_printable(p.stdout.read()) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) with contextlib.suppress(Exception): Path(jar_file.decode()).unlink() diff --git a/lib/cuckoo/common/integrations/parse_office.py b/lib/cuckoo/common/integrations/parse_office.py index 1c7de942cca..b047d6bc604 100644 --- a/lib/cuckoo/common/integrations/parse_office.py +++ b/lib/cuckoo/common/integrations/parse_office.py @@ -129,7 +129,7 @@ def _get_xml_meta(self, filepath) -> Dict[str, Dict[str, str]]: continue metares["SummaryInformation"][n.split(":")[1]] = convert_to_printable(data[0].data) except (IndexError, AttributeError) as e: - log.error(e, exc_info=True) + log.exception(e) for elem in app._get_documentElement().childNodes: try: @@ -146,7 +146,7 @@ def _get_xml_meta(self, filepath) -> Dict[str, Dict[str, str]]: continue metares["DocumentSummaryInformation"][n] = convert_to_printable(data[0].data) except (IndexError, AttributeError) as e: - log.error(e, exc_info=True) + log.exception(e) return metares @@ -236,7 +236,7 @@ def _parse(self, filepath: str) -> Dict[str, Any]: if temp_results: results["office_rtf"] = temp_results except Exception as e: - log.error(e, exc_info=True) + log.exception(e) else: try: vba = VBA_Parser(filepath) @@ -254,7 +254,7 @@ def _parse(self, filepath: str) -> Dict[str, Any]: except AttributeError: log.warning("OleFile library bug: AttributeError! fix: poetry run pip install olefile") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) officeresults = {"Metadata": {}} macro_folder = os.path.join(CUCKOO_ROOT, "storage", "analyses", self.task_id, "macros") @@ -301,7 +301,7 @@ def _parse(self, filepath: str) -> Dict[str, Any]: except ValueError as e: log.error("Can't parse macros for %s - %s ", filepath, str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) for keyword, description in detect_autoexec(vba_code): officeresults["Macro"]["Analysis"].setdefault("AutoExec", []).append( (keyword.replace(".", "_"), description) @@ -328,7 +328,7 @@ def _parse(self, filepath: str) -> Dict[str, Any]: if indicator.value and indicator.name in {"Word Document", "Excel Workbook", "PowerPoint Presentation"}: officeresults["Metadata"]["DocumentType"] = indicator.name except Exception as e: - log.error(e, exc_info=True) + log.exception(e) if HAVE_XLM_DEOBF: tmp_xlmmacro = xlmdeobfuscate(filepath, self.task_id, self.options.get("password", "")) diff --git a/lib/cuckoo/common/integrations/parse_pe.py b/lib/cuckoo/common/integrations/parse_pe.py index f24c0397fa7..1765e468f90 100644 --- a/lib/cuckoo/common/integrations/parse_pe.py +++ b/lib/cuckoo/common/integrations/parse_pe.py @@ -219,7 +219,7 @@ def get_peid_signatures(self, pe: pefile.PE) -> list: if result: return list(result) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None @@ -344,7 +344,7 @@ def get_resources(self, pe: pefile.PE) -> List[Dict[str, str]]: except pefile.PEFormatError as e: log.debug("get_resources error: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue return resources @@ -368,7 +368,7 @@ def get_pdb_path(self, pe: pefile.PE) -> str: length = struct.unpack_from("IIB", dbgdata)[1] return dbgdata[12:length].decode("latin-1").rstrip("\0") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None @@ -401,7 +401,7 @@ def get_imported_symbols(self, pe: pefile.PE) -> Dict[str, dict]: "imports": symbols, } except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue return imports @@ -538,7 +538,7 @@ def get_sections(self, pe: pefile.PE) -> List[Dict[str, str]]: } ) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue return sections @@ -650,7 +650,7 @@ def get_icon_info(self, pe: pefile.PE) -> Tuple[str, str, str, str]: return None, None, None, None return icon, fullhash, simphash, dhash except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None, None, None, None @@ -693,7 +693,7 @@ def get_versioninfo(self, pe: pefile.PE) -> List[dict]: entry["value"] = f"0x0{entry['value'][2:5]} 0x0{entry['value'][7:10]}" peresults.append(entry) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue return peresults @@ -853,12 +853,12 @@ def get_dll_exports(self) -> str: else: exports.append(re.sub("[^A-Za-z0-9_?@-]", "", exported_symbol.name)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return ",".join(exports) except Exception as e: log.error("PE type not recognised") - log.error(e, exc_info=True) + log.exception(e) return "" @@ -873,7 +873,7 @@ def choose_dll_export(self) -> str: if exp.name.decode() in ("DllInstall", "DllRegisterServer", "xlAutoOpen"): return exp.name.decode() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None def get_entrypoint(self, pe: pefile.PE) -> str: diff --git a/lib/cuckoo/common/integrations/pdfminer.py b/lib/cuckoo/common/integrations/pdfminer.py index d3b98bd3e52..f5861aa11bf 100644 --- a/lib/cuckoo/common/integrations/pdfminer.py +++ b/lib/cuckoo/common/integrations/pdfminer.py @@ -44,10 +44,10 @@ def _mine_for_urls(file_path: str) -> Set[str]: try: obj = doc.getobj(object_id) urls.update(_search_for_url(obj)) - except Exception as ex: - log.error(ex, exc_info=True) - except Exception as ex: - log.error(ex, exc_info=True) + except Exception as e: + log.exception(e) + except Exception as e: + log.exception(e) return urls diff --git a/lib/cuckoo/common/integrations/peepdf.py b/lib/cuckoo/common/integrations/peepdf.py index 27357ecb646..698f25d651e 100644 --- a/lib/cuckoo/common/integrations/peepdf.py +++ b/lib/cuckoo/common/integrations/peepdf.py @@ -59,7 +59,7 @@ def _set_base_uri(pdf): if elem: return elem.getValue() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return "" @@ -109,7 +109,7 @@ def peepdf_parse(filepath: str, pdfresult: Dict[str, Any]) -> Dict[str, Any]: jslist, unescapedbytes, urlsfound, errors, ctxdummy = analyseJS(decoded_stream.strip()) jsdata = jslist[0] except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue if errors or jsdata is None: continue @@ -137,7 +137,7 @@ def peepdf_parse(filepath: str, pdfresult: Dict[str, Any]) -> Dict[str, Any]: jslist, unescapedbytes, urlsfound, errors, ctxdummy = analyseJS(js_elem.value) jsdata = jslist[0] except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue if errors or not jsdata: continue diff --git a/lib/cuckoo/common/integrations/vba2graph.py b/lib/cuckoo/common/integrations/vba2graph.py index fcaaf64708a..7b73b2d0b74 100644 --- a/lib/cuckoo/common/integrations/vba2graph.py +++ b/lib/cuckoo/common/integrations/vba2graph.py @@ -599,7 +599,6 @@ def vba_clean_whitespace(vba_content_lines): # process lines one by one for vba_line in vba_content_lines: - # remove leading and trailing whitespace # & reduce multiple whitespaces into one space vba_line = " ".join(vba_line.split()) @@ -801,7 +800,6 @@ def vba_extract_properties(vba_content_lines): # process lines one by one for vba_line in vba_content_lines: - # look for property start keywords prop_start_pos = max(vba_line.find("Property Let "), vba_line.find("Property Get ")) @@ -856,7 +854,6 @@ def create_call_graph(vba_func_dict): DG.add_node(func_name, keywords="") # analyze function calls for func_name in vba_func_dict: - func_code = vba_func_dict[func_name] # split function code into tokens func_code_tokens = list(filter(None, re.split(r'["(, \\-!?:\\r\\n)&=.><]+', func_code))) @@ -898,7 +895,6 @@ def find_keywords_in_graph(vba_func_dict, DG): """ # analyze function calls for func_name in vba_func_dict: - func_code = vba_func_dict[func_name] # split function code into lines func_code_lines = filter(None, re.split("\n", func_code)) diff --git a/lib/cuckoo/common/integrations/virustotal.py b/lib/cuckoo/common/integrations/virustotal.py index c56f8fb4e66..4ff8820ad24 100644 --- a/lib/cuckoo/common/integrations/virustotal.py +++ b/lib/cuckoo/common/integrations/virustotal.py @@ -156,7 +156,6 @@ def get_vt_consensus(namelist: list): - finaltoks = defaultdict(int) for name in namelist: toks = re.findall(r"[A-Za-z0-9]+", name) diff --git a/lib/cuckoo/common/load_extra_modules.py b/lib/cuckoo/common/load_extra_modules.py index 1d38f2ca385..5e26fe97a2c 100644 --- a/lib/cuckoo/common/load_extra_modules.py +++ b/lib/cuckoo/common/load_extra_modules.py @@ -35,7 +35,6 @@ def ratdecodedr_load_decoders(path: str): def cape_load_custom_decoders(CUCKOO_ROOT: str): - cape_modules = {} cape_decoders = os.path.join(CUCKOO_ROOT, "modules", "processing", "parsers", "CAPE") CAPE_DECODERS = {"cape": [os.path.basename(decoder)[:-3] for decoder in glob.glob(f"{cape_decoders}/[!_]*.py")]} @@ -68,7 +67,6 @@ def cape_load_custom_decoders(CUCKOO_ROOT: str): def malduck_load_decoders(CUCKOO_ROOT: str): - malduck_modules = {} malduck_decoders = os.path.join(CUCKOO_ROOT, "modules", "processing", "parsers", "malduck") MALDUCK_DECODERS = [os.path.basename(decoder)[:-3] for decoder in glob.glob(f"{malduck_decoders}/[!_]*.py")] diff --git a/lib/cuckoo/common/logtbl.py b/lib/cuckoo/common/logtbl.py index 8bba49c920b..0c1dbd8e8ec 100644 --- a/lib/cuckoo/common/logtbl.py +++ b/lib/cuckoo/common/logtbl.py @@ -8,6 +8,7 @@ by hand. """ + table = ( ("__process__", "__init__", ("",)), ("__thread__", "__init__", ("",)), diff --git a/lib/cuckoo/common/objects.py b/lib/cuckoo/common/objects.py index aa979094dc7..8b4a69ea528 100644 --- a/lib/cuckoo/common/objects.py +++ b/lib/cuckoo/common/objects.py @@ -353,7 +353,7 @@ def get_content_type(self): except magic.MagicException as e: log.error("Magic error: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) if not file_type and hasattr(magic, "open"): try: ms = magic.open(magic.MAGIC_MIME | magic.MAGIC_SYMLINK) @@ -361,7 +361,7 @@ def get_content_type(self): file_type = ms.file(self.file_path) ms.close() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) if file_type is None: try: @@ -370,7 +370,7 @@ def get_content_type(self): ) file_type = p.stdout.read().strip() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return file_type @@ -419,7 +419,7 @@ def get_type(self): File.notified_pefile = True log.warning("Unable to import pefile (install with `pip3 install pefile`)") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) if not self.file_type: self.file_type = self.get_content_type() diff --git a/lib/cuckoo/common/url_validate.py b/lib/cuckoo/common/url_validate.py index 9df8de2a461..8ce2b626509 100644 --- a/lib/cuckoo/common/url_validate.py +++ b/lib/cuckoo/common/url_validate.py @@ -17,7 +17,10 @@ # protocol identifier r"(?:(?:https?|ftp|tcp|udp)://)" # user:pass authentication - r"(?:[-a-z\u00a1-\uffff0-9._~%!$&'()*+,;=:]+" r"(?::[-a-z0-9._~%!$&'()*+,;=:]*)?@)?" r"(?:" r"(?P" + r"(?:[-a-z\u00a1-\uffff0-9._~%!$&'()*+,;=:]+" + r"(?::[-a-z0-9._~%!$&'()*+,;=:]*)?@)?" + r"(?:" + r"(?P" # IP address exclusion # private & local networks r"(?:(?:10|127)" + ip_middle_octet + r"{2}" + ip_last_octet + r")|" @@ -25,13 +28,19 @@ r"(?:172\.(?:1[6-9]|2\d|3[0-1])" + ip_middle_octet + ip_last_octet + r"))" r"|" # private & local hosts - r"(?P" r"(?:localhost))" r"|" + r"(?P" + r"(?:localhost))" + r"|" # IP address dotted notation octets # excludes loopback network 0.0.0.0 # excludes reserved space >= 224.0.0.0 # excludes network & broadcast addresses # (first & last IP address of each class) - r"(?P" r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" r"" + ip_middle_octet + r"{2}" r"" + ip_last_octet + r")" r"|" + r"(?P" + r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" + r"" + ip_middle_octet + r"{2}" + r"" + ip_last_octet + r")" + r"|" # IPv6 RegEx from https://stackoverflow.com/a/17871737 r"\[(" # 1:2:3:4:5:6:7:8 @@ -59,16 +68,23 @@ r"((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}" # ::255.255.255.255 ::ffff:255.255.255.255 ::ffff:0:255.255.255.255 # (IPv4-mapped IPv6 addresses and IPv4-translated addresses) - r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|" r"([0-9a-fA-F]{1,4}:){1,4}:" r"((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}" + r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|" + r"([0-9a-fA-F]{1,4}:){1,4}:" + r"((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}" # 2001:db8:3:4::192.0.2.33 64:ff9b::192.0.2.33 # (IPv4-Embedded IPv6 Address) - r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])" r")\]|" + r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])" + r")\]|" # host name - r"(?:(?:(?:xn--[-]{0,2})|[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]-?)*" r"[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]+)" + r"(?:(?:(?:xn--[-]{0,2})|[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]-?)*" + r"[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]+)" # domain name - r"(?:\.(?:(?:xn--[-]{0,2})|[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]-?)*" r"[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]+)*" + r"(?:\.(?:(?:xn--[-]{0,2})|[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]-?)*" + r"[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]+)*" # TLD identifier - r"(?:\.(?:(?:xn--[-]{0,2}[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]{2,})|" r"[a-z\u00a1-\uffff\U00010000-\U0010ffff]{2,}))" r")" + r"(?:\.(?:(?:xn--[-]{0,2}[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]{2,})|" + r"[a-z\u00a1-\uffff\U00010000-\U0010ffff]{2,}))" + r")" # port number r"(?::\d{2,5})?" # resource path @@ -76,7 +92,8 @@ # query string r"(?:\?\S*)?" # fragment - r"(?:#\S*)?" r"$", + r"(?:#\S*)?" + r"$", re.UNICODE | re.IGNORECASE, ) diff --git a/lib/cuckoo/common/web_utils.py b/lib/cuckoo/common/web_utils.py index 410995a069c..55f5b31b2b4 100644 --- a/lib/cuckoo/common/web_utils.py +++ b/lib/cuckoo/common/web_utils.py @@ -85,9 +85,8 @@ if dist_conf.distributed.enabled: try: # Tags - from lib.cuckoo.common.dist_db import Machine, Node + from lib.cuckoo.common.dist_db import Machine, Node, create_session from lib.cuckoo.common.dist_db import Task as DTask - from lib.cuckoo.common.dist_db import create_session HAVE_DIST = True dist_session = create_session(dist_conf.distributed.db) @@ -1295,7 +1294,7 @@ def _malwarebazaar_dl(hash): except pyzipper.zipfile.BadZipFile: print(data.content) except Exception as e: - logging.error(e, exc_info=True) + log.exception(e) return sample @@ -1501,7 +1500,7 @@ def submit_task( filename=filename, ) if not task_id: - log.warn("Error adding CAPE task to database: %s", package) + log.warning("Error adding CAPE task to database: %s", package) return task_id log.info('CAPE detection on file "%s": %s - added as CAPE task with ID %s', target, package, task_id) diff --git a/lib/cuckoo/core/database.py b/lib/cuckoo/core/database.py index a38322da31d..3b7d245ada3 100644 --- a/lib/cuckoo/core/database.py +++ b/lib/cuckoo/core/database.py @@ -1366,7 +1366,6 @@ def recon( cape=False, category=None, ): - # Get file filetype to ensure self extracting archives run longer if not isinstance(filename, str): filename = bytes2str(filename) diff --git a/lib/cuckoo/core/guest.py b/lib/cuckoo/core/guest.py index 8c268b72ecd..4614d658681 100644 --- a/lib/cuckoo/core/guest.py +++ b/lib/cuckoo/core/guest.py @@ -388,7 +388,7 @@ def wait_for_completion(self): ) continue except Exception as e: - log.error("Task #%s: Virtual machine %s /status failed. %s", self.task_id, self.vmid, e, exc_info=True) + log.exception("Task #%s: Virtual machine %s /status failed. %s", self.task_id, self.vmid, e) continue if status["status"] in ("complete", "failed"): diff --git a/lib/cuckoo/core/resultserver.py b/lib/cuckoo/core/resultserver.py index 4d952001a9e..6407799c702 100644 --- a/lib/cuckoo/core/resultserver.py +++ b/lib/cuckoo/core/resultserver.py @@ -383,7 +383,6 @@ def parse_message(self, buffer): argdict = {argnames[i]: converters[i](arg) for i, arg in enumerate(args)} if apiname == "__process__": - # pid = argdict["ProcessIdentifier"] ppid = argdict["ParentProcessIdentifier"] modulepath = argdict["ModulePath"] @@ -464,7 +463,7 @@ def create_folders(self): try: create_folder(self.storagepath, folder=folder.decode()) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) # ToDo # except CuckooOperationalError as e: # log.error("Unable to create folder %s", folder) @@ -511,7 +510,7 @@ def handle(self, sock, addr): with protocol: protocol.handle() except CuckooOperationalError as e: - log.error(e, exc_info=True) + log.exception(e) finally: with self.task_mgmt_lock: s.discard(ctx) diff --git a/modules/machinery/az.py b/modules/machinery/az.py index fba0172b769..4826e33c182 100644 --- a/modules/machinery/az.py +++ b/modules/machinery/az.py @@ -24,7 +24,6 @@ HAVE_AZURE = True except ImportError: - print("Missing machinery-required libraries.") print("poetry run pip install azure-identity msrest msrestazure azure-mgmt-compute azure-mgmt-network") @@ -329,11 +328,9 @@ def _process_pre_existing_vmsss(self): # Delete incorrectly named VMSSs or mark them as existing for vmss in existing_vmsss: - # If a VMSS does not have any tags or does not have the tag that we use to indicate that it is used for # Cuckoo (AUTO_SCALE_CAPE key-value pair), ignore if not vmss.tags or not vmss.tags.get(Azure.AUTO_SCALE_CAPE_KEY) == Azure.AUTO_SCALE_CAPE_VALUE: - # Ignoring... unless! They have one of the required names of the VMSSs that we are going to create if vmss.name in self.required_vmsss.keys(): async_delete_vmss = Azure._azure_api_call( @@ -663,7 +660,7 @@ def _add_machines_to_db(self, vmss_name): self.delete_machine(vm) raise except Exception as e: - log.error(repr(e), exc_info=True) + log.exception(repr(e)) # If no machines on any VMSSs are in the db when we leave this method, CAPE will crash. if not self.machines() and self.required_vmsss[vmss_name]["retries"] > 0: @@ -735,16 +732,16 @@ def _thr_wait_for_ready_machine(machine_name, machine_ip): # We did it! break except socket.timeout: - log.debug(f"{machine_name}: Initializing...") + log.debug("%s: Initializing...", machine_name) except socket.error: - log.debug(f"{machine_name}: Initializing...") + log.debug("%s: Initializing...", machine_name) if (timeit.default_timer() - start) >= timeout: # We didn't do it :( raise CuckooGuestCriticalTimeout( - f"Machine {machine_name}: the guest initialization hit the critical timeout, analysis aborted." + "Machine %s: the guest initialization hit the critical timeout, analysis aborted.", machine_name ) time.sleep(10) - log.debug(f"Machine {machine_name} was created and available in {round(timeit.default_timer() - start)}s") + log.debug("Machine %s was created and available in %d s", machine_name, round(timeit.default_timer() - start)) @staticmethod def _azure_api_call(*args, **kwargs): @@ -764,7 +761,7 @@ def _azure_api_call(*args, **kwargs): api_call = f"{operation}({args},{kwargs})" try: - log.debug(f"Trying {api_call}") + log.debug("Trying %s", api_call) results = operation(*args, **kwargs) except Exception as exc: # For ClientRequestErrors, they do not have the attribute 'error' @@ -782,7 +779,7 @@ def _azure_api_call(*args, **kwargs): # Log the subscription limits headers = results._response.headers log.debug( - f"API Charge: {headers['x-ms-request-charge']}; Remaining Calls: {headers['x-ms-ratelimit-remaining-resource']}" + "API Charge: %s; Remaining Calls: %s", headers['x-ms-request-charge'], headers['x-ms-ratelimit-remaining-resource'] ) return results @@ -915,7 +912,7 @@ def _thr_reimage_vmss(self, vmss_name): ) _ = self._handle_poller_result(async_restart_vmss) else: - log.error(repr(e), exc_info=True) + log.exception(repr(e)) raise with self.db.session.begin(): self._add_machines_to_db(vmss_name) @@ -1058,7 +1055,7 @@ def _scale_machine_pool(self, tag, per_platform=False): if relevant_task_queue == initial_number_of_locked_relevant_machines == 0: # The VMSS will scale in via the ScaleInPolicy. machine_pools[vmss_name]["wait"] = True - log.debug(f"System is at rest, scale down {vmss_name} capacity and delete machines.") + log.debug("System is at rest, scale down %s capacity and delete machines.", vmss_name) # System is not at rest, but task queue is 0, therefore set machines in use to delete elif relevant_task_queue == 0: machine_pools[vmss_name]["is_scaling_down"] = True @@ -1079,7 +1076,7 @@ def _scale_machine_pool(self, tag, per_platform=False): # We don't want to be stuck in this for longer than the timeout specified if (timeit.default_timer() - start_time) > AZURE_TIMEOUT: - log.debug(f"Breaking out of the while loop within the scale down section for {vmss_name}.") + log.debug("Breaking out of the while loop within the scale down section for %s.", vmss_name) break # Get the updated number of relevant machines required relevant_task_queue = self._get_number_of_relevant_tasks(tag) @@ -1144,7 +1141,7 @@ def _scale_machine_pool(self, tag, per_platform=False): return timediff = timeit.default_timer() - start_time - log.debug(f"The scaling of {vmss_name} took {round(timediff)}s") + log.debug("The scaling of %s took %d s", vmss_name, round(timediff)) machine_pools[vmss_name]["size"] = number_of_relevant_machines_required # Alter the database based on if we scaled up or down @@ -1159,13 +1156,13 @@ def _scale_machine_pool(self, tag, per_platform=False): machine_pools[vmss_name]["is_scaling"] = False if platform: is_platform_scaling[platform] = False - log.debug(f"Scaling {vmss_name} has completed.") + log.debug("Scaling %s has completed.", vmss_name) except Exception as exc: machine_pools[vmss_name]["wait"] = False machine_pools[vmss_name]["is_scaling"] = False if platform: is_platform_scaling[platform] = False - log.error(repr(exc), exc_info=True) + log.exception(repr(exc)) log.debug(f"Scaling {vmss_name} has completed with errors {exc!r}.") @staticmethod @@ -1182,7 +1179,7 @@ def _handle_poller_result(lro_poller_object): raise CuckooMachineError(repr(e)) time_taken = timeit.default_timer() - start_time if time_taken >= AZURE_TIMEOUT: - raise CuckooMachineError(f"The task took {round(time_taken)}s to complete! Bad Azure!") + raise CuckooMachineError("The task took %ds to complete! Bad Azure!", round(time_taken)) else: return lro_poller_result @@ -1306,7 +1303,7 @@ def _thr_reimage_list_reader(self): operation=self.compute_client.virtual_machine_scale_sets.begin_reimage_all, ) except Exception as exc: - log.error(repr(exc), exc_info=True) + log.exception(repr(exc)) # If InvalidParameter: 'The provided instanceId x is not an active Virtual Machine Scale Set VM instanceId. # This means that the machine has been deleted # If BadRequest: The VM x creation in Virtual Machine Scale Set > with ephemeral disk is not complete. Please trigger a restart if required' @@ -1368,7 +1365,7 @@ def _thr_reimage_list_reader(self): f"{'S' if reimaged else 'Uns'}uccessfully reimaging instances {instance_ids} in {vmss_to_reimage} took {round(timediff)}s" ) except Exception as e: - log.error(f"Exception occurred in the reimage thread: {e}. Trying again...") + log.error("Exception occurred in the reimage thread: %s. Trying again...", str(e)) def _thr_delete_list_reader(self): global current_vmss_operations @@ -1411,7 +1408,7 @@ def _thr_delete_list_reader(self): operation=self.compute_client.virtual_machine_scale_sets.begin_delete_instances, ) except Exception as exc: - log.error(repr(exc), exc_info=True) + log.exception(repr(exc)) with current_operations_lock: current_vmss_operations -= 1 with vms_currently_being_deleted_lock: @@ -1431,7 +1428,7 @@ def _thr_delete_list_reader(self): if self.initializing and deleted: # All machines should have been removed from the db and the VMSS at this point. # To force the VMSS to scale to initial_pool_size, set the size to zero here. - log.debug(f"Setting size to 0 for VMSS {vmss_to_delete_from} after successful deletion") + log.debug("Setting size to 0 for VMSS %s after successful deletion", vmss_to_delete_from) machine_pools[vmss_to_delete_from]["size"] = 0 with vms_currently_being_deleted_lock: @@ -1444,4 +1441,4 @@ def _thr_delete_list_reader(self): f"{'S' if deleted else 'Uns'}uccessfully deleting instances {instance_ids} in {vmss_to_delete_from} took {round(timeit.default_timer() - start_time)}s" ) except Exception as e: - log.error(f"Exception occurred in the delete thread: {e}. Trying again...") + log.error("Exception occurred in the delete thread: %s. Trying again...", str(e)) diff --git a/modules/machinery/vsphere.py b/modules/machinery/vsphere.py index 600d19499f4..692d2566aa6 100644 --- a/modules/machinery/vsphere.py +++ b/modules/machinery/vsphere.py @@ -88,7 +88,7 @@ def _initialize_check(self): sslContext = ssl._create_unverified_context() self.connect_opts["sslContext"] = sslContext - log.warn("Turning off SSL certificate verification!") + log.warning("Turning off SSL certificate verification!") # Check that a snapshot is configured for each machine # and that it was taken in a powered-on state diff --git a/modules/processing/behavior.py b/modules/processing/behavior.py index 394a48f6842..1a95b1e8553 100644 --- a/modules/processing/behavior.py +++ b/modules/processing/behavior.py @@ -309,7 +309,7 @@ def _parse(self, row): try: argument["value"] = convert_to_printable(arg_value, self.conversion_cache) except Exception: - log.error(arg_value, exc_info=True) + log.exception(arg_value) continue if not self.reporting_mode: if isinstance(arg_value_raw, bytes): diff --git a/modules/processing/network.py b/modules/processing/network.py index 166a4f755d9..0477d64e6a6 100644 --- a/modules/processing/network.py +++ b/modules/processing/network.py @@ -1103,7 +1103,6 @@ def _import_ja3_fprints(self): return ja3_fprints def run(self): - if not path_exists(self.pcap_path): log.debug('The PCAP file does not exist at path "%s"', self.pcap_path) return {} diff --git a/modules/processing/parsers/CAPE/Snake.py b/modules/processing/parsers/CAPE/Snake.py deleted file mode 100644 index 50356034693..00000000000 --- a/modules/processing/parsers/CAPE/Snake.py +++ /dev/null @@ -1,174 +0,0 @@ -import base64 -import hashlib -import logging -import re -import traceback - -import dnfile - -try: - from Cryptodome.Cipher import DES - from Cryptodome.Util.Padding import unpad -except ModuleNotFoundError: - raise ModuleNotFoundError("Please run: pip3 install pycryptodomex") - -log = logging.getLogger(__name__) -log.setLevel(logging.INFO) - - -def is_base64(s): - pattern = re.compile("^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$") - if not s or len(s) < 1: - return False - else: - return pattern.match(s) - - -def pad(text): - n = len(text) % 8 - return text + (b" " * n) - - -def md5(string: bytes) -> bytes: - return bytes.fromhex(hashlib.md5(string).hexdigest()) - - -def handle_plain(dotnet_file, c2_type, user_strings): - user_strings_list = list(user_strings.values()) - if c2_type == "Telegram": - token = dotnet_file.net.user_strings.get(user_strings_list[15]).value.__str__() - chat_id = dotnet_file.net.user_strings.get(user_strings_list[16]).value.__str__() - return {"Type": "Telegram", "C2": f"https://api.telegram.org/bot{token}/sendMessage?chat_id={chat_id}"} - elif c2_type == "SMTP": - smtp_from = dotnet_file.net.user_strings.get(user_strings_list[7]).value.__str__() - smtp_password = dotnet_file.net.user_strings.get(user_strings_list[8]).value.__str__() - smtp_host = dotnet_file.net.user_strings.get(user_strings_list[9]).value.__str__() - smtp_to = dotnet_file.net.user_strings.get(user_strings_list[10]).value.__str__() - smtp_port = dotnet_file.net.user_strings.get(user_strings_list[11]).value.__str__() - return { - "Type": "SMTP", - "Host": smtp_host, - "Port": smtp_port, - "From Address": smtp_from, - "To Address": smtp_to, - "Password": smtp_password, - } - elif c2_type == "FTP": - ftp_username = dotnet_file.net.user_strings.get(user_strings_list[12]).value.__str__() - ftp_password = dotnet_file.net.user_strings.get(user_strings_list[13]).value.__str__() - ftp_host = dotnet_file.net.user_strings.get(user_strings_list[14]).value.__str__() - return {"Type": "FTP", "Host": ftp_host, "Username": ftp_username, "Password": ftp_password} - - -def handle_encrypted(dotnet_file, data, c2_type, user_strings): - # Match decrypt string pattern - decrypt_string_pattern = re.compile( - Rb"""(?x) - \x72(...)\x70 - \x7E(...)\x04 - \x28...\x06 - \x80...\x04 - """ - ) - - config_dict = None - decrypted_strings = [] - - matches2 = decrypt_string_pattern.findall(data) - for match in matches2: - string_index = int.from_bytes(match[0], "little") - user_string = dotnet_file.net.user_strings.get(string_index).value - # Skip user strings that are empty/not base64 - if user_string == "Yx74dJ0TP3M=" or not is_base64(user_string): - continue - field_row_index = int.from_bytes(match[1], "little") - field_name = dotnet_file.net.mdtables.Field.get_with_row_index(field_row_index).Name.__str__() - key_index = user_strings[field_name] - key_str = dotnet_file.net.user_strings.get(key_index).value.__str__() - key = md5(key_str.encode())[:8] - des = DES.new(key, DES.MODE_ECB) - - decoded_str = base64.b64decode(user_string) - padded_str = pad(decoded_str) - decrypted_text = des.decrypt(padded_str) - plaintext_bytes = unpad(decrypted_text, DES.block_size) - plaintext = plaintext_bytes.decode() - decrypted_strings.append(plaintext) - - if decrypted_strings: - if c2_type == "Telegram": - token, chat_id = decrypted_strings - config_dict = {"Type": "Telegram", "C2": f"https://api.telegram.org/bot{token}/sendMessage?chat_id={chat_id}"} - elif c2_type == "SMTP": - smtp_from, smtp_password, smtp_host, smtp_to, smtp_port = decrypted_strings - config_dict = { - "Type": "SMTP", - "Host": smtp_host, - "Port": smtp_port, - "From Address": smtp_from, - "To Address": smtp_to, - "Password": smtp_password, - } - elif c2_type == "FTP": - ftp_username, ftp_password, ftp_host = decrypted_strings - config_dict = {"Type": "FTP", "Host": ftp_host, "Username": ftp_username, "Password": ftp_password} - return config_dict - - -def extract_config(data): - - try: - dotnet_file = dnfile.dnPE(data=data) - except Exception as e: - log.debug(f"Exception when attempting to parse .NET file: {e}") - log.debug(traceback.format_exc()) - - # ldstr, stsfld - static_strings = re.compile( - Rb"""(?x) - \x72(...)\x70 - \x80(...)\x04 - """ - ) - - # Get user strings and C2 type - user_strings = {} - c2_type = None - matches = static_strings.findall(data) - for match in matches: - try: - string_index = int.from_bytes(match[0], "little") - string_value = dotnet_file.net.user_strings.get(string_index).value.__str__() - field_index = int.from_bytes(match[1], "little") - field_name = dotnet_file.net.mdtables.Field.get_with_row_index(field_index).Name.__str__() - if string_value == "$%TelegramDv$": - c2_type = "Telegram" - - elif string_value == "$%SMTPDV$": - c2_type = "SMTP" - - elif string_value == "%FTPDV$": - c2_type = "FTP" - else: - user_strings[field_name] = string_index - except Exception as e: - log.debug(f"There was an exception parsing user strings: {e}") - log.debug(traceback.format_exc()) - - if c2_type is None: - raise ValueError("Could not identify C2 type.") - - # Handle encrypted strings - config_dict = handle_encrypted(dotnet_file, data, c2_type, user_strings) - if config_dict is None: - # Handle plain strings - config_dict = handle_plain(dotnet_file, c2_type, user_strings) - - return config_dict - - -if __name__ == "__main__": - import sys - - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/reporting/maec5.py b/modules/reporting/maec5.py index bb237e251c7..603dbaddf7d 100644 --- a/modules/reporting/maec5.py +++ b/modules/reporting/maec5.py @@ -284,7 +284,6 @@ def add_dropped_files(self): # Grab list of all dropped files- remember # package['observable_objects'] is a dict where the key is object-ID for f in self.results["dropped"]: - # Create a new Malware Instance for each dropped file malwareInstance = self.create_malware_instance(f) diff --git a/modules/reporting/mongodb.py b/modules/reporting/mongodb.py index 6a0e82aec2b..f6a8d804693 100644 --- a/modules/reporting/mongodb.py +++ b/modules/reporting/mongodb.py @@ -167,12 +167,12 @@ def run(self, results): for j, parent_dict in enumerate(report[parent_key]): child_key, csize = self.debug_dict_size(parent_dict)[0] if csize > size_filter: - log.warn("results['%s']['%s'] deleted due to size: %s", parent_key, child_key, csize) + log.warning("results['%s']['%s'] deleted due to size: %s", parent_key, child_key, csize) del report[parent_key][j][child_key] else: child_key, csize = self.debug_dict_size(report[parent_key])[0] if csize > size_filter: - log.warn("results['%s']['%s'] deleted due to size: %s", parent_key, child_key, csize) + log.warning("results['%s']['%s'] deleted due to size: %s", parent_key, child_key, csize) del report[parent_key][child_key] try: mongo_insert_one("analysis", report) diff --git a/modules/reporting/tmpfsclean.py b/modules/reporting/tmpfsclean.py index 92e5b25e5f0..55b5e3e06f8 100644 --- a/modules/reporting/tmpfsclean.py +++ b/modules/reporting/tmpfsclean.py @@ -10,6 +10,7 @@ class TMPFSCLEAN(Report): "Remove/save memdump" + order = 9998 def run(self, results): diff --git a/modules/signatures/CAPE.py b/modules/signatures/CAPE.py index e03e40b0f0c..45b644d2a9c 100644 --- a/modules/signatures/CAPE.py +++ b/modules/signatures/CAPE.py @@ -140,7 +140,6 @@ def __init__(self, *args, **kwargs): filter_apinames = set(["NtAllocateVirtualMemory", "NtProtectVirtualMemory", "VirtualProtectEx"]) def on_call(self, call, process): - if process["process_name"] in ("WINWORD.EXE", "EXCEL.EXE", "POWERPNT.EXE"): return False if call["api"] == "NtAllocateVirtualMemory": @@ -571,7 +570,6 @@ def __init__(self, *args, **kwargs): self.transacted_hollowing = False def on_call(self, call, process): - if call["api"] == "RtlSetCurrentTransaction": self.transaction_set = True diff --git a/pyproject.toml b/pyproject.toml index c11a79ff695..57938c4efc7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -111,13 +111,6 @@ httpretty = "^1.1.4" func-timeout = "^4.3.5" pre-commit = "^2.19.0" -[tool.ruff] -select = ["E", "F"] -ignore = ["E402","E501"] -exclude = [ - "./analyzer/linux/dbus_next", -] - [tool.black] line-length = 132 include = "\\.py(_disabled)?$" @@ -143,6 +136,44 @@ norecursedirs = "tests/zip_compound" requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" -[lint] -select = ["E", "F"] -ignore = ["E402","E501"] +[tool.ruff] +line-length = 132 +exclude = [ + "./analyzer/linux/dbus_next", +] + +[tool.ruff.lint] +select = [ + "F", # pyflakes + "E", # pycodestyle errors + "W", # pycodestyle warnings + "I", # isort + # "N", # pep8-naming + "G", # flake8-logging-format +] + +ignore = [ + "E501", # ignore due to conflict with formatter + "N818", # exceptions don't need the Error suffix + "E741", # allow ambiguous variable names + "E402", +] + +fixable = ["ALL"] + +[tool.ruff.lint.per-file-ignores] +"stubs/*" = [ + "N", # naming conventions don't matter in stubs + "F403", # star imports are okay in stubs + "F405", # star imports are okay in stubs +] + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" +skip-magic-trailing-comma = false +line-ending = "auto" + +[tool.ruff.lint.isort] +known-first-party = ["libqtile", "test"] +default-section = "third-party" diff --git a/tests/integrity.py b/tests/integrity.py index b36bba3a635..d725de75b71 100644 --- a/tests/integrity.py +++ b/tests/integrity.py @@ -9,6 +9,7 @@ that there are no remaining tasks in the queue this utility will clean the entire database before starting various analyses. """ + import argparse import json import logging diff --git a/tests/test_objects.py b/tests/test_objects.py index d6859890375..8752f4b02b6 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -82,7 +82,7 @@ def test_get_ssdeep(self, empty_file): assert empty_file["file"].get_ssdeep() is not None except ImportError: assert empty_file["file"].get_ssdeep() is None - logging.warn("Need to install pydeep python module") + logging.warning("Need to install pydeep python module") def test_get_type(self, empty_file): assert empty_file["file"].get_type() == "empty" diff --git a/tests/test_tls_utils.py b/tests/test_tls_utils.py index 5d6a166644a..c0e2d5f48c4 100644 --- a/tests/test_tls_utils.py +++ b/tests/test_tls_utils.py @@ -15,7 +15,6 @@ class TestTlsUtils: - def test_tlslog_to_sslkeylogfile(self, tmpdir): input_log = f"{tmpdir}/tlsdump.log" dest_log = f"{tmpdir}/sslkeys.log" diff --git a/utils/community.py b/utils/community.py index 80bf41d50cc..e6902f95a85 100644 --- a/utils/community.py +++ b/utils/community.py @@ -57,9 +57,10 @@ def flare_capa(proxy=None): path_mkdir(capa_sigs_path) for url in signature_urls: signature_name = url.rsplit("/", 1)[-1] - with http.request("GET", url, preload_content=False) as sig, open( - os.path.join(capa_sigs_path, signature_name), "wb" - ) as out_sig: + with ( + http.request("GET", url, preload_content=False) as sig, + open(os.path.join(capa_sigs_path, signature_name), "wb") as out_sig, + ): shutil.copyfileobj(sig, out_sig) print("[+] FLARE CAPA rules/signatures installed") diff --git a/utils/db_migration/versions/2_3_1_square_hammer.py b/utils/db_migration/versions/2_3_1_square_hammer.py index 10c6d2d2efa..f0f3cb81f70 100644 --- a/utils/db_migration/versions/2_3_1_square_hammer.py +++ b/utils/db_migration/versions/2_3_1_square_hammer.py @@ -9,6 +9,7 @@ Create Date: 2021-05-02 18:24:43.075702 """ + from contextlib import suppress # revision identifiers, used by Alembic. diff --git a/utils/db_migration/versions/add_shrike_and_parent_id_columns.py b/utils/db_migration/versions/add_shrike_and_parent_id_columns.py index e33fbb91bbf..63acf17615c 100644 --- a/utils/db_migration/versions/add_shrike_and_parent_id_columns.py +++ b/utils/db_migration/versions/add_shrike_and_parent_id_columns.py @@ -9,6 +9,7 @@ Create Date: 2015-03-29 08:43:11.468664 """ + # revision identifiers, used by Alembic. revision = "f111620bb8" down_revision = "4b09c454108c" diff --git a/utils/db_migration/versions/add_task_tlp.py b/utils/db_migration/versions/add_task_tlp.py index 85ab54d9bd8..76108b3e1cd 100644 --- a/utils/db_migration/versions/add_task_tlp.py +++ b/utils/db_migration/versions/add_task_tlp.py @@ -9,6 +9,7 @@ Create Date: 2020-04-10 12:17:18.530901 """ + # revision identifiers, used by Alembic. revision = "7331c4d994fd" down_revision = "30d0230de7cd" diff --git a/utils/dist.py b/utils/dist.py index 365115f5387..ef57b959b21 100644 --- a/utils/dist.py +++ b/utils/dist.py @@ -51,9 +51,10 @@ TASK_REPORTED, TASK_RUNNING, Database, + _Database, + init_database, ) from lib.cuckoo.core.database import Task as MD_Task -from lib.cuckoo.core.database import _Database, init_database dist_conf = Config("distributed") main_server_name = dist_conf.distributed.get("main_server_name", "master") @@ -228,7 +229,7 @@ def _delete_many(node, ids, nodes, db): try: url = os.path.join(nodes[node].url, "tasks", "delete_many/") apikey = nodes[node].apikey - log.debug("Removing task id(s): {0} - from node: {1}".format(ids, nodes[node].name)) + log.debug("Removing task id(s): %s - from node: %s", ids, nodes[node].name) res = requests.post( url, headers={"Authorization": f"Token {apikey}"}, @@ -236,7 +237,7 @@ def _delete_many(node, ids, nodes, db): verify=False, ) if res and res.status_code != 200: - log.info("{} - {}".format(res.status_code, res.content)) + log.info("%d - %s", res.status_code, res.content) db.rollback() except Exception as e: @@ -314,7 +315,7 @@ def node_submit_task(task_id, node_id, main_task_id): files = dict(file=open(task.path, "rb")) r = requests.post(url, data=data, files=files, headers={"Authorization": f"Token {apikey}"}, verify=False) else: - log.debug("Target category is: {}".format(task.category)) + log.debug("Target category is: %s", task.category) db.close() return @@ -490,7 +491,7 @@ def failed_cleaner(self): db = session() while True: for node in db.query(Node).with_entities(Node.id, Node.name, Node.url, Node.apikey).filter_by(enabled=True).all(): - log.info("Checking for failed tasks on: {}".format(node.name)) + log.info("Checking for failed tasks on: %s", node.name) for task in node_fetch_tasks("failed_analysis|failed_processing", node.url, node.apikey, action="delete"): t = db.query(Task).filter_by(task_id=task["id"], node_id=node.id).order_by(Task.id.desc()).first() if t is not None: @@ -576,7 +577,7 @@ def fetcher(self): """ except Exception as e: self.status_count[node.name] += 1 - log.error(e, exc_info=True) + log.exception(e) if self.status_count[node.name] == dead_count: log.info("[-] {} dead".format(node.name)) # node_data = db.query(Node).filter_by(name=node.name).first() @@ -925,7 +926,7 @@ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_p if "timeout=" in t.options: t.timeout = options.get("timeout", 0) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) # wtf are you doing in pendings? tasks = db.query(Task).filter_by(main_task_id=t.id).all() if tasks: @@ -1188,7 +1189,7 @@ def run(self): continue db.commit() except Exception as e: - log.error("Got an exception when trying to check nodes status and submit tasks: {}.".format(e), exc_info=True) + log.error("Got an exception when trying to check nodes status and submit tasks: {}.".format(e)) # ToDo hard test this rollback, this normally only happens on db restart and similar db.rollback() diff --git a/utils/fstab.py b/utils/fstab.py index a4758b44a9f..75c3ce9c382 100644 --- a/utils/fstab.py +++ b/utils/fstab.py @@ -69,7 +69,6 @@ def add_nfs_entry(hostname: str, worker_folder: str): def remove_nfs_entry(hostname: str): - worker_path = os.path.join(CUCKOO_ROOT, dist_conf.NFS.mount_folder, hostname) with lock: diff --git a/utils/process.py b/utils/process.py index 5b019fca31a..8805691de65 100644 --- a/utils/process.py +++ b/utils/process.py @@ -289,10 +289,10 @@ def processing_finished(future): log.error("[%d] Processing Timeout %s. Function: %s", task_id, error, error.args[1]) Database().set_status(task_id, TASK_FAILED_PROCESSING) except pebble.ProcessExpired as error: - log.error("[%d] Exception when processing task: %s", task_id, error, exc_info=True) + log.exception("[%d] Exception when processing task: %s", task_id, error) Database().set_status(task_id, TASK_FAILED_PROCESSING) except Exception as error: - log.error("[%d] Exception when processing task: %s", task_id, error, exc_info=True) + log.exception("[%d] Exception when processing task: %s", task_id, error) Database().set_status(task_id, TASK_FAILED_PROCESSING) pending_future_map.pop(future) diff --git a/utils/route.py b/utils/route.py index 16517d47d3e..5bbf62726ff 100755 --- a/utils/route.py +++ b/utils/route.py @@ -1,21 +1,21 @@ #!/usr/bin/python """ - Aux script for VPN setup - - Get a look on utils/vpn2cape.py - Example: - /etc/iproute2/rt_tables - 5 host1 - 6 host2 - 7 host3 - - conf/routing.conf - [vpn5] - name = X.ovpn - description = X - interface = tunX - rt_table = host1 +Aux script for VPN setup + +Get a look on utils/vpn2cape.py +Example: + /etc/iproute2/rt_tables + 5 host1 + 6 host2 + 7 host3 + + conf/routing.conf + [vpn5] + name = X.ovpn + description = X + interface = tunX + rt_table = host1 """ import os diff --git a/web/analysis/templatetags/analysis_tags.py b/web/analysis/templatetags/analysis_tags.py index bf75af0fe6d..791dfffcd2b 100644 --- a/web/analysis/templatetags/analysis_tags.py +++ b/web/analysis/templatetags/analysis_tags.py @@ -71,7 +71,6 @@ def get_detection_by_pid(dictionary, key): return detections = dictionary.get(str(key), "") if detections: - if len(detections) > 1: output = " -> ".join([malware_name_url_pattern.format(malware_name=name) for name in detections]) else: diff --git a/web/analysis/urls.py b/web/analysis/urls.py index 46f5ed47c96..540ab467f82 100644 --- a/web/analysis/urls.py +++ b/web/analysis/urls.py @@ -2,9 +2,10 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file "docs/LICENSE" for copying permission. -from analysis import views from django.urls import re_path +from analysis import views + urlpatterns = [ re_path(r"^$", views.index, name="analysis"), re_path(r"^page/(?P\d+)/$", views.index, name="index"), diff --git a/web/apiv2/urls.py b/web/apiv2/urls.py index aa512323add..bfe7b616595 100644 --- a/web/apiv2/urls.py +++ b/web/apiv2/urls.py @@ -2,12 +2,12 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file "docs/LICENSE" for copying permission. -from apiv2 import views - # from django.conf.urls import include from django.urls import path, re_path from rest_framework.authtoken.views import obtain_auth_token +from apiv2 import views + urlpatterns = [ re_path(r"^$", views.index, name="apiv2"), # disabled due to token auth diff --git a/web/apiv2/views.py b/web/apiv2/views.py index 882f4005cd0..b89b60fc14c 100644 --- a/web/apiv2/views.py +++ b/web/apiv2/views.py @@ -1265,7 +1265,7 @@ def tasks_report(request, task_id, report_format="json", make_zip=False): else: zf.write(filepath, filedir) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) # exception for lite report that is under reports/lite.json if report_format.lower() == "lite": @@ -2455,7 +2455,7 @@ def _stream_iterator(fp, guest_name, chunk_size=1024): return Response(resp) return StreamingHttpResponse(streaming_content=r.iter_content(chunk_size=1024), content_type="application/octet-stream") except requests.exceptions.RequestException as ex: - log.error(ex, exc_info=True) + log.exception(ex) resp = {"error": True, "error_value": f"Requests exception: {ex}"} return Response(resp) diff --git a/web/compare/urls.py b/web/compare/urls.py index 3b32527f6f1..610c56b9c02 100644 --- a/web/compare/urls.py +++ b/web/compare/urls.py @@ -2,9 +2,10 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file "docs/LICENSE" for copying permission. -from compare import views from django.urls import re_path +from compare import views + urlpatterns = [ re_path(r"^(?P\d+)/$", views.left, name="compare_left"), re_path(r"^(?P\d+)/(?P\d+)/$", views.both, name="compare_both"), diff --git a/web/dashboard/urls.py b/web/dashboard/urls.py index ff2e6b25406..de7ca558275 100644 --- a/web/dashboard/urls.py +++ b/web/dashboard/urls.py @@ -2,9 +2,10 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file "docs/LICENSE" for copying permission. -from dashboard import views from django.urls import re_path +from dashboard import views + urlpatterns = [ re_path(r"^$", views.index), ] diff --git a/web/guac/urls.py b/web/guac/urls.py index bf0bb539242..dbf41e6f560 100644 --- a/web/guac/urls.py +++ b/web/guac/urls.py @@ -1,4 +1,5 @@ from django.urls import re_path + from guac import views urlpatterns = [ diff --git a/web/submission/urls.py b/web/submission/urls.py index 756d8efc0e8..a2b2f8fde69 100644 --- a/web/submission/urls.py +++ b/web/submission/urls.py @@ -3,6 +3,7 @@ # See the file 'docs/LICENSE' for copying permission. from django.urls import re_path + from submission import views urlpatterns = [ diff --git a/web/users/migrations/0001_initial.py b/web/users/migrations/0001_initial.py index 04677ea1c82..e7631ace34b 100644 --- a/web/users/migrations/0001_initial.py +++ b/web/users/migrations/0001_initial.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ diff --git a/web/users/migrations/0002_reports.py b/web/users/migrations/0002_reports.py index 35bc9b28e74..bdffb93d232 100644 --- a/web/users/migrations/0002_reports.py +++ b/web/users/migrations/0002_reports.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("users", "0001_initial"), ] diff --git a/web/users/migrations/0003_rename_field_subscription.py b/web/users/migrations/0003_rename_field_subscription.py index 54ab307011f..11c8e6f8434 100644 --- a/web/users/migrations/0003_rename_field_subscription.py +++ b/web/users/migrations/0003_rename_field_subscription.py @@ -24,7 +24,6 @@ def reverse_migrate(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("users", "0002_reports"), ]