Skip to content

Commit

Permalink
sync (#2490)
Browse files Browse the repository at this point in the history
  • Loading branch information
doomedraven authored Feb 12, 2025
1 parent df2152c commit 2f25345
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 32 deletions.
52 changes: 29 additions & 23 deletions lib/cuckoo/common/admin_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,16 @@

try:
from paramiko import AutoAddPolicy, ProxyCommand, SSHClient, SSHConfig
from paramiko.ssh_exception import AuthenticationException, BadHostKeyException, PasswordRequiredException, ProxyCommandFailure
from paramiko.ssh_exception import (
AuthenticationException,
BadHostKeyException,
PasswordRequiredException,
ProxyCommandFailure,
SSHException,
)
from scp import SCPClient, SCPException


conf = SSHConfig()
conf.parse(open(os.path.expanduser("~/.ssh/config")))

Expand All @@ -36,11 +43,13 @@
print("poetry run pip install -U paramiko scp")
HAVE_PARAMIKO = False


from lib.cuckoo.common.colors import green, red
from lib.cuckoo.common.sshclient import SSHJumpClient
from utils.community_blocklist import blocklist

try:
from admin_conf import ( # POSTPROCESS,
from admin_conf import (
CAPE_DIST_URL,
CAPE_PATH,
EXCLUDE_CAPE_FILES,
Expand All @@ -58,8 +67,6 @@
except ModuleNotFoundError:
sys.exit("[-] You need to create admin_conf.py, see admin_conf.py_example")

# Only needed when jumping over nodes
from lib.cuckoo.common.sshclient import SSHJumpClient

# this is bad, but getLogger doesn't work, this can be cause of duplication of log entries if used outside
logging.basicConfig(level=logging.INFO)
Expand Down Expand Up @@ -232,7 +239,6 @@ def file_recon(file, yara_category="CAPE"):
if not Path(file).exists():
return

global POSTPROCESS
LOCAL_SHA256 = False
filename = os.path.basename(file)
OWNER = "cape:cape"
Expand All @@ -245,10 +251,11 @@ def file_recon(file, yara_category="CAPE"):
if b"SignatureMock.run" in f:
return
if b"(TcrSignature):" in f or b"(Signature)" in f:
TARGET = f"{CAPE_PATH}modules/signatures/{filename}"
TARGET = f"{CAPE_PATH}custom/signatures/{filename}"
elif filename in ("loader.exe", "loader_x64.exe"):
TARGET = f"{CAPE_PATH}/analyzer/windows/bin/{filename}"
POSTPROCESS = False
elif "/binary/" in file or "/binaries/" in file:
TARGET = f"{CAPE_PATH}custom/yara/binaries/{filename}"
elif b"def _generator(self" in f:
TARGET = f"{VOL_PATH}{filename}"
OWNER = "root:staff"
Expand All @@ -274,18 +281,15 @@ def file_recon(file, yara_category="CAPE"):
TARGET = f"{CAPE_PATH}/lib/cuckoo/common/{filename}"
elif b"class Analyzer:" in f and b"class PipeHandler(Thread):" in f and b"class PipeServer(Thread):" in f:
TARGET = f"{CAPE_PATH}analyzer/windows/{filename}"
POSTPROCESS = False
elif filename in ("capemon.dll", "capemon_x64.dll"):
TARGET = f"{CAPE_PATH}analyzer/windows/dll/{filename}"
POSTPROCESS = False
# generic deployer of files
elif file.startswith("CAPEv2/"):
# Remove CAPEv2/ from path to build new path
TARGET = f"{CAPE_PATH}" + file[7:]
elif filename.endswith(".service"):
TARGET = "/lib/systemd/system/{filename}"
OWNER = "root:root"
POSTPROCESS = "systemctl daemon-reload"
elif "Extractors/StandAlone/" in file:
TARGET = f"{CAPE_PATH}custom/parsers/"
stem = "Extractors/StandAlone"
Expand Down Expand Up @@ -331,10 +335,10 @@ def _connect_via_jump_box(server: str, ssh_proxy: SSHClient):
server,
username=JUMP_BOX_USERNAME,
key_filename=host.get("identityfile"),
# look_for_keys=True,
# allow_agent=True,
banner_timeout=200,
look_for_keys=False,
allow_agent=True,
# disabled_algorithms=dict(pubkeys=["rsa-sha2-512", "rsa-sha2-256"]),
# port=ssh_port,
)
sockets[server] = ssh
else:
Expand All @@ -350,14 +354,14 @@ def _connect_via_jump_box(server: str, ssh_proxy: SSHClient):
server,
username=REMOTE_SERVER_USER,
key_filename=host.get("identityfile"),
# look_for_keys=False,
# allow_agent=True,
# port=ssh_port,
banner_timeout=200,
look_for_keys=False,
allow_agent=True,
sock=ProxyCommand(host.get("proxycommand")),
)
except (BadHostKeyException, AuthenticationException, PasswordRequiredException) as e:
sys.exit(
f"Connect error: {str(e)}. Also pay attention to this log for more details /var/log/auth.log and paramiko might need update"
f"Connect error: {str(e)}. Also pay attention to this log for more details /var/log/auth.log and paramiko might need update.\nAlso ensure that you have added your public ssh key to /root/.ssh/authorized_keys"
)
except ProxyCommandFailure as e:
# Todo reconnect
Expand All @@ -375,12 +379,14 @@ def execute_command_on_all(remote_command, servers: list, ssh_proxy: SSHClient):
log.info("[+] Service %s", green("restarted successfully and is UP"))
else:
if ssh_out:
log.info(green(f"[+] {server} - {ssh_out}"))
log.info(green("[+] %s - %s", str(server.split('.')[1]), ssh_out))
else:
log.info(green(f"[+] {server}"))
log.info(green("[+] %s", str(server.split('.')[1])))
ssh.close()
except TimeoutError as e:
sys.exit(f"Did you forget to use jump box? {str(e)}")
except SSHException as e:
log.error("Can't read remote bufffer: %s", str(e))
except Exception as e:
log.exception(e)

Expand All @@ -393,7 +399,7 @@ def bulk_deploy(files, yara_category, dry_run=False, servers: list = [], ssh_pro
files.remove(original_name)
continue

if file.endswith(("processor_tests.py", "reporter_tests.py", "admin.py")):
if file.endswith(("processor_tests.py", "reporter_tests.py", "admin.py", ".conf")):
files.remove(original_name)
continue

Expand Down Expand Up @@ -462,7 +468,7 @@ def deploy_file(queue, ssh_proxy: SSHClient):
_, ssh_stdout, _ = ssh.exec_command(f"sha256sum {remote_file} | cut -d' ' -f1")
remote_sha256 = ssh_stdout.read().strip().decode("utf-8")
if local_sha256 == remote_sha256:
log.info("[+] %s - Hashes are %s: %s - %s", server, green("correct"), local_sha256, remote_file)
log.info("[+] %s - Hashes are %s: %s - %s", server.split(".")[1], green("correct"), local_sha256, remote_file)
else:
log.info(
"[-] %s - Hashes are %s: \n\tLocal: %s\n\tRemote: %s - %s",
Expand Down Expand Up @@ -507,9 +513,9 @@ def delete_file(queue, ssh_proxy: SSHClient):
error = 1

if not error:
log.info(green(f"Completed! {remote_file}\n"))
log.info(green("Completed! %s\n", remote_file))
else:
log.info(red(f"Completed with errors. {remote_file}\n"))
log.info(red("Completed with errors. %s\n", remote_file))
queue.task_done()

return error_list
Expand Down
18 changes: 10 additions & 8 deletions lib/cuckoo/common/demux.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,19 +218,21 @@ def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True):
# check if path is not empty
if tmp_child and tmp_child[0]:
retlist.extend(tmp_child)

# child is not available, the original file should be put into the list
tmp_child = _sf_children(sf_child)
# check if path is not empty
if tmp_child and tmp_child[0]:
retlist.append(tmp_child)
if not retlist:
tmp_child = _sf_children(sf_child)
# check if path is not empty
if tmp_child and tmp_child[0]:
retlist.append(tmp_child)
else:
tmp_child = _sf_children(sf_child)
# check if path is not empty
if tmp_child and tmp_child[0]:
retlist.append(tmp_child)
except Exception as e:
log.exception(e)
return retlist, ""
return list(filter(None, retlist)), ""


def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = True, platform: str = ""): # -> tuple[bytes, str]:
Expand Down Expand Up @@ -262,7 +264,7 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool =
{
os.path.basename(
filename
): "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option"
).decode(): "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option"
}
)
return retlist, error_list
Expand Down Expand Up @@ -332,13 +334,13 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool =
# original file
if not retlist:
if error_msg:
error_list.append({os.path.basename(filename), error_msg})
error_list.append({os.path.basename(filename).decode(), error_msg})
new_retlist.append((filename, platform))
else:
for filename, platform, magic_type, file_size in retlist:
# verify not Windows binaries here:
if platform == "linux" and not linux_enabled and "Python" not in magic_type:
error_list.append({os.path.basename(filename): "Linux processing is disabled"})
error_list.append({os.path.basename(filename).decode(): "Linux processing is disabled"})
continue

if file_size > web_cfg.general.max_sample_size:
Expand Down
5 changes: 4 additions & 1 deletion utils/dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,8 @@ def node_get_report_nfs(task_id, worker_name, main_task_id) -> bool:

try:
shutil.copytree(worker_path, analyses_path, ignore=dist_ignore_patterns, ignore_dangling_symlinks=True, dirs_exist_ok=True)
except shutil.Error:
log.error("Files doens't exist on worker")
except Exception as e:
log.exception(e)
return False
Expand Down Expand Up @@ -469,7 +471,8 @@ def node_submit_task(task_id, node_id, main_task_id):
"Failed to submit: main_task_id: %d task %d to node: %s, code: %d, msg: %s",
task.main_task_id, task_id, node.name, r.status_code, r.content
)

if b"File too big, enable" in r.content:
main_db.set_status(task.main_task_id, TASK_BANNED)
if task.task_id:
log.debug("Submitted task to worker: %s - %d - %d", node.name, task.task_id, task.main_task_id)

Expand Down

0 comments on commit 2f25345

Please sign in to comment.