From f541bf91b0d9b2782fae71e414e5a2807f9a2b51 Mon Sep 17 00:00:00 2001 From: Frikky Date: Mon, 6 Jan 2025 12:57:39 +0100 Subject: [PATCH] Delete shuffle-tools-fork directory --- shuffle-tools-fork/1.0.0/Dockerfile | 27 --- shuffle-tools-fork/1.0.0/api.yaml | 34 ---- shuffle-tools-fork/1.0.0/docker-compose.yml | 15 -- shuffle-tools-fork/1.0.0/requirements.txt | 11 -- shuffle-tools-fork/1.0.0/run.sh | 11 -- shuffle-tools-fork/1.0.0/src/app.py | 184 ------------------ shuffle-tools-fork/1.0.0/src/concurrency.py | 201 ------------------- shuffle-tools-fork/1.0.0/src/switch.py | 203 -------------------- 8 files changed, 686 deletions(-) delete mode 100644 shuffle-tools-fork/1.0.0/Dockerfile delete mode 100644 shuffle-tools-fork/1.0.0/api.yaml delete mode 100644 shuffle-tools-fork/1.0.0/docker-compose.yml delete mode 100644 shuffle-tools-fork/1.0.0/requirements.txt delete mode 100644 shuffle-tools-fork/1.0.0/run.sh delete mode 100644 shuffle-tools-fork/1.0.0/src/app.py delete mode 100644 shuffle-tools-fork/1.0.0/src/concurrency.py delete mode 100644 shuffle-tools-fork/1.0.0/src/switch.py diff --git a/shuffle-tools-fork/1.0.0/Dockerfile b/shuffle-tools-fork/1.0.0/Dockerfile deleted file mode 100644 index 5c1a8af4..00000000 --- a/shuffle-tools-fork/1.0.0/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# Base our app image off of the WALKOFF App SDK image -FROM frikky/shuffle:app_sdk as base - -# We're going to stage away all of the bloat from the build tools so lets create a builder stage -FROM base as builder - -# Install all alpine build tools needed for our pip installs -RUN apk --no-cache add --update alpine-sdk libffi libffi-dev musl-dev openssl-dev git - -# Install all of our pip packages in a single directory that we can copy to our base image later -RUN mkdir /install -WORKDIR /install -COPY requirements.txt /requirements.txt -RUN pip install --no-cache-dir --prefix="/install" -r /requirements.txt - -# Switch back to our base image and copy in all of our built packages and source code -FROM base -COPY --from=builder /install /usr/local -COPY src /app - -# Install any binary dependencies needed in our final image -# RUN apk --no-cache add --update my_binary_dependency -RUN apk --no-cache add jq git curl - -# Finally, lets run our app! -WORKDIR /app -CMD ["python", "app.py", "--log-level", "DEBUG"] diff --git a/shuffle-tools-fork/1.0.0/api.yaml b/shuffle-tools-fork/1.0.0/api.yaml deleted file mode 100644 index 3f3d116a..00000000 --- a/shuffle-tools-fork/1.0.0/api.yaml +++ /dev/null @@ -1,34 +0,0 @@ ---- -app_version: 1.0.0 -name: Shuffle Tools Fork -description: A tool app for Shuffle. Gives access to most missing features along with Liquid. -tags: - - Testing - - Shuffle -categories: - - Other -contact_info: - name: "@frikkylikeme" - url: https://shuffler.io - email: frikky@shuffler.io -actions: - - name: execute_python - description: Runs python with the data input. Any prints will be returned. - parameters: - - name: code - description: The code to run. Can be a file ID from within Shuffle. - required: true - multiline: true - example: print("hello world") - schema: - type: string - - name: packages - description: The code to run. Can be a file ID from within Shuffle. - required: true - multiline: true - example: pandas\nnumpy\nmatplotlib - schema: - type: string - -large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAK4AAACuCAYAAACvDDbuAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAAB3RJTUUH5AgXDjM6hEZGWwAAD+lJREFUeNrtXb/vJTcRH7/v3iVBCqRBiCAQAtHwq4AWRElHwX8AoqbmXwDRpiH/QyQkGoogUSAhKIKUAE1IdSRSREhQQk7c3XtD8X55vePxjNfe3bk3H+nu+96uPf54POtnj8fe8OQX30JwOIxhtzYBh6MGOsPF0z9p2iWwpd8LjX6W5vWUYaiqlBuvLT5b5TQDPlRwmMSAABBg+kCer+XuAeQf4tL9tAxJ/hIfZGSm8rhyEfjytfxr9FeSX+KjvVfipNVpWlaPNhsAEPCS7Ao8FYnRlbO4ksLnjiSQvIanv4FNjwJ5pXIlMq6MQpIqqPnQKQKbjuPDtZlG55o6UHXWtVncZZTbbNBVB1P5dJYguCbJJ1WjOG8PVOioSm5HPrVt1rwuyN+K+PSZnNV1M/MmEFubfFjjU9tmK9XBJ2cOk3DDdZiEG67DJOrGuA7HyvAe12ESAxa73KPrN1z8gUikCCdvcD5NXnpQpA8nNhh9m5Yn4ZMrV8dHV/8a/dRA0x419a3lI9GBtM2GcrGYFXRNUU5TyluTOpdXwqeUt6YOpby9DUTLZylOcRlzdBTf2yV3ZBFOmKSHQh5KpjSSSpqG4s6VkUubqw8W8knTSnWk0Y+2jF5tlmuDUloJn6T8gRVcEpJ+3srChHSNt8RJsq4p+S41LC13KTcu/RJt1pLPKY1Pzhwm4YbrMAk3XIdJTMe4aeCjJhBVk0YiQ1MWZHhLgmO5QNVWfKRlavlIIQnurQmcnaMjSbBxhtMwYUxODpLcl2tUhvPlNE6VkiuoFVLXKT6ZfBjxRIIzOSlgWpLSB8uZ0g3BjeVDlFGEos0mfKKL7CQrY2ES7pM2i/OX22w4/sWReEhEnUOTxx3a+FrawQGZh04/rWe6oJBKo5zT4zLjPHE9ZHym5YzToogzfQcmfLgOhuLF/Sjm2izVDyXnrKtcmmmdaKumf+RyCw5Xn7OmzQaJF0fiEZG6BjXpYUYaSVkaPrXeHe4eVaZEr3Prqrmmrbc2T8lrmOMjn5xJHeJLYkk+PfzNTxOflrwF0EeHbU0Zt2wsW+PTkncB7g5zmMSwzUfS4eDhPa7DJK5jXGorsnZxonbRIbeAoOUjkUvlp+qxFp9YNuWL0nBqsVCkqUsrHQnuX+Nx5/qcJDI0kWgtJh7ihYCN8aG+13DqOXlbWUfD+fN0AUEmp3RcUWlVEwCynb5ssYLnxHViJT6ULCykb8EnzUfpqBWfVAdcnt5tprGhIe10WnjHpB2FtMPWcpM66yXyOad4Lz4Srq34SHhwZfRos1w9Y/jkzGESvj3dYRLe4zpMwg3XYRJuuA6T4M/Hzfk/OGd9OP2HOE2f8wtBlCebJrkfp+Gc3AGmiSiuaVlpwkmajL4osPUm9FMqIzBOJolfjGuzEtdUwWl53Dm7Eh9pzIdps+FiYJyi1N+Rvs/6OLCQBul8Ip8R08ik3EwhLZz1Wv8XmU7ZZqX7OT2gUIB2oaRBm+2ovDm5nM+ulEeiD8yka8UnJ1PCP82r9YWW8iCU5XO8W/PhPmvllNKW7lEyszsgNKuzkspJFZFL15uPtIweq7A1xiKpz1J8tGXP+dE53/fJmcMk6hcgJO8XqokEKi5uYzTG29LqSev95JqyKsoOOxjNpKQBD7VFc5GBJRsi+NQHkkv6+7m/UxTufwLCCy+CbAruyOLDdwEf/uf6vbbNJukzlogZC6wMdhAcM7ohHPawe/GrcO+HPwe4u782G7sIAE9++0vYv/YKwO6usfCaka0etgwXAGB3D8JznwIYnlmbiW0M92FbQy0d+MmZ3Xo5JDDcvuXJ2ZYqtyUuTwuM6nSXctcufHCOZqkjPScXhbIcdeD0XUpfKyNNy8nlyhuozLkM8XxR6pjm7tc4Fdx620I7lWq10JCm0ZanWoBwm3FsBe1WznpadbTg4A9PI2xx7FUKHopQjg7TKqNnpbioIUcFUGUsy1CS8fFYBYdJuOE6TMIN12ESgyiKiwO1bQOJe1w+6p42Etmhwmi6kLZXfC2G9IUj2vulY2wIPrv4onRhIXcRqS0DiWxkhF0uIb37wG22LRCSuVCyekC2GSXj9CG3YyT+krWh+KPAhkTvgGDKqbqnWbBwY+2Pnm3Wy4aMRYc1MuPDvp0skwgAh8PaJGbh5k4kx0f/hce/ewnw/QenXQCTFJDfQy45PzFNn5NHsoPy/u6gzE+nObzz91P9Z+6kWAm2zg6bDMoq8OQxHN78Axze/htAaB1EbQhhdzyfgRqIGoCxoUIjhDuA3ZDpcR0W4C3nMInbNVw7v4oOAsehArVFPL0uOjMM+DlM+pk7t7/BDuwcJsM6gcM7WweOX05nFCHNi12ASRfLo3QaX9O0GWTylOTnZIMwf4YPPTlD4iMm7aZwAGOUf3Rf48wjHNzVOMkKFA8pp0RHZ1mjdihs5R61PWbsWlphgs/E5gptNvFfSLY8QPk7dVbh+UNg8qfnJsZ8Bo0hzF0Y2Nqvc0s+Vbs5YL5OLfPRcorT2hvjtuxyHWZhzHCX6AMcFtB2B0RvtKZqqe6OEYz1uA7HEbdruN7ZmsZtGq4brXnQhlsbLFkDrY9mC9giH41/dSlONfeEIBcgss7nXopInPdkYN95J3XD1bMgkJUNFOxsDNLgyiynhYyX5dnAhnLyhzmO4V7IO8+xyZEgx5UqvJ41rOUTdhBOr2w6KjZc+B1FBkLGVUoAABQEcmPu6rPPw73v/gh2n/wMANYEhAd4/NqvYf/Wn5pEyPW2IUrOzQWSHyHdkEJgN8D97/0Edp/7GgDu9fnDDvD9t+HRqy8BPvxQ9i6xEXUEuPcMDF//Puw+/aVqDewfvA77f/zx9M40e7jNeNw5CDu4++K34e4r36kWcXj3TYDfvwz8D79ml1clDPuxx9FhuUik0rblVihFWLX+7ZFEXE2ioLBNg9fUSRopVsOjJbioskZlDuyAvmflpOWsOUNu/cBQ8jW/1A0np11RG+GjwG36cQHqFWnBcG4Axgx37d/I1uXXcvCnx6BXoQXf3mOAzvVpooJzaOcWdKBH1fZ07dCsFZpNgmfZbaOJ2dxnpwkNFC3C9MBcGxo0OugxwV8LWKm5lg9sFQdszKGhLAla2dCuduuOZcypx+UXdk0OK5e/hXKNTc4cjiPGhtvTX1njI6Z2+vbuKtaKspLooXdkXs1u5yUR7/LdROMsraSSIfTa6pqWodE9Mvla6sCI8d7uUMEXIEzjdg3XYRr2osOePIbDR+9BGO7re78QAD/+AODwpK5sBDg6dGyGAtL1sYnLGDe3+2BNTNycYQf7B2/Aw5d/XB9HejjA4YN3jgHUNQ132MOTv/wG9v98A+CgFBCO/+FH/wJ89PBaSY1OULZzQyQL2skayVwg/7Dk3Ky2IlcEgEcfw/7dt+YJnRP1f9jDoz+/AvM0FU4c1u8mes59e+ZXDhXmPE+tForD+lH73Q6EluiozfaldnzWQUWQzdprPk87lg44nkTKN+DT/10S7lW4VYz8wWucOTAPtl5e4mgfjmu0/b3HdZiEG67DJNxwbxlGhwkAuZeXAJS3Qpfemq7dds1tS5dsbc6dAyQpS5uGe+lKrJLSGUqlCb2GcwUuCxBzt71T2/g7t9mQniofv0yjWOtMYdSLM6Sy0pd5iLdFSQtUyiJtRnjmGOdhqq5bo5WzUXAYzns2Lu2tjaqb0WaTHRBrR9cvEVG4VF3WkLsGnzXqohzjbk3dt4hG/jDDxy8BLL5y5miBZi1wa9vT14dJ0o2qft6/1GhQZ1SV9uJxd3cQ7j+XD7RJ40JK38/XAPKz4ly+OG+KwOTDwn0uDSKEZ58/vgH+hmHLcA97uPvCN+G5H/wMoCaQ/KkAAtzdg/DCZ9cmsipsGS4ce5u7z38DYHhmbTL2YfjBH28DOM80s+MoxllVvfkwKudSbiL0dB0NTya2iGpNYmIzl+/EdexjQ8PEGE4FhdPHMAlbLhcsdWaPnfDEAxQJnbx53TEPJ51j3N7CrEfbSNt+arzXt57X2RBx94LsUGHOGRQtF7Fa8HFQQOabJmc5XQ8b8iAbh0mYNFzvdefD+nRhyPowqWitc2VbRyutGCF18+ilU2mEXWX51zFuKbqlZ/RLy0gixzagiS6sgL2hghuwAywarsMBxgzXO9u2sBzZWHwHRLwrQ5rWYQBIfuwCKnZJEpvEYSg9dRoncnejtdxFbBRLqFQzr5fSudH3nDmOaH26yHIwNcZ1NIZNmwWArYU1Fg8HDLB/7wH879VfAey2Rd0a9g/+2ubUyZUOdAz//umXjT136GPd2cDNnM9bC4Pd1gbOx3WsDh/jOkzCDddhEpcjmKiFhvGLQwDitJNrYTz05H7MS+N56hiq0mbYCfeIj2STb2s+cSJEOrguJ4fScaneOW7kOWZJm4VCmaPFg8wKgcSGuLpzR49Rerm8vIRaaECgvyB1Tbl9qOZoMiykHeVhVoZKwW9N+CSJuPwsH4YY12aTa5TxYyZPpsxSDG/Rhgp1lyxUnK/7UMFhEm64DpNIlnzTAdXcsJml8rdO1yt/K+R45EJUluS9zHaWITuQJb9rsVT+HvuKe+RvhdIIcE3ey4Rj+VDBYRJuuA6TcMN1mMT15SWMZ5h10Oc86+dr50s14QWch7rEh5PHef+psgsyqB0iI2e+hE+pDlpvvkQ/uVUMDfdSnTq12TA58injFUdOMPB5AeiALtHcUrstXrqSINnaoVjxyE5ra1ZipHMsTV2kMiQ8NDw7tdmqQ4WtzNEd9uBjXIdJuOE6TMLoy0sct46KHndNS6d2pW5tp+rW+Jw5rVl2qpP5Oqrcnr52w9RMgbfA8db5tAsp8DGuwyTaGW6DB7ppn9CCzxKnvKz9Kz7j/prUi0cwqQLQDBtvrp5uvMc/Wf00oFAT5FjscbcwMloCt1LPWvTUT41sH+M6TMIN12ESw3UPd8gPtrh7JeTyXvZGn0KD0jSlMms5Sfhw92vkUvXT5tPWt3WbSfjMsSFl3ujlJdy+4xkjnFze+PWrNWXWclqaT6t82vq2bjMJnzk2pMzrQwWHSbjhOkzCDdchxpZchpezwySQvHhiyVMLevPRctXwqeWmfcv5GaVTGKRy557YIHnhpETeoCl05grhbPlL89HK1vCp5darvZbgo+XEwYcKDpNww3WYxC6/U5PY5oun66MzPHH8L05PpqHKghn+TpjyictkZQLPh4u6yeknvXeWU+JD6TDHJ/cbn93Bi8nnDKdJm8EG2+zIZwBudlbjUOYOpj1frClPwyf3OZuXuaEx3lgWZixKxIfZ911rvJO65PRFVmZjbYY+VHDYhBuuwyTccB0mcdkB0cr5z70pW/pm7Bo+LesgqUsrPjVye9WXkqld8FiizRCi6LBWjmTRPGGG/JZ5ejvoa1ai1qwvlWarbeZDBYdJuOE6TKKP4W7xJdFb4+R8ZvH5P852gxhpwOZ9AAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDIwLTA4LTIzVDE0OjUyOjAwKzAyOjAwetRgVgAAACV0RVh0ZGF0ZTptb2RpZnkAMjAyMC0wOC0yM1QxNDo1MTo1OCswMjowMJuxI+oAAAAASUVORK5CYII= -# yamllint disable-line rule:line-length diff --git a/shuffle-tools-fork/1.0.0/docker-compose.yml b/shuffle-tools-fork/1.0.0/docker-compose.yml deleted file mode 100644 index 4919dcf4..00000000 --- a/shuffle-tools-fork/1.0.0/docker-compose.yml +++ /dev/null @@ -1,15 +0,0 @@ -version: '3.4' -services: - shuffle-tools-fork: - build: - context: . - dockerfile: Dockerfile -# image: walkoff_registry:5000/walkoff_app_HelloWorld-v1-0 - deploy: - mode: replicated - replicas: 10 - restart_policy: - condition: none - restart: "no" - secrets: - - secret1 diff --git a/shuffle-tools-fork/1.0.0/requirements.txt b/shuffle-tools-fork/1.0.0/requirements.txt deleted file mode 100644 index 67a560ec..00000000 --- a/shuffle-tools-fork/1.0.0/requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ -ioc_finder==7.2.1 -py7zr==0.11.3 -rarfile==4.0 -pyminizip==0.2.4 -requests==2.25.1 -xmltodict==0.11.0 -json2xml==5.0.5 -ipaddress==1.0.23 -google.auth==1.23.0 -paramiko==3.1.0 -shufflepy \ No newline at end of file diff --git a/shuffle-tools-fork/1.0.0/run.sh b/shuffle-tools-fork/1.0.0/run.sh deleted file mode 100644 index bd26dbf6..00000000 --- a/shuffle-tools-fork/1.0.0/run.sh +++ /dev/null @@ -1,11 +0,0 @@ -# Build testing -NAME=frikky/shuffle:shuffle-tools-fork_1.0.0 -docker rmi $NAME --force -docker build . -t frikky/shuffle:shuffle-tools-fork_1.0.0 - -# Run testing -#docker run -e SHUFFLE_SWARM_CONFIG=run -e SHUFFLE_APP_EXPOSED_PORT=33334 frikky/shuffle:shuffle-tools_1.1.0 -echo $NAME -#docker service create --env SHUFFLE_SWARM_CONFIG=run --env SHUFFLE_APP_EXPOSED_PORT=33334 $NAME - -#cat walkoff_app_sdk/app_base.py #cat walkoff_app_sdk/app_sdk.py diff --git a/shuffle-tools-fork/1.0.0/src/app.py b/shuffle-tools-fork/1.0.0/src/app.py deleted file mode 100644 index 5d7b0038..00000000 --- a/shuffle-tools-fork/1.0.0/src/app.py +++ /dev/null @@ -1,184 +0,0 @@ -import hmac -import datetime -import json -import time -import markupsafe -import os -import re -import subprocess -import tempfile -import zipfile -import base64 -import importlib -import ipaddress -import hashlib -import shufflepy -from io import StringIO -from contextlib import redirect_stdout -import random -import string - -import xmltodict -from json2xml import json2xml -from json2xml.utils import readfromstring - -from ioc_finder import find_iocs -from dateutil.parser import parse as dateutil_parser -from google.auth import crypt -from google.auth import jwt - -import py7zr -import pyminizip -import rarfile -import requests -import tarfile -import binascii -import struct - -import paramiko -import concurrent.futures -import multiprocessing - -from pip._internal import main as pip_main -from pip._internal.commands.show import search_packages_info - -from walkoff_app_sdk.app_base import AppBase - -class Tools(AppBase): - __version__ = "1.2.0" - app_name = ( - "Shuffle Tools Fork" # this needs to match "name" in api.yaml for WALKOFF to work - ) - - def __init__(self, redis, logger, console_logger=None): - """ - Each app should have this __init__ to set up Redis and logging. - :param redis: - :param logger: - :param console_logger: - """ - super().__init__(redis, logger, console_logger) - - def dynamic_import(package_name: str): - """Import a package and return the module""" - return importlib.import_module(package_name.split('==')[0].split('>=')[0].split('<=')[0].split('>')[0].split('<')[0]) - - - def get_missing_packages(required_packages: list) -> list: - """ - Returns a list of packages that aren't currently installed. - - Args: - required_packages: List of package names (can include version specs) - - Returns: - List of package names that aren't installed - """ - missing = [] - for package in required_packages: - # Remove version specifiers if present (e.g., 'pandas>=1.0.0' -> 'pandas') - package_name = package.split('==')[0].split('>=')[0].split('<=')[0].split('>')[0].split('<')[0].strip() - - # Check if package exists in environment - if not list(search_packages_info([package_name])): - missing.append(package) - - return missing - - def install_packages(self, packages=[]) -> None: - """ - Install Python packages using pip's Python interface. - - Args: - packages: List of package names to install - """ - - packages_not_found = self.get_missing_packages(packages) - - for package in packages_not_found: - try: - pip_main(['install', package]) - print(f"Successfully installed {package}") - except Exception as e: - print(f"Failed to install {package}: {str(e)}") - - def execute_python(self, code, packages) -> dict: - if os.getenv("SHUFFLE_ALLOW_PACKAGE_INSTALL") == "true": - allow_package_install = True - - packages = packages.split("\n") if packages else [] - - if packages: - if allow_package_install: - self.install_packages(packages) - self.dynamic_import(packages) - - if len(code) == 36 and "-" in code: - filedata = self.get_file(code) - if filedata["success"] == False: - return { - "success": False, - "message": f"Failed to get file for ID {code}", - } - - if ".py" not in filedata["filename"]: - return { - "success": False, - "message": f"Filename needs to contain .py", - } - - - # Write the code to a file - # 1. Take the data into a file - # 2. Subprocess execute file? - try: - f = StringIO() - def custom_print(*args, **kwargs): - return print(*args, file=f, **kwargs) - - #with redirect_stdout(f): # just in case - # Add globals in it too - globals_copy = globals().copy() - globals_copy["print"] = custom_print - - # Add self to globals_copy - for key, value in locals().copy().items(): - if key not in globals_copy: - globals_copy[key] = value - - globals_copy["self"] = self - - exec(code, globals_copy) - - s = f.getvalue() - f.close() # why: https://www.youtube.com/watch?v=6SA6S9Ca5-U - - #try: - # s = s.encode("utf-8") - #except Exception as e: - - try: - return { - "success": True, - "message": json.loads(s.strip()), - } - except Exception as e: - try: - return { - "success": True, - "message": s.strip(), - } - except Exception as e: - return { - "success": True, - "message": s, - } - - except Exception as e: - return { - "success": False, - "message": f"exception: {e}", - } - -if __name__ == "__main__": - Tools.run() diff --git a/shuffle-tools-fork/1.0.0/src/concurrency.py b/shuffle-tools-fork/1.0.0/src/concurrency.py deleted file mode 100644 index 420d1686..00000000 --- a/shuffle-tools-fork/1.0.0/src/concurrency.py +++ /dev/null @@ -1,201 +0,0 @@ -import time -import json -import ipaddress -import concurrent.futures -from functools import partial -from ioc_finder import find_iocs - -class Test(): - def split_text(self, text): - # Split text into chunks of 10kb. Add each 10k to array - # In case e.g. 1.2.3.4 lands exactly on 20k boundary, it may be useful to overlap here. - # (just shitty code to reduce chance of issues) while still going fast - - arr_one = [] - max_len = 2500 - current_string = "" - overlaps = 100 - - - for i in range(0, len(text)): - current_string += text[i] - if len(current_string) > max_len: - # Appending just in case even with overlaps - if len(text) > i+overlaps: - current_string += text[i+1:i+overlaps] - else: - current_string += text[i+1:] - - arr_one.append(current_string) - current_string = "" - - if len(current_string) > 0: - arr_one.append(current_string) - - #print("DATA:", arr_one) - print("Strings:", len(arr_one)) - #exit() - - return arr_one - - def _format_result(self, result): - final_result = {} - - for res in result: - for key, val in res.items(): - if key in final_result: - if isinstance(val, list) and len(val) > 0: - for i in val: - final_result[key].append(i) - elif isinstance(val, dict): - #print(key,":::",val) - if key in final_result: - if isinstance(val, dict): - for k,v in val.items(): - #print("k:",k,"v:",v) - val[k].append(v) - #print(val) - #final_result[key].append([i for i in val if len(val) > 0]) - else: - final_result[key] = val - - return final_result - - def worker_function(self, inputdata): - return find_iocs(inputdata["data"], included_ioc_types=inputdata["ioc_types"]) - - def _with_concurency(self, array_of_strings, ioc_types): - results = [] - #start = time.perf_counter() - - # Workers dont matter..? - # What can we use instead? - - results = [] - workers = 4 - with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor: - # Submit the find_iocs function for each string in the array - futures = [executor.submit( - find_iocs, - text=string, - included_ioc_types=ioc_types, - ) for string in array_of_strings] - - # Wait for all tasks to complete - concurrent.futures.wait(futures) - - # Retrieve the results if needed - results = [future.result() for future in futures] - - return self._format_result(results) - - def parse_ioc_new(self, input_string, input_type="all"): - if input_type == "": - input_type = "all" - - #ioc_types = ["domains", "urls", "email_addresses", "ipv6s", "ipv4s", "ipv4_cidrs", "md5s", "sha256s", "sha1s", "cves"] - ioc_types = ["domains", "urls", "email_addresses", "ipv4s", "ipv4_cidrs", "md5s", "sha256s", "sha1s", "cves"] - - # urls = 10.4 -> 9.1 - # emails = 10.4 -> 9.48 - # ipv6s = 10.4 -> 7.37 - # ipv4 cidrs = 10.4 -> 10.44 - - if input_type == "" or input_type == "all": - ioc_types = ioc_types - else: - input_type = input_type.split(",") - for item in input_type: - item = item.strip() - - ioc_types = input_type - - input_string = str(input_string) - if len(input_string) > 10000: - iocs = self._with_concurency(self.split_text(input_string), ioc_types=ioc_types) - else: - iocs = find_iocs(input_string, included_ioc_types=ioc_types) - - newarray = [] - for key, value in iocs.items(): - if input_type != "all": - if key not in input_type: - continue - - if len(value) == 0: - continue - - for item in value: - # If in here: attack techniques. Shouldn't be 3 levels so no - # recursion necessary - if isinstance(value, dict): - for subkey, subvalue in value.items(): - if len(subvalue) == 0: - continue - - for subitem in subvalue: - data = { - "data": subitem, - "data_type": "%s_%s" % (key[:-1], subkey), - } - - if data not in newarray: - newarray.append(data) - else: - data = {"data": item, "data_type": key[:-1]} - if data not in newarray: - newarray.append(data) - - # Reformatting IP - i = -1 - for item in newarray: - i += 1 - if "ip" not in item["data_type"]: - continue - - newarray[i]["data_type"] = "ip" - try: - newarray[i]["is_private_ip"] = ipaddress.ip_address(item["data"]).is_private - except Exception as e: - print("Error parsing %s: %s" % (item["data"], e)) - - try: - newarray = json.dumps(newarray) - except json.decoder.JSONDecodeError as e: - return "Failed to parse IOC's: %s" % e - - return newarray - -# Make it not run this for multithreads -if __name__ == "__main__": - - input_string = "" - with open("testdata.txt", "r") as f: - input_string = f.read() - - try: - json_data = json.loads(input_string) - # If array, loop - if isinstance(json_data, list): - cnt = 0 - start = time.perf_counter() - for item in json_data: - cnt += 1 - classdata = Test() - - ret = classdata.parse_ioc_new(item) - #print("OUTPUT1: ", ret) - - #if cnt == 5: - # break - - print("Total time taken:", time.perf_counter()-start) - else: - classdata = Test() - ret = classdata.parse_ioc_new(input_string) - print("OUTPUT2: ", ret) - except Exception as e: - classdata = Test() - ret = classdata.parse_ioc_new(json_data) - print("OUTPUT3: ", ret) - diff --git a/shuffle-tools-fork/1.0.0/src/switch.py b/shuffle-tools-fork/1.0.0/src/switch.py deleted file mode 100644 index 78ede505..00000000 --- a/shuffle-tools-fork/1.0.0/src/switch.py +++ /dev/null @@ -1,203 +0,0 @@ -# self, sourcevalue, condition, destinationvalue -def validate_condition(sourcevalue, check, destinationvalue): - if check == "=" or check == "==" or check.lower() == "equals": - if str(sourcevalue).lower() == str(destinationvalue).lower(): - return True - elif check == "!=" or check.lower() == "does not equal": - if str(sourcevalue).lower() != str(destinationvalue).lower(): - return True - elif check.lower() == "startswith": - if str(sourcevalue).lower().startswith(str(destinationvalue).lower()): - return True - - - elif check.lower() == "endswith": - if str(sourcevalue).lower().endswith(str(destinationvalue).lower()): - return True - elif check.lower() == "contains": - if destinationvalue.lower() in sourcevalue.lower(): - return True - - elif check.lower() == "is empty" or check.lower() == "is_empty": - try: - if len(json.loads(sourcevalue)) == 0: - return True - except Exception as e: - print("[ERROR] Failed to check if empty as list: {e}") - - if len(str(sourcevalue)) == 0: - return True - - elif check.lower() == "contains_any_of": - newvalue = [destinationvalue.lower()] - if "," in destinationvalue: - newvalue = destinationvalue.split(",") - elif ", " in destinationvalue: - newvalue = destinationvalue.split(", ") - - for item in newvalue: - if not item: - continue - - if item.strip() in sourcevalue: - return True - - elif check.lower() == "larger than" or check.lower() == "bigger than" or check == ">" or check == ">=": - try: - if str(sourcevalue).isdigit() and str(destinationvalue).isdigit(): - if int(sourcevalue) > int(destinationvalue): - return True - - except AttributeError as e: - print("[WARNING] Condition larger than failed with values %s and %s: %s" % (sourcevalue, destinationvalue, e)) - - try: - destinationvalue = len(json.loads(destinationvalue)) - except Exception as e: - print("[WARNING] Failed to convert destination to list: {e}") - try: - # Check if it's a list in autocast and if so, check the length - if len(json.loads(sourcevalue)) > int(destinationvalue): - return True - except Exception as e: - print("[WARNING] Failed to check if larger than as list: {e}") - - - elif check.lower() == "smaller than" or check.lower() == "less than" or check == "<" or check == "<=": - print("In smaller than check: %s %s" % (sourcevalue, destinationvalue)) - - try: - if str(sourcevalue).isdigit() and str(destinationvalue).isdigit(): - if int(sourcevalue) < int(destinationvalue): - return True - - except AttributeError as e: - pass - - try: - destinationvalue = len(json.loads(destinationvalue)) - except Exception as e: - print("[WARNING] Failed to convert destination to list: {e}") - - try: - # Check if it's a list in autocast and if so, check the length - if len(json.loads(sourcevalue)) < int(destinationvalue): - return True - except Exception as e: - print("[WARNING] Failed to check if smaller than as list: {e}") - - elif check.lower() == "re" or check.lower() == "matches regex": - try: - found = re.search(str(destinationvalue), str(sourcevalue)) - except re.error as e: - return False - except Exception as e: - return False - - if found == None: - return False - - return True - else: - print("[DEBUG] Condition: can't handle %s yet. Setting to true" % check) - - return False - -def evaluate_conditions(condition_structure): - operator = condition_structure.get('operator') - - # Base case: Single condition - if 'source' in condition_structure: - source = condition_structure['source'] - condition = condition_structure['condition'] - destination = condition_structure['destination'] - - # self. - return validate_condition(source, condition, destination) - - # Recursive case: Logical operator - elif operator == "AND": - return all(evaluate_conditions(sub_condition) for sub_condition in condition_structure['conditions']) - - elif operator == "OR": - return any(evaluate_conditions(sub_condition) for sub_condition in condition_structure['conditions']) - - elif operator == "NOT": - return not evaluate_conditions(condition_structure['conditions'][0]) - - else: - raise ValueError(f"Unknown operator: {operator}") - - -def switch(conditions): - to_return = { - "success": True, - "run_else": True, - } - - for condition in conditions: - if "id" not in condition: - print("Condition ID not found") - continue - - evaluated = False - try: - evaluated = evaluate_conditions(condition) - except Exception as e: - print(f"Failed to evaluate condition {condition['id']}: {e}") - - if evaluated == True: - to_return["run_else"] = False - - to_return[condition["id"]] = evaluated - - return to_return - -# Example usage - -condition_structure = { - "id": "lol", - "operator": "AND", - "conditions": [ - { # true - "source": "20", # age - "condition": ">", - "destination": 18 - }, - { # true - "operator": "OR", - "conditions": [ - { - "source": "active", # status - "condition": "==", - "destination": "active" - }, - { - "source": "1500", # balance - "condition": ">=", - "destination": 1000 - } - ] - }, - { - "operator": "NOT", - "conditions": [ - { - "source": "user", # user - "condition": "==", - "destination": "admin" - } - ] - } - ] -} - -newcondition = condition_structure.copy() -testconditions = [condition_structure] -newcondition['id'] = "lol2" -testconditions.append(newcondition) - -result = switch(testconditions) -print() -print() -print("Output: ", result)