diff --git a/.github/SECURITY.md b/.github/SECURITY.md index ed2632d15..e485605a4 100644 --- a/.github/SECURITY.md +++ b/.github/SECURITY.md @@ -31,7 +31,6 @@ Thank you for helping to keep reNgine and its users safe! **What do I get in return?** * Much thanks from Maintainer and the community -* Monetary Rewards * CVE ID(s) ## Past Security Vulnerabilities @@ -41,6 +40,7 @@ Thanks to these individuals for reporting Security Issues in reNgine. ### 2024 * [HIGH] [Command Injection](https://github.com/yogeshojha/rengine/security/advisories/GHSA-fx7f-f735-vgh4) in Waf Detector, Reported by [n-thumann](https://github.com/n-thumann) +* [MEDIUM] [Stored XSS](https://github.com/yogeshojha/rengine/security/advisories/GHSA-96q4-fj2m-jqf7) in in Vulnerability Page, Reported by [Touhid M Shaikh](https://github.com/touhidshaikh) ### 2022 @@ -72,6 +72,6 @@ Thanks to these individuals for reporting Security Issues in reNgine. * [LOW] [Stored XSS](https://huntr.dev/bounties/693a7d23-c5d4-448e-bbf6-50b3f0ad8544/) on Target Summary via Todo, Reported by [TheLabda](https://github.com/thelabda) -* [LOW] [Stored XSS](https://huntr.dev/bounties/81c48a07-9cb8-4da8-babc-28a4076a5e92/) on Nuclei Template Summary via maliclous Nuclei Template, Reported by [Walleson Moura](https://github.com/phor3nsic) +* [LOW] [Stored XSS](https://huntr.dev/bounties/81c48a07-9cb8-4da8-babc-28a4076a5e92/) on Nuclei Template Summary via malicious Nuclei Template, Reported by [Walleson Moura](https://github.com/phor3nsic) * [MEDIUM] [Path Traversal/LFI](https://huntr.dev/bounties/5df1a485-7a1e-411d-9664-0f4343e8512a/), reported by [Koen Molenaar](https://github.com/k0enm) diff --git a/.github/workflows/auto-comment.yml b/.github/workflows/auto-comment.yml index 6efc55b7d..e5fa9e7c8 100644 --- a/.github/workflows/auto-comment.yml +++ b/.github/workflows/auto-comment.yml @@ -1,37 +1,98 @@ -name: 👋 Auto Comment -on: [issues, pull_request] +name: 💬 Auto Comment + +on: + issues: + types: [opened] + pull_request: + types: [opened, closed] + pull_request_target: + types: [opened, closed] + +permissions: + issues: write + pull-requests: write + jobs: - run: + auto_comment: runs-on: ubuntu-latest steps: - - uses: bubkoo/auto-comment@v1.1.2 + - name: 🤖 Auto Comment on Issues and PRs + uses: actions/github-script@v7 with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - issuesOpened: > - 👋 Hi @{{ author }}, - - Issues is only for reporting a bug/feature request. Please read documentation before raising an issue https://rengine.wiki - - For very limited support, questions, and discussions, please join reNgine Discord channel: https://discord.gg/azv6fzhNCE - - Please include all the requested and relevant information when opening a bug report. Improper reports will be closed without any response. - - pullRequestOpened: > - 👋 Hi @{{ author }}, + github-token: ${{secrets.GITHUB_TOKEN}} + script: | + const { owner, repo } = context.repo; + const author = context.payload.sender.login; + + if (context.eventName === 'issues' && context.payload.action === 'opened') { + const issueTitle = context.payload.issue.title.toLowerCase(); + let commentBody; + + if (issueTitle.includes('feat')) { + commentBody = `Hey @${author}! 🚀 Thanks for this exciting feature idea! - Thank you for sending this pull request. + We love seeing fresh concepts that could take reNgine to the next level. 🌟 + + To help us understand your vision better, could you: + + 📝 Provide a detailed description of the feature + 🎯 Explain the problem it solves or the value it adds + 💡 Share any implementation ideas you might have + + Your input is invaluable in shaping the future of reNgine. Let's innovate together! 💪`; + } else { + commentBody = `Hey @${author}! 👋 Thanks for flagging this bug! 🐛🔍 - Please make sure you have followed our [contribution guidelines](https://github.com/yogeshojha/rengine/blob/master/.github/CONTRIBUTING.md). + You're our superhero bug hunter! 🦸♂️🦸♀️ Before we suit up to squash this bug, could you please: + + 📚 Double-check our documentation: https://rengine.wiki + 🕵️ Make sure it's not a known issue + 📝 Provide all the juicy details about this sneaky bug + + Once again - thanks for your vigilance! 🛠️🚀`; + } + + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner, + repo, + body: commentBody + }); + } else if ((context.eventName === 'pull_request' || context.eventName === 'pull_request_target') && context.payload.action === 'opened') { + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner, + repo, + body: `Woohoo @${author}! 🎉 You've just dropped some hot new code! 🔥 - We will review this PR as soon as possible. Thank you for your patience. + Hang tight while we review this! You rock! 🤘` + }); + } else if ((context.eventName === 'pull_request' || context.eventName === 'pull_request_target') && context.payload.action === 'closed') { + const isPRMerged = context.payload.pull_request.merged; + let commentBody; - pullRequestClosed: > - 🚀 Hi @{{ author }}, + if (isPRMerged) { + commentBody = `Holy smokes! 🤯 You've just made reNgine even more awesome! - You are amazing! Thank you for your contributions. Your contributions are what makes reNgine awesome! + Your code is now part of the reNgine hall of fame. 🏆 + + Keep the cool ideas coming - maybe next time you'll break the internet! 💻💥 - This pull request has now been closed. + Virtual high fives all around! 🙌`; + } else { + commentBody = `Hey, thanks for your contribution! 🙏 - We look forward to your more contributions and support. + We appreciate the time and effort you put into this PR. Sadly this is not the right fit for reNgine at the moment. + + While we couldn't merge it this time, we value your interest in improving reNgine. + + Feel free to reach out if you have any questions. Thanks again!`; + } - Thanks + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner, + repo, + body: commentBody + }); + } \ No newline at end of file diff --git a/.github/workflows/auto-release.yml b/.github/workflows/auto-release.yml new file mode 100644 index 000000000..7c068aa2c --- /dev/null +++ b/.github/workflows/auto-release.yml @@ -0,0 +1,60 @@ +name: Update Version and Changelog and Readme + +on: + release: + types: [published] + +jobs: + update-version-and-changelog: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout code + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Get latest release info + id: get_release + uses: actions/github-script@v6 + with: + script: | + const release = await github.rest.repos.getLatestRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + }); + core.setOutput('tag_name', release.data.tag_name); + core.setOutput('body', release.data.body); + + - name: Update version file + run: echo ${{ steps.get_release.outputs.tag_name }} > web/.version + + - name: Update CHANGELOG.md + run: | + echo "# Changelog" > CHANGELOG.md.new + echo "" >> CHANGELOG.md.new + echo "## ${{ steps.get_release.outputs.tag_name }}" >> CHANGELOG.md.new + echo "" >> CHANGELOG.md.new + echo "${{ steps.get_release.outputs.body }}" >> CHANGELOG.md.new + echo "" >> CHANGELOG.md.new + if [ -f CHANGELOG.md ]; then + sed '1,2d' CHANGELOG.md >> CHANGELOG.md.new + fi + mv CHANGELOG.md.new CHANGELOG.md + + - name: Update README.md + run: | + sed -i 's|https://img.shields.io/badge/version-.*-informational|https://img.shields.io/badge/version-${{ steps.get_release.outputs.tag_name }}-informational|g' README.md + + - name: Commit and push changes + run: | + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + git add web/.version CHANGELOG.md README.md + if git diff --staged --quiet; then + echo "No changes to commit" + else + git commit -m "reNgine release: ${{ steps.get_release.outputs.tag_name }} :rocket:" + git push origin HEAD:${{ github.event.repository.default_branch }} + fi diff --git a/.github/workflows/build-pr.yml b/.github/workflows/build-pr.yml index eb9feec1d..d5cdad0b7 100644 --- a/.github/workflows/build-pr.yml +++ b/.github/workflows/build-pr.yml @@ -1,4 +1,4 @@ -name: 🌄 Build Docker image for pull request +name: 🏗️ Build Docker image for pull request on: pull_request: @@ -8,33 +8,42 @@ on: jobs: build: - name: Build Docker image + name: 🐳 Build Docker image runs-on: ubuntu-latest strategy: matrix: platform: - linux/amd64 - linux/arm64 + # - linux/arm/v7 steps: - - name: Checkout the git repo + - name: 📥 Checkout the git repo uses: actions/checkout@v4 - - name: Log in to Docker Hub - uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + - name: 🖥️ Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: 🏗️ Set up Docker Buildx + uses: docker/setup-buildx-action@v3 - - name: Extract metadata (tags, labels) for Docker + - name: 🏷️ Extract metadata (tags, labels) for Docker id: meta - uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 + uses: docker/metadata-action@v5 with: images: yogeshojha/rengine + tags: | + type=raw,value=pr-${{ github.event.pull_request.number }} + type=sha,prefix=sha- + type=ref,event=branch + type=ref,event=pr - - name: Build Docker image - uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc + - name: 🏗️ Build Docker image + uses: docker/build-push-action@v5 with: context: web/ + platforms: ${{ matrix.platform }} push: false tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index fd438d55d..df11eb559 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,14 +1,16 @@ -name: Build Docker image +name: 🚀 Build and Push Docker image on: push: branches: [ master ] + release: + types: [published] schedule: - - cron: '0 18 * * 5' + - cron: '0 0 */5 * *' # Run every 5 days at midnight UTC jobs: - build: - name: Build Docker image + build-and-push: + name: 🐳 Build and Push Docker image runs-on: ubuntu-latest strategy: matrix: @@ -16,25 +18,41 @@ jobs: - linux/amd64 - linux/arm64 steps: - - name: Checkout the git repo + - name: 📥 Checkout the git repo uses: actions/checkout@v4 - - name: Log in to Docker Hub - uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 + - name: 🖥️ Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: 🛠️ Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: 🔑 Log in to Docker Hub + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - - name: Extract metadata (tags, labels) for Docker + - name: 🏷️ Extract metadata (tags, labels) for Docker id: meta - uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 + uses: docker/metadata-action@v5 with: images: yogeshojha/rengine + tags: | + type=raw,value=${{ matrix.platform }}-latest,enable={{is_default_branch}} + type=semver,pattern=${{ matrix.platform }}-{{version}} + type=semver,pattern=${{ matrix.platform }}-{{major}}.{{minor}} + type=semver,pattern=${{ matrix.platform }}-{{major}} + type=sha,prefix=${{ matrix.platform }}-sha- + type=schedule,pattern=${{ matrix.platform }}-{{date 'YYYYMMDD'}} - - name: Build Docker image - uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc + - name: 🏗️ Build and push Docker image + uses: docker/build-push-action@v5 with: context: web/ - push: true + platforms: ${{ matrix.platform }} + push: ${{ github.event_name != 'pull_request' }} tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c3b3262d..19fb9a984 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,86 @@ # Changelog +## v2.2.0 + +## What's Changed + +### Summary +- Introducing Bounty Hub: Central platform for managing and importing bug bounty programs +- New Built-in notification system for important events and updates +- Enhanced subdomain discovery using Chaos project dataset +- Bug Bounty Mode as user preference to enable or disable features related to bug bounty +- Path exclusion feature for scans +- New visually appealing PDF report template +- Regex support for out-of-scope subdomains +- Stop All Scans killswitch to halt multiple running scans at once +- Smart rescans that automatically import and apply previous scan configurations +- Improved Start Scan UI for consistent configuration across multiple scans +- Support for bulk uploads of nuclei and gf patterns +- API key protection (masking in settings view) + +* feat: Allow uploading of multiple gf patterns #1318 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1319 +* feat: Introduce stop multiple scans #1270 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1321 +* feat: Mask API keys Fixes #1213 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1331 +* feat: Allow uploading multiple nuclei patterns #461 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1320 +* feat: Introduce github action for auto updating version and changelog on every release by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1348 +* chores: Removes external IP from reNgine ui by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1350 +* feat: Implement URL Path Exclusion Feature with Regex Support Fixes #1264 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1354 +* feat: Consistent start scan ui across schedule scan, multiple scans. Now supports import, out of scope subdomains, starting path, excluded path for all types of scan #1357 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1361 +* Update of template.html with conditional statement by @DamianHusted in https://github.com/yogeshojha/rengine/pull/1378 +* feat: feat ability to delete multiple scheduled scan #1360 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1382 +* feat: Enhanced Out of Scope Subdomain Checking, Support for regex in out of scope scan parameter #1358 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1380 +* feat: Store and showcase scan related configuration such as imported subdomains, out of scope subdomains, starting point url and excluded paths fixes #1356 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1383 +* Update celery-entrypoint.sh by @SJ029626 in https://github.com/yogeshojha/rengine/pull/1390 +* feat: Prefll the scan parameters during rescan with the scan configuration values that were being used in earlier scan #1381 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1386 +* feat: Added additional templates for PDF reports #1387 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1391 +* Replace CVE-2024-41661 with CVE-2023-50094 by @shelbyc in https://github.com/yogeshojha/rengine/pull/1393 +* hotfix: Workflow autocomment issues by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1396 +* Fix comment workflow on fork PRs by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1400 +* Hotfix/workflow cmt1 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1401 +* fix author name by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1403 +* Update of the uninstall.sh script by @DamianHusted in https://github.com/yogeshojha/rengine/pull/1385 +* feat: Builtin notification system in reNgine #1392 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1394 +* feat: Show what's new popup when update happens and new features are released #1395 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1405 +* feat: Add Chaos for subdomain enumeration #173 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1406 +* Version 2.1.3 contains a patch for CVE-2024-43381 by @shelbyc in https://github.com/yogeshojha/rengine/pull/1412 +* feat: Introducing Bounty Hub, a central hub to import and manage your hackerone programs to reNgine by @null-ref-0000 in https://github.com/yogeshojha/rengine/pull/1410 +* feat: Add ability to delete multiple organizations by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1417 +* feat: Enable bug bounty mode as User Preference to separate bug bounty related features #1411 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1418 +* bug: remove watchmedo usage in production #1419 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1424 +* feat: Create organization when quick adding targets #492 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1425 +* reNgine 2.2.0 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1349 + +## New Contributors +* @DamianHusted made their first contribution in https://github.com/yogeshojha/rengine/pull/1378 +* @SJ029626 made their first contribution in https://github.com/yogeshojha/rengine/pull/1390 +* @shelbyc made their first contribution in https://github.com/yogeshojha/rengine/pull/1393 + +**Full Changelog**: https://github.com/yogeshojha/rengine/compare/v2.1.3...v2.2.0 + +## 2.1.3 + +**Release Date: Aug 18, 2024** + +## What's Changed + +### Security Update + +* (Security) CVE-2024-43381 Stored Cross-Site Scripting (XSS) via DNS Record Poisoning reported by @touhidshaikh Advisory https://github.com/yogeshojha/rengine/security/advisories/GHSA-96q4-fj2m-jqf7 + +### Bug Fixes + +* remove redundant docker environment variables by @jxdv in https://github.com/yogeshojha/rengine/pull/1353 +* fix: reNgine installation issue due to orjson and langchain #1362 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1363 +* #1364 Fix whois lookup and improve performance by executing various modules of whois lookup to run concurrently by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1368 +* chores: Add error handling for the curl command by @gitworkflows in https://github.com/yogeshojha/rengine/pull/1367 +* Update Github Actions Workflows by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1369 +* chores: Fix docker build on master by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1373 + +#### New Contributors +* @gitworkflows made their first contribution in https://github.com/yogeshojha/rengine/pull/1367 + +**Full Changelog**: https://github.com/yogeshojha/rengine/compare/v2.1.2...v2.1.3 + ## 2.1.2 **Release Date: July 30, 2024** @@ -7,7 +88,7 @@ ## What's Changed ### Security update -* (Security) CVE-2024-41661 Fix Authenticated command injection in WAF detection tool reported by @n-thumann Advisory https://github.com/yogeshojha/rengine/security/advisories/GHSA-fx7f-f735-vgh4 +* (Security) CVE-2023-50094 Fix Authenticated command injection in WAF detection tool reported by @n-thumann Advisory https://github.com/yogeshojha/rengine/security/advisories/GHSA-fx7f-f735-vgh4 ### Bug Fixes @@ -44,7 +125,7 @@ * Fix #1315 Fix for todo URLs not compatible with slugs by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1316 * Fixes #1122 But in port service lookup that caused multiple entries of Port with same port number but different service name/description by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1317 -## New Contributors +#### New Contributors * @emmanuel-ferdman made their first contribution in https://github.com/yogeshojha/rengine/pull/1286 **Full Changelog**: https://github.com/yogeshojha/rengine/compare/v2.1.0...v2.1.1 @@ -66,7 +147,7 @@ * Release/2.1.0 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1147 * Dockerfile Build Multiple Platforms by @vncloudsco in https://github.com/yogeshojha/rengine/pull/1210 -## New Contributors +#### New Contributors * @fopina made their first contribution in https://github.com/yogeshojha/rengine/pull/1230 * @iuime made their first contribution in https://github.com/yogeshojha/rengine/pull/1137 * @null-ref-0000 made their first contribution in https://github.com/yogeshojha/rengine/pull/1275 @@ -91,7 +172,7 @@ * Fix/infoga removal by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1249 * Fix #1241 by @yogeshojha in https://github.com/yogeshojha/rengine/pull/1251 -## New Contributors +#### New Contributors * @Talanor made their first contribution in https://github.com/yogeshojha/rengine/pull/1245 * @specters312 made their first contribution in https://github.com/yogeshojha/rengine/pull/1239 * @TH3xACE made their first contribution in https://github.com/yogeshojha/rengine/pull/1224 @@ -121,7 +202,7 @@ * Fix uninitialised variable cmd in custom_subdomain_tools by @cpandya2909 in https://github.com/yogeshojha/rengine/pull/1207 * [FIX] security: OS Command Injection vulnerability (x2) #1219 by @0xtejas in https://github.com/yogeshojha/rengine/pull/1227 -## New Contributors :rocket: +### New Contributors :rocket: * @yarysp made their first contribution in https://github.com/yogeshojha/rengine/pull/1199 * @jostasik made their first contribution in https://github.com/yogeshojha/rengine/pull/1226 * @cpandya2909 made their first contribution in https://github.com/yogeshojha/rengine/pull/1207 @@ -144,7 +225,7 @@ * Change Redirect URL after login to prevent 500 error by @psyray in https://github.com/yogeshojha/rengine/pull/1124 * fix-1030: Add missing slug on target summary link by @psyray in https://github.com/yogeshojha/rengine/pull/1123 -## New Contributors +### New Contributors * @Deathpoolxrs made their first contribution in https://github.com/yogeshojha/rengine/pull/1149 * @ErdemOzgen made their first contribution in https://github.com/yogeshojha/rengine/pull/1126 @@ -196,7 +277,7 @@ * Fix report generation when `Ignore Informational Vulnerabilities` checked by @psyray in https://github.com/yogeshojha/rengine/pull/1100 * fix(tool_arsenal): incorrect regex version numbers by @AnonymousWP in https://github.com/yogeshojha/rengine/pull/1086 -## New Contributors +### New Contributors * @luizmlo made their first contribution in https://github.com/yogeshojha/rengine/pull/1029 :partying_face: * @aqhmal made their first contribution in https://github.com/yogeshojha/rengine/pull/1021 :partying_face: * @C0wnuts made their first contribution in https://github.com/yogeshojha/rengine/pull/973 :partying_face: diff --git a/README.md b/README.md index 4a7b45d2e..be6ebd458 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@
+ reNgine 2.2.0 comes with bounty hub where you can sync and import your hackerone programs, in app notifications, chaos as subdomain enumeration tool, ability to upload multiple nuclei and gf patterns, support for regex in out of scope subdomain config, additional pdf report template and many more. + Check out What's new in reNgine 2.2.0! +
-Unleash the power of LLM toolkit! Now you can use local LLM models to generate attack surface and vulnerability reports!, Checkout the release-notes!
Note: Parts of this README were written or refined using AI language models.
diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index f80e3d91b..29f8a3c8f 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -58,6 +58,7 @@ services: command: celery -A reNgine beat -l INFO --scheduler django_celery_beat.schedulers:DatabaseScheduler depends_on: - celery + - db environment: - DEBUG=1 - CELERY_BROKER=redis://redis:6379/0 @@ -94,9 +95,6 @@ services: - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_PORT=${POSTGRES_PORT} - POSTGRES_HOST=${POSTGRES_HOST} - # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO - # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.1.2' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/docker-compose.yml b/docker-compose.yml index e46db5430..141bf724e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -68,6 +68,7 @@ services: - POSTGRES_HOST=${POSTGRES_HOST} depends_on: - celery + - db volumes: - ./web:/usr/src/app - github_repos:/usr/src/github @@ -96,9 +97,6 @@ services: - POSTGRES_PORT=${POSTGRES_PORT} - POSTGRES_HOST=${POSTGRES_HOST} - DJANGO_SUPERUSER_PASSWORD=${DJANGO_SUPERUSER_PASSWORD} - # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO - # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.1.2' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/scripts/uninstall.sh b/scripts/uninstall.sh index cc177285d..952330450 100755 --- a/scripts/uninstall.sh +++ b/scripts/uninstall.sh @@ -30,27 +30,27 @@ read -p "$(echo -e ${WARNING}"Are you sure you want to proceed? (y/Y/yes/YES to # change answer to lowecase for comparison ANSWER_LC=$(echo "$CONFIRM" | tr '[:upper:]' '[:lower:]') -if [[ "$ANSWER_LC" != "y" && "$ANSWER_LC" != "yes" ]]; then - print_status "${YELLOW}Uninstall aborted by user.${RESET}" +if [ -z "$CONFIRM" ] || { [ "$CONFIRM" != "y" ] && [ "$CONFIRM" != "Y" ] && [ "$CONFIRM" != "yes" ] && [ "$CONFIRM" != "Yes" ] && [ "$CONFIRM" != "YES" ]; }; then + print_status "${WARNING}Uninstall aborted by user.${RESET}" exit 0 fi print_status "${INFO}Proceeding with uninstalling reNgine${RESET}" print_status "Stopping all containers related to reNgine..." -docker stop $(docker ps -a -q --filter name=rengine-) 2>/dev/null +docker stop $(docker ps -a -q --filter name=rengine) 2>/dev/null print_status "Removing all containers related to reNgine..." -docker rm $(docker ps -a -q --filter name=rengine-) 2>/dev/null +docker rm $(docker ps -a -q --filter name=rengine) 2>/dev/null print_status "Removing all volumes related to reNgine..." -docker volume rm $(docker volume ls -q --filter name=rengine-) 2>/dev/null +docker volume rm $(docker volume ls -q --filter name=rengine) 2>/dev/null print_status "Removing all networks related to reNgine..." -docker network rm $(docker network ls -q --filter name=rengine-) 2>/dev/null +docker network rm $(docker network ls -q --filter name=rengine) 2>/dev/null print_status "Removing all images related to reNgine..." -docker rmi $(docker images -q --filter reference=rengine-) 2>/dev/null +docker rmi $(docker images -q --filter reference=rengine) 2>/dev/null print_status "Performing final cleanup" docker system prune -f --volumes --filter "label=com.docker.compose.project=rengine" diff --git a/web/.version b/web/.version new file mode 100644 index 000000000..a4b6ac3de --- /dev/null +++ b/web/.version @@ -0,0 +1 @@ +v2.2.0 diff --git a/web/Dockerfile b/web/Dockerfile index c5c0af4c9..22f3e68b1 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -15,66 +15,60 @@ LABEL name="reNgine" \ # Environment variables ENV DEBIAN_FRONTEND="noninteractive" \ - DATABASE="postgres" \ - PYTHONDONTWRITEBYTECODE=1 \ - PYTHONUNBUFFERED=1 \ - HOME="/root" \ - GOROOT="/usr/local/go" \ - GOPATH="/root/go" \ - PATH="$PATH:/usr/local/go/bin:/root/go/bin" \ - GO111MODULE=on - -# Install required packages and add Mozilla Team PPA -RUN ARCH=$(dpkg --print-architecture) \ - && echo "$SUPPORTED_ARCH" | grep -qw "$ARCH" || { \ - echo "Unsupported architecture: $ARCH"; exit 1; \ - } \ - && apt update -y \ - && apt install -y --no-install-recommends \ - python3.10 python3-dev python3-pip \ - build-essential cmake geoip-bin geoip-database \ - gcc git libpq-dev libpango-1.0-0 libpangoft2-1.0-0 \ - libpcap-dev netcat nmap x11-utils xvfb wget curl \ - python3-netaddr software-properties-common \ - gpg-agent \ - && add-apt-repository -y ppa:mozillateam/ppa \ - && apt update -y - -# Install Go based on architecture -RUN ARCH=$(dpkg --print-architecture) \ - && case "$ARCH" in \ - arm64) GOFILE="go${GOVERSION}.linux-arm64.tar.gz" ;; \ - amd64) GOFILE="go${GOVERSION}.linux-amd64.tar.gz" ;; \ - armhf|armv6|armv7) GOFILE="go${GOVERSION}.linux-armv6l.tar.gz" ;; \ - i386) GOFILE="go${GOVERSION}.linux-386.tar.gz" ;; \ - *) echo "Unsupported architecture: $ARCH"; exit 1 ;; \ - esac \ - && wget https://go.dev/dl/${GOFILE} \ - && tar -xvf ${GOFILE} -C /usr/local \ - && rm ${GOFILE} - -# Install Geckodriver based on architecture -RUN ARCH=$(dpkg --print-architecture) \ - && case "$ARCH" in \ - arm64) GECKOPATH="geckodriver-v${GECKOVERSION}-linux-aarch64.tar.gz" \ - GECKOREPO="https://github.com/khulnasoft-lab/geckodriver/releases/download/v${GECKOVERSION}/${GECKOPATH}" ;; \ - armv7l) GECKOPATH="geckodriver-v${GECKOVERSION}-linux-armv7l.tar.gz" \ - GECKOREPO="https://github.com/khulnasoft-lab/geckodriver/releases/download/v${GECKOVERSION}/${GECKOPATH}" ;; \ - amd64) GECKOPATH="geckodriver-v${GECKOVERSION}-linux64.tar.gz" ;; \ - armhf|armv6|i386) GECKOPATH="geckodriver-v${GECKOVERSION}-linux32.tar.gz" ;; \ - *) echo "Unsupported architecture: $ARCH"; exit 1 ;; \ - esac \ - && wget ${GECKOREPO:-https://github.com/mozilla/geckodriver/releases/download/v${GECKOVERSION}/${GECKOPATH}} \ - && tar -xvf ${GECKOPATH} -C /usr/local/bin \ - && rm ${GECKOPATH} + DATABASE="postgres" +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONUNBUFFERED 1 +ENV GOROOT="/usr/local/go" +ENV GOPATH=$HOME/go +ENV PATH="${PATH}:${GOROOT}/bin:${GOPATH}/bin" + +# Install Python +RUN apt update -y && \ + apt install -y \ + python3.10 \ + python3-dev \ + python3-pip + +# Install essential packages +RUN apt install -y --no-install-recommends \ + build-essential \ + cmake \ + geoip-bin \ + geoip-database \ + gcc \ + git \ + libpq-dev \ + libpango-1.0-0 \ + libpangoft2-1.0-0 \ + libpcap-dev \ + netcat \ + nmap \ + x11-utils \ + xvfb \ + wget \ + curl \ + python3-netaddr \ + software-properties-common + +RUN add-apt-repository ppa:mozillateam/ppa + +RUN ARCH=$(dpkg --print-architecture) \ + && curl -L https://go.dev/dl/go${GOVERSION}.linux-${ARCH}.tar.gz | tar -xzC /usr/local + +RUN ARCH=$(dpkg --print-architecture) \ + && if [ "${ARCH}" = "arm64" ]; then \ + GECKOPATH="geckodriver-v${GECKOVERSION}-linux-aarch64.tar.gz"; \ + elif [ "${ARCH}" = "amd64" ]; then \ + GECKOPATH="geckodriver-v${GECKOVERSION}-linux64.tar.gz"; \ + fi \ + && wget https://github.com/mozilla/geckodriver/releases/download/v${GECKOVERSION}/${GECKOPATH} \ + && tar -xvf ${GECKOPATH} \ + && rm ${GECKOPATH} \ + && mv geckodriver /usr/bin # Install Rust for orjson -RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ - && echo "source $HOME/.cargo/env" >> $HOME/.bashrc - -ENV PATH="/root/.cargo/bin:$PATH" - -# Install Maturin for Python bindings +RUN set -e; curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +ENV PATH="/root/.cargo/bin:${PATH}" RUN pip3 install maturin # Set working directory @@ -88,6 +82,7 @@ RUN printf "\ github.com/tomnomnom/waybackurls@latest\n\ github.com/projectdiscovery/httpx/cmd/httpx@latest\n\ github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest\n\ + github.com/projectdiscovery/chaos-client/cmd/chaos@latest\n\ github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest\n\ github.com/projectdiscovery/naabu/v2/cmd/naabu@latest\n\ github.com/hakluke/hakrawler@latest\n\ @@ -105,7 +100,10 @@ RUN printf "\ # Update Nuclei and Nuclei-Templates RUN nuclei -update-templates -# Install Python dependencies +# update chaos +RUN chaos -update + +# Copy requirements COPY ./requirements.txt /tmp/requirements.txt RUN pip3 install --upgrade setuptools==72.1.0 \ && pip3 install -r /tmp/requirements.txt --no-cache-dir diff --git a/web/api/serializers.py b/web/api/serializers.py index 1fd0b7e91..a01c9b909 100644 --- a/web/api/serializers.py +++ b/web/api/serializers.py @@ -1,6 +1,5 @@ from dashboard.models import * -from django.contrib.humanize.templatetags.humanize import (naturalday, - naturaltime) +from django.contrib.humanize.templatetags.humanize import (naturalday, naturaltime) from django.db.models import F, JSONField, Value from recon_note.models import * from reNgine.common_func import * @@ -8,6 +7,60 @@ from scanEngine.models import * from startScan.models import * from targetApp.models import * +from dashboard.models import InAppNotification + + +class HackerOneProgramAttributesSerializer(serializers.Serializer): + """ + Serializer for HackerOne Program + IMP: THIS is not a model serializer, programs will not be stored in db + due to ever changing nature of programs, rather cache will be used on these serializers + """ + handle = serializers.CharField(required=False) + name = serializers.CharField(required=False) + currency = serializers.CharField(required=False) + submission_state = serializers.CharField(required=False) + triage_active = serializers.BooleanField(allow_null=True, required=False) + state = serializers.CharField(required=False) + started_accepting_at = serializers.DateTimeField(required=False) + bookmarked = serializers.BooleanField(required=False) + allows_bounty_splitting = serializers.BooleanField(required=False) + offers_bounties = serializers.BooleanField(required=False) + open_scope = serializers.BooleanField(allow_null=True, required=False) + fast_payments = serializers.BooleanField(allow_null=True, required=False) + gold_standard_safe_harbor = serializers.BooleanField(allow_null=True, required=False) + + def to_representation(self, instance): + return {key: value for key, value in instance.items()} + + +class HackerOneProgramSerializer(serializers.Serializer): + id = serializers.CharField() + type = serializers.CharField() + attributes = HackerOneProgramAttributesSerializer() + + + +class InAppNotificationSerializer(serializers.ModelSerializer): + class Meta: + model = InAppNotification + fields = [ + 'id', + 'title', + 'description', + 'icon', + 'is_read', + 'created_at', + 'notification_type', + 'status', + 'redirect_link', + 'open_in_new_tab', + 'project' + ] + read_only_fields = ['id', 'created_at'] + + def get_project_name(self, obj): + return obj.project.name if obj.project else None class SearchHistorySerializer(serializers.ModelSerializer): diff --git a/web/api/shared_api_tasks.py b/web/api/shared_api_tasks.py new file mode 100644 index 000000000..d21ca23fa --- /dev/null +++ b/web/api/shared_api_tasks.py @@ -0,0 +1,209 @@ +# include all the celery tasks to be used in the API, do not put in tasks.py +import requests + +from reNgine.common_func import create_inappnotification, get_hackerone_key_username +from reNgine.definitions import PROJECT_LEVEL_NOTIFICATION, HACKERONE_ALLOWED_ASSET_TYPES +from reNgine.celery import app +from reNgine.database_utils import bulk_import_targets + +@app.task(name='import_hackerone_programs_task', bind=False, queue='api_queue') +def import_hackerone_programs_task(handles, project_slug, is_sync = False): + """ + Runs in the background to import programs from HackerOne + + Args: + handles (list): List of handles to import + project_slug (str): Slug of the project + is_sync (bool): If the import is a sync operation + Returns: + None + rather creates inapp notifications + """ + def fetch_program_details_from_hackerone(program_handle): + url = f'https://api.hackerone.com/v1/hackers/programs/{program_handle}' + headers = {'Accept': 'application/json'} + creds = get_hackerone_key_username() + + if not creds: + raise Exception("HackerOne API credentials not configured") + + username, api_key = creds + + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("HackerOne API credentials are invalid") + + if response.status_code == 200: + return response.json() + else: + return None + + for handle in handles: + program_details = fetch_program_details_from_hackerone(handle) + if program_details: + # Thanks, some parts of this logics were originally written by @null-ref-0000 + # via PR https://github.com/yogeshojha/rengine/pull/1410 + try: + program_name = program_details['attributes']['name'] + + assets = [] + scopes = program_details['relationships']['structured_scopes']['data'] + for scope in scopes: + asset_type = scope['attributes']['asset_type'] + asset_identifier = scope['attributes']['asset_identifier'] + eligible_for_submission = scope['attributes']['eligible_for_submission'] + + # for now we should ignore the scope that are not eligible for submission + # in future release we will add this in target out_of_scope + + # we need to filter the scope that are supported by reNgine now + if asset_type in HACKERONE_ALLOWED_ASSET_TYPES and eligible_for_submission: + assets.append(asset_identifier) + + # in some cases asset_type is OTHER and may contain the asset + elif asset_type == 'OTHER' and ('.' in asset_identifier or asset_identifier.startswith('http')): + assets.append(asset_identifier) + + # cleanup assets + assets = list(set(assets)) + + # convert assets to list of dict with name and description + assets = [{'name': asset, 'description': None} for asset in assets] + new_targets_added = bulk_import_targets( + targets=assets, + project_slug=project_slug, + organization_name=program_name, + org_description='Imported from Hackerone', + h1_team_handle=handle + ) + + if new_targets_added: + create_inappnotification( + title=f"HackerOne Program Imported: {handle}", + description=f"The program '{program_name}' from hackerone has been successfully imported.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-check-circle", + status='success' + ) + + except Exception as e: + create_inappnotification( + title=f"HackerOne Program Import Failed: {handle}", + description=f"Failed to import program from hackerone with handle '{handle}'. {str(e)}", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-alert-circle", + status='error' + ) + else: + create_inappnotification( + title=f"HackerOne Program Import Failed: {handle}", + description=f"Failed to import program from hackerone with handle '{handle}'. Program details could not be fetched.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-alert-circle", + status='error' + ) + + if is_sync: + title = "HackerOne Program Sync Completed" + description = f"Sync process for {len(handles)} program(s) has completed." + else: + title = "HackerOne Program Import Completed" + description = f"Import process for {len(handles)} program(s) has completed." + + create_inappnotification( + title=title, + description=description, + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-check-all", + status='success' + ) + + +@app.task(name='sync_bookmarked_programs_task', bind=False, queue='api_queue') +def sync_bookmarked_programs_task(project_slug): + """ + Runs in the background to sync bookmarked programs from HackerOne + + Args: + project_slug (str): Slug of the project + Returns: + None + Creates in-app notifications for progress and results + """ + + def fetch_bookmarked_programs(): + url = f'https://api.hackerone.com/v1/hackers/programs?&page[size]=100' + headers = {'Accept': 'application/json'} + bookmarked_programs = [] + + credentials = get_hackerone_key_username() + if not credentials: + raise Exception("HackerOne API credentials not configured") + + username, api_key = credentials + + while url: + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("HackerOne API credentials are invalid") + elif response.status_code != 200: + raise Exception(f"HackerOne API request failed with status code {response.status_code}") + + data = response.json() + programs = data['data'] + bookmarked = [p for p in programs if p['attributes']['bookmarked']] + bookmarked_programs.extend(bookmarked) + + url = data['links'].get('next') + + return bookmarked_programs + + try: + bookmarked_programs = fetch_bookmarked_programs() + handles = [program['attributes']['handle'] for program in bookmarked_programs] + + if not handles: + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Completed", + description="No bookmarked programs found.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-information", + status='info' + ) + return + + import_hackerone_programs_task.delay(handles, project_slug, is_sync=True) + + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Progress", + description=f"Found {len(handles)} bookmarked program(s). Starting import process.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-progress-check", + status='info' + ) + + except Exception as e: + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Failed", + description=f"Failed to sync bookmarked programs: {str(e)}", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-alert-circle", + status='error' + ) diff --git a/web/api/urls.py b/web/api/urls.py index cfdd8f265..7c1c12802 100644 --- a/web/api/urls.py +++ b/web/api/urls.py @@ -19,6 +19,8 @@ router.register(r'listIps', IpAddressViewSet) router.register(r'listActivityLogs', ListActivityLogsViewSet) router.register(r'listScanLogs', ListScanLogsViewSet) +router.register(r'notifications', InAppNotificationManagerViewSet, basename='notification') +router.register(r'hackerone-programs', HackerOneProgramViewSet, basename='hackerone_program') urlpatterns = [ url('^', include(router.urls)), @@ -239,6 +241,11 @@ 'action/create/project', CreateProjectApi.as_view(), name='create_project'), + path( + 'toggle-bug-bounty-mode/', + ToggleBugBountyModeView.as_view(), + name='toggle_bug_bounty_mode' + ), ] urlpatterns += router.urls diff --git a/web/api/views.py b/web/api/views.py index e2c7805a0..fcea8abd9 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -1,24 +1,30 @@ -import logging import re import socket -from ipaddress import IPv4Network - +import logging import requests import validators -from dashboard.models import * +import requests + +from ipaddress import IPv4Network from django.db.models import CharField, Count, F, Q, Value -from django.shortcuts import get_object_or_404 from django.utils import timezone from packaging import version from django.template.defaultfilters import slugify -from rest_framework import viewsets +from datetime import datetime +from rest_framework import viewsets, status from rest_framework.response import Response from rest_framework.views import APIView -from rest_framework.status import HTTP_400_BAD_REQUEST +from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED +from rest_framework.decorators import action +from django.core.exceptions import ObjectDoesNotExist +from django.core.cache import cache + +from dashboard.models import * from recon_note.models import * from reNgine.celery import app from reNgine.common_func import * +from reNgine.database_utils import * from reNgine.definitions import ABORTED_TASK from reNgine.tasks import * from reNgine.llm import * @@ -27,12 +33,305 @@ from startScan.models import * from startScan.models import EndPoint from targetApp.models import * - +from api.shared_api_tasks import import_hackerone_programs_task, sync_bookmarked_programs_task from .serializers import * + logger = logging.getLogger(__name__) +class ToggleBugBountyModeView(APIView): + """ + This class manages the user bug bounty mode + """ + def post(self, request, *args, **kwargs): + user_preferences = get_object_or_404(UserPreferences, user=request.user) + user_preferences.bug_bounty_mode = not user_preferences.bug_bounty_mode + user_preferences.save() + return Response({ + 'bug_bounty_mode': user_preferences.bug_bounty_mode + }, status=status.HTTP_200_OK) + + +class HackerOneProgramViewSet(viewsets.ViewSet): + """ + This class manages the HackerOne Program model, + provides basic fetching of programs and caching + """ + CACHE_KEY = 'hackerone_programs' + CACHE_TIMEOUT = 60 * 30 # 30 minutes + PROGRAM_CACHE_KEY = 'hackerone_program_{}' + + API_BASE = 'https://api.hackerone.com/v1/hackers' + + ALLOWED_ASSET_TYPES = ["WILDCARD", "DOMAIN", "IP_ADDRESS", "CIDR", "URL"] + + def list(self, request): + try: + sort_by = request.query_params.get('sort_by', 'age') + sort_order = request.query_params.get('sort_order', 'desc') + + programs = self.get_cached_programs() + + if sort_by == 'name': + programs = sorted(programs, key=lambda x: x['attributes']['name'].lower(), + reverse=(sort_order.lower() == 'desc')) + elif sort_by == 'reports': + programs = sorted(programs, key=lambda x: x['attributes'].get('number_of_reports_for_user', 0), + reverse=(sort_order.lower() == 'desc')) + elif sort_by == 'age': + programs = sorted(programs, + key=lambda x: datetime.strptime(x['attributes'].get('started_accepting_at', '1970-01-01T00:00:00.000Z'), '%Y-%m-%dT%H:%M:%S.%fZ'), + reverse=(sort_order.lower() == 'desc') + ) + + serializer = HackerOneProgramSerializer(programs, many=True) + return Response(serializer.data) + except Exception as e: + return self.handle_exception(e) + + def get_api_credentials(self): + try: + api_key = HackerOneAPIKey.objects.first() + if not api_key: + raise ObjectDoesNotExist("HackerOne API credentials not found") + return api_key.username, api_key.key + except ObjectDoesNotExist: + raise Exception("HackerOne API credentials not configured") + + @action(detail=False, methods=['get']) + def bookmarked_programs(self, request): + try: + # do not cache bookmarked programs due to the user specific nature + programs = self.fetch_programs_from_hackerone() + bookmarked = [p for p in programs if p['attributes']['bookmarked']] + serializer = HackerOneProgramSerializer(bookmarked, many=True) + return Response(serializer.data) + except Exception as e: + return self.handle_exception(e) + + @action(detail=False, methods=['get']) + def bounty_programs(self, request): + try: + programs = self.get_cached_programs() + bounty_programs = [p for p in programs if p['attributes']['offers_bounties']] + serializer = HackerOneProgramSerializer(bounty_programs, many=True) + return Response(serializer.data) + except Exception as e: + return self.handle_exception(e) + + def get_cached_programs(self): + programs = cache.get(self.CACHE_KEY) + if programs is None: + programs = self.fetch_programs_from_hackerone() + cache.set(self.CACHE_KEY, programs, self.CACHE_TIMEOUT) + return programs + + def fetch_programs_from_hackerone(self): + url = f'{self.API_BASE}/programs?page[size]=100' + headers = {'Accept': 'application/json'} + all_programs = [] + try: + username, api_key = self.get_api_credentials() + except Exception as e: + raise Exception("API credentials error: " + str(e)) + + while url: + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("Invalid API credentials") + elif response.status_code != 200: + raise Exception(f"HackerOne API request failed with status code {response.status_code}") + + data = response.json() + all_programs.extend(data['data']) + + url = data['links'].get('next') + + return all_programs + + @action(detail=False, methods=['post']) + def refresh_cache(self, request): + try: + programs = self.fetch_programs_from_hackerone() + cache.set(self.CACHE_KEY, programs, self.CACHE_TIMEOUT) + return Response({"status": "Cache refreshed successfully"}) + except Exception as e: + return self.handle_exception(e) + + @action(detail=True, methods=['get']) + def program_details(self, request, pk=None): + try: + program_handle = pk + cache_key = self.PROGRAM_CACHE_KEY.format(program_handle) + program_details = cache.get(cache_key) + + if program_details is None: + program_details = self.fetch_program_details_from_hackerone(program_handle) + if program_details: + cache.set(cache_key, program_details, self.CACHE_TIMEOUT) + + if program_details: + filtered_scopes = [ + scope for scope in program_details.get('relationships', {}).get('structured_scopes', {}).get('data', []) + if scope.get('attributes', {}).get('asset_type') in self.ALLOWED_ASSET_TYPES + ] + + program_details['relationships']['structured_scopes']['data'] = filtered_scopes + + return Response(program_details) + else: + return Response({"error": "Program not found"}, status=status.HTTP_404_NOT_FOUND) + except Exception as e: + return self.handle_exception(e) + + def fetch_program_details_from_hackerone(self, program_handle): + url = f'{self.API_BASE}/programs/{program_handle}' + headers = {'Accept': 'application/json'} + try: + username, api_key = self.get_api_credentials() + except Exception as e: + raise Exception("API credentials error: " + str(e)) + + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("Invalid API credentials") + elif response.status_code == 200: + return response.json() + else: + return None + + @action(detail=False, methods=['post']) + def import_programs(self, request): + try: + project_slug = request.query_params.get('project_slug') + if not project_slug: + return Response({"error": "Project slug is required"}, status=status.HTTP_400_BAD_REQUEST) + handles = request.data.get('handles', []) + + if not handles: + return Response({"error": "No program handles provided"}, status=status.HTTP_400_BAD_REQUEST) + + import_hackerone_programs_task.delay(handles, project_slug) + + create_inappnotification( + title="HackerOne Program Import Started", + description=f"Import process for {len(handles)} program(s) has begun.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-download", + status='info' + ) + + return Response({"message": f"Import process for {len(handles)} program(s) has begun."}, status=status.HTTP_202_ACCEPTED) + except Exception as e: + return self.handle_exception(e) + + @action(detail=False, methods=['get']) + def sync_bookmarked(self, request): + try: + project_slug = request.query_params.get('project_slug') + if not project_slug: + return Response({"error": "Project slug is required"}, status=status.HTTP_400_BAD_REQUEST) + + sync_bookmarked_programs_task.delay(project_slug) + + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Started", + description="Sync process for bookmarked programs has begun.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-sync", + status='info' + ) + + return Response({"message": "Sync process for bookmarked programs has begun."}, status=status.HTTP_202_ACCEPTED) + except Exception as e: + return self.handle_exception(e) + + def handle_exception(self, exc): + if isinstance(exc, ObjectDoesNotExist): + return Response({"error": "HackerOne API credentials not configured"}, status=status.HTTP_503_SERVICE_UNAVAILABLE) + elif str(exc) == "Invalid API credentials": + return Response({"error": "Invalid HackerOne API credentials"}, status=status.HTTP_401_UNAUTHORIZED) + else: + return Response({"error": str(exc)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + +class InAppNotificationManagerViewSet(viewsets.ModelViewSet): + """ + This class manages the notification model, provided CRUD operation on notif model + such as read notif, clear all, fetch all notifications etc + """ + serializer_class = InAppNotificationSerializer + pagination_class = None + + def get_queryset(self): + # we will see later if user based notif is needed + # return InAppNotification.objects.filter(user=self.request.user) + project_slug = self.request.query_params.get('project_slug') + queryset = InAppNotification.objects.all() + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + return queryset.order_by('-created_at') + + @action(detail=False, methods=['post']) + def mark_all_read(self, request): + # marks all notification read + project_slug = self.request.query_params.get('project_slug') + queryset = self.get_queryset() + + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + queryset.update(is_read=True) + return Response(status=HTTP_204_NO_CONTENT) + + @action(detail=True, methods=['post']) + def mark_read(self, request, pk=None): + # mark individual notification read when cliked + notification = self.get_object() + notification.is_read = True + notification.save() + return Response(status=HTTP_204_NO_CONTENT) + + @action(detail=False, methods=['get']) + def unread_count(self, request): + # this fetches the count for unread notif mainly for the badge + project_slug = self.request.query_params.get('project_slug') + queryset = self.get_queryset() + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + count = queryset.filter(is_read=False).count() + return Response({'count': count}) + + @action(detail=False, methods=['post']) + def clear_all(self, request): + # when clicked on the clear button this must be called to clear all notif + project_slug = self.request.query_params.get('project_slug') + queryset = self.get_queryset() + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + queryset.delete() + return Response(status=HTTP_204_NO_CONTENT) + + class OllamaManager(APIView): def get(self, request): """ @@ -622,6 +921,11 @@ def post(self, request): h1_team_handle = data.get('h1_team_handle') description = data.get('description') domain_name = data.get('domain_name') + # remove wild card from domain + domain_name = domain_name.replace('*', '') + # if domain_name begins with . remove that + if domain_name.startswith('.'): + domain_name = domain_name[1:] organization_name = data.get('organization') slug = data.get('slug') @@ -629,35 +933,26 @@ def post(self, request): if not validators.domain(domain_name): return Response({'status': False, 'message': 'Invalid domain or IP'}) - project = Project.objects.get(slug=slug) - - # Create domain object in DB - domain, _ = Domain.objects.get_or_create(name=domain_name) - domain.project = project - domain.h1_team_handle = h1_team_handle - domain.description = description - if not domain.insert_date: - domain.insert_date = timezone.now() - domain.save() - - # Create org object in DB - if organization_name: - organization_obj = None - organization_query = Organization.objects.filter(name=organization_name) - if organization_query.exists(): - organization_obj = organization_query[0] - else: - organization_obj = Organization.objects.create( - name=organization_name, - project=project, - insert_date=timezone.now()) - organization_obj.domains.add(domain) + status = bulk_import_targets( + targets=[{ + 'name': domain_name, + 'description': description, + }], + organization_name=organization_name, + h1_team_handle=h1_team_handle, + project_slug=slug + ) + if status: + return Response({ + 'status': True, + 'message': 'Domain successfully added as target !', + 'domain_name': domain_name, + # 'domain_id': domain.id + }) return Response({ - 'status': True, - 'message': 'Domain successfully added as target !', - 'domain_name': domain_name, - 'domain_id': domain.id + 'status': False, + 'message': 'Failed to add as target !' }) @@ -763,6 +1058,9 @@ def post(self, request): if data['type'] == 'subscan': for row in data['rows']: SubScan.objects.get(id=row).delete() + elif data['type'] == 'organization': + for row in data['rows']: + Organization.objects.get(id=row).delete() response = True except Exception as e: response = False @@ -774,63 +1072,95 @@ class StopScan(APIView): def post(self, request): req = self.request data = req.data - scan_id = data.get('scan_id') - subscan_id = data.get('subscan_id') - response = {} - task_ids = [] - scan = None - subscan = None - if subscan_id: - try: - subscan = get_object_or_404(SubScan, id=subscan_id) - scan = subscan.scan_history - task_ids = subscan.celery_ids - subscan.status = ABORTED_TASK - subscan.stop_scan_date = timezone.now() - subscan.save() - create_scan_activity( - subscan.scan_history.id, - f'Subscan {subscan_id} aborted', - SUCCESS_TASK) - response['status'] = True - except Exception as e: - logging.error(e) - response = {'status': False, 'message': str(e)} - elif scan_id: + scan_ids = data.get('scan_ids', []) + subscan_ids = data.get('subscan_ids', []) + + scan_ids = [int(id) for id in scan_ids] + subscan_ids = [int(id) for id in subscan_ids] + + response = {'status': False} + + def abort_scan(scan): + response = {} + logger.info(f'Aborting scan History') try: - scan = get_object_or_404(ScanHistory, id=scan_id) + logger.info(f"Setting scan {scan} status to ABORTED_TASK") task_ids = scan.celery_ids scan.scan_status = ABORTED_TASK scan.stop_scan_date = timezone.now() scan.aborted_by = request.user scan.save() + for task_id in task_ids: + app.control.revoke(task_id, terminate=True, signal='SIGKILL') + + tasks = ( + ScanActivity.objects + .filter(scan_of=scan) + .filter(status=RUNNING_TASK) + .order_by('-pk') + ) + for task in tasks: + task.status = ABORTED_TASK + task.time = timezone.now() + task.save() + create_scan_activity( scan.id, "Scan aborted", - SUCCESS_TASK) + ABORTED_TASK + ) response['status'] = True except Exception as e: - logging.error(e) + logger.error(e) response = {'status': False, 'message': str(e)} - logger.warning(f'Revoking tasks {task_ids}') - for task_id in task_ids: - app.control.revoke(task_id, terminate=True, signal='SIGKILL') + return response - # Abort running tasks - tasks = ( - ScanActivity.objects - .filter(scan_of=scan) - .filter(status=RUNNING_TASK) - .order_by('-pk') - ) - if tasks.exists(): - for task in tasks: - if subscan_id and task.id not in subscan.celery_ids: + def abort_subscan(subscan): + response = {} + logger.info(f'Aborting subscan') + try: + logger.info(f"Setting scan {subscan} status to ABORTED_TASK") + task_ids = subscan.celery_ids + + for task_id in task_ids: + app.control.revoke(task_id, terminate=True, signal='SIGKILL') + + subscan.status = ABORTED_TASK + subscan.stop_scan_date = timezone.now() + subscan.save() + create_scan_activity( + subscan.scan_history.id, + f'Subscan aborted', + ABORTED_TASK + ) + response['status'] = True + except Exception as e: + logger.error(e) + response = {'status': False, 'message': str(e)} + + return response + + for scan_id in scan_ids: + try: + scan = ScanHistory.objects.get(id=scan_id) + # if scan is already successful or aborted then do nothing + if scan.scan_status == SUCCESS_TASK or scan.scan_status == ABORTED_TASK: continue - task.status = ABORTED_TASK - task.time = timezone.now() - task.save() + response = abort_scan(scan) + except Exception as e: + logger.error(e) + response = {'status': False, 'message': str(e)} + + for subscan_id in subscan_ids: + try: + subscan = SubScan.objects.get(id=subscan_id) + if subscan.scan_status == SUCCESS_TASK or subscan.scan_status == ABORTED_TASK: + continue + response = abort_subscan(subscan) + except Exception as e: + logger.error(e) + response = {'status': False, 'message': str(e)} return Response(response) @@ -890,10 +1220,7 @@ def get(self, request): # get current version_number # remove quotes from current_version - current_version = ((os.environ['RENGINE_CURRENT_VERSION' - ])[1:] if os.environ['RENGINE_CURRENT_VERSION' - ][0] == 'v' - else os.environ['RENGINE_CURRENT_VERSION']).replace("'", "") + current_version = RENGINE_CURRENT_VERSION # for consistency remove v from both if exists latest_version = re.search(r'v(\d+\.)?(\d+\.)?(\*|\d+)', @@ -914,8 +1241,21 @@ def get(self, request): return_response['status'] = True return_response['latest_version'] = latest_version return_response['current_version'] = current_version - return_response['update_available'] = version.parse(current_version) < version.parse(latest_version) - if version.parse(current_version) < version.parse(latest_version): + is_version_update_available = version.parse(current_version) < version.parse(latest_version) + + # if is_version_update_available then we should create inapp notification + create_inappnotification( + title='reNgine Update Available', + description=f'Update to version {latest_version} is available', + notification_type=SYSTEM_LEVEL_NOTIFICATION, + project_slug=None, + icon='mdi-update', + redirect_link='https://github.com/yogeshojha/rengine/releases', + open_in_new_tab=True + ) + + return_response['update_available'] = is_version_update_available + if is_version_update_available: return_response['changelog'] = response[0]['body'] return Response(return_response) @@ -1015,7 +1355,11 @@ def get(self, request): version_number = None _, stdout = run_command(tool.version_lookup_command) - version_number = re.search(re.compile(tool.version_match_regex), str(stdout)) + if tool.version_match_regex: + version_number = re.search(re.compile(tool.version_match_regex), str(stdout)) + else: + version_match_regex = r'(?i:v)?(\d+(?:\.\d+){2,})' + version_number = re.search(version_match_regex, str(stdout)) if not version_number: return Response({'status': False, 'message': 'Invalid version lookup command.'}) @@ -1126,13 +1470,15 @@ def get(self, request): class Whois(APIView): def get(self, request): req = self.request - ip_domain = req.query_params.get('ip_domain') - if not (validators.domain(ip_domain) or validators.ipv4(ip_domain) or validators.ipv6(ip_domain)): - print(f'Ip address or domain "{ip_domain}" did not pass validator.') + target = req.query_params.get('target') + if not target: + return Response({'status': False, 'message': 'Target IP/Domain required!'}) + if not (validators.domain(target) or validators.ipv4(target) or validators.ipv6(target)): + print(f'Ip address or domain "{target}" did not pass validator.') return Response({'status': False, 'message': 'Invalid domain or IP'}) is_force_update = req.query_params.get('is_reload') is_force_update = True if is_force_update and 'true' == is_force_update.lower() else False - task = query_whois.apply_async(args=(ip_domain,is_force_update)) + task = query_whois.apply_async(args=(target,is_force_update)) response = task.wait() return Response(response) diff --git a/web/art/reNgine.txt b/web/art/reNgine.txt index cf0082bd3..a94a0ea1d 100644 --- a/web/art/reNgine.txt +++ b/web/art/reNgine.txt @@ -3,6 +3,6 @@ _ __ ___| \| | __ _ _ _ __ ___ | '__/ _ \ . ` |/ _` | | '_ \ / _ \ | | | __/ |\ | (_| | | | | | __/ - |_| \___|_| \_|\__, |_|_| |_|\___| v2.1.1 + |_| \___|_| \_|\__, |_|_| |_|\___| __/ | |___/ diff --git a/web/celery-entrypoint.sh b/web/celery-entrypoint.sh index 70f6ab74e..4a6d228dc 100755 --- a/web/celery-entrypoint.sh +++ b/web/celery-entrypoint.sh @@ -1,7 +1,40 @@ #!/bin/bash -python3 manage.py makemigrations +# apply existing migrations python3 manage.py migrate + +# make migrations for specific apps +apps=( + "targetApp" + "scanEngine" + "startScan" + "dashboard" + "recon_note" +) + +create_migrations() { + local app=$1 + echo "Creating migrations for $app..." + python3 manage.py makemigrations $app + echo "Finished creating migrations for $app" + echo "----------------------------------------" +} + +echo "Starting migration creation process..." + +for app in "${apps[@]}" +do + create_migrations $app +done + +echo "Migration creation process completed." + +# apply migrations again +echo "Applying migrations..." +python3 manage.py migrate +echo "Migration process completed." + + python3 manage.py collectstatic --no-input --clear # Load default engines, keywords, and external tools @@ -151,13 +184,11 @@ then chmod +x /usr/src/github/goofuzz/GooFuzz fi -exec "$@" - # httpx seems to have issue, use alias instead!!! echo 'alias httpx="/go/bin/httpx"' >> ~/.bashrc # TEMPORARY FIX, httpcore is causing issues with celery, removing it as temp fix -python3 -m pip uninstall -y httpcore +#python3 -m pip uninstall -y httpcore # TEMPORARY FIX FOR langchain pip install tenacity==8.2.2 @@ -167,28 +198,70 @@ if [ "$DEBUG" == "1" ]; then loglevel='debug' fi -# watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --autoscale=10,0 -l INFO -Q scan_queue & -echo "Starting Workers..." -echo "Starting Main Scan Worker with Concurrency: $MAX_CONCURRENCY,$MIN_CONCURRENCY" -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --loglevel=$loglevel --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q initiate_scan_queue -n initiate_scan_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q subscan_queue -n subscan_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$loglevel -Q report_queue -n report_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_notif_queue -n send_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_scan_notif_queue -n send_scan_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_task_notif_queue -n send_task_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$loglevel -Q send_file_to_discord_queue -n send_file_to_discord_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$loglevel -Q send_hackerone_report_queue -n send_hackerone_report_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q parse_nmap_results_queue -n parse_nmap_results_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$loglevel -Q geo_localize_queue -n geo_localize_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_whois_queue -n query_whois_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q remove_duplicate_endpoints_queue -n remove_duplicate_endpoints_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=50 --loglevel=$loglevel -Q run_command_queue -n run_command_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_reverse_whois_queue -n query_reverse_whois_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_ip_history_queue -n query_ip_history_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q llm_queue -n llm_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q dorking_queue -n dorking_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q osint_discovery_queue -n osint_discovery_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q h8mail_queue -n h8mail_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q theHarvester_queue -n theHarvester_worker -exec "$@" +generate_worker_command() { + local queue=$1 + local concurrency=$2 + local worker_name=$3 + local app=${4:-"reNgine.tasks"} + local directory=${5:-"/usr/src/app/reNgine/"} + + local base_command="celery -A $app worker --pool=gevent --optimization=fair --autoscale=$concurrency,1 --loglevel=$loglevel -Q $queue -n $worker_name" + + if [ "$DEBUG" == "1" ]; then + echo "watchmedo auto-restart --recursive --pattern=\"*.py\" --directory=\"$directory\" -- $base_command &" + else + echo "$base_command &" + fi +} + +echo "Starting Celery Workers..." + +commands="" + +# Main scan worker +if [ "$DEBUG" == "1" ]; then + commands+="watchmedo auto-restart --recursive --pattern=\"*.py\" --directory=\"/usr/src/app/reNgine/\" -- celery -A reNgine.tasks worker --loglevel=$loglevel --optimization=fair --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue &"$'\n' +else + commands+="celery -A reNgine.tasks worker --loglevel=$loglevel --optimization=fair --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue &"$'\n' +fi + +# API shared task worker +if [ "$DEBUG" == "1" ]; then + commands+="watchmedo auto-restart --recursive --pattern=\"*.py\" --directory=\"/usr/src/app/api/\" -- celery -A api.shared_api_tasks worker --pool=gevent --optimization=fair --concurrency=30 --loglevel=$loglevel -Q api_queue -n api_worker &"$'\n' +else + commands+="celery -A api.shared_api_tasks worker --pool=gevent --concurrency=30 --optimization=fair --loglevel=$loglevel -Q api_queue -n api_worker &"$'\n' +fi + +# worker format: "queue_name:concurrency:worker_name" +workers=( + "initiate_scan_queue:30:initiate_scan_worker" + "subscan_queue:30:subscan_worker" + "report_queue:20:report_worker" + "send_notif_queue:10:send_notif_worker" + "send_task_notif_queue:10:send_task_notif_worker" + "send_file_to_discord_queue:5:send_file_to_discord_worker" + "send_hackerone_report_queue:5:send_hackerone_report_worker" + "parse_nmap_results_queue:10:parse_nmap_results_worker" + "geo_localize_queue:20:geo_localize_worker" + "query_whois_queue:10:query_whois_worker" + "remove_duplicate_endpoints_queue:30:remove_duplicate_endpoints_worker" + "run_command_queue:50:run_command_worker" + "query_reverse_whois_queue:10:query_reverse_whois_worker" + "query_ip_history_queue:10:query_ip_history_worker" + "llm_queue:30:llm_worker" + "dorking_queue:10:dorking_worker" + "osint_discovery_queue:10:osint_discovery_worker" + "h8mail_queue:10:h8mail_worker" + "theHarvester_queue:10:theHarvester_worker" + "send_scan_notif_queue:10:send_scan_notif_worker" +) + +for worker in "${workers[@]}"; do + IFS=':' read -r queue concurrency worker_name <<< "$worker" + commands+="$(generate_worker_command "$queue" "$concurrency" "$worker_name")"$'\n' +done +commands="${commands%&}" + +eval "$commands" + +wait \ No newline at end of file diff --git a/web/dashboard/admin.py b/web/dashboard/admin.py index be2a79a67..0c44dd932 100644 --- a/web/dashboard/admin.py +++ b/web/dashboard/admin.py @@ -5,3 +5,7 @@ admin.site.register(Project) admin.site.register(OpenAiAPIKey) admin.site.register(NetlasAPIKey) +admin.site.register(ChaosAPIKey) +admin.site.register(HackerOneAPIKey) +admin.site.register(InAppNotification) +admin.site.register(UserPreferences) \ No newline at end of file diff --git a/web/dashboard/migrations/0002_chaosapikey_hackeroneapikey_inappnotification_userpreferences.py b/web/dashboard/migrations/0002_chaosapikey_hackeroneapikey_inappnotification_userpreferences.py new file mode 100644 index 000000000..9823c3b14 --- /dev/null +++ b/web/dashboard/migrations/0002_chaosapikey_hackeroneapikey_inappnotification_userpreferences.py @@ -0,0 +1,58 @@ +# Generated by Django 3.2.23 on 2024-09-11 01:46 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('dashboard', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='ChaosAPIKey', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('key', models.CharField(max_length=500)), + ], + ), + migrations.CreateModel( + name='HackerOneAPIKey', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('username', models.CharField(max_length=500)), + ('key', models.CharField(max_length=500)), + ], + ), + migrations.CreateModel( + name='UserPreferences', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('bug_bounty_mode', models.BooleanField(default=True)), + ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + ), + migrations.CreateModel( + name='InAppNotification', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('notification_type', models.CharField(choices=[('system', 'system'), ('project', 'project')], default='system', max_length=10)), + ('status', models.CharField(choices=[('success', 'Success'), ('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=10)), + ('title', models.CharField(max_length=255)), + ('description', models.TextField()), + ('icon', models.CharField(max_length=50)), + ('is_read', models.BooleanField(default=False)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('redirect_link', models.URLField(blank=True, max_length=255, null=True)), + ('open_in_new_tab', models.BooleanField(default=False)), + ('project', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dashboard.project')), + ], + options={ + 'ordering': ['-created_at'], + }, + ), + ] diff --git a/web/dashboard/models.py b/web/dashboard/models.py index 8ed77dd43..6628b16eb 100644 --- a/web/dashboard/models.py +++ b/web/dashboard/models.py @@ -1,4 +1,6 @@ from django.db import models +from reNgine.definitions import * +from django.contrib.auth.models import User class SearchHistory(models.Model): @@ -41,3 +43,55 @@ class NetlasAPIKey(models.Model): def __str__(self): return self.key + + +class ChaosAPIKey(models.Model): + id = models.AutoField(primary_key=True) + key = models.CharField(max_length=500) + + def __str__(self): + return self.key + + +class HackerOneAPIKey(models.Model): + id = models.AutoField(primary_key=True) + username = models.CharField(max_length=500) + key = models.CharField(max_length=500) + + def __str__(self): + return self.username + + +class InAppNotification(models.Model): + project = models.ForeignKey(Project, on_delete=models.CASCADE, null=True, blank=True) + notification_type = models.CharField(max_length=10, choices=NOTIFICATION_TYPES, default='system') + status = models.CharField(max_length=10, choices=NOTIFICATION_STATUS_TYPES, default='info') + title = models.CharField(max_length=255) + description = models.TextField() + icon = models.CharField(max_length=50) # mdi icon class name + is_read = models.BooleanField(default=False) + created_at = models.DateTimeField(auto_now_add=True) + redirect_link = models.URLField(max_length=255, blank=True, null=True) + open_in_new_tab = models.BooleanField(default=False) + + class Meta: + ordering = ['-created_at'] + + def __str__(self): + if self.notification_type == 'system': + return f"System wide notif: {self.title}" + else: + return f"Project wide notif: {self.project.name}: {self.title}" + + @property + def is_system_wide(self): + # property to determine if the notification is system wide or project specific + return self.notification_type == 'system' + + +class UserPreferences(models.Model): + user = models.OneToOneField(User, on_delete=models.CASCADE) + bug_bounty_mode = models.BooleanField(default=True) + + def __str__(self): + return f"{self.user.username}'s preferences" \ No newline at end of file diff --git a/web/dashboard/templates/dashboard/bountyhub_programs.html b/web/dashboard/templates/dashboard/bountyhub_programs.html new file mode 100644 index 000000000..130d1e054 --- /dev/null +++ b/web/dashboard/templates/dashboard/bountyhub_programs.html @@ -0,0 +1,92 @@ +{% extends 'base/base.html' %} +{% load humanize %} +{% load static %} + +{% block title %} +{{platform}} Programs +{% endblock title %} + +{% block custom_js_css_link %} +{% endblock custom_js_css_link %} + +{% block page_title %} +{{platform}} Programs +{% endblock page_title %} + +{% block breadcrumb_title %} +