diff --git a/.env b/.env
index fca522eee..f5d282d0c 100644
--- a/.env
+++ b/.env
@@ -28,3 +28,10 @@ POSTGRES_HOST=db
#
MAX_CONCURRENCY=80
MIN_CONCURRENCY=10
+
+#
+# Rengine web interface super user (for non-interactive install)
+#
+DJANGO_SUPERUSER_USERNAME=rengine
+DJANGO_SUPERUSER_EMAIL=rengine@example.com
+DJANGO_SUPERUSER_PASSWORD=Sm7IJG.IfHAFw9snSKv
diff --git a/.github/workflows/build-pr.yml b/.github/workflows/build-pr.yml
index b1c75fe0b..eb9feec1d 100644
--- a/.github/workflows/build-pr.yml
+++ b/.github/workflows/build-pr.yml
@@ -2,12 +2,19 @@ name: 🌄 Build Docker image for pull request
on:
pull_request:
- branches: [ master, stable, dev ]
+ branches:
+ - master
+ - release/*
jobs:
build:
name: Build Docker image
runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ platform:
+ - linux/amd64
+ - linux/arm64
steps:
- name: Checkout the git repo
uses: actions/checkout@v4
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 439af90e8..fd438d55d 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -10,6 +10,11 @@ jobs:
build:
name: Build Docker image
runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ platform:
+ - linux/amd64
+ - linux/arm64
steps:
- name: Checkout the git repo
uses: actions/checkout@v4
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index cc50ad1f6..bd8a831d2 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -24,12 +24,12 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
- uses: github/codeql-action/init@v2
+ uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
- name: Autobuild
- uses: github/codeql-action/autobuild@v2
+ uses: github/codeql-action/autobuild@v3
- name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v2
+ uses: github/codeql-action/analyze@v3
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
index 00cf16f06..297b91930 100644
--- a/CONTRIBUTORS.md
+++ b/CONTRIBUTORS.md
@@ -8,6 +8,7 @@ Thanks to these individuals for making reNgine awesome by fixing bugs, resolving
* [Suprita-25](https://github.com/Suprita-25)
* [TheBinitGhimire](https://github.com/TheBinitGhimire)
* [Vinay Leo](https://github.com/vinaynm)
+* [Erdem Ozgen](https://github.com/ErdemOzgen)
*If you have created a Pull request, feel free to add your name here, because we know you are awesome and deserve thanks from the community!*
diff --git a/Makefile b/Makefile
index f1b8d1d1d..37ce9ad0d 100644
--- a/Makefile
+++ b/Makefile
@@ -1,3 +1,4 @@
+include .env
.DEFAULT_GOAL:=help
# Credits: https://github.com/sherifabdlnaby/elastdocker/
@@ -6,55 +7,65 @@
COMPOSE_PREFIX_CMD := COMPOSE_DOCKER_CLI_BUILD=1
COMPOSE_ALL_FILES := -f docker-compose.yml
-SERVICES := db web proxy redis celery celery-beat
+SERVICES := db web proxy redis celery celery-beat ollama
# --------------------------
.PHONY: setup certs up build username pull down stop restart rm logs
certs: ## Generate certificates.
- @${COMPOSE_PREFIX_CMD} docker-compose -f docker-compose.setup.yml run --rm certs
+ @${COMPOSE_PREFIX_CMD} docker compose -f docker-compose.setup.yml run --rm certs
setup: ## Generate certificates.
@make certs
up: ## Build and start all services.
- ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} up -d --build ${SERVICES}
+ ${COMPOSE_PREFIX_CMD} docker compose ${COMPOSE_ALL_FILES} up -d --build ${SERVICES}
build: ## Build all services.
- ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} build ${SERVICES}
+ ${COMPOSE_PREFIX_CMD} docker compose ${COMPOSE_ALL_FILES} build ${SERVICES}
username: ## Generate Username (Use only after make up).
- ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} exec web python3 manage.py createsuperuser
+ifeq ($(isNonInteractive), true)
+ ${COMPOSE_PREFIX_CMD} docker compose ${COMPOSE_ALL_FILES} exec web python3 manage.py createsuperuser --username ${DJANGO_SUPERUSER_USERNAME} --email ${DJANGO_SUPERUSER_EMAIL} --noinput
+else
+ ${COMPOSE_PREFIX_CMD} docker compose ${COMPOSE_ALL_FILES} exec web python3 manage.py createsuperuser
+endif
+
+changepassword: ## Change password for user
+ ${COMPOSE_PREFIX_CMD} docker compose ${COMPOSE_ALL_FILES} exec web python3 manage.py changepassword
+
+migrate: ## Apply migrations
+ ${COMPOSE_PREFIX_CMD} docker compose ${COMPOSE_ALL_FILES} exec web python3 manage.py migrate
pull: ## Pull Docker images.
docker login docker.pkg.github.com
- ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} pull
+ ${COMPOSE_PREFIX_CMD} docker compose ${COMPOSE_ALL_FILES} pull
down: ## Down all services.
- ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} down
+ ${COMPOSE_PREFIX_CMD} docker compose ${COMPOSE_ALL_FILES} down
stop: ## Stop all services.
- ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} stop ${SERVICES}
+ ${COMPOSE_PREFIX_CMD} docker compose ${COMPOSE_ALL_FILES} stop ${SERVICES}
restart: ## Restart all services.
- ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} restart ${SERVICES}
+ ${COMPOSE_PREFIX_CMD} docker compose ${COMPOSE_ALL_FILES} restart ${SERVICES}
rm: ## Remove all services containers.
- ${COMPOSE_PREFIX_CMD} docker-compose $(COMPOSE_ALL_FILES) rm -f ${SERVICES}
+ ${COMPOSE_PREFIX_CMD} docker compose $(COMPOSE_ALL_FILES) rm -f ${SERVICES}
test:
- ${COMPOSE_PREFIX_CMD} docker-compose $(COMPOSE_ALL_FILES) exec celery python3 -m unittest tests/test_scan.py
+ ${COMPOSE_PREFIX_CMD} docker compose $(COMPOSE_ALL_FILES) exec celery python3 -m unittest tests/test_scan.py
logs: ## Tail all logs with -n 1000.
- ${COMPOSE_PREFIX_CMD} docker-compose $(COMPOSE_ALL_FILES) logs --follow --tail=1000 ${SERVICES}
+ ${COMPOSE_PREFIX_CMD} docker compose $(COMPOSE_ALL_FILES) logs --follow --tail=1000 ${SERVICES}
images: ## Show all Docker images.
- ${COMPOSE_PREFIX_CMD} docker-compose $(COMPOSE_ALL_FILES) images ${SERVICES}
+ ${COMPOSE_PREFIX_CMD} docker compose $(COMPOSE_ALL_FILES) images ${SERVICES}
prune: ## Remove containers and delete volume data.
@make stop && make rm && docker volume prune -f
help: ## Show this help.
- @echo "Make application docker images and manage containers using docker-compose files."
+ @echo "Make application Docker images and manage containers using Docker Compose files."
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m (default: help)\n\nTargets:\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-12s\033[0m %s\n", $$1, $$2 }' $(MAKEFILE_LIST)
diff --git a/README.md b/README.md
index 5cb963784..067ac34b8 100644
--- a/README.md
+++ b/README.md
@@ -27,23 +27,20 @@
+
reNgine 2.1.0 is released!
+
Unleash the power of LLM toolkit! Now you can use local LLM models to generate attack surface and vulnerability reports!, Checkout the release-notes!
+
reNgine 2.0-jasper Redefining the future of reconnaissance!
What is reNgine?
reNgine is your go-to web application reconnaissance suite that's designed to simplify and streamline the reconnaissance process for security professionals, penetration testers, and bug bounty hunters. With its highly configurable engines, data correlation capabilities, continuous monitoring, database-backed reconnaissance data, and an intuitive user interface, reNgine redefines how you gather critical information about your target web applications.
-Traditional reconnaissance tools often fall short in terms of configurability and efficiency. reNgine addresses these shortcomings and emerges as a excellent alternative to existing commercial tools.
+Traditional reconnaissance tools often fall short in terms of configurability and efficiency. reNgine addresses these shortcomings and emerges as an excellent alternative to existing commercial tools.
reNgine was created to address the limitations of traditional reconnaissance tools and provide a better alternative, even surpassing some commercial offerings. Whether you're a bug bounty hunter, a penetration tester, or a corporate security team, reNgine is your go-to solution for automating and enhancing your information-gathering efforts.
-reNgine 2.0-jasper is out now, you can [watch reNgine 2.0-jasper release trailer here!](https://youtu.be/VwkOWqiWW5g)
-
-reNgine 2.0-Jasper would not have been possible without [@ocervell](https://github.com/ocervell) valuable contributions. [@ocervell](https://github.com/ocervell) did majority of the refactoring if not all and also added a ton of features. Together, we wish to shape the future of web application reconnaissance, and it's developers like [@ocervell](https://github.com/ocervell) and a [ton of other developers and hackers from our community](https://github.com/yogeshojha/rengine/graphs/contributors) who inspire and drive us forward.
-
-Thank you, [@ocervell](https://github.com/ocervell), for your outstanding work and unwavering commitment to reNgine.
-
-Checkout our contributers here: [Contributers](https://github.com/yogeshojha/rengine/graphs/contributors)
+[Watch reNgine 2.0-jasper release trailer here!](https://youtu.be/VwkOWqiWW5g)
![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png)
@@ -75,7 +72,7 @@ You can find detailed documentation at [https://rengine.wiki](https://rengine.wi
reNgine is not an ordinary reconnaissance suite; it's a game-changer! We've turbocharged the traditional workflow with groundbreaking features that is sure to ease your reconnaissance game. reNgine redefines the art of reconnaissance with highly configurable scan engines, recon data correlation, continuous monitoring, GPT powered Vulnerability Report, Project Management and role based access control etc.
-🦾 reNgine has advanced reconnaissance capabilities, harnessing a range of open-source tools to deliver a comprehensive web application reconnaissance experience. With it's intuitive User Interface, it excels in subdomain discovery, pinpointing IP addresses and open ports, collecting endpoints, conducting directory and file fuzzing, capturing screenshots, and performing vulnerability scans. To summarize, it does end-to-end reconnaissance. With WHOIS identification and WAF detection, it offers deep insights into target domains. Additionally, reNgine also identifies misconfigured S3 buckets and find interesting subdomains and URLS, based on specific keywords to helps you identify your next target, making it an go to tool for efficient reconnaissance.
+🦾 reNgine has advanced reconnaissance capabilities, harnessing a range of open-source tools to deliver a comprehensive web application reconnaissance experience. With its intuitive User Interface, it excels in subdomain discovery, pinpointing IP addresses and open ports, collecting endpoints, conducting directory and file fuzzing, capturing screenshots, and performing vulnerability scans. To summarize, it does end-to-end reconnaissance. With WHOIS identification and WAF detection, it offers deep insights into target domains. Additionally, reNgine also identifies misconfigured S3 buckets and find interesting subdomains and URLS, based on specific keywords to helps you identify your next target, making it a go-to tool for efficient reconnaissance.
🗃️ Say goodbye to recon data chaos! reNgine seamlessly integrates with a database, providing you with unmatched data correlation and organization. Forgot the hassle of grepping through json, txt or csv files. Plus, our custom query language lets you filter reconnaissance data effortlessly using natural language like operators such as filtering all alive subdomains with `http_status=200` and also filter all subdomains that are alive and has admin in name `http_status=200&name=admin`
@@ -85,17 +82,17 @@ reNgine is not an ordinary reconnaissance suite; it's a game-changer! We've turb
📃 PDF Reports: In addition to its robust reconnaissance capabilities, reNgine goes the extra mile by simplifying the report generation process, recognizing the crucial role that PDF reports play in the realm of end-to-end reconnaissance. Users can effortlessly generate and customize PDF reports to suit their exact needs. Whether it's a Full Scan Report, Vulnerability Report, or a concise reconnaissance report, reNgine provides the flexibility to choose the report type that best communicates your findings. Moreover, the level of customization is unparalleled, allowing users to select report colors, fine-tune executive summaries, and even add personalized touches like company names and footers. With GPT integration, your reports aren't just a report, with remediation steps, and impacts, you get 360-degree view of the vulnerabilities you've uncovered.
-🔖 Say Hello to Projects! reNgine 2.0 introduces a powerful addition that enables you to efficiently organize your web application reconnaissance efforts. With this feature, you can create distinct project spaces, each tailored to a specific purpose, such as personal bug bounty hunting, client engagements, or any other specialized recon task. Each projects will have separate dashboard and all the scan results will be separated from each projects, while scan engines and configuration will be shared across all the projects.
+🔖 Say Hello to Projects! reNgine 2.0 introduces a powerful addition that enables you to efficiently organize your web application reconnaissance efforts. With this feature, you can create distinct project spaces, each tailored to a specific purpose, such as personal bug bounty hunting, client engagements, or any other specialized recon task. Each projects will have separate dashboard and all the scan results will be separated from each project, while scan engines and configuration will be shared across all the projects.
-⚙️ Roles and Permissions! Begining reNgine 2.0, we've taken your web application reconnaissance to a whole new level of control and security. Now, you can assign distinct roles to your team members—Sys Admin, Penetration Tester, and Auditor—each with precisely defined permissions to tailor their access and actions within the reNgine ecosystem.
+⚙️ Roles and Permissions! In reNgine 2.0, we've taken your web application reconnaissance to a whole new level of control and security. Now, you can assign distinct roles to your team members—Sys Admin, Penetration Tester, and Auditor—each with precisely defined permissions to tailor their access and actions within the reNgine ecosystem.
- - 🔐 Sys Admin: Sys Admin is a super user that has permission to modify system and scan related configurations, scan engines, create new users, add new tools etc. Super user can initiate scans and subscans effortlessly.
+ - 🔐 Sys Admin: Sys Admin is a superuser that has permission to modify system and scan related configurations, scan engines, create new users, add new tools etc. Superuser can initiate scans and subscans effortlessly.
- 🔍 Penetration Tester: Penetration Tester will be allowed to modify and initiate scans and subscans, add or update targets, etc. A penetration tester will not be allowed to modify system configurations.
- 📊 Auditor: Auditor can only view and download the report. An auditor can not change any system or scan related configurations nor can initiate any scans or subscans.
🚀 GPT Vulnerability Report Generation: Get ready for the future of penetration testing reports with reNgine's groundbreaking feature: "GPT-Powered Report Generation"! With the power of OpenAI's GPT, reNgine now provides you with detailed vulnerability descriptions, remediation strategies, and impact assessments that read like they were written by a human security expert! **But that's not all!** Our GPT-driven reports go the extra mile by scouring the web for related news articles, blogs, and references, so you have a 360-degree view of the vulnerabilities you've uncovered. With reNgine 2.0 revolutionize your penetration testing game and impress your clients with reports that are not just informative but engaging and comprehensive with detailed analysis on impact assessment and remediation strategies.
-🥷 GPT-Powered Attack Surface Generation: With reNgine 2.0, reNgine seamlessly integrates with GPT to identify the attacks that you can likely perform on a subdomain. By making use of reconnaissance data such as page title, open ports, subdomain name etc, reNgine can advice you the attacks you could perform on a target. reNgine will also provide you the rationale on why the specific attack is likely to be successful.
+🥷 GPT-Powered Attack Surface Generation: With reNgine 2.0, reNgine seamlessly integrates with GPT to identify the attacks that you can likely perform on a subdomain. By making use of reconnaissance data such as page title, open ports, subdomain name etc. reNgine can advise you the attacks you could perform on a target. reNgine will also provide you the rationale on why the specific attack is likely to be successful.
🧭 Continuous monitoring: Continuous monitoring is at the core of reNgine's mission, and it's robust continuous monitoring feature ensures that their targets are under constant scrutiny. With the flexibility to schedule scans at regular intervals, penetration testers can effortlessly stay informed about their targets. What sets reNgine apart is its seamless integration with popular notification channels such as Discord, Slack, and Telegram, delivering real-time alerts for newly discovered subdomains, vulnerabilities, or any changes in reconnaissance data.
@@ -150,13 +147,14 @@ reNgine is not an ordinary reconnaissance suite; it's a game-changer! We've turb
* Identify Interesting Subdomains
* Custom GF patterns and custom Nuclei Templates
* Edit tool-related configuration files (Nuclei, Subfinder, Naabu, amass)
-* Add external tools from Github/Go
+* Add external tools from GitHub/Go
* Interoperable with other tools, Import/Export Subdomains/Endpoints
* Import Targets via IP and/or CIDRs
* Report Generation
* Toolbox: Comes bundled with most commonly used tools during penetration testing such as whois lookup, CMS detector, CVE lookup, etc.
* Identification of related domains and related TLDs for targets
* Find actionable insights such as Most Common Vulnerability, Most Common CVE ID, Most Vulnerable Target/Subdomain, etc.
+* You can now use local LLMs for Attack surface identification and vulnerability description (NEW: reNgine 2.1.0)
![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png)
@@ -318,12 +316,27 @@ screenshot: {
git clone https://github.com/yogeshojha/rengine && cd rengine
```
-1. Edit the dotenv file, **please make sure to change the password for postgresql `POSTGRES_PASSWORD`!**
+1. Edit the `.env` file, **please make sure to change the password for postgresql `POSTGRES_PASSWORD`!**
```bash
nano .env
```
+1. **Optional, only for non-interactive install**: In the `.env` file, **please make sure to change the super admin values!**
+
+ ```bash
+ DJANGO_SUPERUSER_USERNAME=yourUsername
+ DJANGO_SUPERUSER_EMAIL=YourMail@example.com
+ DJANGO_SUPERUSER_PASSWORD=yourStrongPassword
+ ```
+ If you need to carry out a non-interactive installation, you can setup the login, email and password of the web interface admin directly from the .env file (instead of manually setting them from prompts during the installation process). This option can be interesting for automated installation (via ansible, vagrant, etc.).
+
+ `DJANGO_SUPERUSER_USERNAME`: web interface admin username (used to login to the web interface).
+
+ `DJANGO_SUPERUSER_EMAIL`: web interface admin email.
+
+ `DJANGO_SUPERUSER_PASSWORD`: web interface admin password (used to login to the web interface).
+
1. In the dotenv file, you may also modify the Scaling Configurations
```bash
@@ -331,11 +344,20 @@ screenshot: {
MIN_CONCURRENCY=10
```
- MAX_CONCURRENCY: This parameter specifies the maximum number of reNgine's concurrent Celery worker processes that can be spawned. In this case, it's set to 80, meaning that the application can utilize up to 80 concurrent worker processes to execute tasks concurrently. This is useful for handling a high volume of scans or when you want to scale up processing power during periods of high demand. If you have more CPU cores, you will need to increase this for maximised performance.
+ `MAX_CONCURRENCY`: This parameter specifies the maximum number of reNgine's concurrent Celery worker processes that can be spawned. In this case, it's set to 80, meaning that the application can utilize up to 80 concurrent worker processes to execute tasks concurrently. This is useful for handling a high volume of scans or when you want to scale up processing power during periods of high demand. If you have more CPU cores, you will need to increase this for maximised performance.
+
+ `MIN_CONCURRENCY`: On the other hand, MIN_CONCURRENCY specifies the minimum number of concurrent worker processes that should be maintained, even during periods of lower demand. In this example, it's set to 10, which means that even when there are fewer tasks to process, at least 10 worker processes will be kept running. This helps ensure that the application can respond promptly to incoming tasks without the overhead of repeatedly starting and stopping worker processes.
+
+ These settings allow for dynamic scaling of Celery workers, ensuring that the application efficiently manages its workload by adjusting the number of concurrent workers based on the workload's size and complexity.
- MIN_CONCURRENCY: On the other hand, MIN_CONCURRENCY specifies the minimum number of concurrent worker processes that should be maintained, even during periods of lower demand. In this example, it's set to 10, which means that even when there are fewer tasks to process, at least 10 worker processes will be kept running. This helps ensure that the application can respond promptly to incoming tasks without the overhead of repeatedly starting and stopping worker processes.
+ Here is the ideal value for `MIN_CONCURRENCY` and `MAX_CONCURRENCY` depending on the number of RAM your machine has:
- These settings allow for dynamic scaling of Celery workers, ensuring that the application efficiently manages its workload by adjusting the number of concurrent workers based on the workload's size and complexity
+ * 4GB: `MAX_CONCURRENCY=10`
+ * 8GB: `MAX_CONCURRENCY=30`
+ * 16GB: `MAX_CONCURRENCY=50`
+
+ This is just an ideal value which developers have tested and tried out and works! But feel free to play around with the values.
+ Maximum number of scans is determined by various factors, your network bandwidth, RAM, number of CPUs available. etc
1. Run the installation script, Please keep an eye for any prompt, you will also be asked for username and password for reNgine.
@@ -343,6 +365,12 @@ screenshot: {
sudo ./install.sh
```
+ Or for a non-interactive installation, use `-n` argument (make sure you've modified the `.env` file before launching the installation).
+
+ ```bash
+ sudo ./install.sh -n
+ ```
+
If `install.sh` does not have install permission, please change it, `chmod +x install.sh`
**reNgine can now be accessed from or if you're on the VPS **
@@ -358,12 +386,12 @@ Installation instructions can be found at [https://reNgine.wiki/install/detailed
1. Updating is as simple as running the following command:
```bash
- cd rengine && sudo ./update.sh
+ cd rengine && sudo ./update.sh
```
If `update.sh` does not have execution permissions, please change it, `sudo chmod +x update.sh`
-
- **NOTE:** if you're updating from 1.3.6 and you're getting a 'password authentication failed' error, consider uninstalling 1.3.6 first, then install 2.x.x as you'd normally do.
+
+ **NOTE:** if you're updating from 1.3.6, and you're getting a 'password authentication failed' error, consider uninstalling 1.3.6 first, then install 2.x.x as you'd normally do.
### Changelog
@@ -405,7 +433,7 @@ Installation instructions can be found at [https://reNgine.wiki/install/detailed
### Contributing
-Contributions are what make the open-source community such an amazing place to learn, inspire and create. Every contributions you make is **greatly appreciated**. Your contributions can be as simple as fixing the indentation or UI, or as complex as adding new modules and features.
+Contributions are what make the open-source community such an amazing place to learn, inspire and create. Every contribution you make is **greatly appreciated**. Your contributions can be as simple as fixing the indentation or UI, or as complex as adding new modules and features.
See the [Contributing Guide](.github/CONTRIBUTING.md) to get started.
@@ -413,67 +441,80 @@ You can also [join our Discord channel #development](https://discord.gg/JuhHdHTt
![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png)
-### First-time Open Source contributors
+### Submitting issues
-Please note that reNgine is beginner friendly. If you have never done open-source before, we encourage you to do so. **We will be happy and proud of your first PR ever.**
+You can submit issues related to this project, but you should do it in a way that helps developers to resolve it as quickly as possible.
-You can start by resolving any [open issues](https://github.com/yogeshojha/rengine/issues).
+For that, you need to add as much valuable information as possible.
-![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png)
+You can have this valuable information by following these steps:
-### reNgine Support
+- Go to the root of the git cloned project
+- Edit `web/entrypoint.sh` and add `export DEBUG=1` at the top
+This should give you this result
-Please do not use GitHub for support requests. Instead, [join our Discord channel #support](https://discord.gg/azv6fzhNCE).
+ ```python
+ #!/bin/bash
-![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png)
+ export DEBUG=1
-### Support and Sponsoring
+ python3 manage.py migrate
+ python3 manage.py runserver 0.0.0.0:8000
-Over the past few years, I have been working hard on reNgine to add new features with the sole aim of making it the de facto standard for reconnaissance. I spend most of my free time and weekends working on reNgine. I do this in addition to my day job. I am happy to have received such overwhelming support from the community. But to keep this project alive, I am looking for financial support.
+ exec "$@"
+ ```
+- Restart the web container: `docker-compose restart web`
+- To deactivate, set **DEBUG** to **0** and restart the web container again
-| Paypal | Bitcoin | Ethereum |
-| :-------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------: |
-|[https://www.paypal.com/paypalme/yogeshojha11](https://www.paypal.com/paypalme/yogeshojha11) | `35AiKyNswNZ4TZUSdriHopSCjNMPi63BCX` | `0xe7A337Da6ff98A28513C26A7Fec8C9b42A63d346`
+Then, with **DEBUG** set to **1**, in the `make logs` output you could see the full stack trace to debug reNgine.
-OR
+Example with the tool arsenal version check API bug.
-* Add a [GitHub Star](https://github.com/yogeshojha/rengine) to the project.
-* Tweet about this project, or maybe blogs?
-* Maybe nominate me for [GitHub Stars?](https://stars.github.com/nominate/)
-* Join DigitalOcean using my [referral link](https://m.do.co/c/e353502d19fc) your profit is **$100** and I get $25 DO credit. This will help me test reNgine on VPS before I release any major features.
+```
+web_1 | File "/usr/local/lib/python3.10/dist-packages/celery/app/task.py", line 411, in __call__
+web_1 | return self.run(*args, **kwargs)
+web_1 | TypeError: run_command() got an unexpected keyword argument 'echo'
+```
+Now you know the real error is `TypeError: run_command() got an unexpected keyword argument 'echo'`
-It takes a considerable amount of time to add new features and make sure everything works. Donating is your way of saying: **reNgine is awesome**.
+And you can post the full stack trace to your newly created issue to help developers to track the root cause of the bug and correct the bug easily
-Any support is greatly appreciated! Thank you!
+**Activating debug like this also give you the full stack trace in the browser** instead of an error 500 without any details.
+So don't forget to open the developer console and check for any XHR request with error 500.
+If there's any, check the response of this request to get your detailed error.
+
+
+
+Happy issuing ;)
![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png)
-### reNgine Bug Bounty Program
+### First-time Open Source contributors
-[![huntr](https://cdn.huntr.dev/huntr_security_badge_mono.svg)](https://huntr.dev/bounties/disclose/?target=https%3A%2F%2Fgithub.com%2Fyogeshojha%2Frengine)
+Please note that reNgine is beginner-friendly. If you have never done open-source before, we encourage you to do so. **We will be happy and proud of your first PR ever.**
-Security researchers, welcome aboard! I'm excited to announce the reNgine bug bounty programme in collaboration with [huntr.dev](https://huntr.dev), which means that you will be rewarded for any vulnerabilities you find in reNgine.
+You can start by resolving any [open issues](https://github.com/yogeshojha/rengine/issues).
-Thank you for your interest in reporting reNgine vulnerabilities! If you are aware of any potential security vulnerabilities in reNgine, we encourage you to report them immediately via [huntr.dev](https://huntr.dev/bounties/disclose/?target=https%3A%2F%2Fgithub.com%2Fyogeshojha%2Frengine).
+![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png)
+
+### reNgine Support
-**Please do not disclose vulnerabilities via Github issues/blogs/tweets after/before reporting to huntr.dev as this is explicitly against the disclosure policy of huntr.dev and reNgine and will not be considered for monetary rewards.**
+Please do not use GitHub for support requests. Instead, [join our Discord channel #support](https://discord.gg/azv6fzhNCE).
-Please note that the reNgine maintainer does not set the bounty amount.
-The bounty reward is determined by an industry-first equation developed by huntr.dev to understand the popularity, impact and value of repositories to the open-source community.
+![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png)
-**What do I expect from security researchers?**
+### Support and Sponsoring
-* Patience: Please note that I am currently the only maintainer in reNgine and it will take some time to validate your report. I ask for your patience during this process.
-* Respect for privacy and security reports: Please do not publicly disclose any vulnerabilities (including GitHub issues) before or after reporting them on huntr.dev! This is against the disclosure policy and will not be rewarded.
-* Respect the rules
+Over the past few years, I have been working hard on reNgine to add new features with the sole aim of making it the de facto standard for reconnaissance. I spend most of my free time and weekends working on reNgine. I do this in addition to my day job. I am happy to have received such overwhelming support from the community. To keep this project alive, you may
-**What do you get in return?**
+* Add a [GitHub Star](https://github.com/yogeshojha/rengine) to the project.
+* Tweet about this project, or maybe blogs?
+* Maybe nominate me for [GitHub Stars?](https://stars.github.com/nominate/)
+* Join DigitalOcean using my [referral link](https://m.do.co/c/e353502d19fc) your profit is **$100** and I get $25 DO credit. This will help me test reNgine on VPS before I release any major features.
-* Thanks from the maintainer
-* Monetary rewards
-* CVE ID(s)
+It takes a considerable amount of time to add new features and make sure everything works. Donating is your way of saying: **reNgine is awesome**.
-Please find the [FAQ](https://www.huntr.dev/faq) and [Responsible disclosure policy](https://www.huntr.dev/policy/) from huntr.dev.
+Any support is greatly appreciated! Thank you!
![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png)
diff --git a/certs/entrypoint.sh b/certs/entrypoint.sh
index fded161ba..6d699b8d6 100755
--- a/certs/entrypoint.sh
+++ b/certs/entrypoint.sh
@@ -19,16 +19,21 @@ cert() {
-out ${FILENAME}.csr \
-subj "/C=${COUNTRY_CODE}/ST=${STATE}/L=${CITY}/O=${COMPANY}/CN=${COMMON_NAME}"
+ # Creating SAN extension which is needed by modern browsers
+ echo "subjectAltName=DNS:${COMMON_NAME}" > client-ext.cnf
+
# Create a new certificate using our own CA
openssl x509 -req -sha256 -passin pass:${AUTHORITY_PASSWORD} -days 3650 \
-in ${FILENAME}.csr -CA ca.crt -CAkey ca.key \
- -out ${FILENAME}.crt
+ -out ${FILENAME}.crt \
+ -extfile client-ext.cnf
# Rename files and remove useless ones
mv ${FILENAME}.crt ${FILENAME}.pem
cp ca.crt ${FILENAME}_chain.pem
mv ${FILENAME}.key ${FILENAME}_rsa.key
rm ${FILENAME}.csr
+ rm client-ext.cnf
}
# Create /certs folder if it does not exist
diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml
index ffd733e31..840236685 100644
--- a/docker-compose.dev.yml
+++ b/docker-compose.dev.yml
@@ -1,5 +1,3 @@
-version: '3.8'
-
services:
db:
restart: always
@@ -98,7 +96,7 @@ services:
- POSTGRES_HOST=${POSTGRES_HOST}
# THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO
# MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/
- - RENGINE_CURRENT_VERSION='2.0.6'
+ - RENGINE_CURRENT_VERSION='2.1.0'
volumes:
- ./web:/usr/src/app
- github_repos:/usr/src/github
@@ -115,6 +113,17 @@ services:
- celery-beat
networks:
- rengine_network
+ ollama:
+ image: ollama/ollama
+ container_name: ollama
+ volumes:
+ - ollama_data:/root/.ollama
+ # ports:
+ # - "11434:11434"
+ networks:
+ - rengine_network
+ restart: always
+ # command: ["ollama", "run", "llama2-uncensored"]
networks:
rengine_network:
@@ -126,3 +135,4 @@ volumes:
github_repos:
wordlist:
scan_results:
+ ollama_data:
diff --git a/docker-compose.setup.yml b/docker-compose.setup.yml
index 6046b1136..bdc8ba5dd 100644
--- a/docker-compose.setup.yml
+++ b/docker-compose.setup.yml
@@ -1,4 +1,3 @@
-version: '3.8'
services:
certs:
build: ./certs
diff --git a/docker-compose.yml b/docker-compose.yml
index bd623618c..b6c4a07f8 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,5 +1,3 @@
-version: '3.8'
-
services:
db:
restart: always
@@ -9,12 +7,15 @@ services:
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_PORT=${POSTGRES_PORT}
+ ports:
+ - "127.0.0.1:5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data/
networks:
- rengine_network
redis:
+ restart: always
image: "redis:alpine"
hostname: redis
networks:
@@ -94,9 +95,10 @@ services:
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_PORT=${POSTGRES_PORT}
- POSTGRES_HOST=${POSTGRES_HOST}
+ - DJANGO_SUPERUSER_PASSWORD=${DJANGO_SUPERUSER_PASSWORD}
# THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO
# MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/
- - RENGINE_CURRENT_VERSION='2.0.6'
+ - RENGINE_CURRENT_VERSION='2.1.0'
volumes:
- ./web:/usr/src/app
- github_repos:/usr/src/github
@@ -107,7 +109,7 @@ services:
- tool_config:/root/.config
- static_volume:/usr/src/app/staticfiles/
ports:
- - "8000:8000"
+ - "127.0.0.1:8000:8000"
depends_on:
- db
- celery
@@ -140,7 +142,15 @@ services:
- scan_results:/usr/src/scan_results
networks:
- rengine_network
-
+ ollama:
+ image: ollama/ollama
+ container_name: ollama
+ volumes:
+ - ollama_data:/root/.ollama
+ ports:
+ - "11434:11434"
+ networks:
+ - rengine_network
networks:
rengine_network:
@@ -154,6 +164,7 @@ volumes:
wordlist:
scan_results:
static_volume:
+ ollama_data:
secrets:
proxy.ca:
diff --git a/install.sh b/install.sh
index 1e2ae5bd4..0f27a4b3c 100755
--- a/install.sh
+++ b/install.sh
@@ -1,5 +1,15 @@
#!/bin/bash
+usageFunction()
+{
+ echo " "
+ tput setaf 2;
+ echo "Usage: $0 (-n) (-h)"
+ echo -e "\t-n Non-interactive installation (Optional)"
+ echo -e "\t-h Show usage"
+ exit 1
+}
+
tput setaf 2;
cat web/art/reNgine.txt
@@ -7,21 +17,29 @@ tput setaf 1; echo "Before running this script, please make sure Docker is runni
tput setaf 2; echo "Changing the postgres username & password from .env is highly recommended."
tput setaf 4;
-read -p "Are you sure, you made changes to .env file (y/n)? " answer
-case ${answer:0:1} in
- y|Y|yes|YES|Yes )
- echo "Continiuing Installation!"
- ;;
- * )
- if [ -x "$(command -v nano)" ]; then
- tput setaf 2; echo "nano already installed, skipping."
- else
- sudo apt update && sudo apt install nano -y
- tput setaf 2; echo "nano installed!!!"
- fi
- nano .env
- ;;
-esac
+
+isNonInteractive=false
+while getopts nh opt; do
+ case $opt in
+ n) isNonInteractive=true ;;
+ h) usageFunction ;;
+ ?) usageFunction ;;
+ esac
+done
+
+if [ $isNonInteractive = false ]; then
+ read -p "Are you sure, you made changes to .env file (y/n)? " answer
+ case ${answer:0:1} in
+ y|Y|yes|YES|Yes )
+ echo "Continiuing Installation!"
+ ;;
+ * )
+ nano .env
+ ;;
+ esac
+else
+ echo "Non-interactive installation parameter set. Installation begins."
+fi
echo " "
tput setaf 3;
@@ -32,7 +50,7 @@ echo "#########################################################################"
echo " "
tput setaf 4;
-echo "Installing reNgine and it's dependencies"
+echo "Installing reNgine and its dependencies"
echo " "
if [ "$EUID" -ne 0 ]
@@ -66,20 +84,22 @@ else
tput setaf 2; echo "Docker installed!!!"
fi
+
echo " "
tput setaf 4;
echo "#########################################################################"
-echo "Installing docker compose"
+echo "Installing Docker Compose"
echo "#########################################################################"
if [ -x "$(command -v docker compose)" ]; then
- tput setaf 2; echo "docker compose already installed, skipping."
+ tput setaf 2; echo "Docker Compose already installed, skipping."
else
- curl -L "https://github.com/docker/compose/releases/download/v2.23.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
+ curl -L "https://github.com/docker/compose/releases/download/v2.5.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
- tput setaf 2; echo "docker compose installed!!!"
+ tput setaf 2; echo "Docker Compose installed!!!"
fi
+
echo " "
tput setaf 4;
echo "#########################################################################"
@@ -106,6 +126,8 @@ else
exit 1
fi
+
+
echo " "
tput setaf 4;
echo "#########################################################################"
@@ -121,9 +143,11 @@ if [ "${failed}" -eq 0 ]; then
echo "#########################################################################"
echo "Creating an account"
echo "#########################################################################"
- make username
+ make username isNonInteractive=$isNonInteractive
+ make migrate
tput setaf 2 && printf "\n%s\n" "Thank you for installing reNgine, happy recon!!"
+ echo "In case you have unapplied migrations (see above in red), run 'make migrate'"
else
tput setaf 1 && printf "\n%s\n" "reNgine installation failed!!"
fi
diff --git a/make.bat b/make.bat
index 0a99cb6cb..6b052927d 100644
--- a/make.bat
+++ b/make.bat
@@ -6,30 +6,32 @@ set COMPOSE_ALL_FILES = -f docker-compose.yml
set SERVICES = db web proxy redis celery celery-beat
:: Generate certificates.
-if "%1" == "certs" docker-compose -f docker-compose.setup.yml run --rm certs
+if "%1" == "certs" docker compose -f docker-compose.setup.yml run --rm certs
:: Generate certificates.
-if "%1" == "setup" docker-compose -f docker-compose.setup.yml run --rm certs
+if "%1" == "setup" docker compose -f docker-compose.setup.yml run --rm certs
:: Build and start all services.
-if "%1" == "up" docker-compose %COMPOSE_ALL_FILES% up -d --build %SERVICES%
+if "%1" == "up" docker compose %COMPOSE_ALL_FILES% up -d --build %SERVICES%
:: Build all services.
-if "%1" == "build" docker-compose %COMPOSE_ALL_FILES% build %SERVICES%
+if "%1" == "build" docker compose %COMPOSE_ALL_FILES% build %SERVICES%
:: Generate Username (Use only after make up).
if "%1" == "username" docker-compose %COMPOSE_ALL_FILES% exec web python3 manage.py createsuperuser
+:: Apply migrations
+if "%1" == "migrate" docker-compose %COMPOSE_ALL_FILES% exec web python3 manage.py migrate
:: Pull Docker images.
if "%1" == "pull" docker login docker.pkg.github.com & docker-compose %COMPOSE_ALL_FILES% pull
:: Down all services.
-if "%1" == "down" docker-compose %COMPOSE_ALL_FILES% down
+if "%1" == "down" docker compose %COMPOSE_ALL_FILES% down
:: Stop all services.
-if "%1" == "stop" docker-compose %COMPOSE_ALL_FILES% stop %SERVICES%
+if "%1" == "stop" docker compose %COMPOSE_ALL_FILES% stop %SERVICES%
:: Restart all services.
-if "%1" == "restart" docker-compose %COMPOSE_ALL_FILES% restart %SERVICES%
+if "%1" == "restart" docker compose %COMPOSE_ALL_FILES% restart %SERVICES%
:: Remove all services containers.
-if "%1" == "rm" docker-compose %COMPOSE_ALL_FILES% rm -f %SERVICES%
+if "%1" == "rm" docker compose %COMPOSE_ALL_FILES% rm -f %SERVICES%
:: Tail all logs with -n 1000.
-if "%1" == "logs" docker-compose %COMPOSE_ALL_FILES% logs --follow --tail=1000 %SERVICES%
+if "%1" == "logs" docker compose %COMPOSE_ALL_FILES% logs --follow --tail=1000 %SERVICES%
:: Show all Docker images.
-if "%1" == "images" docker-compose %COMPOSE_ALL_FILES% images %SERVICES%
+if "%1" == "images" docker compose %COMPOSE_ALL_FILES% images %SERVICES%
:: Remove containers and delete volume data.
-if "%1" == "prune" docker-compose %COMPOSE_ALL_FILES% stop %SERVICES% & docker-compose %COMPOSE_ALL_FILES% rm -f %SERVICES% & docker volume prune -f
+if "%1" == "prune" docker compose %COMPOSE_ALL_FILES% stop %SERVICES% & docker-compose %COMPOSE_ALL_FILES% rm -f %SERVICES% & docker volume prune -f
:: Show this help.
-if "%1" == "help" @echo Make application docker images and manage containers using docker-compose files only for windows.
+if "%1" == "help" @echo Make application Docker images and manage containers using Docker Compose files only for windows.
diff --git a/update.sh b/update.sh
old mode 100644
new mode 100755
index b29c9e770..6b3adc751
--- a/update.sh
+++ b/update.sh
@@ -2,6 +2,7 @@
echo "Do you want to apply your local changes after updating? (y/n)"
read answer
+answer=$(echo $answer | tr '[:upper:]' '[:lower:]')
if [[ $answer == "y" ]]; then
make down && git stash save && git pull && git stash apply && make build && make up
diff --git a/web/Dockerfile b/web/Dockerfile
index 72b13433a..b9082d594 100644
--- a/web/Dockerfile
+++ b/web/Dockerfile
@@ -1,5 +1,14 @@
# Base image
-FROM --platform=linux/amd64 ubuntu:22.04
+FROM ubuntu:22.04
+
+ARG GECKOVERSION=0.33.0
+ARG GOVERSION=1.21.5
+
+RUN ARCH=$(dpkg --print-architecture) \
+ && if [ "${ARCH}" ! "arm64" ] || [ "${ARCH}" ! "amd64" ]; then \
+ echo "reNgine not supported, encountered unknown architecture: ${TARGETPLATFORM}" \
+ && exit 1; \
+ fi
# Labels and Credits
LABEL \
@@ -18,14 +27,13 @@ ENV PATH="${PATH}:${GOROOT}/bin:${GOPATH}/bin"
# Install Python
RUN apt update -y && \
- apt update -y && \
apt install -y \
python3.10 \
python3-dev \
- python3-pip
+ python3-pip
# Install essential packages
-RUN apt install -y --no-install-recommends \
+RUN apt install -y --no-install-recommends \
build-essential \
cmake \
geoip-bin \
@@ -47,17 +55,21 @@ RUN apt install -y --no-install-recommends \
RUN add-apt-repository ppa:mozillateam/ppa
-# Download and install go 1.20
-RUN wget https://golang.org/dl/go1.21.4.linux-amd64.tar.gz
-RUN tar -xvf go1.21.4.linux-amd64.tar.gz
-RUN rm go1.21.4.linux-amd64.tar.gz
-RUN mv go /usr/local
-
-# Download geckodriver
-RUN wget https://github.com/mozilla/geckodriver/releases/download/v0.32.0/geckodriver-v0.32.0-linux64.tar.gz
-RUN tar -xvf geckodriver-v0.32.0-linux64.tar.gz
-RUN rm geckodriver-v0.32.0-linux64.tar.gz
-RUN mv geckodriver /usr/bin
+RUN ARCH=$(dpkg --print-architecture) \
+ && wget https://go.dev/dl/go${GOVERSION}.linux-${ARCH}.tar.gz \
+ && tar -xvf go${GOVERSION}.linux-${ARCH}.tar.gz -C /usr/local \
+ && rm go${GOVERSION}.linux-${ARCH}.tar.gz
+
+RUN ARCH=$(dpkg --print-architecture) \
+ && if [ "${ARCH}" = "arm64" ]; then \
+ GECKOPATH="geckodriver-v${GECKOVERSION}-linux-aarch64.tar.gz"; \
+ elif [ "${ARCH}" = "amd64" ]; then \
+ GECKOPATH="geckodriver-v${GECKOVERSION}-linux64.tar.gz"; \
+ fi \
+ && wget https://github.com/mozilla/geckodriver/releases/download/v${GECKOVERSION}/${GECKOPATH} \
+ && tar -xvf ${GECKOPATH} \
+ && rm ${GECKOPATH} \
+ && mv geckodriver /usr/bin
# Make directory for app
WORKDIR /usr/src/app
@@ -66,52 +78,43 @@ WORKDIR /usr/src/app
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
-# Download Go packages
-RUN go install -v github.com/jaeles-project/gospider@latest
-RUN go install -v github.com/tomnomnom/gf@latest
-RUN go install -v github.com/tomnomnom/unfurl@latest
-RUN go install -v github.com/tomnomnom/waybackurls@latest
-RUN go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest
-RUN go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest
-RUN go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest
-RUN go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest
-RUN go install -v github.com/hakluke/hakrawler@latest
-RUN go install -v github.com/lc/gau/v2/cmd/gau@latest
-RUN go install -v github.com/owasp-amass/amass/v3/...@latest
-RUN go install -v github.com/ffuf/ffuf@latest
-RUN go install -v github.com/projectdiscovery/tlsx/cmd/tlsx@latest
-RUN go install -v github.com/hahwul/dalfox/v2@latest
-RUN go install -v github.com/projectdiscovery/katana/cmd/katana@latest
-RUN go install -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest
-RUN go install -v github.com/sa7mon/s3scanner@latest
+ENV GO111MODULE=on
+RUN printf "\
+ github.com/jaeles-project/gospider@latest\n\
+ github.com/tomnomnom/gf@latest\n\
+ github.com/tomnomnom/unfurl@latest\n\
+ github.com/tomnomnom/waybackurls@latest\n\
+ github.com/projectdiscovery/httpx/cmd/httpx@latest\n\
+ github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest\n\
+ github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest\n\
+ github.com/projectdiscovery/naabu/v2/cmd/naabu@latest\n\
+ github.com/hakluke/hakrawler@latest\n\
+ github.com/lc/gau/v2/cmd/gau@latest\n\
+ github.com/owasp-amass/amass/v3/...@latest\n\
+ github.com/ffuf/ffuf@latest\n\
+ github.com/projectdiscovery/tlsx/cmd/tlsx@latest\n\
+ github.com/hahwul/dalfox/v2@latest\n\
+ github.com/projectdiscovery/katana/cmd/katana@latest\n\
+ github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest\n\
+ github.com/sa7mon/s3scanner@latest\n" | \
+ xargs -L1 go install -ldflags="-s -w" -v && \
+ rm -rf /go/pkg/* && rm -rf /root/.cache/go-build
+
# Update Nuclei and Nuclei-Templates
-RUN nuclei -update
RUN nuclei -update-templates
-# Update project discovery tools
-RUN httpx -up
-RUN naabu -up
-RUN subfinder -up
-RUN tlsx -up
-RUN katana -up
-
# Copy requirements
COPY ./requirements.txt /tmp/requirements.txt
RUN pip3 install --upgrade setuptools pip && \
- pip3 install -r /tmp/requirements.txt
-
+ pip3 install -r /tmp/requirements.txt --no-cache-dir
# install eyewitness
-
-RUN python3 -m pip install fuzzywuzzy \
+RUN python3 -m pip install --no-cache-dir fuzzywuzzy \
selenium==4.9.1 \
python-Levenshtein \
pyvirtualdisplay \
netaddr
# Copy source code
-COPY . /usr/src/app/
-
-# httpx seems to have issue, use alias instead!!!
-RUN echo 'alias httpx="/go/bin/httpx"' >> ~/.bashrc
+COPY . /usr/src/app/
\ No newline at end of file
diff --git a/web/api/urls.py b/web/api/urls.py
index f3a09b54d..f4ee867e7 100644
--- a/web/api/urls.py
+++ b/web/api/urls.py
@@ -178,6 +178,10 @@
'tool/uninstall/',
UninstallTool.as_view(),
name='uninstall_tool'),
+ path(
+ 'tool/ollama/',
+ OllamaManager.as_view(),
+ name='ollama_manager'),
path(
'rengine/update/',
RengineUpdateCheck.as_view(),
diff --git a/web/api/views.py b/web/api/views.py
index 5aa778692..b04c48145 100644
--- a/web/api/views.py
+++ b/web/api/views.py
@@ -2,6 +2,7 @@
import re
import socket
import subprocess
+from ipaddress import IPv4Network
import requests
import validators
@@ -33,6 +34,82 @@
logger = logging.getLogger(__name__)
+class OllamaManager(APIView):
+ def get(self, request):
+ """
+ API to download Ollama Models
+ sends a POST request to download the model
+ """
+ req = self.request
+ model_name = req.query_params.get('model')
+ response = {
+ 'status': False
+ }
+ try:
+ pull_model_api = f'{OLLAMA_INSTANCE}/api/pull'
+ _response = requests.post(
+ pull_model_api,
+ json={
+ 'name': model_name,
+ 'stream': False
+ }
+ ).json()
+ if _response.get('error'):
+ response['status'] = False
+ response['error'] = _response.get('error')
+ else:
+ response['status'] = True
+ except Exception as e:
+ response['error'] = str(e)
+ return Response(response)
+
+ def delete(self, request):
+ req = self.request
+ model_name = req.query_params.get('model')
+ delete_model_api = f'{OLLAMA_INSTANCE}/api/delete'
+ response = {
+ 'status': False
+ }
+ try:
+ _response = requests.delete(
+ delete_model_api,
+ json={
+ 'name': model_name
+ }
+ ).json()
+ if _response.get('error'):
+ response['status'] = False
+ response['error'] = _response.get('error')
+ else:
+ response['status'] = True
+ except Exception as e:
+ response['error'] = str(e)
+ return Response(response)
+
+ def put(self, request):
+ req = self.request
+ model_name = req.query_params.get('model')
+ # check if model_name is in DEFAULT_GPT_MODELS
+ response = {
+ 'status': False
+ }
+ use_ollama = True
+ if any(model['name'] == model_name for model in DEFAULT_GPT_MODELS):
+ use_ollama = False
+ try:
+ OllamaSettings.objects.update_or_create(
+ defaults={
+ 'selected_model': model_name,
+ 'use_ollama': use_ollama
+ },
+ id=1
+ )
+ response['status'] = True
+ except Exception as e:
+ response['error'] = str(e)
+ return Response(response)
+
+
class GPTAttackSuggestion(APIView):
def get(self, request):
req = self.request
@@ -171,7 +248,7 @@ def filter_queryset(self, qs):
if _order_direction == 'desc':
- order_col = '-{}'.format(order_col)
+ order_col = f'-{order_col}'
qs = self.queryset.filter(
Q(name__icontains=search_value) |
@@ -547,6 +624,7 @@ def post(self, request):
h1_team_handle = data.get('h1_team_handle')
description = data.get('description')
domain_name = data.get('domain_name')
+ organization_name = data.get('organization')
slug = data.get('slug')
# Validate domain name
@@ -563,6 +641,20 @@ def post(self, request):
if not domain.insert_date:
domain.insert_date = timezone.now()
domain.save()
+
+ # Create org object in DB
+ if organization_name:
+ organization_obj = None
+ organization_query = Organization.objects.filter(name=organization_name)
+ if organization_query.exists():
+ organization_obj = organization_query[0]
+ else:
+ organization_obj = Organization.objects.create(
+ name=organization_name,
+ project=project,
+ insert_date=timezone.now())
+ organization_obj.domains.add(domain)
+
return Response({
'status': True,
'message': 'Domain successfully added as target !',
@@ -712,6 +804,7 @@ def post(self, request):
task_ids = scan.celery_ids
scan.scan_status = ABORTED_TASK
scan.stop_scan_date = timezone.now()
+ scan.aborted_by = request.user
scan.save()
create_scan_activity(
scan.id,
@@ -949,7 +1042,7 @@ def get(self, request):
# if tool_github_url has https://github.com/ remove and also remove trailing /
tool_github_url = tool.github_url.replace('http://github.com/', '').replace('https://github.com/', '')
tool_github_url = remove_lead_and_trail_slash(tool_github_url)
- github_api = 'https://api.github.com/repos/{}/releases'.format(tool_github_url)
+ github_api = f'https://api.github.com/repos/{tool_github_url}/releases'
response = requests.get(github_api).json()
# check if api rate limit exceeded
if 'message' in response and response['message'] == 'RateLimited':
@@ -958,7 +1051,7 @@ def get(self, request):
return Response({'status': False, 'message': 'Not Found'})
elif not response:
return Response({'status': False, 'message': 'Not Found'})
-
+
# only send latest release
response = response[0]
@@ -1122,27 +1215,29 @@ def get(self, request):
})
try:
logger.info(f'Resolving IP address {ip_address} ...')
- domain, domains, ips = socket.gethostbyaddr(ip_address)
- response = {
- 'status': True,
- 'ip_address': ip_address,
- 'domains': domains or [domain],
- 'resolves_to': domain
- }
- except socket.herror: # ip does not have a PTR record
- logger.info(f'No PTR record for {ip_address}')
+ resolved_ips = []
+ for ip in IPv4Network(ip_address, False):
+ domains = []
+ ips = []
+ try:
+ (domain, domains, ips) = socket.gethostbyaddr(str(ip))
+ except socket.herror:
+ logger.info(f'No PTR record for {ip_address}')
+ domain = str(ip)
+ if domain not in domains:
+ domains.append(domain)
+ resolved_ips.append({'ip': str(ip),'domain': domain, 'domains': domains, 'ips': ips})
response = {
'status': True,
- 'ip_address': ip_address,
- 'domains': [ip_address],
- 'resolves_to': ip_address
+ 'orig': ip_address,
+ 'ip_address': resolved_ips,
}
except Exception as e:
logger.exception(e)
response = {
'status': False,
'ip_address': ip_address,
- 'message': 'Exception {}'.format(e)
+ 'message': f'Exception {e}'
}
finally:
return Response(response)
@@ -1788,7 +1883,7 @@ def filter_queryset(self, qs):
order_col = 'content_length'
if _order_direction == 'desc':
- order_col = '-{}'.format(order_col)
+ order_col = f'-{order_col}'
if search_value:
qs = self.queryset.filter(
@@ -1844,6 +1939,9 @@ def get_queryset(self):
subdomains = Subdomain.objects.filter(target_domain__project__slug=project)
+ if 'is_important' in req.query_params:
+ subdomains = subdomains.filter(is_important=True)
+
if target_id:
self.queryset = (
subdomains
@@ -1895,7 +1993,7 @@ def filter_queryset(self, qs):
elif _order_col == '10':
order_col = 'response_time'
if _order_direction == 'desc':
- order_col = '-{}'.format(order_col)
+ order_col = f'-{order_col}'
# if the search query is separated by = means, it is a specific lookup
# divide the search query into two half and lookup
if search_value:
@@ -2225,7 +2323,7 @@ def filter_queryset(self, qs):
elif _order_col == '9':
order_col = 'response_time'
if _order_direction == 'desc':
- order_col = '-{}'.format(order_col)
+ order_col = f'-{order_col}'
# if the search query is separated by = means, it is a specific lookup
# divide the search query into two half and lookup
if '=' in search_value or '&' in search_value or '|' in search_value or '>' in search_value or '<' in search_value or '!' in search_value:
diff --git a/web/art/reNgine.txt b/web/art/reNgine.txt
index 99971eea6..3b3a89199 100644
--- a/web/art/reNgine.txt
+++ b/web/art/reNgine.txt
@@ -3,6 +3,6 @@
_ __ ___| \| | __ _ _ _ __ ___
| '__/ _ \ . ` |/ _` | | '_ \ / _ \
| | | __/ |\ | (_| | | | | | __/
- |_| \___|_| \_|\__, |_|_| |_|\___| v2.0.4-jasper
+ |_| \___|_| \_|\__, |_|_| |_|\___| v2.1.0
__/ |
|___/
diff --git a/web/celery-entrypoint.sh b/web/celery-entrypoint.sh
index 3e681abf0..74923206a 100755
--- a/web/celery-entrypoint.sh
+++ b/web/celery-entrypoint.sh
@@ -159,29 +159,33 @@ echo 'alias httpx="/go/bin/httpx"' >> ~/.bashrc
# TEMPORARY FIX, httpcore is causing issues with celery, removing it as temp fix
python3 -m pip uninstall -y httpcore
+loglevel='info'
+if [ "$DEBUG" == "1" ]; then
+ loglevel='debug'
+fi
# watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --autoscale=10,0 -l INFO -Q scan_queue &
echo "Starting Workers..."
echo "Starting Main Scan Worker with Concurrency: $MAX_CONCURRENCY,$MIN_CONCURRENCY"
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --loglevel=info --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=info -Q initiate_scan_queue -n initiate_scan_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=info -Q subscan_queue -n subscan_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=info -Q report_queue -n report_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q send_notif_queue -n send_notif_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q send_scan_notif_queue -n send_scan_notif_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q send_task_notif_queue -n send_task_notif_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=info -Q send_file_to_discord_queue -n send_file_to_discord_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=info -Q send_hackerone_report_queue -n send_hackerone_report_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q parse_nmap_results_queue -n parse_nmap_results_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=info -Q geo_localize_queue -n geo_localize_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q query_whois_queue -n query_whois_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=info -Q remove_duplicate_endpoints_queue -n remove_duplicate_endpoints_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=50 --loglevel=info -Q run_command_queue -n run_command_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q query_reverse_whois_queue -n query_reverse_whois_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q query_ip_history_queue -n query_ip_history_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=info -Q gpt_queue -n gpt_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q dorking_queue -n dorking_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q osint_discovery_queue -n osint_discovery_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q h8mail_queue -n h8mail_worker &
-watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=info -Q theHarvester_queue -n theHarvester_worker
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --loglevel=$loglevel --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q initiate_scan_queue -n initiate_scan_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q subscan_queue -n subscan_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$loglevel -Q report_queue -n report_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_notif_queue -n send_notif_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_scan_notif_queue -n send_scan_notif_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_task_notif_queue -n send_task_notif_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$loglevel -Q send_file_to_discord_queue -n send_file_to_discord_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$loglevel -Q send_hackerone_report_queue -n send_hackerone_report_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q parse_nmap_results_queue -n parse_nmap_results_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$loglevel -Q geo_localize_queue -n geo_localize_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_whois_queue -n query_whois_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q remove_duplicate_endpoints_queue -n remove_duplicate_endpoints_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=50 --loglevel=$loglevel -Q run_command_queue -n run_command_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_reverse_whois_queue -n query_reverse_whois_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_ip_history_queue -n query_ip_history_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q gpt_queue -n gpt_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q dorking_queue -n dorking_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q osint_discovery_queue -n osint_discovery_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q h8mail_queue -n h8mail_worker &
+watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q theHarvester_queue -n theHarvester_worker
exec "$@"
diff --git a/web/dashboard/migrations/0001_initial.py b/web/dashboard/migrations/0001_initial.py
index 86394badd..44e9ac9a7 100644
--- a/web/dashboard/migrations/0001_initial.py
+++ b/web/dashboard/migrations/0001_initial.py
@@ -1,4 +1,4 @@
-# Generated by Django 3.2.4 on 2022-04-23 17:58
+# Generated by Django 3.2.23 on 2024-06-19 02:43
from django.db import migrations, models
@@ -11,11 +11,42 @@ class Migration(migrations.Migration):
]
operations = [
+ migrations.CreateModel(
+ name='NetlasAPIKey',
+ fields=[
+ ('id', models.AutoField(primary_key=True, serialize=False)),
+ ('key', models.CharField(max_length=500)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='OllamaSettings',
+ fields=[
+ ('id', models.AutoField(primary_key=True, serialize=False)),
+ ('selected_model', models.CharField(max_length=500)),
+ ('use_ollama', models.BooleanField(default=True)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='OpenAiAPIKey',
+ fields=[
+ ('id', models.AutoField(primary_key=True, serialize=False)),
+ ('key', models.CharField(max_length=500)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Project',
+ fields=[
+ ('id', models.AutoField(primary_key=True, serialize=False)),
+ ('name', models.CharField(max_length=500)),
+ ('slug', models.SlugField(unique=True)),
+ ('insert_date', models.DateTimeField()),
+ ],
+ ),
migrations.CreateModel(
name='SearchHistory',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('name', models.CharField(max_length=1000)),
+ ('query', models.CharField(max_length=1000)),
],
),
]
diff --git a/web/dashboard/migrations/0002_rename_name_searchhistory_query.py b/web/dashboard/migrations/0002_rename_name_searchhistory_query.py
deleted file mode 100644
index 20fbfd414..000000000
--- a/web/dashboard/migrations/0002_rename_name_searchhistory_query.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Generated by Django 3.2.4 on 2022-04-23 18:07
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('dashboard', '0001_initial'),
- ]
-
- operations = [
- migrations.RenameField(
- model_name='searchhistory',
- old_name='name',
- new_name='query',
- ),
- ]
diff --git a/web/dashboard/migrations/0003_projects.py b/web/dashboard/migrations/0003_projects.py
deleted file mode 100644
index 2c63d6b51..000000000
--- a/web/dashboard/migrations/0003_projects.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Generated by Django 3.2.4 on 2023-06-26 03:54
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('dashboard', '0002_rename_name_searchhistory_query'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='Projects',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('name', models.CharField(max_length=500)),
- ('slug', models.SlugField(max_length=200)),
- ],
- ),
- ]
diff --git a/web/dashboard/migrations/0004_rename_projects_project.py b/web/dashboard/migrations/0004_rename_projects_project.py
deleted file mode 100644
index 050e610db..000000000
--- a/web/dashboard/migrations/0004_rename_projects_project.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Generated by Django 3.2.4 on 2023-06-26 03:57
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('dashboard', '0003_projects'),
- ]
-
- operations = [
- migrations.RenameModel(
- old_name='Projects',
- new_name='Project',
- ),
- ]
diff --git a/web/dashboard/migrations/0005_alter_project_slug.py b/web/dashboard/migrations/0005_alter_project_slug.py
deleted file mode 100644
index 99b7d68be..000000000
--- a/web/dashboard/migrations/0005_alter_project_slug.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Generated by Django 3.2.4 on 2023-06-26 05:28
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('dashboard', '0004_rename_projects_project'),
- ]
-
- operations = [
- migrations.AlterField(
- model_name='project',
- name='slug',
- field=models.SlugField(unique=True),
- ),
- ]
diff --git a/web/dashboard/migrations/0006_project_insert_date.py b/web/dashboard/migrations/0006_project_insert_date.py
deleted file mode 100644
index 6d51f4e22..000000000
--- a/web/dashboard/migrations/0006_project_insert_date.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Generated by Django 3.2.4 on 2023-07-06 09:08
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('dashboard', '0005_alter_project_slug'),
- ]
-
- operations = [
- migrations.AddField(
- model_name='project',
- name='insert_date',
- field=models.DateTimeField(default='2023-06-06'),
- preserve_default=False,
- ),
- ]
diff --git a/web/dashboard/migrations/0007_netlasapikey_openaiapikey.py b/web/dashboard/migrations/0007_netlasapikey_openaiapikey.py
deleted file mode 100644
index e6d6c596a..000000000
--- a/web/dashboard/migrations/0007_netlasapikey_openaiapikey.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Generated by Django 3.2.4 on 2023-07-22 18:59
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('dashboard', '0006_project_insert_date'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='NetlasAPIKey',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('key', models.CharField(max_length=500)),
- ],
- ),
- migrations.CreateModel(
- name='OpenAiAPIKey',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('key', models.CharField(max_length=500)),
- ],
- ),
- ]
diff --git a/web/dashboard/migrations/0007_openaikeys.py b/web/dashboard/migrations/0007_openaikeys.py
deleted file mode 100644
index 629e5e39c..000000000
--- a/web/dashboard/migrations/0007_openaikeys.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Generated by Django 3.2.4 on 2023-07-21 09:50
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('dashboard', '0006_project_insert_date'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='OpenAIKeys',
- fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('key', models.CharField(max_length=500)),
- ],
- ),
- ]
diff --git a/web/dashboard/migrations/0008_merge_0007_netlasapikey_openaiapikey_0007_openaikeys.py b/web/dashboard/migrations/0008_merge_0007_netlasapikey_openaiapikey_0007_openaikeys.py
deleted file mode 100644
index 7554e1592..000000000
--- a/web/dashboard/migrations/0008_merge_0007_netlasapikey_openaiapikey_0007_openaikeys.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Generated by Django 3.2.4 on 2023-07-27 10:00
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('dashboard', '0007_netlasapikey_openaiapikey'),
- ('dashboard', '0007_openaikeys'),
- ]
-
- operations = [
- ]
diff --git a/web/dashboard/migrations/0009_delete_openaikeys.py b/web/dashboard/migrations/0009_delete_openaikeys.py
deleted file mode 100644
index b206e0121..000000000
--- a/web/dashboard/migrations/0009_delete_openaikeys.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Generated by Django 3.2.4 on 2023-07-28 04:22
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('dashboard', '0008_merge_0007_netlasapikey_openaiapikey_0007_openaikeys'),
- ]
-
- operations = [
- migrations.DeleteModel(
- name='OpenAIKeys',
- ),
- ]
diff --git a/web/dashboard/models.py b/web/dashboard/models.py
index 3d6001745..8ed77dd43 100644
--- a/web/dashboard/models.py
+++ b/web/dashboard/models.py
@@ -24,6 +24,15 @@ class OpenAiAPIKey(models.Model):
def __str__(self):
return self.key
+
+
+class OllamaSettings(models.Model):
+ id = models.AutoField(primary_key=True)
+ selected_model = models.CharField(max_length=500)
+ use_ollama = models.BooleanField(default=True)
+
+ def __str__(self):
+ return self.selected_model
class NetlasAPIKey(models.Model):
diff --git a/web/dashboard/templates/dashboard/admin.html b/web/dashboard/templates/dashboard/admin.html
index 31c754a88..a9008afd2 100644
--- a/web/dashboard/templates/dashboard/admin.html
+++ b/web/dashboard/templates/dashboard/admin.html
@@ -216,7 +216,7 @@