diff --git a/.gitattributes b/.gitattributes
index 28ac2a71..229d16fa 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,2 +1,4 @@
* linguist-vendored
-*.cr linguist-vendored=false
\ No newline at end of file
+*.cr linguist-vendored=false
+*.rb linguist-vendored=false
+Rakefile linguist-vendored=false
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index b4039749..12b53740 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -23,7 +23,7 @@ If applicable, add screenshots to help explain your problem.
**Versions**
- OS: [e.g. macos, linux]
- - Version [e.g. v0.17.0]
+ - Version [e.g. v0.18.0]
**Additional context**
Add any other context about the problem here.
diff --git a/.github/labeler.yml b/.github/labeler.yml
index bced18d8..a75ec91b 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -22,6 +22,9 @@
🏷️ tagger:
- changed-files:
- any-glob-to-any-file: [src/tagger/**, src/models/tag.cr]
+🚔 passive-scan:
+ - changed-files:
+ - any-glob-to-any-file: [src/passive_scan/**, src/models/passive_scan.cr]
💊 spec:
- changed-files:
- any-glob-to-any-file: spec/**
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index fb00a42e..b15c8b6b 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -3,12 +3,13 @@ name: CI
on:
pull_request_target:
branches: [main, dev]
+ paths: ['**/*.cr', shard.yml, Dockerfile]
jobs:
build-crystal:
runs-on: ubuntu-latest
strategy:
matrix:
- crystal-version: [1.10.1, 1.11.2, 1.12.2, 1.13.1]
+ crystal-version: [1.10.1, 1.11.2, 1.12.2, 1.13.3, 1.14.0]
steps:
- uses: actions/checkout@v4
- uses: MeilCli/setup-crystal-action@v4
diff --git a/.github/workflows/contributors.yml b/.github/workflows/contributors.yml
index 7a962a0a..abf4057d 100644
--- a/.github/workflows/contributors.yml
+++ b/.github/workflows/contributors.yml
@@ -16,4 +16,6 @@ jobs:
- uses: wow-actions/contributors-list@v1
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- round: true
+ round: false
+ includeBots: true
+
diff --git a/.github/workflows/deadlinks.yml b/.github/workflows/deadlinks.yml
new file mode 100644
index 00000000..b43ccebb
--- /dev/null
+++ b/.github/workflows/deadlinks.yml
@@ -0,0 +1,30 @@
+---
+ name: DeadLink
+ # Controls when the workflow will run
+ on:
+ # Allows you to run this workflow manually from the Actions tab
+ workflow_dispatch:
+ # A workflow run is made up of one or more jobs that can run sequentially or in parallel
+ jobs:
+ # This workflow contains a single job called "build"
+ build:
+ # The type of runner that the job will run on
+ runs-on: ubuntu-latest
+
+ # Steps represent a sequence of tasks that will be executed as part of the job
+ steps:
+ - name: Find Broken Link
+ uses: hahwul/deadfinder@1.4.4
+ id: broken-link
+ with:
+ command: sitemap
+ target: https://owasp-noir.github.io/noir/sitemap.xml
+ - name: Create an issue
+ uses: dacbd/create-issue-action@main
+ with:
+ token: ${{ github.token }}
+ title: DeadLink Issue
+ body: |
+ ```json
+ ${{ steps.broken-link.outputs.output }}
+ ```
\ No newline at end of file
diff --git a/.github/workflows/ghcr_publish.yml b/.github/workflows/ghcr_publish.yml
index 44becddb..f0d25828 100644
--- a/.github/workflows/ghcr_publish.yml
+++ b/.github/workflows/ghcr_publish.yml
@@ -7,7 +7,6 @@ name: GHCR Publish
on:
push:
branches: [main, dev]
- tags: [v*.*.*]
release:
types: [published]
workflow_dispatch:
diff --git a/.github/workflows/jekyll.yml b/.github/workflows/jekyll.yml
index be4a0eeb..d01b2806 100644
--- a/.github/workflows/jekyll.yml
+++ b/.github/workflows/jekyll.yml
@@ -1,3 +1,4 @@
+---
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
@@ -5,27 +6,25 @@
# Sample workflow for building and deploying a Jekyll site to GitHub Pages
name: Deploy Jekyll site to Pages
-
on:
# Runs on pushes targeting the default branch
push:
- branches: ["dev"]
+ branches: [dev]
+ paths:
+ - docs/**/*
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
-
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
-
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
- group: "pages"
+ group: pages
cancel-in-progress: false
-
jobs:
# Build job
build:
@@ -34,12 +33,12 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Ruby
- uses: ruby/setup-ruby@8575951200e472d5f2d95c625da0c7bec8217c42 # v1.161.0
+ uses: ruby/setup-ruby@8575951200e472d5f2d95c625da0c7bec8217c42 # v1.161.0
with:
- ruby-version: '3.2' # Not needed with a .ruby-version file
- bundler-cache: true # runs 'bundle install' and caches installed gems automatically
- cache-version: 0 # Increment this number if you need to re-download cached gems
- working-directory: docs # Needed if your Gemfile is not in the root directory
+ ruby-version: '3.2' # Not needed with a .ruby-version file
+ bundler-cache: true # runs 'bundle install' and caches installed gems automatically
+ cache-version: 0 # Increment this number if you need to re-download cached gems
+ working-directory: docs # Needed if your Gemfile is not in the root directory
- name: Setup Pages
id: pages
uses: actions/configure-pages@v4
@@ -65,4 +64,4 @@ jobs:
steps:
- name: Deploy to GitHub Pages
id: deployment
- uses: actions/deploy-pages@v4
\ No newline at end of file
+ uses: actions/deploy-pages@v4
diff --git a/.gitignore b/.gitignore
index 0a8daee2..2cc5bd55 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,7 +4,8 @@
*.dwarf
.DS_Store
.vscode
+ameba.sh
# Ignore the public directory for Jekyll
/docs/_site/
-/docs/.jekyll-cache/
\ No newline at end of file
+/docs/.jekyll-cache/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index e840e47c..b2394326 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -65,6 +65,12 @@ ameba --fix
# https://github.com/crystal-ameba/ameba#installation
```
+or
+
+```bash
+rake lint:all
+```
+
## 🧭 Code structure
- spec:
@@ -89,3 +95,25 @@ To ensure a smooth integration of your contributions, please follow these steps:
* Submit your PR to the dev branch for review.
By doing so, you'll help us keep our project up-to-date and well-organized. Your efforts are greatly appreciated, and we're excited to see what you'll bring to the project!
+
+### Setting up the Documentation Site
+
+To set up the documentation site locally, follow these steps:
+
+#### Install Dependencies
+
+We use Rake tasks to manage dependencies. Run the following command to install the necessary dependencies:
+
+```sh
+rake docs:install
+```
+
+#### Serve the Documentation Site
+
+After installing the dependencies, you can serve the documentation site locally using the following Rake task:
+
+```sh
+rake docs:serve
+```
+
+This will start a local server, and you can view the documentation by navigating to http://localhost:4000 in your web browser.
\ No newline at end of file
diff --git a/README.md b/README.md
index 9a7ccbbf..93c46301 100644
--- a/README.md
+++ b/README.md
@@ -19,13 +19,15 @@
- Installation •
Documentation •
- Available Support Scope •
+ Installation •
+ Available Support Scope •
Usage •
Contributing
+OWASP Noir is an open-source project specializing in identifying attack surfaces for enhanced whitebox security testing and security pipeline. This includes the capability to discover API endpoints, web endpoints, and other potential entry points within source code for thorough security analysis.
+
## Key Features
- Identify API endpoints and parameters from source code.
@@ -33,110 +35,9 @@
- Provide analysts with technical information and security issues identified during source code analysis.
- Friendly pipeline & DevOps integration, offering multiple output formats (JSON, YAML, OAS spec) and compatibility with tools like curl and httpie.
- Friendly Offensive Security Tools integration, allowing usage with tools such as ZAP and Caido, Burpsuite.
+- Identify security issues within the source code through rule-based passive scanning.
- Generate elegant and clear output results.
-## Available Support Scope
-
-
- Endpoint's Entities
-
-- Path
-- Method
-- Param
-- Header
-- Cookie
-- Protocol (e.g ws)
-- Details (e.g The origin of the endpoint)
-
-
-
-
- Languages and Frameworks
-
-| Language | Framework | URL | Method | Param | Header | Cookie | WS |
-|----------|-------------|-----|--------|-------|--------|--------|----|
-| Crystal | Kemal | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
-| Crystal | Lucky | ✅ | ✅ | ✅ | ✅ | ✅ | X |
-| Go | Beego | ✅ | ✅ | X | X | X | X |
-| Go | Echo | ✅ | ✅ | ✅ | ✅ | ✅ | X |
-| Go | Gin | ✅ | ✅ | ✅ | ✅ | ✅ | X |
-| Go | Fiber | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
-| Python | Django | ✅ | ✅ | ✅ | ✅ | ✅ | X |
-| Python | Flask | ✅ | ✅ | ✅ | ✅ | ✅ | X |
-| Python | FastAPI | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
-| Ruby | Rails | ✅ | ✅ | ✅ | ✅ | ✅ | X |
-| Ruby | Sinatra | ✅ | ✅ | ✅ | ✅ | ✅ | X |
-| Ruby | Hanami | ✅ | ✅ | X | X | X | X |
-| Php | | ✅ | ✅ | ✅ | ✅ | X | X |
-| Java | Jsp | ✅ | ✅ | ✅ | X | X | X |
-| Java | Armeria | ✅ | ✅ | X | X | X | X |
-| Java | Spring | ✅ | ✅ | ✅ | ✅ | X | X |
-| Kotlin | Spring | ✅ | ✅ | ✅ | ✅ | ✅ | X |
-| JS | Express | ✅ | ✅ | ✅ | ✅ | ✅ | X |
-| JS | Restify | ✅ | ✅ | ✅ | ✅ | ✅ | X |
-| Rust | Axum | ✅ | ✅ | X | X | X | X |
-| Rust | Rocket | ✅ | ✅ | X | X | X | X |
-| Elixir | Phoenix | ✅ | ✅ | X | X | X | ✅ |
-| C# | ASP.NET MVC | ✅ | X | X | X | X | X |
-| JS | Next | X | X | X | X | X | X |
-
-
-
-
- Specification
-
-| Specification | Format | URL | Method | Param | Header | WS |
-|------------------------|---------|-----|--------|-------|--------|----|
-| OAS 2.0 (Swagger 2.0) | JSON | ✅ | ✅ | ✅ | ✅ | X |
-| OAS 2.0 (Swagger 2.0) | YAML | ✅ | ✅ | ✅ | ✅ | X |
-| OAS 3.0 | JSON | ✅ | ✅ | ✅ | ✅ | X |
-| OAS 3.0 | YAML | ✅ | ✅ | ✅ | ✅ | X |
-| RAML | YAML | ✅ | ✅ | ✅ | ✅ | X |
-| HAR | JSON | ✅ | ✅ | ✅ | ✅ | X |
-
-
-
-## Installation
-### Homebrew
-
-```bash
-brew install noir
-
-# https://formulae.brew.sh/formula/noir
-```
-
-### Snapcraft
-
-```bash
-sudo snap install noir
-
-# https://snapcraft.io/noir
-```
-
-### From Sources
-```bash
-# Install Crystal-lang
-# https://crystal-lang.org/install/
-
-# Clone this repo
-git clone https://github.com/owasp-noir/noir
-cd noir
-
-# Install Dependencies
-shards install
-
-# Build
-shards build --release --no-debug
-
-# Copy binary
-cp ./bin/noir /usr/bin/
-```
-
-### Docker (GHCR)
-```bash
-docker pull ghcr.io/owasp-noir/noir:main
-```
-
## Usage
```bash
@@ -157,45 +58,50 @@ noir -b . -u https://testapp.internal.domains -f json -T
```json
{
- "url": "https://testapp.internal.domains/query",
- "method": "POST",
- "params": [
- {
- "name": "my_auth",
- "value": "",
- "param_type": "cookie",
- "tags": []
- },
- {
- "name": "query",
- "value": "",
- "param_type": "form",
- "tags": [
+ "endpoints": [
+ {
+ "url": "https://testapp.internal.domains/query",
+ "method": "POST",
+ "params": [
+ {
+ "name": "my_auth",
+ "value": "",
+ "param_type": "cookie",
+ "tags": []
+ },
+ {
+ "name": "query",
+ "value": "",
+ "param_type": "form",
+ "tags": [
+ {
+ "name": "sqli",
+ "description": "This parameter may be vulnerable to SQL Injection attacks.",
+ "tagger": "Hunt"
+ }
+ ]
+ }
+ ],
+ "details": {
+ "code_paths": [
{
- "name": "sqli",
- "description": "This parameter may be vulnerable to SQL Injection attacks.",
- "tagger": "Hunt"
+ "path": "spec/functional_test/fixtures/crystal_kemal/src/testapp.cr",
+ "line": 8
}
]
- }
- ],
- "details": {
- "code_paths": [
- {
- "path": "spec/functional_test/fixtures/crystal_kemal/src/testapp.cr",
- "line": 8
- }
- ]
- },
- "protocol": "http",
- "tags": []
- }
+ },
+ "protocol": "http",
+ "tags": []
+ }
+ ]
+}
```
For more details, please visit our [documentation](https://owasp-noir.github.io/noir/) page.
## Contributing
+
Noir is open-source project and made it with ❤️
if you want contribute this project, please see [CONTRIBUTING.md](./CONTRIBUTING.md) and Pull-Request with cool your contents.
-![](./CONTRIBUTORS.svg)
+[![](./CONTRIBUTORS.svg)](https://github.com/owasp-noir/noir/graphs/contributors)
diff --git a/Rakefile b/Rakefile
new file mode 100644
index 00000000..e51d8f8d
--- /dev/null
+++ b/Rakefile
@@ -0,0 +1,54 @@
+namespace :docs do
+ desc "Serve the documentation site"
+ task :serve do
+ within_docs_directory do
+ unless system('bundle check')
+ puts "Bundler is not installed or dependencies are not met. Please run 'rake docs:install'."
+ exit 1
+ end
+
+ sh 'bundle exec jekyll s'
+ end
+ end
+
+ desc "Install dependencies for the documentation site"
+ task :install do
+ within_docs_directory do
+ sh 'bundle install'
+ end
+ end
+
+ desc "Generate usage documentation"
+ task :generate_usage do
+ output = `./bin/noir -h`
+ cleaned_output = output.gsub(/\e\[[0-9;]*m/, '') # Remove ANSI color codes
+ File.write('docs/_includes/usage.md', cleaned_output)
+ end
+
+ def within_docs_directory
+ Dir.chdir('docs') do
+ yield
+ end
+ rescue Errno::ENOENT => e
+ puts "Directory 'docs' not found: #{e.message}"
+ exit 1
+ rescue => e
+ puts "An error occurred: #{e.message}"
+ exit 1
+ end
+end
+
+namespace :lint do
+ desc "Format the code using crystal tool format"
+ task :format do
+ sh 'crystal tool format'
+ end
+
+ desc "Lint the code using ameba"
+ task :ameba do
+ sh 'ameba --fix'
+ end
+
+ desc "Run all linting tasks"
+ task :all => [:format, :ameba]
+end
diff --git a/docs/Gemfile b/docs/Gemfile
index 97ceb515..94d2a252 100644
--- a/docs/Gemfile
+++ b/docs/Gemfile
@@ -9,4 +9,5 @@ gem "just-the-docs"
# Plugins
group :jekyll_plugins do
gem "jekyll-securitytxt"
+ gem "jekyll-sitemap"
end
diff --git a/docs/Gemfile.lock b/docs/Gemfile.lock
index da89ed78..760f83f7 100644
--- a/docs/Gemfile.lock
+++ b/docs/Gemfile.lock
@@ -3,8 +3,9 @@ GEM
specs:
addressable (2.8.7)
public_suffix (>= 2.0.2, < 7.0)
+ bigdecimal (3.1.8)
colorator (1.1.0)
- concurrent-ruby (1.3.3)
+ concurrent-ruby (1.3.4)
em-websocket (0.5.3)
eventmachine (>= 0.12.9)
http_parser.rb (~> 0)
@@ -12,14 +13,16 @@ GEM
ffi (1.17.0-arm64-darwin)
ffi (1.17.0-x86_64-linux-gnu)
forwardable-extended (2.6.0)
- google-protobuf (4.26.1-arm64-darwin)
+ google-protobuf (4.28.2-arm64-darwin)
+ bigdecimal
rake (>= 13)
- google-protobuf (4.26.1-x86_64-linux)
+ google-protobuf (4.28.2-x86_64-linux)
+ bigdecimal
rake (>= 13)
http_parser.rb (0.8.0)
- i18n (1.14.5)
+ i18n (1.14.6)
concurrent-ruby (~> 1.0)
- jekyll (4.3.3)
+ jekyll (4.3.4)
addressable (~> 2.4)
colorator (~> 1.0)
em-websocket (~> 0.5)
@@ -39,13 +42,15 @@ GEM
jekyll (>= 3.7, < 5.0)
jekyll-sass-converter (3.0.0)
sass-embedded (~> 1.54)
- jekyll-securitytxt (1.0.1)
+ jekyll-securitytxt (1.0.2)
jekyll
jekyll-seo-tag (2.8.0)
jekyll (>= 3.8, < 5.0)
+ jekyll-sitemap (1.4.0)
+ jekyll (>= 3.7, < 5.0)
jekyll-watch (2.2.1)
listen (~> 3.0)
- just-the-docs (0.8.2)
+ just-the-docs (0.10.0)
jekyll (>= 3.8.5)
jekyll-include-cache
jekyll-seo-tag (>= 2.0)
@@ -61,24 +66,22 @@ GEM
mercenary (0.4.0)
pathutil (0.16.2)
forwardable-extended (~> 2.6)
- public_suffix (6.0.0)
+ public_suffix (6.0.1)
rake (13.2.1)
rb-fsevent (0.11.2)
rb-inotify (0.11.1)
ffi (~> 1.0)
- rexml (3.3.2)
- strscan
- rouge (4.2.1)
+ rexml (3.3.9)
+ rouge (4.4.0)
safe_yaml (1.0.5)
- sass-embedded (1.77.1-arm64-darwin)
- google-protobuf (>= 3.25, < 5.0)
- sass-embedded (1.77.1-x86_64-linux-gnu)
- google-protobuf (>= 3.25, < 5.0)
- strscan (3.1.0)
+ sass-embedded (1.79.4-arm64-darwin)
+ google-protobuf (~> 4.27)
+ sass-embedded (1.79.4-x86_64-linux-gnu)
+ google-protobuf (~> 4.27)
terminal-table (3.0.2)
unicode-display_width (>= 1.1.1, < 3)
- unicode-display_width (2.5.0)
- webrick (1.8.1)
+ unicode-display_width (2.6.0)
+ webrick (1.8.2)
PLATFORMS
arm64-darwin
@@ -87,6 +90,7 @@ PLATFORMS
DEPENDENCIES
jekyll (~> 4.3.3)
jekyll-securitytxt
+ jekyll-sitemap
just-the-docs
BUNDLED WITH
diff --git a/docs/_advanced/configuration.md b/docs/_advanced/configuration.md
index 810ef7fe..0ad5c4cd 100644
--- a/docs/_advanced/configuration.md
+++ b/docs/_advanced/configuration.md
@@ -25,13 +25,15 @@ layout: page
# Noir configuration file
# This file is used to store the configuration options for Noir.
# You can edit this file to change the configuration options.
+
+# Config values are defaults; CLI options take precedence.
# **************************************************************
# Base directory for the application
base: ""
# Whether to use color in the output
-color: "yes"
+color: "true"
# The configuration file to use
config_file: ""
@@ -40,7 +42,7 @@ config_file: ""
concurrency: "100"
# Whether to enable debug mode
-debug: "no"
+debug: "false"
# Technologies to exclude
exclude_techs: ""
@@ -48,29 +50,44 @@ exclude_techs: ""
# The format to use for the output
format: "plain"
+# Whether to display HTTP status codes in the output
+status_codes: "false"
+
+# Whether to exclude HTTP status codes from the output
+exclude_codes: ""
+
# Whether to include the path in the output
-include_path: "no"
+include_path: "false"
# Whether to disable logging
-nolog: "no"
+nolog: "false"
# The output file to write to
output: ""
# The Elasticsearch server to send data to
+# e.g http://localhost:9200
send_es: ""
# The proxy server to use
+# e.g http://localhost:8080
send_proxy: ""
# Whether to send a request
-send_req: "no"
+send_req: "false"
-# Whether to send headers with the request
-send_with_headers: ""
+# Whether to send headers with the request (Array of strings)
+# e.g "Authorization: Bearer token"
+send_with_headers:
-# The value to set for pvalue
-set_pvalue: ""
+# The value to set for pvalue (Array of strings)
+set_pvalue:
+set_pvalue_header:
+set_pvalue_cookie:
+set_pvalue_query:
+set_pvalue_form:
+set_pvalue_json:
+set_pvalue_path:
# The technologies to use
techs: ""
@@ -78,16 +95,18 @@ techs: ""
# The URL to use
url: ""
-# Whether to use filters
-use_filters: ""
+# Whether to use filters (Array of strings)
+use_filters:
-# Whether to use matchers
-use_matchers: ""
+# Whether to use matchers (Array of strings)
+use_matchers:
# Whether to use all taggers
-all_taggers: "no"
+all_taggers: "false"
# The taggers to use
+# e.g "tagger1,tagger2"
+# To see the list of all taggers, please use the noir command with --list-taggers
use_taggers: ""
# The diff file to use
diff --git a/docs/_advanced/diff.md b/docs/_advanced/diff.md
index 9259e393..05ceb95b 100644
--- a/docs/_advanced/diff.md
+++ b/docs/_advanced/diff.md
@@ -1,7 +1,7 @@
---
title: Diff Mode
has_children: false
-nav_order: 4
+nav_order: 5
layout: page
---
diff --git a/docs/_advanced/passive_scan.md b/docs/_advanced/passive_scan.md
new file mode 100644
index 00000000..8eaceaf5
--- /dev/null
+++ b/docs/_advanced/passive_scan.md
@@ -0,0 +1,46 @@
+---
+title: Passive Scan
+has_children: true
+nav_order: 4
+layout: page
+---
+
+A Passive Scan is a feature where additional actions are performed by the Detector to identify security issues according to scan rules. This functionality typically includes:
+
+* Regular Expression Matching: It uses regular expressions to match patterns that could indicate security vulnerabilities.
+* String Matching: Besides regex, it looks for specific strings within the code that could be indicative of security concerns.
+* Default Rule Set: Comes with a predefined set of rules to check against common security issues.
+
+```bash
+noir -b -P
+
+# You can check the format list with the -h flag.
+# PASSIVE SCAN:
+# -P, --passive-scan Perform a passive scan for security issues using rules from the specified path
+# --passive-scan-path PATH Specify the path for the rules used in the passive security scan
+```
+
+Usage Example:
+
+When you run a command like:
+
+```bash
+noir -b ./your_app -P
+```
+
+The passive scan might produce results like:
+
+```
+★ Passive Results:
+[critical][hahwul-test][secret] use x-api-key
+ ├── extract: env.request.headers["x-api-key"].as(String)
+ └── file: ./spec/functional_test/fixtures/crystal_kemal/src/testapp.cr:4
+```
+
+Explanation of Output:
+
+* Label: `[critical][hahwul-test][secret]` - This line indicates the severity, test context, and type of issue found. Here, it's critical, related to a test named hahwul-test, and concerns a secret.
+* Extract: This shows where or how the sensitive information is being accessed or used. In this case, it's extracting an x-api-key from the request headers.
+* File: Indicates the location of the potential security issue within the codebase, pointing to the exact file and line number where the issue was detected.
+
+This output helps developers immediately identify where and what kind of security issues exist in their code, focusing on passive analysis without actively exploiting the vulnerabilities.
\ No newline at end of file
diff --git a/docs/_advanced/passive_scan/community_rules.md b/docs/_advanced/passive_scan/community_rules.md
new file mode 100644
index 00000000..1dc77270
--- /dev/null
+++ b/docs/_advanced/passive_scan/community_rules.md
@@ -0,0 +1,15 @@
+---
+title: Community Rules
+parent: Passive Scan
+has_children: false
+nav_order: 3
+layout: page
+---
+
+Community rules are managed in several repositories below. You can clone these repositories to the default rule path for use.
+
+To clone the `noir-passive-rules` repository to the default rule path, use the following command:
+
+```bash
+git clone https://github.com/owasp-noir/noir-passive-rules ~/.config/noir/passive_rules/
+```
\ No newline at end of file
diff --git a/docs/_advanced/passive_scan/default_rules.md b/docs/_advanced/passive_scan/default_rules.md
new file mode 100644
index 00000000..8620bf4c
--- /dev/null
+++ b/docs/_advanced/passive_scan/default_rules.md
@@ -0,0 +1,19 @@
+---
+title: Default Rules
+parent: Passive Scan
+has_children: false
+nav_order: 2
+layout: page
+---
+
+The default rules are stored in the following paths based on your operating system:
+
+| OS | Path |
+|---|---|
+| MacOS: | `~/.config/noir/passive_rules/` |
+| Linux: | `~/.config/noir/passive_rules/` |
+| Windows: | `%APPDATA%\noir\passive_rules\` |
+
+When using the `-P` (`--passive-scan`) flag, Noir references the rules stored in these paths. These rules are managed by the Noir team, ensuring they are up-to-date and effective.
+
+However, if you wish to add your own custom rules, you can place them in the respective directory for your operating system. This allows you to extend the functionality of the passive scan to meet your specific needs.
\ No newline at end of file
diff --git a/docs/_advanced/passive_scan/rule.md b/docs/_advanced/passive_scan/rule.md
new file mode 100644
index 00000000..b7a06d5d
--- /dev/null
+++ b/docs/_advanced/passive_scan/rule.md
@@ -0,0 +1,72 @@
+---
+title: Passive Scan Rule
+parent: Passive Scan
+has_children: false
+nav_order: 1
+layout: page
+---
+
+```yaml
+id: rule-id
+info:
+ name: "The name of the rule"
+ author:
+ - "List of authors"
+ - "Another author"
+ severity: "The severity level of the rule (e.g., critical, high, medium, low)"
+ description: "A brief description of the rule"
+ reference:
+ - "URLs or references related to the rule"
+
+matchers-condition: "The condition to apply between matchers (and/or)"
+matchers:
+ - type: "The type of matcher (e.g., word, regex)"
+ patterns:
+ - "Patterns to match"
+ condition: "The condition to apply within the matcher (and/or)"
+
+ - type: "The type of matcher (e.g., word, regex)"
+ patterns:
+ - "Patterns to match"
+ - "Another pattern"
+ condition: "The condition to apply within the matcher (and/or)"
+
+category: "The category of the rule (e.g., secret, vulnerability)"
+techs:
+ - "Technologies or frameworks the rule applies to"
+ - "Another technology"
+```
+
+### Example Rule: Detecting PRIVATE_KEY
+
+```yaml
+id: detect-private-key
+info:
+ name: "Detect PRIVATE_KEY"
+ author:
+ - "security-team"
+ severity: critical
+ description: "Detects the presence of PRIVATE_KEY in the code"
+ reference:
+ - "https://example.com/security-guidelines"
+
+matchers-condition: or
+matchers:
+ - type: word
+ patterns:
+ - "PRIVATE_KEY"
+ - "-----BEGIN PRIVATE KEY-----"
+ condition: or
+
+ - type: regex
+ patterns:
+ - "PRIVATE_KEY\\s*=\\s*['\"]?[^'\"]+['\"]?"
+ - "-----BEGIN PRIVATE KEY-----[\\s\\S]*?-----END PRIVATE KEY-----"
+ condition: or
+
+category: secret
+techs:
+ - '*'
+```
+
+![](../../../images/advanced/passive_private_key.png)
\ No newline at end of file
diff --git a/docs/_advanced/tips/pipeline.md b/docs/_advanced/tips/pipeline.md
index bcbb15e7..ec993243 100644
--- a/docs/_advanced/tips/pipeline.md
+++ b/docs/_advanced/tips/pipeline.md
@@ -6,4 +6,4 @@ nav_order: 3
layout: page
---
-Pipeline
\ No newline at end of file
+Coming Soon!
\ No newline at end of file
diff --git a/docs/_advanced/tips/shell-completion.md b/docs/_advanced/tips/shell-completion.md
index fcc791b0..b564fed3 100644
--- a/docs/_advanced/tips/shell-completion.md
+++ b/docs/_advanced/tips/shell-completion.md
@@ -12,16 +12,46 @@ To enable auto-completion for Zsh, run the following command to generate the com
```bash
noir --generate-completion zsh
+# #compdef noir
+# _arguments \
+# ....
```
Then, move the generated script to your Zsh completions directory, typically `~/.zsh/completion/`. If this directory does not exist, you may need to create it. Ensure the script is named `_noir` to follow Zsh's naming convention for completion scripts.
+```bash
+noir --generate-completion zsh > ~/.zsh/completion/_noir
+```
+
## Bash completion
For Bash, generate the completion script by running:
```bash
noir --generate-completion bash
+# _noir_completions() {
+# local cur prev opts
+# ....
+```
+
+After generating the script, move it to the appropriate directory for Bash completions. This location can vary depending on your operating system and Bash configuration, but a common path is `/etc/bash_completion.d/` for system-wide availability, or `~/.local/share/bash-completion/completions/` for a single user. Ensure the script is executable and sourced in your Bash profile.
+
+```bash
+noir --generate-completion bash > ~/.local/share/bash-completion/completions/noir
```
-After generating the script, move it to the appropriate directory for Bash completions. This location can vary depending on your operating system and Bash configuration, but a common path is `/etc/bash_completion.d/` for system-wide availability, or `~/.local/share/bash-completion/completions/` for a single user. Ensure the script is executable and sourced in your Bash profile.
\ No newline at end of file
+## Fish completion
+
+For Fish, generate the completion script by running:
+
+```bash
+noir --generate-completion fish
+# function __fish_noir_needs_command
+# ....
+```
+
+After generating the script, move it to the Fish completions directory, typically `~/.config/fish/completions/.` If this directory does not exist, you may need to create it. Ensure the script is named noir.fish to follow Fish's naming convention for completion scripts.
+
+```bash
+noir --generate-completion fish > ~/.config/fish/completions/noir.fish
+```
\ No newline at end of file
diff --git a/docs/_config.yml b/docs/_config.yml
index 80d9cd40..27e4e8ca 100644
--- a/docs/_config.yml
+++ b/docs/_config.yml
@@ -3,12 +3,15 @@ description: Attack surface detector that identifies endpoints by static analysi
theme: just-the-docs
logo: "/images/owasp-noir-black.png"
favicon_ico: "/images/favicon.ico"
-url: https://owasp-noir.github.io/noir/
+url: https://owasp-noir.github.io
aux_links:
Github: https://github.com/owasp-noir/noir
OWASP: https://owasp.org/www-project-noir/
+plugins:
+ - jekyll-sitemap
+
# Footer "Edit this page on GitHub" link text
gh_edit_link: true # show or hide edit this page link
gh_edit_link_text: "Edit this page on GitHub."
diff --git a/docs/_get_started/basic.md b/docs/_get_started/basic.md
index 01c0465c..a15cef10 100644
--- a/docs/_get_started/basic.md
+++ b/docs/_get_started/basic.md
@@ -24,62 +24,12 @@ noir -b
## Outputs
-The output will display endpoints (such as paths, methods, parameters, headers, etc.), and you can specify the output format using flags like `-f`.
+The output will display endpoints (such as paths, methods, parameters, headers, etc.), and you can specify the output format using flags `-f` or `--format`. If you're curious about the supported formats, please refer to [this](/get_started/output/) document.
![](../../images/get_started/basic.png)
## Usage
```
-USAGE: noir
-
-FLAGS:
- BASE:
- -b PATH, --base-path ./app (Required) Set base path
- -u URL, --url http://.. Set base url for endpoints
-
- OUTPUT:
- -f FORMAT, --format json Set output format
- * plain yaml json jsonl markdown-table
- * curl httpie oas2 oas3
- * only-url only-param only-header only-cookie only-tag
- -o PATH, --output out.txt Write result to file
- --set-pvalue VALUE Specifies the value of the identified parameter
- --include-path Include file path in the plain result
- --no-color Disable color output
- --no-log Displaying only the results
-
- TAGGER:
- -T, --use-all-taggers Activates all taggers for full analysis coverage
- --use-taggers VALUES Activates specific taggers (e.g., --use-taggers hunt,oauth)
- --list-taggers Lists all available taggers
-
- DELIVER:
- --send-req Send results to a web request
- --send-proxy http://proxy.. Send results to a web request via an HTTP proxy
- --send-es http://es.. Send results to Elasticsearch
- --with-headers X-Header:Value Add custom headers to be included in the delivery
- --use-matchers string Send URLs that match specific conditions to the Deliver
- --use-filters string Exclude URLs that match specified conditions and send the rest to Deliver
-
- DIFF:
- --diff-path ./app2 Specify the path to the old version of the source code for comparison
-
- TECHNOLOGIES:
- -t TECHS, --techs rails,php Specify the technologies to use
- --exclude-techs rails,php Specify the technologies to be excluded
- --list-techs Show all technologies
-
- CONFIG:
- --config-file ./config.yaml Specify the path to a configuration file in YAML format
- --concurrency 100 Set concurrency
- --generate-completion zsh Generate Zsh/Bash completion script
-
- DEBUG:
- -d, --debug Show debug messages
- -v, --version Show version
- --build-info Show version and Build info
-
- OTHERS:
- -h, --help Show help
+{% include usage.md %}
```
\ No newline at end of file
diff --git a/docs/_get_started/installation.md b/docs/_get_started/installation.md
index 48d86f78..f6a941e9 100644
--- a/docs/_get_started/installation.md
+++ b/docs/_get_started/installation.md
@@ -10,20 +10,28 @@ layout: page
You can install the tool using various package managers. Each package manager has its own dedicated page with detailed instructions. Please refer to the specific pages for comprehensive installation steps.
+> You can use Homebrew on macOS, Snapcraft or Homebrew on Linux, and on all operating systems including Windows, you can use Docker or a directly built version.
+
## Homebrew
```shell
brew install noir
```
+[Learn more](/noir/get_started/installation/homebrew/)
+
## Snapcraft
```shell
sudo snap install noir
```
+[Learn more](/noir/get_started/installation/snapcraft/)
+
## Docker (GHCR)
```bash
docker pull ghcr.io/owasp-noir/noir:latest
```
+
+[Learn more](/noir/get_started/installation/docker/)
diff --git a/docs/_get_started/installation/docker.md b/docs/_get_started/installation/docker.md
index a43df2a7..31d24dbf 100644
--- a/docs/_get_started/installation/docker.md
+++ b/docs/_get_started/installation/docker.md
@@ -23,4 +23,6 @@ or Replace `` with the specific version tag you need.
```dockerfile
FROM ghcr.io/owasp-noir/noir:
-```
\ No newline at end of file
+```
+
+If you want to see packages by Docker tag, visit [this page](https://github.com/owasp-noir/noir/pkgs/container/noir).
\ No newline at end of file
diff --git a/docs/_get_started/installation/snapcraft.md b/docs/_get_started/installation/snapcraft.md
index 0de937ff..31cf7461 100644
--- a/docs/_get_started/installation/snapcraft.md
+++ b/docs/_get_started/installation/snapcraft.md
@@ -11,12 +11,20 @@ Snapcraft is a powerful package manager for Linux that enables you to easily ins
Ensure you have Snap installed on your system. You can install Snap using your distribution's package manager. For example, on Ubuntu, you can run:
+## Install snapcraft
+### Ubuntu
```bash
sudo apt update
sudo apt install snapd
```
-Install noir with Snapcraft:
+### Other linux
+
+Snapcraft provides installation for various operating systems. Please refer to the document below.
+
+[https://snapcraft.io/docs/installing-snapd](https://snapcraft.io/docs/installing-snapd)
+
+## Install noir with Snapcraft
```shell
sudo snap install noir
diff --git a/docs/_get_started/output.md b/docs/_get_started/output.md
index 25311563..e02ce2e6 100644
--- a/docs/_get_started/output.md
+++ b/docs/_get_started/output.md
@@ -17,3 +17,18 @@ noir -b -f
```
+| Format | Description |
+|-----------------|-----------------------------------------------------------------------------|
+| plain | Outputs the results in plain text format. |
+| yaml | Outputs the results in YAML format. |
+| json | Outputs the results in JSON format. |
+| jsonl | Outputs the results in JSON Lines format, where each line is a JSON object. |
+| markdown-table | Outputs the results in a Markdown table format. |
+| curl | Outputs the results as curl commands. |
+| httpie | Outputs the results as httpie commands. |
+| oas2 | Outputs the results in OpenAPI Specification v2 format. |
+| oas3 | Outputs the results in OpenAPI Specification v3 format. |
+| only-url | Outputs only the URLs found in the analysis. |
+| only-param | Outputs only the parameters found in the analysis. |
+| only-header | Outputs only the headers found in the analysis. |
+| only-cookie | Outputs only the cookies found in the analysis. |
\ No newline at end of file
diff --git a/docs/_get_started/output/curl.md b/docs/_get_started/output/curl.md
index 1e6a16f3..4a38e111 100644
--- a/docs/_get_started/output/curl.md
+++ b/docs/_get_started/output/curl.md
@@ -1,11 +1,13 @@
---
-title: CURL
+title: Curl and HTTPie
parent: Output Formatting
has_children: false
nav_order: 3
layout: page
---
+## Curl
+
```bash
noir -b . -f curl -u https://www.hahwul.com
@@ -15,4 +17,17 @@ noir -b . -f curl -u https://www.hahwul.com
# curl -i -X GET https://www.hahwul.com/socket
# curl -i -X GET https://www.hahwul.com/1.html
# curl -i -X GET https://www.hahwul.com/2.html
+```
+
+## HTTPie
+
+```bash
+noir -b . -f httpie -u https://www.hahwul.com
+
+# http GET https://www.hahwul.com/ "x-api-key: "
+# http POST https://www.hahwul.com/query "query=" "Cookie: my_auth="
+# http GET https://www.hahwul.com/token "client_id=&redirect_url=&grant_type="
+# http GET https://www.hahwul.com/socket
+# http GET https://www.hahwul.com/1.html
+# http GET https://www.hahwul.com/2.html
```
\ No newline at end of file
diff --git a/docs/_get_started/output/json.md b/docs/_get_started/output/json.md
index 003a81e4..c7257884 100644
--- a/docs/_get_started/output/json.md
+++ b/docs/_get_started/output/json.md
@@ -1,48 +1,68 @@
---
-title: JSON
+title: JSON & JSONL
parent: Output Formatting
has_children: false
nav_order: 1
layout: page
---
+## JSON
```bash
noir -b . -f json --no-log
```
```json
{
- "url": "https://testapp.internal.domains/query",
- "method": "POST",
- "params": [
- {
- "name": "my_auth",
- "value": "",
- "param_type": "cookie",
- "tags": []
- },
- {
- "name": "query",
- "value": "",
- "param_type": "form",
- "tags": [
+ "endpoints": [
+ {
+ "url": "https://testapp.internal.domains/query",
+ "method": "POST",
+ "params": [
+ {
+ "name": "my_auth",
+ "value": "",
+ "param_type": "cookie",
+ "tags": []
+ },
+ {
+ "name": "query",
+ "value": "",
+ "param_type": "form",
+ "tags": [
+ {
+ "name": "sqli",
+ "description": "This parameter may be vulnerable to SQL Injection attacks.",
+ "tagger": "Hunt"
+ }
+ ]
+ }
+ ],
+ "details": {
+ "code_paths": [
{
- "name": "sqli",
- "description": "This parameter may be vulnerable to SQL Injection attacks.",
- "tagger": "Hunt"
+ "path": "spec/functional_test/fixtures/crystal_kemal/src/testapp.cr",
+ "line": 8
}
]
- }
- ],
- "details": {
- "code_paths": [
- {
- "path": "spec/functional_test/fixtures/crystal_kemal/src/testapp.cr",
- "line": 8
- }
- ]
- },
- "protocol": "http",
- "tags": []
+ },
+ "protocol": "http",
+ "tags": []
+ }
+ ]
}
+```
+
+## JSONL
+
+```bash
+noir -b . -f jsonl --no-log
+```
+
+```json
+{"url":"/","method":"GET","params":[{"name":"x-api-key","value":"","param_type":"header","tags":[]}],"details":{"code_paths":[{"path":"./spec/functional_test/fixtures/crystal_kemal/src/testapp.cr","line":3}]},"protocol":"http","tags":[]}
+{"url":"/query","method":"POST","params":[{"name":"my_auth","value":"","param_type":"cookie","tags":[]},{"name":"query","value":"","param_type":"form","tags":[]}],"details":{"code_paths":[{"path":"./spec/functional_test/fixtures/crystal_kemal/src/testapp.cr","line":8}]},"protocol":"http","tags":[]}
+{"url":"/token","method":"GET","params":[{"name":"client_id","value":"","param_type":"form","tags":[]},{"name":"redirect_url","value":"","param_type":"form","tags":[]},{"name":"grant_type","value":"","param_type":"form","tags":[]}],"details":{"code_paths":[{"path":"./spec/functional_test/fixtures/crystal_kemal/src/testapp.cr","line":13}]},"protocol":"http","tags":[]}
+{"url":"/socket","method":"GET","params":[],"details":{"code_paths":[{"path":"./spec/functional_test/fixtures/crystal_kemal/src/testapp.cr","line":19}]},"protocol":"ws","tags":[]}
+{"url":"/1.html","method":"GET","params":[],"details":{"code_paths":[]},"protocol":"http","tags":[]}
+{"url":"/2.html","method":"GET","params":[],"details":{"code_paths":[]},"protocol":"http","tags":[]}
```
\ No newline at end of file
diff --git a/docs/_get_started/output/more.md b/docs/_get_started/output/more.md
index 2d300321..8a2ae878 100644
--- a/docs/_get_started/output/more.md
+++ b/docs/_get_started/output/more.md
@@ -6,17 +6,15 @@ nav_order: 4
layout: page
---
-## httpie
+## Open API Spec
```bash
-noir -b . -f httpie -u https://www.hahwul.com
+# noir -b . -f oas3
+# noir -b . -f oas2
+```
-# http GET https://www.hahwul.com/ "x-api-key: "
-# http POST https://www.hahwul.com/query "query=" "Cookie: my_auth="
-# http GET https://www.hahwul.com/token "client_id=&redirect_url=&grant_type="
-# http GET https://www.hahwul.com/socket
-# http GET https://www.hahwul.com/1.html
-# http GET https://www.hahwul.com/2.html
+```json
+{"openapi":"3.0.0","info":{"title":"Generated by Noir","version":""},"paths":{"/":{"get":{"responses":{"200":{"description":"Successful response"}},"parameters":[{"name":"x-api-key","in":"header"}]}},"/query":{"post":{"responses":{"200":{"description":"Successful response"}},"parameters":[{"name":"my_auth","in":"query"},{"name":"query","in":"formData"}]}},"/token":{"get":{"responses":{"200":{"description":"Successful response"}},"parameters":[{"name":"client_id","in":"formData"},{"name":"redirect_url","in":"formData"},{"name":"grant_type","in":"formData"}]}},"/socket":{"get":{"responses":{"200":{"description":"Successful response"}},"parameters":[]}},"/1.html":{"get":{"responses":{"200":{"description":"Successful response"}},"parameters":[]}},"/2.html":{"get":{"responses":{"200":{"description":"Successful response"}},"parameters":[]}}}}
```
## Only-x
diff --git a/docs/_get_started/output/yaml.md b/docs/_get_started/output/yaml.md
index 5af22788..0b9ee3e2 100644
--- a/docs/_get_started/output/yaml.md
+++ b/docs/_get_started/output/yaml.md
@@ -11,6 +11,7 @@ noir -b . -f yaml --no-log
```
```yaml
+endpoints:
- url: /
method: GET
params:
diff --git a/docs/_get_started/supported/language_and_frameworks.md b/docs/_get_started/supported/language_and_frameworks.md
index 35067ee7..5ed6b8c1 100644
--- a/docs/_get_started/supported/language_and_frameworks.md
+++ b/docs/_get_started/supported/language_and_frameworks.md
@@ -158,6 +158,12 @@ layout: page
|-----|--------|-------|--------|--------|----|
| ✅ | ✅ | ❌ | ❌ | ❌ | ❌ |
+### Actix Web
+
+| URL | Method | Param | Header | Cookie | WS |
+|-----|--------|-------|--------|--------|----|
+| ✅ | ✅ | ❌ | ❌ | ❌ | ❌ |
+
## Elixir
### Phoenix
diff --git a/docs/_includes/usage.md b/docs/_includes/usage.md
new file mode 100644
index 00000000..86025b83
--- /dev/null
+++ b/docs/_includes/usage.md
@@ -0,0 +1,75 @@
+USAGE: noir
+
+FLAGS:
+ BASE:
+ -b PATH, --base-path ./app (Required) Set base path
+ -u URL, --url http://.. Set base url for endpoints
+
+ OUTPUT:
+ -f FORMAT, --format json Set output format
+ * plain yaml json jsonl markdown-table
+ * curl httpie oas2 oas3
+ * only-url only-param only-header only-cookie only-tag
+ -o PATH, --output out.txt Write result to file
+ --set-pvalue VALUE Specifies the value of the identified parameter for all types
+ --set-pvalue-header VALUE Specifies the value of the identified parameter for headers
+ --set-pvalue-cookie VALUE Specifies the value of the identified parameter for cookies
+ --set-pvalue-query VALUE Specifies the value of the identified parameter for query parameters
+ --set-pvalue-form VALUE Specifies the value of the identified parameter for form data
+ --set-pvalue-json VALUE Specifies the value of the identified parameter for JSON data
+ --set-pvalue-path VALUE Specifies the value of the identified parameter for path parameters
+ --status-codes Display HTTP status codes for discovered endpoints
+ --exclude-codes 404,500 Exclude specific HTTP response codes (comma-separated)
+ --include-path Include file path in the plain result
+ --no-color Disable color output
+ --no-log Displaying only the results
+
+ PASSIVE SCAN:
+ -P, --passive-scan Perform a passive scan for security issues using rules from the specified path
+ --passive-scan-path PATH Specify the path for the rules used in the passive security scan
+
+ TAGGER:
+ -T, --use-all-taggers Activates all taggers for full analysis coverage
+ --use-taggers VALUES Activates specific taggers (e.g., --use-taggers hunt,oauth)
+ --list-taggers Lists all available taggers
+
+ DELIVER:
+ --send-req Send results to a web request
+ --send-proxy http://proxy.. Send results to a web request via an HTTP proxy
+ --send-es http://es.. Send results to Elasticsearch
+ --with-headers X-Header:Value Add custom headers to be included in the delivery
+ --use-matchers string Send URLs that match specific conditions to the Deliver
+ --use-filters string Exclude URLs that match specified conditions and send the rest to Deliver
+
+ DIFF:
+ --diff-path ./app2 Specify the path to the old version of the source code for comparison
+
+ TECHNOLOGIES:
+ -t TECHS, --techs rails,php Specify the technologies to use
+ --exclude-techs rails,php Specify the technologies to be excluded
+ --list-techs Show all technologies
+
+ CONFIG:
+ --config-file ./config.yaml Specify the path to a configuration file in YAML format
+ --concurrency 100 Set concurrency
+ --generate-completion zsh Generate Zsh/Bash/Fish completion script
+
+ DEBUG:
+ -d, --debug Show debug messages
+ -v, --version Show version
+ --build-info Show version and Build info
+
+ OTHERS:
+ -h, --help Show help
+
+EXAMPLES:
+ Basic run of noir:
+ $ noir -b .
+ Running noir targeting a specific URL and forwarding results through a proxy:
+ $ noir -b . -u http://example.com
+ $ noir -b . -u http://example.com --send-proxy http://localhost:8090
+ Running noir for detailed analysis:
+ $ noir -b . -T --include-path
+ Running noir with output limited to JSON or YAML format, without logs:
+ $ noir -b . -f json --no-log
+ $ noir -b . -f yaml --no-log
diff --git a/docs/images/advanced/passive_private_key.png b/docs/images/advanced/passive_private_key.png
new file mode 100644
index 00000000..26795d8b
Binary files /dev/null and b/docs/images/advanced/passive_private_key.png differ
diff --git a/docs/images/get_started/basic.png b/docs/images/get_started/basic.png
index 2767987c..64208ab8 100644
Binary files a/docs/images/get_started/basic.png and b/docs/images/get_started/basic.png differ
diff --git a/docs/images/noir-usage.jpg b/docs/images/noir-usage.jpg
index 118405bd..4cd69bcf 100644
Binary files a/docs/images/noir-usage.jpg and b/docs/images/noir-usage.jpg differ
diff --git a/docs/index.md b/docs/index.md
index e278b64b..bc12a903 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -4,10 +4,89 @@ layout: home
nav_order: 1
---
-## Noir documentation
+# Secure Every Endpoint.
+{: .fs-9 }
OWASP Noir is an open-source project, specializing in identifying attack surfaces for enhanced whitebox security testing and security pipeline.
+{: .fs-6 .fw-300 }
[Get started now](./get_started/installation/){: .btn .btn-blue }
-[View it on Github](https://github.com/owasp-noir/noir){: .btn }
-[View it on OWASP Project](https://owasp.org/www-project-noir){: .btn }
+[Github](https://github.com/owasp-noir/noir){: .btn }
+[OWASP](https://owasp.org/www-project-noir){: .btn }
+
+---
+
+## What is noir?
+
+Noir is an open-source project dedicated to identifying attack surfaces, enhancing whitebox security testing, and optimizing security pipelines. It excels at discovering API endpoints, web endpoints, and other potential entry points within source code for comprehensive security analysis.
+
+![](images/noir-usage.jpg)
+
+This tool was developed using [Crystal](https://crystal-lang.org). Initially started as hahwul's personal project[^1] in August 2023, it became an OWASP project[^2] in June 2024, co-led by [hahwul](https://github.com/hahwul) and [ksg](https://github.com/ksg97031).
+
+[^1]: [Hello Noir 👋🏼](https://www.hahwul.com/2023/08/03/hello-noir/)
+[^2]: [Welcome to OWASP Noir](https://github.com/orgs/owasp-noir/discussions/336)
+
+## How it works?
+
+Noir is composed of several key components: detector, analyzer, deliver, minilexer/miniparser[^3], output-builder, and passive-scan & tagger[^4]. These components interact and work together to effectively analyze source code. Through this process, they help identify endpoints, parameters, headers, and more within the source code.
+
+[^3]: The minilexer and miniparser is a parser and tokenizer used for code analysis to identify various elements within the source code.
+[^4]: The tagger assigns relevant tags to the identified issues for easier categorization and management.
+
+```mermaid
+flowchart LR
+ SourceCode:::highlight --> Detectors
+
+ subgraph Detectors
+ direction LR
+ Detector1 & Detector2 & Detector3 --> |Condition| PassiveScan
+ end
+
+ PassiveScan --> |Results| OutputBuilder
+
+ Detectors --> |Techs| Analyzers
+
+ subgraph Analyzers
+ direction LR
+ Analyzer1 & Analyzer2 & Analyzer3
+ Analyzer2 --> |Condition| Minilexer
+ Analyzer3 --> |Condition| Miniparser
+ end
+
+ Analyzers --> |Condition| Deliver
+ Analyzers --> |Condition| Tagger
+ Deliver --> 3rdParty
+ Tagger --> |Tags| OutputBuilder
+ Analyzers --> |Endpoints| OutputBuilder
+ OutputBuilder --> Report:::highlight
+
+ classDef highlight fill:#f9f,stroke:#333,stroke-width:4px;
+```
+
+## About the project
+### License
+OWASP Noir is distributed by an [MIT license](https://github.com/owasp-noir/noir/blob/main/LICENSE).
+
+### Contributing
+
+Open-source projects thrive on the strength of the community. From small contributions to major ones, we want to express our gratitude to all contributors. If you're interested in contributing, please check out this document.
+
+We believe every contribution counts and appreciate the time and effort you put into making this project better. Whether you're fixing a typo, adding a new feature, or improving documentation, your help is invaluable. Thank you for being part of our community!
+
+To get started, simply follow the guidelines in the [Contribute Guide](https://github.com/owasp-noir/noir/blob/main/CONTRIBUTING.md). It's full of helpful tips and instructions to make your first contribution smooth and enjoyable.
+
+Happy contributing!
+
+#### Thank you to the contributors of Noir ♥️
+
+![](https://raw.githubusercontent.com/owasp-noir/noir/refs/heads/main/CONTRIBUTORS.svg)
+
+### Code of Conduct
+OWASP Noir is committed to fostering a welcoming community.
+
+View our [Code of Conduct](https://github.com/owasp-noir/noir/blob/main/CODE_OF_CONDUCT.md) on our GitHub repository.
+
+## Help and feedback
+
+We always welcome feedback. Please share your thoughts, suggestions, or report any issues via the GitHub [discussions](https://github.com/orgs/owasp-noir/discussions) or [issues](https://github.com/owasp-noir/noir/issues) page.
\ No newline at end of file
diff --git a/docs/introdution.md b/docs/introdution.md
deleted file mode 100644
index ecf29fa2..00000000
--- a/docs/introdution.md
+++ /dev/null
@@ -1,52 +0,0 @@
----
-title: Introduction
-layout: page
-permalink: /introduction/
-nav_order: 1
----
-
-## What is noir?
-Noir is an open-source project specializing in identifying attack surfaces for enhanced whitebox security testing and security pipeline. This includes the capability to discover API endpoints, web endpoints, and other potential entry points within source code for thorough security analysis.
-
-![](../images/noir-usage.jpg)
-
-## How it works?
-
-Noir is composed of several key components: detector, analyzer, deliver, minilexer, output-builder, and tagger. These components interact and work together to effectively analyze source code. Through this process, they help identify endpoints, parameters, headers, and more within the source code.
-
-```mermaid
-flowchart LR
- SourceCode --> Detectors
-
- subgraph Detectors
- direction LR
- Detector1 & Detector2 & Detector3
- end
-
- Detectors --> Analyzers
-
- subgraph Analyzers
- direction LR
- Analyzer1 & Analyzer2 & Analyzer3
- Analyzer2 --> |Condition| Minilexer
- Analyzer3 --> |Condition| Miniparser
- end
-
- Analyzers --> |Condition| Deliver
- Analyzers --> |Condition| Tagger
- Deliver --> OutputBuilder
- Tagger --> OutputBuilder
- Analyzers --> OutputBuilder
- OutputBuilder --> Endpoints
-
-```
-
-## How to contribute?
-
-Open-source projects thrive on the strength of the community. From small contributions to major ones, we want to express our gratitude to all contributors. If you're interested in contributing, please check out this document.
-
-We believe every contribution counts and appreciate the time and effort you put into making this project better. Whether you're fixing a typo, adding a new feature, or improving documentation, your help is invaluable. Thank you for being part of our community!
-
-To get started, simply follow the guidelines in the [Contribute Guide](https://github.com/owasp-noir/noir/blob/main/CONTRIBUTING.md). It's full of helpful tips and instructions to make your first contribution smooth and enjoyable.
-
-Happy contributing!
\ No newline at end of file
diff --git a/shard.yml b/shard.yml
index 0ad394ab..ae13032e 100644
--- a/shard.yml
+++ b/shard.yml
@@ -1,6 +1,6 @@
# Project Metadata
name: noir
-version: 0.17.0
+version: 0.18.0
authors:
- hahwul
- ksg97031
diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml
index 58219f0a..419ed1cf 100644
--- a/snap/snapcraft.yaml
+++ b/snap/snapcraft.yaml
@@ -1,6 +1,6 @@
name: noir
base: core20
-version: 0.17.0
+version: 0.18.0
summary: Attack surface detector that identifies endpoints by static analysis.
description: |
Noir is an open-source project specializing in identifying attack surfaces for enhanced whitebox security testing and security pipeline.
diff --git a/spec/functional_test/fixtures/crystal_kemal/custom_public/2.html b/spec/functional_test/fixtures/crystal/kemal/custom_public/2.html
similarity index 100%
rename from spec/functional_test/fixtures/crystal_kemal/custom_public/2.html
rename to spec/functional_test/fixtures/crystal/kemal/custom_public/2.html
diff --git a/spec/functional_test/fixtures/crystal_kemal/public/1.html b/spec/functional_test/fixtures/crystal/kemal/public/1.html
similarity index 100%
rename from spec/functional_test/fixtures/crystal_kemal/public/1.html
rename to spec/functional_test/fixtures/crystal/kemal/public/1.html
diff --git a/spec/functional_test/fixtures/crystal_kemal/shard.yml b/spec/functional_test/fixtures/crystal/kemal/shard.yml
similarity index 100%
rename from spec/functional_test/fixtures/crystal_kemal/shard.yml
rename to spec/functional_test/fixtures/crystal/kemal/shard.yml
diff --git a/spec/functional_test/fixtures/crystal_kemal/src/testapp.cr b/spec/functional_test/fixtures/crystal/kemal/src/testapp.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_kemal/src/testapp.cr
rename to spec/functional_test/fixtures/crystal/kemal/src/testapp.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/config/authentic.cr b/spec/functional_test/fixtures/crystal/lucky/config/authentic.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/config/authentic.cr
rename to spec/functional_test/fixtures/crystal/lucky/config/authentic.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/config/cookies.cr b/spec/functional_test/fixtures/crystal/lucky/config/cookies.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/config/cookies.cr
rename to spec/functional_test/fixtures/crystal/lucky/config/cookies.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/public/secret.html b/spec/functional_test/fixtures/crystal/lucky/public/secret.html
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/public/secret.html
rename to spec/functional_test/fixtures/crystal/lucky/public/secret.html
diff --git a/spec/functional_test/fixtures/crystal_lucky/shard.yml b/spec/functional_test/fixtures/crystal/lucky/shard.yml
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/shard.yml
rename to spec/functional_test/fixtures/crystal/lucky/shard.yml
diff --git a/spec/functional_test/fixtures/crystal_lucky/src/actions/api/me/show.cr b/spec/functional_test/fixtures/crystal/lucky/src/actions/api/me/show.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/src/actions/api/me/show.cr
rename to spec/functional_test/fixtures/crystal/lucky/src/actions/api/me/show.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/src/actions/api/sign_ins/create.cr b/spec/functional_test/fixtures/crystal/lucky/src/actions/api/sign_ins/create.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/src/actions/api/sign_ins/create.cr
rename to spec/functional_test/fixtures/crystal/lucky/src/actions/api/sign_ins/create.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/src/actions/api/sign_ups/create.cr b/spec/functional_test/fixtures/crystal/lucky/src/actions/api/sign_ups/create.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/src/actions/api/sign_ups/create.cr
rename to spec/functional_test/fixtures/crystal/lucky/src/actions/api/sign_ups/create.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/src/actions/api_action.cr b/spec/functional_test/fixtures/crystal/lucky/src/actions/api_action.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/src/actions/api_action.cr
rename to spec/functional_test/fixtures/crystal/lucky/src/actions/api_action.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/src/actions/errors/show.cr b/spec/functional_test/fixtures/crystal/lucky/src/actions/errors/show.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/src/actions/errors/show.cr
rename to spec/functional_test/fixtures/crystal/lucky/src/actions/errors/show.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/src/actions/home/index.cr b/spec/functional_test/fixtures/crystal/lucky/src/actions/home/index.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/src/actions/home/index.cr
rename to spec/functional_test/fixtures/crystal/lucky/src/actions/home/index.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/src/actions/mixins/.keep b/spec/functional_test/fixtures/crystal/lucky/src/actions/mixins/.keep
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/src/actions/mixins/.keep
rename to spec/functional_test/fixtures/crystal/lucky/src/actions/mixins/.keep
diff --git a/spec/functional_test/fixtures/crystal_lucky/src/actions/mixins/api/auth/helpers.cr b/spec/functional_test/fixtures/crystal/lucky/src/actions/mixins/api/auth/helpers.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/src/actions/mixins/api/auth/helpers.cr
rename to spec/functional_test/fixtures/crystal/lucky/src/actions/mixins/api/auth/helpers.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/src/actions/mixins/api/auth/require_auth_token.cr b/spec/functional_test/fixtures/crystal/lucky/src/actions/mixins/api/auth/require_auth_token.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/src/actions/mixins/api/auth/require_auth_token.cr
rename to spec/functional_test/fixtures/crystal/lucky/src/actions/mixins/api/auth/require_auth_token.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/src/actions/mixins/api/auth/skip_require_auth_token.cr b/spec/functional_test/fixtures/crystal/lucky/src/actions/mixins/api/auth/skip_require_auth_token.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/src/actions/mixins/api/auth/skip_require_auth_token.cr
rename to spec/functional_test/fixtures/crystal/lucky/src/actions/mixins/api/auth/skip_require_auth_token.cr
diff --git a/spec/functional_test/fixtures/crystal_lucky/tasks.cr b/spec/functional_test/fixtures/crystal/lucky/tasks.cr
similarity index 100%
rename from spec/functional_test/fixtures/crystal_lucky/tasks.cr
rename to spec/functional_test/fixtures/crystal/lucky/tasks.cr
diff --git a/spec/functional_test/fixtures/aspnet_mvc/App_Start/RouteConfig.cs b/spec/functional_test/fixtures/csharp/aspnet_mvc/App_Start/RouteConfig.cs
similarity index 100%
rename from spec/functional_test/fixtures/aspnet_mvc/App_Start/RouteConfig.cs
rename to spec/functional_test/fixtures/csharp/aspnet_mvc/App_Start/RouteConfig.cs
diff --git a/spec/functional_test/fixtures/aspnet_mvc/packages.config b/spec/functional_test/fixtures/csharp/aspnet_mvc/packages.config
similarity index 100%
rename from spec/functional_test/fixtures/aspnet_mvc/packages.config
rename to spec/functional_test/fixtures/csharp/aspnet_mvc/packages.config
diff --git a/spec/functional_test/fixtures/elixir_phoenix/config/config.exs b/spec/functional_test/fixtures/elixir/phoenix/config/config.exs
similarity index 100%
rename from spec/functional_test/fixtures/elixir_phoenix/config/config.exs
rename to spec/functional_test/fixtures/elixir/phoenix/config/config.exs
diff --git a/spec/functional_test/fixtures/elixir_phoenix/lib/elixir_phoenix_web/endpoint.ex b/spec/functional_test/fixtures/elixir/phoenix/lib/elixir_phoenix_web/endpoint.ex
similarity index 100%
rename from spec/functional_test/fixtures/elixir_phoenix/lib/elixir_phoenix_web/endpoint.ex
rename to spec/functional_test/fixtures/elixir/phoenix/lib/elixir_phoenix_web/endpoint.ex
diff --git a/spec/functional_test/fixtures/elixir_phoenix/lib/elixir_phoenix_web/router.ex b/spec/functional_test/fixtures/elixir/phoenix/lib/elixir_phoenix_web/router.ex
similarity index 100%
rename from spec/functional_test/fixtures/elixir_phoenix/lib/elixir_phoenix_web/router.ex
rename to spec/functional_test/fixtures/elixir/phoenix/lib/elixir_phoenix_web/router.ex
diff --git a/spec/functional_test/fixtures/elixir_phoenix/mix.exs b/spec/functional_test/fixtures/elixir/phoenix/mix.exs
similarity index 100%
rename from spec/functional_test/fixtures/elixir_phoenix/mix.exs
rename to spec/functional_test/fixtures/elixir/phoenix/mix.exs
diff --git a/spec/functional_test/fixtures/file_based/base64.txt b/spec/functional_test/fixtures/etc/file_based/base64.txt
similarity index 100%
rename from spec/functional_test/fixtures/file_based/base64.txt
rename to spec/functional_test/fixtures/etc/file_based/base64.txt
diff --git a/spec/functional_test/fixtures/file_based/urls.json b/spec/functional_test/fixtures/etc/file_based/urls.json
similarity index 100%
rename from spec/functional_test/fixtures/file_based/urls.json
rename to spec/functional_test/fixtures/etc/file_based/urls.json
diff --git a/spec/functional_test/fixtures/file_based/urls.txt b/spec/functional_test/fixtures/etc/file_based/urls.txt
similarity index 100%
rename from spec/functional_test/fixtures/file_based/urls.txt
rename to spec/functional_test/fixtures/etc/file_based/urls.txt
diff --git a/spec/functional_test/fixtures/multi_techs/doc.yml b/spec/functional_test/fixtures/etc/multi_techs/doc.yml
similarity index 100%
rename from spec/functional_test/fixtures/multi_techs/doc.yml
rename to spec/functional_test/fixtures/etc/multi_techs/doc.yml
diff --git a/spec/functional_test/fixtures/multi_techs/kemal/shard.yml b/spec/functional_test/fixtures/etc/multi_techs/kemal/shard.yml
similarity index 100%
rename from spec/functional_test/fixtures/multi_techs/kemal/shard.yml
rename to spec/functional_test/fixtures/etc/multi_techs/kemal/shard.yml
diff --git a/spec/functional_test/fixtures/multi_techs/kemal/src/testapp.cr b/spec/functional_test/fixtures/etc/multi_techs/kemal/src/testapp.cr
similarity index 100%
rename from spec/functional_test/fixtures/multi_techs/kemal/src/testapp.cr
rename to spec/functional_test/fixtures/etc/multi_techs/kemal/src/testapp.cr
diff --git a/spec/functional_test/fixtures/multi_techs/sinatra/Gemfile b/spec/functional_test/fixtures/etc/multi_techs/sinatra/Gemfile
similarity index 100%
rename from spec/functional_test/fixtures/multi_techs/sinatra/Gemfile
rename to spec/functional_test/fixtures/etc/multi_techs/sinatra/Gemfile
diff --git a/spec/functional_test/fixtures/multi_techs/sinatra/app.rb b/spec/functional_test/fixtures/etc/multi_techs/sinatra/app.rb
similarity index 100%
rename from spec/functional_test/fixtures/multi_techs/sinatra/app.rb
rename to spec/functional_test/fixtures/etc/multi_techs/sinatra/app.rb
diff --git a/spec/functional_test/fixtures/etc/passive_scan/private_key.pem b/spec/functional_test/fixtures/etc/passive_scan/private_key.pem
new file mode 100644
index 00000000..871650a0
--- /dev/null
+++ b/spec/functional_test/fixtures/etc/passive_scan/private_key.pem
@@ -0,0 +1,16 @@
+-----BEGIN PRIVATE KEY-----
+MIICeAIBADANBgkqhkiG9w0BAQEFAASCAmIwggJeAgEAAoGBAM6OKnP4enU6h29u
+hzkgwA8i8yU7AVBrD62H16tsyZ1ugi13owZJROhyUMxn7JCl3cIqjXv0vRbWZPYV
+TFkQjYkIAp7dItN0TCIps+rKCGfZzAsj/fxwltMIrnIvcc6V8RFxATqGaCSPwbpA
+Gr1SgdIJ6DmD8wRm4KjNkZf5SIOVAgMBAAECgYEAgf/p6ill8SgQG2rmAmeBWUV2
+4GS9k/591Iqdi0+hqMrndXRJj2hHzUA9aBi0Q+mQo2aiu3QyGhB/ZH3NGnUXjhgV
+UXC1lwsBw+MHueHwOablckZJpP4ODVce8FqL0L1PqkwpiUSLnhAx0hPux+MxLoIG
+lJvvMdULfWKn1RJScQECQQD0aRffq3aroIf83PLaCIr+PlE80R9XCx/WEMo2p9sa
+pCiGxPVnd6NELBItLFn/LLQSfVLDaQp+uT5V28OkolTBAkEA2FmMHdOqzvVt6vQr
+shhU0pJ6tCoYBSNvMR/iamEgpt+z1O30HOq3gQxXi8XauGlL2ZUTrx5WjmGH72uN
+M/y/1QJBAO+74cHv8n/X/2tE4iSBOqy1PYP079O2Qqz7XQ2NSfnwyRugN8IcdJWa
+0N1kYoZMN/jm2nZ4IuY4F2+qF1X2MEECQEPpeA40zz0/1U0XcE/rS1T+Hf5H6FpU
+kSa+d3glHPbE6dJBD4u0Mf4UPbbmWI3in6ihHxitUUZIXz9MZ3qWMDECQQCEUTFm
+vFfshD59digt+GmbK0z/rn/EBsSKbvo7S6WDD4bfAFjMonSC9XiBb9tDKMB1UzMe
+D8yFRkrlRHzX7Gd6
+-----END PRIVATE KEY-----
diff --git a/spec/functional_test/fixtures/go_beego/go.mod b/spec/functional_test/fixtures/go/beego/go.mod
similarity index 100%
rename from spec/functional_test/fixtures/go_beego/go.mod
rename to spec/functional_test/fixtures/go/beego/go.mod
diff --git a/spec/functional_test/fixtures/go_beego/server.go b/spec/functional_test/fixtures/go/beego/server.go
similarity index 100%
rename from spec/functional_test/fixtures/go_beego/server.go
rename to spec/functional_test/fixtures/go/beego/server.go
diff --git a/spec/functional_test/fixtures/go_echo/go.mod b/spec/functional_test/fixtures/go/echo/go.mod
similarity index 100%
rename from spec/functional_test/fixtures/go_echo/go.mod
rename to spec/functional_test/fixtures/go/echo/go.mod
diff --git a/spec/functional_test/fixtures/go_echo/public/secret.html b/spec/functional_test/fixtures/go/echo/public/secret.html
similarity index 100%
rename from spec/functional_test/fixtures/go_echo/public/secret.html
rename to spec/functional_test/fixtures/go/echo/public/secret.html
diff --git a/spec/functional_test/fixtures/go_echo/public2/mob.txt b/spec/functional_test/fixtures/go/echo/public2/mob.txt
similarity index 100%
rename from spec/functional_test/fixtures/go_echo/public2/mob.txt
rename to spec/functional_test/fixtures/go/echo/public2/mob.txt
diff --git a/spec/functional_test/fixtures/go_echo/public3/coffee.txt b/spec/functional_test/fixtures/go/echo/public3/coffee.txt
similarity index 100%
rename from spec/functional_test/fixtures/go_echo/public3/coffee.txt
rename to spec/functional_test/fixtures/go/echo/public3/coffee.txt
diff --git a/spec/functional_test/fixtures/go_echo/server.go b/spec/functional_test/fixtures/go/echo/server.go
similarity index 100%
rename from spec/functional_test/fixtures/go_echo/server.go
rename to spec/functional_test/fixtures/go/echo/server.go
diff --git a/spec/functional_test/fixtures/go_fiber/go.mod b/spec/functional_test/fixtures/go/fiber/go.mod
similarity index 100%
rename from spec/functional_test/fixtures/go_fiber/go.mod
rename to spec/functional_test/fixtures/go/fiber/go.mod
diff --git a/spec/functional_test/fixtures/go_fiber/public/secret.html b/spec/functional_test/fixtures/go/fiber/public/secret.html
similarity index 100%
rename from spec/functional_test/fixtures/go_fiber/public/secret.html
rename to spec/functional_test/fixtures/go/fiber/public/secret.html
diff --git a/spec/functional_test/fixtures/go_fiber/server.go b/spec/functional_test/fixtures/go/fiber/server.go
similarity index 100%
rename from spec/functional_test/fixtures/go_fiber/server.go
rename to spec/functional_test/fixtures/go/fiber/server.go
diff --git a/spec/functional_test/fixtures/go_gin/go.mod b/spec/functional_test/fixtures/go/gin/go.mod
similarity index 100%
rename from spec/functional_test/fixtures/go_gin/go.mod
rename to spec/functional_test/fixtures/go/gin/go.mod
diff --git a/spec/functional_test/fixtures/go_gin/public/secret.html b/spec/functional_test/fixtures/go/gin/public/secret.html
similarity index 100%
rename from spec/functional_test/fixtures/go_gin/public/secret.html
rename to spec/functional_test/fixtures/go/gin/public/secret.html
diff --git a/spec/functional_test/fixtures/go_gin/server.go b/spec/functional_test/fixtures/go/gin/server.go
similarity index 100%
rename from spec/functional_test/fixtures/go_gin/server.go
rename to spec/functional_test/fixtures/go/gin/server.go
diff --git a/spec/functional_test/fixtures/java_armeria/pom.xml b/spec/functional_test/fixtures/java/armeria/pom.xml
similarity index 100%
rename from spec/functional_test/fixtures/java_armeria/pom.xml
rename to spec/functional_test/fixtures/java/armeria/pom.xml
diff --git a/spec/functional_test/fixtures/java_armeria/src/ApplicationStartUp.java b/spec/functional_test/fixtures/java/armeria/src/ApplicationStartUp.java
similarity index 100%
rename from spec/functional_test/fixtures/java_armeria/src/ApplicationStartUp.java
rename to spec/functional_test/fixtures/java/armeria/src/ApplicationStartUp.java
diff --git a/spec/functional_test/fixtures/jsp/el.jsp b/spec/functional_test/fixtures/java/jsp/el.jsp
similarity index 100%
rename from spec/functional_test/fixtures/jsp/el.jsp
rename to spec/functional_test/fixtures/java/jsp/el.jsp
diff --git a/spec/functional_test/fixtures/jsp/get_param.jsp b/spec/functional_test/fixtures/java/jsp/get_param.jsp
similarity index 100%
rename from spec/functional_test/fixtures/jsp/get_param.jsp
rename to spec/functional_test/fixtures/java/jsp/get_param.jsp
diff --git a/spec/functional_test/fixtures/java_spring/.gitignore b/spec/functional_test/fixtures/java/spring/.gitignore
similarity index 100%
rename from spec/functional_test/fixtures/java_spring/.gitignore
rename to spec/functional_test/fixtures/java/spring/.gitignore
diff --git a/spec/functional_test/fixtures/java_spring/build.gradle b/spec/functional_test/fixtures/java/spring/build.gradle
similarity index 100%
rename from spec/functional_test/fixtures/java_spring/build.gradle
rename to spec/functional_test/fixtures/java/spring/build.gradle
diff --git a/spec/functional_test/fixtures/java_spring/src/HttpServletRequest.java b/spec/functional_test/fixtures/java/spring/src/HttpServletRequest.java
similarity index 100%
rename from spec/functional_test/fixtures/java_spring/src/HttpServletRequest.java
rename to spec/functional_test/fixtures/java/spring/src/HttpServletRequest.java
diff --git a/spec/functional_test/fixtures/java_spring/src/ItemController.java b/spec/functional_test/fixtures/java/spring/src/ItemController.java
similarity index 75%
rename from spec/functional_test/fixtures/java_spring/src/ItemController.java
rename to spec/functional_test/fixtures/java/spring/src/ItemController.java
index 0ae7c25b..2e9b027c 100644
--- a/spec/functional_test/fixtures/java_spring/src/ItemController.java
+++ b/spec/functional_test/fixtures/java/spring/src/ItemController.java
@@ -27,9 +27,21 @@ public void deleteItem(@PathVariable Long id) {
public void getItemJson(){
}
+ @RequestMapping("/requestmap/put", method = RequestMethod.PUT)
+ public void requestGet(){
+ }
+
+ @RequestMapping("/requestmap/delete",method={RequestMethod.DELETE})
+ public void requestDelete(){
+ }
+
@RequestMapping("/multiple/methods", method = {RequestMethod.GET, RequestMethod.POST})
public void multipleMethods(){
}
+
+ @RequestMapping("/multiple/methods2", method = [RequestMethod.GET, RequestMethod.POST])
+ public void multipleMethods2(){
+ }
}
class Item {
diff --git a/spec/functional_test/fixtures/java_spring/src/MyRoutingConfiguration.java b/spec/functional_test/fixtures/java/spring/src/MyRoutingConfiguration.java
similarity index 100%
rename from spec/functional_test/fixtures/java_spring/src/MyRoutingConfiguration.java
rename to spec/functional_test/fixtures/java/spring/src/MyRoutingConfiguration.java
diff --git a/spec/functional_test/fixtures/java_spring/src/QuoteRouter.java b/spec/functional_test/fixtures/java/spring/src/QuoteRouter.java
similarity index 100%
rename from spec/functional_test/fixtures/java_spring/src/QuoteRouter.java
rename to spec/functional_test/fixtures/java/spring/src/QuoteRouter.java
diff --git a/spec/functional_test/fixtures/java_spring/src/RequestParam.java b/spec/functional_test/fixtures/java/spring/src/RequestParam.java
similarity index 100%
rename from spec/functional_test/fixtures/java_spring/src/RequestParam.java
rename to spec/functional_test/fixtures/java/spring/src/RequestParam.java
diff --git a/spec/functional_test/fixtures/js_express/app.js b/spec/functional_test/fixtures/javascript/express/app.js
similarity index 100%
rename from spec/functional_test/fixtures/js_express/app.js
rename to spec/functional_test/fixtures/javascript/express/app.js
diff --git a/spec/functional_test/fixtures/js_restify/app.js b/spec/functional_test/fixtures/javascript/restify/app.js
similarity index 100%
rename from spec/functional_test/fixtures/js_restify/app.js
rename to spec/functional_test/fixtures/javascript/restify/app.js
diff --git a/spec/functional_test/fixtures/kotlin_spring/.gitignore b/spec/functional_test/fixtures/kotlin/spring/.gitignore
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/.gitignore
rename to spec/functional_test/fixtures/kotlin/spring/.gitignore
diff --git a/spec/functional_test/fixtures/kotlin_spring/build.gradle.kts b/spec/functional_test/fixtures/kotlin/spring/build.gradle.kts
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/build.gradle.kts
rename to spec/functional_test/fixtures/kotlin/spring/build.gradle.kts
diff --git a/spec/functional_test/fixtures/kotlin_spring/settings.gradle.kts b/spec/functional_test/fixtures/kotlin/spring/settings.gradle.kts
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/settings.gradle.kts
rename to spec/functional_test/fixtures/kotlin/spring/settings.gradle.kts
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/BlogApplication.kt b/spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/BlogApplication.kt
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/BlogApplication.kt
rename to spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/BlogApplication.kt
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/BlogConfiguration.kt b/spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/BlogConfiguration.kt
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/BlogConfiguration.kt
rename to spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/BlogConfiguration.kt
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/BlogProperties.kt b/spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/BlogProperties.kt
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/BlogProperties.kt
rename to spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/BlogProperties.kt
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/Entities.kt b/spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/Entities.kt
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/Entities.kt
rename to spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/Entities.kt
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/Extensions.kt b/spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/Extensions.kt
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/Extensions.kt
rename to spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/Extensions.kt
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/HtmlController.kt b/spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/HtmlController.kt
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/HtmlController.kt
rename to spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/HtmlController.kt
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/HttpControllers.kt b/spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/HttpControllers.kt
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/HttpControllers.kt
rename to spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/HttpControllers.kt
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/Repositories.kt b/spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/Repositories.kt
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/kotlin/com/example/blog/Repositories.kt
rename to spec/functional_test/fixtures/kotlin/spring/src/main/kotlin/com/example/blog/Repositories.kt
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/resources/application.properties b/spec/functional_test/fixtures/kotlin/spring/src/main/resources/application.properties
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/resources/application.properties
rename to spec/functional_test/fixtures/kotlin/spring/src/main/resources/application.properties
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/resources/templates/article.mustache b/spec/functional_test/fixtures/kotlin/spring/src/main/resources/templates/article.mustache
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/resources/templates/article.mustache
rename to spec/functional_test/fixtures/kotlin/spring/src/main/resources/templates/article.mustache
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/resources/templates/blog.mustache b/spec/functional_test/fixtures/kotlin/spring/src/main/resources/templates/blog.mustache
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/resources/templates/blog.mustache
rename to spec/functional_test/fixtures/kotlin/spring/src/main/resources/templates/blog.mustache
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/resources/templates/footer.mustache b/spec/functional_test/fixtures/kotlin/spring/src/main/resources/templates/footer.mustache
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/resources/templates/footer.mustache
rename to spec/functional_test/fixtures/kotlin/spring/src/main/resources/templates/footer.mustache
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/resources/templates/header.mustache b/spec/functional_test/fixtures/kotlin/spring/src/main/resources/templates/header.mustache
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/resources/templates/header.mustache
rename to spec/functional_test/fixtures/kotlin/spring/src/main/resources/templates/header.mustache
diff --git a/spec/functional_test/fixtures/kotlin_spring/src/main/resources/templates/response.mustache b/spec/functional_test/fixtures/kotlin/spring/src/main/resources/templates/response.mustache
similarity index 100%
rename from spec/functional_test/fixtures/kotlin_spring/src/main/resources/templates/response.mustache
rename to spec/functional_test/fixtures/kotlin/spring/src/main/resources/templates/response.mustache
diff --git a/spec/functional_test/fixtures/php_pure/get.php b/spec/functional_test/fixtures/php/php/get.php
similarity index 100%
rename from spec/functional_test/fixtures/php_pure/get.php
rename to spec/functional_test/fixtures/php/php/get.php
diff --git a/spec/functional_test/fixtures/php_pure/header.php b/spec/functional_test/fixtures/php/php/header.php
similarity index 100%
rename from spec/functional_test/fixtures/php_pure/header.php
rename to spec/functional_test/fixtures/php/php/header.php
diff --git a/spec/functional_test/fixtures/php_pure/post.php b/spec/functional_test/fixtures/php/php/post.php
similarity index 100%
rename from spec/functional_test/fixtures/php_pure/post.php
rename to spec/functional_test/fixtures/php/php/post.php
diff --git a/spec/functional_test/fixtures/php_pure/request.php b/spec/functional_test/fixtures/php/php/request.php
similarity index 100%
rename from spec/functional_test/fixtures/php_pure/request.php
rename to spec/functional_test/fixtures/php/php/request.php
diff --git a/spec/functional_test/fixtures/python_django/README.md b/spec/functional_test/fixtures/python/django/README.md
similarity index 100%
rename from spec/functional_test/fixtures/python_django/README.md
rename to spec/functional_test/fixtures/python/django/README.md
diff --git a/spec/functional_test/fixtures/python_django/blog/__init__.py b/spec/functional_test/fixtures/python/django/blog/__init__.py
similarity index 100%
rename from spec/functional_test/fixtures/python_django/blog/__init__.py
rename to spec/functional_test/fixtures/python/django/blog/__init__.py
diff --git a/spec/functional_test/fixtures/python_django/blog/urls.py b/spec/functional_test/fixtures/python/django/blog/urls.py
similarity index 100%
rename from spec/functional_test/fixtures/python_django/blog/urls.py
rename to spec/functional_test/fixtures/python/django/blog/urls.py
diff --git a/spec/functional_test/fixtures/python_django/blog/views.py b/spec/functional_test/fixtures/python/django/blog/views.py
similarity index 100%
rename from spec/functional_test/fixtures/python_django/blog/views.py
rename to spec/functional_test/fixtures/python/django/blog/views.py
diff --git a/spec/functional_test/fixtures/python_django/djangoblog/__init__.py b/spec/functional_test/fixtures/python/django/djangoblog/__init__.py
similarity index 100%
rename from spec/functional_test/fixtures/python_django/djangoblog/__init__.py
rename to spec/functional_test/fixtures/python/django/djangoblog/__init__.py
diff --git a/spec/functional_test/fixtures/python_django/djangoblog/settings.py b/spec/functional_test/fixtures/python/django/djangoblog/settings.py
similarity index 100%
rename from spec/functional_test/fixtures/python_django/djangoblog/settings.py
rename to spec/functional_test/fixtures/python/django/djangoblog/settings.py
diff --git a/spec/functional_test/fixtures/python_django/djangoblog/urls.py b/spec/functional_test/fixtures/python/django/djangoblog/urls.py
similarity index 100%
rename from spec/functional_test/fixtures/python_django/djangoblog/urls.py
rename to spec/functional_test/fixtures/python/django/djangoblog/urls.py
diff --git a/spec/functional_test/fixtures/python_django/manage.py b/spec/functional_test/fixtures/python/django/manage.py
similarity index 100%
rename from spec/functional_test/fixtures/python_django/manage.py
rename to spec/functional_test/fixtures/python/django/manage.py
diff --git a/spec/functional_test/fixtures/python_fastapi/__init__.py b/spec/functional_test/fixtures/python/fastapi/__init__.py
similarity index 100%
rename from spec/functional_test/fixtures/python_fastapi/__init__.py
rename to spec/functional_test/fixtures/python/fastapi/__init__.py
diff --git a/spec/functional_test/fixtures/python_fastapi/api.py b/spec/functional_test/fixtures/python/fastapi/api.py
similarity index 96%
rename from spec/functional_test/fixtures/python_fastapi/api.py
rename to spec/functional_test/fixtures/python/fastapi/api.py
index ac9d04dd..36e36abe 100644
--- a/spec/functional_test/fixtures/python_fastapi/api.py
+++ b/spec/functional_test/fixtures/python/fastapi/api.py
@@ -4,7 +4,7 @@
from fastapi.responses import JSONResponse
from fastapi import FastAPI, Path, Query, status, Body, Header, Cookie, Depends, Request, Response, APIRouter
-api = APIRouter()
+api : APIRouter = APIRouter()
@api.get("/query/param-required/int")
def get_query_param_required_type(query: int = Query()):
@@ -56,4 +56,4 @@ def cookie_examples(
@api.post("/dummypath")
async def get_body(request: Request):
jj = request.json()
- return await jj["dummy"]
\ No newline at end of file
+ return await jj["dummy"]
diff --git a/spec/functional_test/fixtures/python_fastapi/main.py b/spec/functional_test/fixtures/python/fastapi/main.py
similarity index 78%
rename from spec/functional_test/fixtures/python_fastapi/main.py
rename to spec/functional_test/fixtures/python/fastapi/main.py
index bf2b6b59..edd22e0b 100644
--- a/spec/functional_test/fixtures/python_fastapi/main.py
+++ b/spec/functional_test/fixtures/python/fastapi/main.py
@@ -4,9 +4,9 @@
from fastapi import FastAPI, Path, Query
from api import api
-app = FastAPI()
+app : FastAPI = FastAPI()
app.include_router(api, prefix="/api")
@app.get("/main")
def main():
- return "Hello World"
\ No newline at end of file
+ return "Hello World"
diff --git a/spec/functional_test/fixtures/python_fastapi/requirements.txt b/spec/functional_test/fixtures/python/fastapi/requirements.txt
similarity index 100%
rename from spec/functional_test/fixtures/python_fastapi/requirements.txt
rename to spec/functional_test/fixtures/python/fastapi/requirements.txt
diff --git a/spec/functional_test/fixtures/python_flask/__init__.py b/spec/functional_test/fixtures/python/flask/__init__.py
similarity index 100%
rename from spec/functional_test/fixtures/python_flask/__init__.py
rename to spec/functional_test/fixtures/python/flask/__init__.py
diff --git a/spec/functional_test/fixtures/python_flask/app.py b/spec/functional_test/fixtures/python/flask/app.py
similarity index 99%
rename from spec/functional_test/fixtures/python_flask/app.py
rename to spec/functional_test/fixtures/python/flask/app.py
index f6c90f90..f7aac9e7 100644
--- a/spec/functional_test/fixtures/python_flask/app.py
+++ b/spec/functional_test/fixtures/python/flask/app.py
@@ -6,7 +6,7 @@
from models import User
from utils import get_hash
-app = Flask(__name__)
+app:Flask = Flask(__name__)
app.secret_key = "dd2e7b987b357908fac0118ecdf0d3d2cae7b5a635f802d6" # random generate
@app.teardown_appcontext
diff --git a/spec/functional_test/fixtures/python_flask/requirements.txt b/spec/functional_test/fixtures/python/flask/requirements.txt
similarity index 100%
rename from spec/functional_test/fixtures/python_flask/requirements.txt
rename to spec/functional_test/fixtures/python/flask/requirements.txt
diff --git a/spec/functional_test/fixtures/python_flask/static/css/example.css b/spec/functional_test/fixtures/python/flask/static/css/example.css
similarity index 100%
rename from spec/functional_test/fixtures/python_flask/static/css/example.css
rename to spec/functional_test/fixtures/python/flask/static/css/example.css
diff --git a/spec/functional_test/fixtures/python_flask/static/js/example.js b/spec/functional_test/fixtures/python/flask/static/js/example.js
similarity index 100%
rename from spec/functional_test/fixtures/python_flask/static/js/example.js
rename to spec/functional_test/fixtures/python/flask/static/js/example.js
diff --git a/spec/functional_test/fixtures/python_flask/templates/__init__.py b/spec/functional_test/fixtures/python/flask/templates/__init__.py
similarity index 100%
rename from spec/functional_test/fixtures/python_flask/templates/__init__.py
rename to spec/functional_test/fixtures/python/flask/templates/__init__.py
diff --git a/spec/functional_test/fixtures/python_flask/templates/error.html b/spec/functional_test/fixtures/python/flask/templates/error.html
similarity index 100%
rename from spec/functional_test/fixtures/python_flask/templates/error.html
rename to spec/functional_test/fixtures/python/flask/templates/error.html
diff --git a/spec/functional_test/fixtures/python_flask/templates/index.html b/spec/functional_test/fixtures/python/flask/templates/index.html
similarity index 100%
rename from spec/functional_test/fixtures/python_flask/templates/index.html
rename to spec/functional_test/fixtures/python/flask/templates/index.html
diff --git a/spec/functional_test/fixtures/python_flask/templates/login.html b/spec/functional_test/fixtures/python/flask/templates/login.html
similarity index 100%
rename from spec/functional_test/fixtures/python_flask/templates/login.html
rename to spec/functional_test/fixtures/python/flask/templates/login.html
diff --git a/spec/functional_test/fixtures/python_flask/templates/sign.html b/spec/functional_test/fixtures/python/flask/templates/sign.html
similarity index 100%
rename from spec/functional_test/fixtures/python_flask/templates/sign.html
rename to spec/functional_test/fixtures/python/flask/templates/sign.html
diff --git a/spec/functional_test/fixtures/python_flask/utils.py b/spec/functional_test/fixtures/python/flask/utils.py
similarity index 100%
rename from spec/functional_test/fixtures/python_flask/utils.py
rename to spec/functional_test/fixtures/python/flask/utils.py
diff --git a/spec/functional_test/fixtures/ruby_hanami/Gemfile b/spec/functional_test/fixtures/ruby/hanami/Gemfile
similarity index 100%
rename from spec/functional_test/fixtures/ruby_hanami/Gemfile
rename to spec/functional_test/fixtures/ruby/hanami/Gemfile
diff --git a/spec/functional_test/fixtures/ruby_hanami/app/action.rb b/spec/functional_test/fixtures/ruby/hanami/app/action.rb
similarity index 100%
rename from spec/functional_test/fixtures/ruby_hanami/app/action.rb
rename to spec/functional_test/fixtures/ruby/hanami/app/action.rb
diff --git a/spec/functional_test/fixtures/ruby_hanami/config/routes.rb b/spec/functional_test/fixtures/ruby/hanami/config/routes.rb
similarity index 100%
rename from spec/functional_test/fixtures/ruby_hanami/config/routes.rb
rename to spec/functional_test/fixtures/ruby/hanami/config/routes.rb
diff --git a/spec/functional_test/fixtures/ruby_hanami/public/secret.html b/spec/functional_test/fixtures/ruby/hanami/public/secret.html
similarity index 100%
rename from spec/functional_test/fixtures/ruby_hanami/public/secret.html
rename to spec/functional_test/fixtures/ruby/hanami/public/secret.html
diff --git a/spec/functional_test/fixtures/ruby_rails/Gemfile b/spec/functional_test/fixtures/ruby/rails/Gemfile
similarity index 100%
rename from spec/functional_test/fixtures/ruby_rails/Gemfile
rename to spec/functional_test/fixtures/ruby/rails/Gemfile
diff --git a/spec/functional_test/fixtures/ruby_rails/app/controllers/posts_controller.rb b/spec/functional_test/fixtures/ruby/rails/app/controllers/posts_controller.rb
similarity index 100%
rename from spec/functional_test/fixtures/ruby_rails/app/controllers/posts_controller.rb
rename to spec/functional_test/fixtures/ruby/rails/app/controllers/posts_controller.rb
diff --git a/spec/functional_test/fixtures/ruby_rails/config/routes.rb b/spec/functional_test/fixtures/ruby/rails/config/routes.rb
similarity index 100%
rename from spec/functional_test/fixtures/ruby_rails/config/routes.rb
rename to spec/functional_test/fixtures/ruby/rails/config/routes.rb
diff --git a/spec/functional_test/fixtures/ruby_rails/public/secret.html b/spec/functional_test/fixtures/ruby/rails/public/secret.html
similarity index 100%
rename from spec/functional_test/fixtures/ruby_rails/public/secret.html
rename to spec/functional_test/fixtures/ruby/rails/public/secret.html
diff --git a/spec/functional_test/fixtures/ruby_sinatra/Gemfile b/spec/functional_test/fixtures/ruby/sinatra/Gemfile
similarity index 100%
rename from spec/functional_test/fixtures/ruby_sinatra/Gemfile
rename to spec/functional_test/fixtures/ruby/sinatra/Gemfile
diff --git a/spec/functional_test/fixtures/ruby_sinatra/app.rb b/spec/functional_test/fixtures/ruby/sinatra/app.rb
similarity index 100%
rename from spec/functional_test/fixtures/ruby_sinatra/app.rb
rename to spec/functional_test/fixtures/ruby/sinatra/app.rb
diff --git a/spec/functional_test/fixtures/rust/actix_web/Cargo.toml b/spec/functional_test/fixtures/rust/actix_web/Cargo.toml
new file mode 100644
index 00000000..2201edd8
--- /dev/null
+++ b/spec/functional_test/fixtures/rust/actix_web/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "example-hello-world"
+
+[dependencies]
+actix-web = "4.9"
+actix-web-actors = "4.1"
+actix-web-lab = "0.22"
+actix-ws = "0.3"
\ No newline at end of file
diff --git a/spec/functional_test/fixtures/rust/actix_web/src/main.rs b/spec/functional_test/fixtures/rust/actix_web/src/main.rs
new file mode 100644
index 00000000..3152b62a
--- /dev/null
+++ b/spec/functional_test/fixtures/rust/actix_web/src/main.rs
@@ -0,0 +1,15 @@
+use actix_web::{get, post, web, App, HttpResponse, HttpServer, Responder};
+
+#[get("/")]
+async fn hello() -> impl Responder {
+ HttpResponse::Ok().body("Hello world!")
+}
+
+#[post("/echo")]
+async fn echo(req_body: String) -> impl Responder {
+ HttpResponse::Ok().body(req_body)
+}
+
+async fn manual_hello() -> impl Responder {
+ HttpResponse::Ok().body("Hey there!")
+}
\ No newline at end of file
diff --git a/spec/functional_test/fixtures/rust_axum/Cargo.toml b/spec/functional_test/fixtures/rust/axum/Cargo.toml
similarity index 100%
rename from spec/functional_test/fixtures/rust_axum/Cargo.toml
rename to spec/functional_test/fixtures/rust/axum/Cargo.toml
diff --git a/spec/functional_test/fixtures/rust_axum/src/main.rs b/spec/functional_test/fixtures/rust/axum/src/main.rs
similarity index 100%
rename from spec/functional_test/fixtures/rust_axum/src/main.rs
rename to spec/functional_test/fixtures/rust/axum/src/main.rs
diff --git a/spec/functional_test/fixtures/rust_rocket/Cargo.toml b/spec/functional_test/fixtures/rust/rocket/Cargo.toml
similarity index 100%
rename from spec/functional_test/fixtures/rust_rocket/Cargo.toml
rename to spec/functional_test/fixtures/rust/rocket/Cargo.toml
diff --git a/spec/functional_test/fixtures/rust_rocket/src/main.rs b/spec/functional_test/fixtures/rust/rocket/src/main.rs
similarity index 100%
rename from spec/functional_test/fixtures/rust_rocket/src/main.rs
rename to spec/functional_test/fixtures/rust/rocket/src/main.rs
diff --git a/spec/functional_test/fixtures/har/example.har b/spec/functional_test/fixtures/specification/har/example.har
similarity index 100%
rename from spec/functional_test/fixtures/har/example.har
rename to spec/functional_test/fixtures/specification/har/example.har
diff --git a/spec/functional_test/fixtures/oas2/doc.yml b/spec/functional_test/fixtures/specification/oas2/doc.yml
similarity index 100%
rename from spec/functional_test/fixtures/oas2/doc.yml
rename to spec/functional_test/fixtures/specification/oas2/doc.yml
diff --git a/spec/functional_test/fixtures/oas3/common/doc.yml b/spec/functional_test/fixtures/specification/oas3/common/doc.yml
similarity index 100%
rename from spec/functional_test/fixtures/oas3/common/doc.yml
rename to spec/functional_test/fixtures/specification/oas3/common/doc.yml
diff --git a/spec/functional_test/fixtures/oas3/multiple_docs/first.yml b/spec/functional_test/fixtures/specification/oas3/multiple_docs/first.yml
similarity index 100%
rename from spec/functional_test/fixtures/oas3/multiple_docs/first.yml
rename to spec/functional_test/fixtures/specification/oas3/multiple_docs/first.yml
diff --git a/spec/functional_test/fixtures/oas3/multiple_docs/second.yml b/spec/functional_test/fixtures/specification/oas3/multiple_docs/second.yml
similarity index 100%
rename from spec/functional_test/fixtures/oas3/multiple_docs/second.yml
rename to spec/functional_test/fixtures/specification/oas3/multiple_docs/second.yml
diff --git a/spec/functional_test/fixtures/oas3/nil_cast/doc_nil_cast.yml b/spec/functional_test/fixtures/specification/oas3/nil_cast/doc_nil_cast.yml
similarity index 100%
rename from spec/functional_test/fixtures/oas3/nil_cast/doc_nil_cast.yml
rename to spec/functional_test/fixtures/specification/oas3/nil_cast/doc_nil_cast.yml
diff --git a/spec/functional_test/fixtures/oas3/nil_cast/doc_nil_cast_no_paths.yml b/spec/functional_test/fixtures/specification/oas3/nil_cast/doc_nil_cast_no_paths.yml
similarity index 100%
rename from spec/functional_test/fixtures/oas3/nil_cast/doc_nil_cast_no_paths.yml
rename to spec/functional_test/fixtures/specification/oas3/nil_cast/doc_nil_cast_no_paths.yml
diff --git a/spec/functional_test/fixtures/oas3/no_servers/doc_no_servers.yml b/spec/functional_test/fixtures/specification/oas3/no_servers/doc_no_servers.yml
similarity index 100%
rename from spec/functional_test/fixtures/oas3/no_servers/doc_no_servers.yml
rename to spec/functional_test/fixtures/specification/oas3/no_servers/doc_no_servers.yml
diff --git a/spec/functional_test/fixtures/oas3/param_in_path/doc_param_in_json.json b/spec/functional_test/fixtures/specification/oas3/param_in_path/doc_param_in_json.json
similarity index 100%
rename from spec/functional_test/fixtures/oas3/param_in_path/doc_param_in_json.json
rename to spec/functional_test/fixtures/specification/oas3/param_in_path/doc_param_in_json.json
diff --git a/spec/functional_test/fixtures/oas3/param_in_path/doc_param_in_path.yml b/spec/functional_test/fixtures/specification/oas3/param_in_path/doc_param_in_path.yml
similarity index 100%
rename from spec/functional_test/fixtures/oas3/param_in_path/doc_param_in_path.yml
rename to spec/functional_test/fixtures/specification/oas3/param_in_path/doc_param_in_path.yml
diff --git a/spec/functional_test/fixtures/raml/docs.yaml b/spec/functional_test/fixtures/specification/raml/docs.yaml
similarity index 100%
rename from spec/functional_test/fixtures/raml/docs.yaml
rename to spec/functional_test/fixtures/specification/raml/docs.yaml
diff --git a/spec/functional_test/func_spec.cr b/spec/functional_test/func_spec.cr
index e1eb8183..acadd6af 100644
--- a/spec/functional_test/func_spec.cr
+++ b/spec/functional_test/func_spec.cr
@@ -17,8 +17,8 @@ class FunctionalTester
def initialize(@path, expected_count, expected_endpoints)
config_init = ConfigInitializer.new
noir_options = config_init.default_options
- noir_options["base"] = "./spec/functional_test/#{@path}"
- noir_options["nolog"] = "yes"
+ noir_options["base"] = YAML::Any.new("./spec/functional_test/#{@path}")
+ noir_options["nolog"] = YAML::Any.new(true)
if !expected_count.nil?
@expected_count = expected_count
@@ -95,17 +95,17 @@ class FunctionalTester
if endpoint.params.size > 0
describe "check - params" do
endpoint.params.each do |param|
- found_param = found_endpoint.params.find { |p| p.name == param.name }
- if found_param.nil?
+ found_params = found_endpoint.params.select { |found_p| found_p.name == param.name }
+ if found_params.size == 0
it "params nil" do
false.should eq true
end
else
it "check '#{param.name}' name " do
- param.name.should eq found_param.name
+ param.name.should eq found_params[0].name
end
- it "check '#{param.name}' param_type " do
- param.param_type.should eq found_param.param_type
+ it "check '#{param.name}' param_type '#{param.param_type}'" do
+ (found_params.any? { |found_p| found_p.param_type == param.param_type }).should be_true
end
end
end
@@ -130,7 +130,7 @@ class FunctionalTester
@app
end
- def set_url(url)
- @app.options["url"] = url
+ def url=(url)
+ @app.options["url"] = YAML::Any.new(url)
end
end
diff --git a/spec/functional_test/testers/crystal_kemal_spec.cr b/spec/functional_test/testers/crystal/kemal_spec.cr
similarity index 81%
rename from spec/functional_test/testers/crystal_kemal_spec.cr
rename to spec/functional_test/testers/crystal/kemal_spec.cr
index 7da85e4c..38d79965 100644
--- a/spec/functional_test/testers/crystal_kemal_spec.cr
+++ b/spec/functional_test/testers/crystal/kemal_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/", "GET", [Param.new("x-api-key", "", "header")]),
@@ -16,7 +16,7 @@ extected_endpoints = [
Endpoint.new("/2.html", "GET"),
]
-FunctionalTester.new("fixtures/crystal_kemal/", {
+FunctionalTester.new("fixtures/crystal/kemal/", {
:techs => 1,
- :endpoints => 6,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/crystal_lucky_spec.cr b/spec/functional_test/testers/crystal/lucky_spec.cr
similarity index 82%
rename from spec/functional_test/testers/crystal_lucky_spec.cr
rename to spec/functional_test/testers/crystal/lucky_spec.cr
index 6a00f7eb..a2a85e7c 100644
--- a/spec/functional_test/testers/crystal_lucky_spec.cr
+++ b/spec/functional_test/testers/crystal/lucky_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/", "GET"),
@@ -17,7 +17,7 @@ extected_endpoints = [
]),
]
-FunctionalTester.new("fixtures/crystal_lucky/", {
+FunctionalTester.new("fixtures/crystal/lucky/", {
:techs => 1,
- :endpoints => 5,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/cs_aspnet_mvc_spec.cr b/spec/functional_test/testers/cs_aspnet_mvc_spec.cr
deleted file mode 100644
index c36f3f07..00000000
--- a/spec/functional_test/testers/cs_aspnet_mvc_spec.cr
+++ /dev/null
@@ -1,11 +0,0 @@
-require "../func_spec.cr"
-
-extected_endpoints = [
- Endpoint.new("/Open/Callback/{appId}", "GET"),
- Endpoint.new("/data/default", "GET"),
-]
-
-FunctionalTester.new("fixtures/aspnet_mvc/", {
- :techs => 1,
- :endpoints => 2,
-}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/csharp/aspnet_mvc_spec.cr b/spec/functional_test/testers/csharp/aspnet_mvc_spec.cr
new file mode 100644
index 00000000..b58fbdd0
--- /dev/null
+++ b/spec/functional_test/testers/csharp/aspnet_mvc_spec.cr
@@ -0,0 +1,13 @@
+require "../../func_spec.cr"
+
+extected_endpoints = [
+ Endpoint.new("/Open/Callback/{appId}", "GET", [
+ Param.new("appId", "", "path"),
+ ]),
+ Endpoint.new("/data/default", "GET"),
+]
+
+FunctionalTester.new("fixtures/csharp/aspnet_mvc/", {
+ :techs => 1,
+ :endpoints => extected_endpoints.size,
+}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/elixir_phoenix_spec.cr b/spec/functional_test/testers/elixir/phoenix_spec.cr
similarity index 75%
rename from spec/functional_test/testers/elixir_phoenix_spec.cr
rename to spec/functional_test/testers/elixir/phoenix_spec.cr
index 34138804..c284037b 100644
--- a/spec/functional_test/testers/elixir_phoenix_spec.cr
+++ b/spec/functional_test/testers/elixir/phoenix_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/page", "GET"),
@@ -11,7 +11,7 @@ extected_endpoints = [
Endpoint.new("/phoenix/live_reload/socket", "GET"),
]
-FunctionalTester.new("fixtures/elixir_phoenix/", {
+FunctionalTester.new("fixtures/elixir/phoenix/", {
:techs => 1,
- :endpoints => 8,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/file_based_spec.cr b/spec/functional_test/testers/etc/file_based_spec.cr
similarity index 71%
rename from spec/functional_test/testers/file_based_spec.cr
rename to spec/functional_test/testers/etc/file_based_spec.cr
index d2fc72d8..b9f4111a 100644
--- a/spec/functional_test/testers/file_based_spec.cr
+++ b/spec/functional_test/testers/etc/file_based_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("https://www.hahwul.com/", "GET"),
@@ -10,10 +10,10 @@ extected_endpoints = [
Endpoint.new("https://www.hahwul.com/tag/zap/", "GET"),
]
-tester = FunctionalTester.new("fixtures/file_based/", {
+tester = FunctionalTester.new("fixtures/etc/file_based/", {
:techs => 0,
- :endpoints => 7,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints)
-tester.app.options["url"] = "https://www.hahwul.com"
+tester.app.options["url"] = YAML::Any.new("https://www.hahwul.com")
tester.test_all
diff --git a/spec/functional_test/testers/multi_techs_spec.cr b/spec/functional_test/testers/etc/multi_techs_spec.cr
similarity index 77%
rename from spec/functional_test/testers/multi_techs_spec.cr
rename to spec/functional_test/testers/etc/multi_techs_spec.cr
index 692d9763..7dcd3e46 100644
--- a/spec/functional_test/testers/multi_techs_spec.cr
+++ b/spec/functional_test/testers/etc/multi_techs_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/", "GET", [
@@ -15,14 +15,15 @@ extected_endpoints = [
Endpoint.new("/pets", "POST", [
Param.new("name", "", "json"),
]),
- Endpoint.new("/pets/{petId}", "GET"),
+ Endpoint.new("/pets/{petId}", "GET", [Param.new("petId", "", "path")]),
Endpoint.new("/pets/{petId}", "PUT", [
+ Param.new("petId", "", "path"),
Param.new("breed", "", "json"),
Param.new("name", "", "json"),
]),
]
-FunctionalTester.new("fixtures/multi_techs/", {
+FunctionalTester.new("fixtures/etc/multi_techs/", {
:techs => 3,
:endpoints => 8,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/go_beego_spec.cr b/spec/functional_test/testers/go/beego_spec.cr
similarity index 65%
rename from spec/functional_test/testers/go_beego_spec.cr
rename to spec/functional_test/testers/go/beego_spec.cr
index a22c20f6..4cba9c7f 100644
--- a/spec/functional_test/testers/go_beego_spec.cr
+++ b/spec/functional_test/testers/go/beego_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/alice", "POST", [
@@ -8,7 +8,7 @@ extected_endpoints = [
Endpoint.new("/", "GET"),
]
-FunctionalTester.new("fixtures/go_beego/", {
+FunctionalTester.new("fixtures/go/beego/", {
:techs => 1,
- :endpoints => 2,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/go_echo_spec.cr b/spec/functional_test/testers/go/echo_spec.cr
similarity index 84%
rename from spec/functional_test/testers/go_echo_spec.cr
rename to spec/functional_test/testers/go/echo_spec.cr
index 681db7c3..d6d40206 100644
--- a/spec/functional_test/testers/go_echo_spec.cr
+++ b/spec/functional_test/testers/go/echo_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/", "GET", [
@@ -21,7 +21,7 @@ extected_endpoints = [
Endpoint.new("/admin/v1/migration", "GET"),
]
-FunctionalTester.new("fixtures/go_echo/", {
+FunctionalTester.new("fixtures/go/echo/", {
:techs => 1,
- :endpoints => 9,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/go_fiber_spec.cr b/spec/functional_test/testers/go/fiber_spec.cr
similarity index 81%
rename from spec/functional_test/testers/go_fiber_spec.cr
rename to spec/functional_test/testers/go/fiber_spec.cr
index 02a26720..4561e217 100644
--- a/spec/functional_test/testers/go_fiber_spec.cr
+++ b/spec/functional_test/testers/go/fiber_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/info", "GET", [
@@ -16,7 +16,7 @@ extected_endpoints = [
Endpoint.new("/admin/v1/migration", "GET"),
]
-FunctionalTester.new("fixtures/go_fiber/", {
+FunctionalTester.new("fixtures/go/fiber/", {
:techs => 1,
- :endpoints => 6,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/go_gin_spec.cr b/spec/functional_test/testers/go/gin_spec.cr
similarity index 83%
rename from spec/functional_test/testers/go_gin_spec.cr
rename to spec/functional_test/testers/go/gin_spec.cr
index 7487a32f..035f7079 100644
--- a/spec/functional_test/testers/go_gin_spec.cr
+++ b/spec/functional_test/testers/go/gin_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/ping", "GET", [
@@ -18,7 +18,7 @@ extected_endpoints = [
Endpoint.new("/group/v1/migration", "GET"),
]
-FunctionalTester.new("fixtures/go_gin/", {
+FunctionalTester.new("fixtures/go/gin/", {
:techs => 1,
- :endpoints => 6,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/java_armeria_spec.cr b/spec/functional_test/testers/java/armeria_spec.cr
similarity index 71%
rename from spec/functional_test/testers/java_armeria_spec.cr
rename to spec/functional_test/testers/java/armeria_spec.cr
index 4638f715..2709410c 100644
--- a/spec/functional_test/testers/java_armeria_spec.cr
+++ b/spec/functional_test/testers/java/armeria_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/graphql", "GET"),
@@ -9,7 +9,7 @@ extected_endpoints = [
Endpoint.new("/", "GET"),
]
-FunctionalTester.new("fixtures/java_armeria/", {
+FunctionalTester.new("fixtures/java/armeria/", {
:techs => 1,
- :endpoints => 6,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/jsp_spec.cr b/spec/functional_test/testers/java/jsp_spec.cr
similarity index 70%
rename from spec/functional_test/testers/jsp_spec.cr
rename to spec/functional_test/testers/java/jsp_spec.cr
index 43b7d0ba..d0a0d9ce 100644
--- a/spec/functional_test/testers/jsp_spec.cr
+++ b/spec/functional_test/testers/java/jsp_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/get_param.jsp", "GET", [
@@ -8,7 +8,7 @@ extected_endpoints = [
Endpoint.new("/el.jsp", "GET", [Param.new("username", "", "query")]),
]
-FunctionalTester.new("fixtures/jsp/", {
+FunctionalTester.new("fixtures/java/jsp/", {
:techs => 1,
- :endpoints => 2,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/java/spring_spec.cr b/spec/functional_test/testers/java/spring_spec.cr
new file mode 100644
index 00000000..83306a03
--- /dev/null
+++ b/spec/functional_test/testers/java/spring_spec.cr
@@ -0,0 +1,42 @@
+require "../../func_spec.cr"
+
+extected_endpoints = [
+ # MyRoutingConfiguration.java
+ Endpoint.new("/{user}", "GET", [Param.new("user", "", "path")]),
+ Endpoint.new("/{user}/customers", "GET", [Param.new("user", "", "path")]),
+ Endpoint.new("/{user}/0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_.~", "GET", [Param.new("user", "", "path")]),
+ Endpoint.new("/{user}", "DELETE", [Param.new("user", "", "path")]),
+ Endpoint.new("/{user}", "POST", [Param.new("user", "", "path")]),
+ Endpoint.new("/{user}", "PUT", [Param.new("user", "", "path")]),
+ # QuoteRouter.java
+ Endpoint.new("/hello", "GET"),
+ Endpoint.new("/echo", "POST"),
+ Endpoint.new("/quotes", "GET"),
+ Endpoint.new("/quotes/0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_.~", "GET"),
+ # ItemController.java
+ Endpoint.new("/items/{id}", "GET", [Param.new("id", "", "path")]),
+ Endpoint.new("/items/json/{id}", "GET", [Param.new("id", "", "path")]),
+ Endpoint.new("/items", "POST", [Param.new("id", "", "form"), Param.new("name", "", "form")]),
+ Endpoint.new("/items/update/{id}", "PUT", [Param.new("id", "", "path"), Param.new("id", "", "json"), Param.new("name", "", "json")]),
+ Endpoint.new("/items/delete/{id}", "DELETE", [Param.new("id", "", "path")]),
+ Endpoint.new("/items/requestmap/put", "PUT"),
+ Endpoint.new("/items/requestmap/delete", "DELETE"),
+ Endpoint.new("/items/multiple/methods", "GET"),
+ Endpoint.new("/items/multiple/methods", "POST"),
+ Endpoint.new("/items/multiple/methods2", "GET"),
+ Endpoint.new("/items/multiple/methods2", "POST"),
+ Endpoint.new("/greet", "GET", [
+ Param.new("name", "", "query"),
+ Param.new("header", "", "header"),
+ ]),
+ Endpoint.new("/greet2", "GET", [
+ Param.new("myname", "", "query"),
+ Param.new("b", "", "query"),
+ Param.new("name", "", "query"),
+ ]),
+]
+
+FunctionalTester.new("fixtures/java/spring/", {
+ :techs => 1,
+ :endpoints => extected_endpoints.size,
+}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/java_spring_spec.cr b/spec/functional_test/testers/java_spring_spec.cr
deleted file mode 100644
index 1e0c23e4..00000000
--- a/spec/functional_test/testers/java_spring_spec.cr
+++ /dev/null
@@ -1,38 +0,0 @@
-require "../func_spec.cr"
-
-extected_endpoints = [
- # MyRoutingConfiguration.java
- Endpoint.new("/{user}", "GET"),
- Endpoint.new("/{user}/customers", "GET"),
- Endpoint.new("/{user}/0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_.~", "GET"),
- Endpoint.new("/{user}", "DELETE"),
- Endpoint.new("/{user}", "POST"),
- Endpoint.new("/{user}", "PUT"),
- # QuoteRouter.java
- Endpoint.new("/hello", "GET"),
- Endpoint.new("/echo", "POST"),
- Endpoint.new("/quotes", "GET"),
- Endpoint.new("/quotes/0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_.~", "GET"),
- # ItemController.java
- Endpoint.new("/items/{id}", "GET"),
- Endpoint.new("/items/json/{id}", "GET"),
- Endpoint.new("/items", "POST", [Param.new("id", "", "form"), Param.new("name", "", "form")]),
- Endpoint.new("/items/update/{id}", "PUT", [Param.new("id", "", "json"), Param.new("name", "", "json")]),
- Endpoint.new("/items/delete/{id}", "DELETE"),
- Endpoint.new("/items/multiple/methods", "GET"),
- Endpoint.new("/items/multiple/methods", "POST"),
- Endpoint.new("/greet", "GET", [
- Param.new("name", "", "query"),
- Param.new("header", "", "header"),
- ]),
- Endpoint.new("/greet2", "GET", [
- Param.new("myname", "", "query"),
- Param.new("b", "", "query"),
- Param.new("name", "", "query"),
- ]),
-]
-
-FunctionalTester.new("fixtures/java_spring/", {
- :techs => 1,
- :endpoints => 19,
-}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/js_restify_spec.cr b/spec/functional_test/testers/javascript/express_spec.cr
similarity index 70%
rename from spec/functional_test/testers/js_restify_spec.cr
rename to spec/functional_test/testers/javascript/express_spec.cr
index 8af6a52d..3ec42637 100644
--- a/spec/functional_test/testers/js_restify_spec.cr
+++ b/spec/functional_test/testers/javascript/express_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/", "GET", [
@@ -11,7 +11,7 @@ extected_endpoints = [
]),
]
-FunctionalTester.new("fixtures/js_restify/", {
+FunctionalTester.new("fixtures/javascript/express/", {
:techs => 1,
- :endpoints => 2,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/js_express_spec.cr b/spec/functional_test/testers/javascript/restify_spec.cr
similarity index 70%
rename from spec/functional_test/testers/js_express_spec.cr
rename to spec/functional_test/testers/javascript/restify_spec.cr
index 99f21225..bfdfb0ae 100644
--- a/spec/functional_test/testers/js_express_spec.cr
+++ b/spec/functional_test/testers/javascript/restify_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/", "GET", [
@@ -11,7 +11,7 @@ extected_endpoints = [
]),
]
-FunctionalTester.new("fixtures/js_express/", {
+FunctionalTester.new("fixtures/javascript/restify/", {
:techs => 1,
- :endpoints => 2,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/kotlin_spring_spec.cr b/spec/functional_test/testers/kotlin/spring_spec.cr
similarity index 59%
rename from spec/functional_test/testers/kotlin_spring_spec.cr
rename to spec/functional_test/testers/kotlin/spring_spec.cr
index 0f05e975..28c69ba4 100644
--- a/spec/functional_test/testers/kotlin_spring_spec.cr
+++ b/spec/functional_test/testers/kotlin/spring_spec.cr
@@ -1,10 +1,10 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/api/article/", "GET"),
- Endpoint.new("/api/article/{slug}", "GET"),
+ Endpoint.new("/api/article/{slug}", "GET", [Param.new("slug", "", "path")]),
Endpoint.new("/api/user/", "GET"),
- Endpoint.new("/api/user/{login}", "GET", [Param.new("lorem", "ipsum", "cookie")]),
+ Endpoint.new("/api/user/{login}", "GET", [Param.new("login", "", "path"), Param.new("lorem", "ipsum", "cookie")]),
Endpoint.new("/v1", "GET", [Param.new("version", "1", "query")]),
Endpoint.new("/v2", "GET", [Param.new("version", "2", "query")]),
Endpoint.new("/version2", "GET", [Param.new("version", "2", "query")]),
@@ -14,28 +14,25 @@ extected_endpoints = [
Param.new("title", "", "json"),
Param.new("headline", "", "json"),
Param.new("content", "", "json"),
- Param.new("login", "", "json"),
- Param.new("firstname", "", "json"),
- Param.new("lastname", "", "json"),
- Param.new("description", "", "json"),
+ Param.new("author", "", "json"),
Param.new("id", "", "json"),
Param.new("slug", "", "json"),
Param.new("addedAt", "", "json"),
Param.new("deleted", "", "json"),
]),
Endpoint.new("/article2", "POST", [Param.new("title", "", "query"), Param.new("content", "", "query")]),
- Endpoint.new("/article/{slug}", "GET", [Param.new("preview", "false", "query")]),
- Endpoint.new("/article/{id}", "PUT", [Param.new("title", "", "json"), Param.new("content", "", "json")]),
- Endpoint.new("/article/{id}", "DELETE", [Param.new("soft", "", "form"), Param.new("X-Custom-Header", "soft-delete", "header")]),
- Endpoint.new("/article2/{id}", "DELETE"),
- Endpoint.new("/article/{id}", "PATCH", [Param.new("title", "", "json"), Param.new("content", "", "json")]),
+ Endpoint.new("/article/{slug}", "GET", [Param.new("slug", "", "path"), Param.new("preview", "false", "query")]),
+ Endpoint.new("/article/{id}", "PUT", [Param.new("id", "", "path"), Param.new("title", "", "json"), Param.new("content", "", "json")]),
+ Endpoint.new("/article/{id}", "DELETE", [Param.new("id", "", "path"), Param.new("soft", "", "form"), Param.new("X-Custom-Header", "soft-delete", "header")]),
+ Endpoint.new("/article2/{id}", "DELETE", [Param.new("id", "", "path")]),
+ Endpoint.new("/article/{id}", "PATCH", [Param.new("id", "", "path"), Param.new("title", "", "json"), Param.new("content", "", "json")]),
Endpoint.new("/request", "GET", [Param.new("type", "basic", "query"), Param.new("X-Custom-Header", "basic", "header")]),
Endpoint.new("/request", "POST", [Param.new("type", "basic", "query"), Param.new("X-Custom-Header", "basic", "header")]),
Endpoint.new("/request2", "GET", [Param.new("type", "advanced", "query"), Param.new("X-Custom-Header", "advanced", "header")]),
Endpoint.new("/request2", "POST", [Param.new("type", "advanced", "query"), Param.new("X-Custom-Header", "advanced", "header")]),
]
-FunctionalTester.new("fixtures/kotlin_spring/", {
+FunctionalTester.new("fixtures/kotlin/spring/", {
:techs => 1,
- :endpoints => 20,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/oas2_spec.cr b/spec/functional_test/testers/oas2_spec.cr
deleted file mode 100644
index 91899c2c..00000000
--- a/spec/functional_test/testers/oas2_spec.cr
+++ /dev/null
@@ -1,13 +0,0 @@
-require "../func_spec.cr"
-
-extected_endpoints = [
- Endpoint.new("/v1/pets", "GET"),
- Endpoint.new("/v1/pets", "POST", [Param.new("pet", "", "json")]),
- Endpoint.new("/v1/pets/{petId}", "GET"),
- Endpoint.new("/v1/pets/{petId}", "PUT", [Param.new("pet", "", "json")]),
-]
-
-FunctionalTester.new("fixtures/oas2/", {
- :techs => 1,
- :endpoints => 4,
-}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/php_pure_spec.cr b/spec/functional_test/testers/php/php_spec.cr
similarity index 83%
rename from spec/functional_test/testers/php_pure_spec.cr
rename to spec/functional_test/testers/php/php_spec.cr
index e1da60e8..230de1d4 100644
--- a/spec/functional_test/testers/php_pure_spec.cr
+++ b/spec/functional_test/testers/php/php_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/get.php", "GET", [Param.new("param1", "", "query")]),
@@ -15,7 +15,7 @@ extected_endpoints = [
Endpoint.new("/request.php", "POST", [Param.new("param1", "", "form")]),
]
-FunctionalTester.new("fixtures/php_pure/", {
+FunctionalTester.new("fixtures/php/php/", {
:techs => 1,
- :endpoints => 6,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/python/django_spec.cr b/spec/functional_test/testers/python/django_spec.cr
new file mode 100644
index 00000000..355236ee
--- /dev/null
+++ b/spec/functional_test/testers/python/django_spec.cr
@@ -0,0 +1,52 @@
+require "../../func_spec.cr"
+
+extected_endpoints = [
+ Endpoint.new("/", "GET"),
+ Endpoint.new("/page//", "GET", [
+ Param.new("page", "", "path"),
+ ]),
+ Endpoint.new("/article////.html", "GET", [
+ Param.new("year", "", "path"),
+ Param.new("month", "", "path"),
+ Param.new("day", "", "path"),
+ Param.new("article_id", "", "path"),
+ Param.new("comment_page", "", "query"),
+ ]),
+ Endpoint.new("/category/.html", "GET", [
+ Param.new("category_name", "", "path"),
+ ]),
+ Endpoint.new("/category//.html", "GET", [
+ Param.new("category_name", "", "path"),
+ Param.new("page", "", "path"),
+ ]),
+ Endpoint.new("/author/.html", "GET", [
+ Param.new("author_name", "", "path"),
+ ]),
+ Endpoint.new("/author//.html", "GET", [
+ Param.new("author_name", "", "path"),
+ Param.new("page", "", "path"),
+ ]),
+ Endpoint.new("/tag/.html", "GET", [
+ Param.new("tag_name", "", "path"),
+ ]),
+ Endpoint.new("/tag//.html", "GET", [
+ Param.new("tag_name", "", "path"),
+ Param.new("page", "", "path"),
+ ]),
+ Endpoint.new("/archives.html", "GET"),
+ Endpoint.new("/links.html", "GET"),
+ Endpoint.new("/upload", "GET", [Param.new("sign", "", "query"), Param.new("sign", "", "query"), Param.new("X_FORWARDED_FOR", "", "header"), Param.new("X_REAL_IP", "", "header")]),
+ Endpoint.new("/upload", "POST", [Param.new("sign", "", "query"), Param.new("X_FORWARDED_FOR", "", "header"), Param.new("X_REAL_IP", "", "header")]),
+ Endpoint.new("/not_found", "GET", [Param.new("app_type", "", "cookie")]),
+ Endpoint.new("/test", "GET", [Param.new("test_param", "", "form")]),
+ Endpoint.new("/test", "POST", [Param.new("test_param", "", "form")]),
+ Endpoint.new("/test", "PUT", [Param.new("test_param", "", "form")]),
+ Endpoint.new("/test", "PATCH", [Param.new("test_param", "", "form")]),
+ Endpoint.new("/delete_test", "GET"),
+ Endpoint.new("/delete_test", "DELETE"),
+]
+
+FunctionalTester.new("fixtures/python/django/", {
+ :techs => 1,
+ :endpoints => extected_endpoints.size,
+}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/python_fastapi_spec.cr b/spec/functional_test/testers/python/fastapi_spec.cr
similarity index 62%
rename from spec/functional_test/testers/python_fastapi_spec.cr
rename to spec/functional_test/testers/python/fastapi_spec.cr
index c03a28dd..4e89a82c 100644
--- a/spec/functional_test/testers/python_fastapi_spec.cr
+++ b/spec/functional_test/testers/python/fastapi_spec.cr
@@ -1,15 +1,15 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/query/param-required/int", "GET", [Param.new("query", "", "query")]),
- Endpoint.new("/items/{item_id}", "PUT", [Param.new("name", "", "form"), Param.new("size", "", "form")]),
+ Endpoint.new("/items/{item_id}", "PUT", [Param.new("item_id", "", "path"), Param.new("name", "", "form"), Param.new("size", "", "form")]),
Endpoint.new("/hidden_header", "GET", [Param.new("hidden_header", "", "header")]),
Endpoint.new("/cookie_examples/", "GET", [Param.new("data", "", "cookie")]),
Endpoint.new("/dummypath", "POST", [Param.new("dummy", "", "json")]),
Endpoint.new("/main", "GET"),
]
-FunctionalTester.new("fixtures/python_fastapi/", {
+FunctionalTester.new("fixtures/python/fastapi/", {
:techs => 1,
- :endpoints => 6,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/python_flask_spec.cr b/spec/functional_test/testers/python/flask_spec.cr
similarity index 74%
rename from spec/functional_test/testers/python_flask_spec.cr
rename to spec/functional_test/testers/python/flask_spec.cr
index 7aa9e2bb..3ee214de 100644
--- a/spec/functional_test/testers/python_flask_spec.cr
+++ b/spec/functional_test/testers/python/flask_spec.cr
@@ -1,17 +1,17 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/sign", "GET"),
Endpoint.new("/sign", "POST", [Param.new("username", "", "form"), Param.new("password", "", "form")]),
Endpoint.new("/cookie", "GET", [Param.new("test", "", "cookie")]),
Endpoint.new("/login", "POST", [Param.new("username", "", "form"), Param.new("password", "", "form")]),
- Endpoint.new("/create_record", "PUT"),
+ Endpoint.new("/create_record", "PUT", [Param.new("name", "", "form")]),
Endpoint.new("/delete_record", "DELETE", [Param.new("name", "", "json")]),
Endpoint.new("/get_ip", "GET", [Param.new("X-Forwarded-For", "", "header")]),
Endpoint.new("/", "GET"),
]
-FunctionalTester.new("fixtures/python_flask/", {
+FunctionalTester.new("fixtures/python/flask/", {
:techs => 1,
- :endpoints => 8,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/python_django_spec.cr b/spec/functional_test/testers/python_django_spec.cr
deleted file mode 100644
index 16490d86..00000000
--- a/spec/functional_test/testers/python_django_spec.cr
+++ /dev/null
@@ -1,29 +0,0 @@
-require "../func_spec.cr"
-
-extected_endpoints = [
- Endpoint.new("/", "GET"),
- Endpoint.new("/page//", "GET"),
- Endpoint.new("/article////.html", "GET", [Param.new("comment_page", "", "query")]),
- Endpoint.new("/category/.html", "GET"),
- Endpoint.new("/category//.html", "GET"),
- Endpoint.new("/author/.html", "GET"),
- Endpoint.new("/author//.html", "GET"),
- Endpoint.new("/tag/.html", "GET"),
- Endpoint.new("/tag//.html", "GET"),
- Endpoint.new("/archives.html", "GET"),
- Endpoint.new("/links.html", "GET"),
- Endpoint.new("/upload", "GET", [Param.new("sign", "", "query"), Param.new("sign", "", "query"), Param.new("X_FORWARDED_FOR", "", "header"), Param.new("X_REAL_IP", "", "header")]),
- Endpoint.new("/upload", "POST", [Param.new("sign", "", "query"), Param.new("X_FORWARDED_FOR", "", "header"), Param.new("X_REAL_IP", "", "header")]),
- Endpoint.new("/not_found", "GET", [Param.new("app_type", "", "cookie")]),
- Endpoint.new("/test", "GET", [Param.new("test_param", "", "form")]),
- Endpoint.new("/test", "POST", [Param.new("test_param", "", "form")]),
- Endpoint.new("/test", "PUT", [Param.new("test_param", "", "form")]),
- Endpoint.new("/test", "PATCH", [Param.new("test_param", "", "form")]),
- Endpoint.new("/delete_test", "GET"),
- Endpoint.new("/delete_test", "DELETE"),
-]
-
-FunctionalTester.new("fixtures/python_django/", {
- :techs => 1,
- :endpoints => 20,
-}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/ruby/hanami_spec.cr b/spec/functional_test/testers/ruby/hanami_spec.cr
new file mode 100644
index 00000000..8df27da6
--- /dev/null
+++ b/spec/functional_test/testers/ruby/hanami_spec.cr
@@ -0,0 +1,15 @@
+require "../../func_spec.cr"
+
+extected_endpoints = [
+ Endpoint.new("/books", "GET"),
+ Endpoint.new("/books/:id", "GET", [Param.new("id", "", "path")]),
+ Endpoint.new("/books/new", "GET"),
+ Endpoint.new("/books", "POST"),
+ Endpoint.new("/books/:id", "PATCH", [Param.new("id", "", "path")]),
+ Endpoint.new("/books/:id", "DELETE", [Param.new("id", "", "path")]),
+]
+
+FunctionalTester.new("fixtures/ruby/hanami/", {
+ :techs => 1,
+ :endpoints => extected_endpoints.size,
+}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/ruby_rails_spec.cr b/spec/functional_test/testers/ruby/rails_spec.cr
similarity index 86%
rename from spec/functional_test/testers/ruby_rails_spec.cr
rename to spec/functional_test/testers/ruby/rails_spec.cr
index e7014369..59d67c47 100644
--- a/spec/functional_test/testers/ruby_rails_spec.cr
+++ b/spec/functional_test/testers/ruby/rails_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/secret.html", "GET"),
@@ -24,7 +24,7 @@ extected_endpoints = [
Endpoint.new("/posts/1", "DELETE"),
]
-FunctionalTester.new("fixtures/ruby_rails/", {
+FunctionalTester.new("fixtures/ruby/rails/", {
:techs => 1,
- :endpoints => 6,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/ruby_sinatra_spec.cr b/spec/functional_test/testers/ruby/sinatra_spec.cr
similarity index 81%
rename from spec/functional_test/testers/ruby_sinatra_spec.cr
rename to spec/functional_test/testers/ruby/sinatra_spec.cr
index 29a02cbf..3bbb3f28 100644
--- a/spec/functional_test/testers/ruby_sinatra_spec.cr
+++ b/spec/functional_test/testers/ruby/sinatra_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/", "GET", [
@@ -10,7 +10,7 @@ extected_endpoints = [
Endpoint.new("/query", "POST", [Param.new("query", "", "form")]),
]
-FunctionalTester.new("fixtures/ruby_sinatra/", {
+FunctionalTester.new("fixtures/ruby/sinatra/", {
:techs => 1,
:endpoints => 2,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/ruby_hanami_spec.cr b/spec/functional_test/testers/ruby_hanami_spec.cr
deleted file mode 100644
index 545f35b9..00000000
--- a/spec/functional_test/testers/ruby_hanami_spec.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../func_spec.cr"
-
-extected_endpoints = [
- Endpoint.new("/books", "GET"),
- Endpoint.new("/books/:id", "GET"),
- Endpoint.new("/books/new", "GET"),
- Endpoint.new("/books", "POST"),
- Endpoint.new("/books/:id", "PATCH"),
- Endpoint.new("/books/:id", "DELETE"),
-]
-
-FunctionalTester.new("fixtures/ruby_hanami/", {
- :techs => 1,
- :endpoints => 6,
-}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/rust/actix_web_spec.cr b/spec/functional_test/testers/rust/actix_web_spec.cr
new file mode 100644
index 00000000..dadb729b
--- /dev/null
+++ b/spec/functional_test/testers/rust/actix_web_spec.cr
@@ -0,0 +1,11 @@
+require "../../func_spec.cr"
+
+extected_endpoints = [
+ Endpoint.new("/", "GET"),
+ Endpoint.new("/echo", "POST"),
+]
+
+FunctionalTester.new("fixtures/rust/actix_web/", {
+ :techs => 1,
+ :endpoints => extected_endpoints.size,
+}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/rust_axum_spec.cr b/spec/functional_test/testers/rust/axum_spec.cr
similarity index 59%
rename from spec/functional_test/testers/rust_axum_spec.cr
rename to spec/functional_test/testers/rust/axum_spec.cr
index f40ec78e..84916e52 100644
--- a/spec/functional_test/testers/rust_axum_spec.cr
+++ b/spec/functional_test/testers/rust/axum_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/", "GET"),
@@ -6,7 +6,7 @@ extected_endpoints = [
Endpoint.new("/bar", "POST"),
]
-FunctionalTester.new("fixtures/rust_axum/", {
+FunctionalTester.new("fixtures/rust/axum/", {
:techs => 1,
- :endpoints => 3,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/rust_rocket_spec.cr b/spec/functional_test/testers/rust/rocket_spec.cr
similarity index 54%
rename from spec/functional_test/testers/rust_rocket_spec.cr
rename to spec/functional_test/testers/rust/rocket_spec.cr
index fb0f1efd..d62e753c 100644
--- a/spec/functional_test/testers/rust_rocket_spec.cr
+++ b/spec/functional_test/testers/rust/rocket_spec.cr
@@ -1,11 +1,11 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/", "GET"),
Endpoint.new("/customer", "POST"),
]
-FunctionalTester.new("fixtures/rust_rocket/", {
+FunctionalTester.new("fixtures/rust/rocket/", {
:techs => 1,
- :endpoints => 2,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/har_spec.cr b/spec/functional_test/testers/specification/har_spec.cr
similarity index 77%
rename from spec/functional_test/testers/har_spec.cr
rename to spec/functional_test/testers/specification/har_spec.cr
index 2e71940e..231a71be 100644
--- a/spec/functional_test/testers/har_spec.cr
+++ b/spec/functional_test/testers/specification/har_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("https://www.hahwul.com/", "GET", [
@@ -10,10 +10,10 @@ extected_endpoints = [
]),
]
-instance = FunctionalTester.new("fixtures/har/", {
+instance = FunctionalTester.new("fixtures/specification/har/", {
:techs => 1,
- :endpoints => 1,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints)
-instance.set_url "https://www.hahwul.com"
+instance.url = "https://www.hahwul.com"
instance.test_all
diff --git a/spec/functional_test/testers/specification/oas2_spec.cr b/spec/functional_test/testers/specification/oas2_spec.cr
new file mode 100644
index 00000000..57491f24
--- /dev/null
+++ b/spec/functional_test/testers/specification/oas2_spec.cr
@@ -0,0 +1,13 @@
+require "../../func_spec.cr"
+
+extected_endpoints = [
+ Endpoint.new("/v1/pets", "GET"),
+ Endpoint.new("/v1/pets", "POST", [Param.new("pet", "", "json")]),
+ Endpoint.new("/v1/pets/{petId}", "GET", [Param.new("petId", "", "path")]),
+ Endpoint.new("/v1/pets/{petId}", "PUT", [Param.new("petId", "", "path"), Param.new("pet", "", "json")]),
+]
+
+FunctionalTester.new("fixtures/specification/oas2/", {
+ :techs => 1,
+ :endpoints => extected_endpoints.size,
+}, extected_endpoints).test_all
diff --git a/spec/functional_test/testers/oas3_spec.cr b/spec/functional_test/testers/specification/oas3_spec.cr
similarity index 71%
rename from spec/functional_test/testers/oas3_spec.cr
rename to spec/functional_test/testers/specification/oas3_spec.cr
index 7eb3b84b..de74d96d 100644
--- a/spec/functional_test/testers/oas3_spec.cr
+++ b/spec/functional_test/testers/specification/oas3_spec.cr
@@ -1,4 +1,4 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/pets", "GET", [
@@ -9,34 +9,35 @@ extected_endpoints = [
Endpoint.new("/pets", "POST", [
Param.new("name", "", "json"),
]),
- Endpoint.new("/pets/{petId}", "GET"),
+ Endpoint.new("/pets/{petId}", "GET", [Param.new("petId", "", "path")]),
Endpoint.new("/pets/{petId}", "PUT", [
+ Param.new("petId", "", "path"),
Param.new("breed", "", "json"),
Param.new("name", "", "json"),
]),
]
-FunctionalTester.new("fixtures/oas3/common/", {
+FunctionalTester.new("fixtures/specification/oas3/common/", {
:techs => 1,
- :endpoints => 4,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
-FunctionalTester.new("fixtures/oas3/no_servers/", {
+FunctionalTester.new("fixtures/specification/oas3/no_servers/", {
:techs => 1,
:endpoints => 1,
}, nil).test_all
-FunctionalTester.new("fixtures/oas3/multiple_docs/", {
+FunctionalTester.new("fixtures/specification/oas3/multiple_docs/", {
:techs => 1,
:endpoints => 2,
}, nil).test_all
-FunctionalTester.new("fixtures/oas3/nil_cast/", {
+FunctionalTester.new("fixtures/specification/oas3/nil_cast/", {
:techs => 1,
:endpoints => 0,
}, nil).test_all
-FunctionalTester.new("fixtures/oas3/param_in_path/", {
+FunctionalTester.new("fixtures/specification/oas3/param_in_path/", {
:techs => 1,
:endpoints => 4,
}, [
diff --git a/spec/functional_test/testers/raml_spec.cr b/spec/functional_test/testers/specification/raml_spec.cr
similarity index 66%
rename from spec/functional_test/testers/raml_spec.cr
rename to spec/functional_test/testers/specification/raml_spec.cr
index 59d8d377..772596b7 100644
--- a/spec/functional_test/testers/raml_spec.cr
+++ b/spec/functional_test/testers/specification/raml_spec.cr
@@ -1,7 +1,8 @@
-require "../func_spec.cr"
+require "../../func_spec.cr"
extected_endpoints = [
Endpoint.new("/users/{userId}", "GET", [
+ Param.new("userId", "", "path"),
Param.new("userId", "", "query"),
Param.new("Authorization", "", "header"),
]),
@@ -11,7 +12,7 @@ extected_endpoints = [
]),
]
-FunctionalTester.new("fixtures/raml/", {
+FunctionalTester.new("fixtures/specification/raml/", {
:techs => 1,
- :endpoints => 2,
+ :endpoints => extected_endpoints.size,
}, extected_endpoints).test_all
diff --git a/spec/unit_test/analyzer/analyzer_go_echo_spec.cr b/spec/unit_test/analyzer/analyzer_go_echo_spec.cr
index d22e5118..e1133d81 100644
--- a/spec/unit_test/analyzer/analyzer_go_echo_spec.cr
+++ b/spec/unit_test/analyzer/analyzer_go_echo_spec.cr
@@ -1,10 +1,10 @@
-require "../../../src/analyzer/analyzers/analyzer_go_echo.cr"
+require "../../../src/analyzer/analyzers/go/echo.cr"
require "../../../src/options"
describe "analyzer_go_echo" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = AnalyzerGoEcho.new(options)
+ instance = Analyzer::Go::Echo.new(options)
groups = [] of Hash(String, String)
it "instance.get_route_path - GET" do
diff --git a/spec/unit_test/analyzer/analyzer_kemal_spec.cr b/spec/unit_test/analyzer/analyzer_kemal_spec.cr
index 6c807128..5319da7d 100644
--- a/spec/unit_test/analyzer/analyzer_kemal_spec.cr
+++ b/spec/unit_test/analyzer/analyzer_kemal_spec.cr
@@ -1,10 +1,10 @@
-require "../../../src/analyzer/analyzers/analyzer_crystal_kemal.cr"
+require "../../../src/analyzer/analyzers/crystal/kemal.cr"
require "../../../src/options"
describe "mapping_to_path" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = AnalyzerCrystalKemal.new(options)
+ instance = Analyzer::Crystal::Kemal.new(options)
it "line_to_param - env.params.query" do
line = "env.params.query[\"id\"]"
diff --git a/spec/unit_test/analyzer/analyzer_sinatra_spec.cr b/spec/unit_test/analyzer/analyzer_sinatra_spec.cr
index 95c8da0d..ae539e4f 100644
--- a/spec/unit_test/analyzer/analyzer_sinatra_spec.cr
+++ b/spec/unit_test/analyzer/analyzer_sinatra_spec.cr
@@ -1,10 +1,10 @@
-require "../../../src/analyzer/analyzers/analyzer_ruby_sinatra.cr"
+require "../../../src/analyzer/analyzers/ruby/sinatra.cr"
require "../../../src/options"
describe "mapping_to_path" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = AnalyzerRubySinatra.new(options)
+ instance = Analyzer::Ruby::Sinatra.new(options)
it "line_to_param - param[]" do
line = "param['id']"
diff --git a/spec/unit_test/detector/detect_crystal_kemal_spec.cr b/spec/unit_test/detector/crystal/kemal_spec.cr
similarity index 66%
rename from spec/unit_test/detector/detect_crystal_kemal_spec.cr
rename to spec/unit_test/detector/crystal/kemal_spec.cr
index 7ffafc68..611f2d19 100644
--- a/spec/unit_test/detector/detect_crystal_kemal_spec.cr
+++ b/spec/unit_test/detector/crystal/kemal_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/crystal/*"
describe "Detect Crystal Kemal" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorCrystalKemal.new options
+ instance = Detector::Crystal::Kemal.new options
it "shard.yml" do
instance.detect("shard.yml", "kemalcr/kemal").should eq(true)
diff --git a/spec/unit_test/detector/detect_crystal_lucky_spec.cr b/spec/unit_test/detector/crystal/lucky_spec.cr
similarity index 67%
rename from spec/unit_test/detector/detect_crystal_lucky_spec.cr
rename to spec/unit_test/detector/crystal/lucky_spec.cr
index a5271a7f..c4e4b331 100644
--- a/spec/unit_test/detector/detect_crystal_lucky_spec.cr
+++ b/spec/unit_test/detector/crystal/lucky_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/crystal/*"
describe "Detect Crystal Lucky" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorCrystalLucky.new options
+ instance = Detector::Crystal::Lucky.new options
it "shard.yml" do
instance.detect("shard.yml", "luckyframework/lucky").should eq(true)
diff --git a/spec/unit_test/detector/detect_cs_aspnet_mvc_spec.cr b/spec/unit_test/detector/csharp/aspnet_mvc_spec.cr
similarity index 67%
rename from spec/unit_test/detector/detect_cs_aspnet_mvc_spec.cr
rename to spec/unit_test/detector/csharp/aspnet_mvc_spec.cr
index 0191efa1..0b7c92e1 100644
--- a/spec/unit_test/detector/detect_cs_aspnet_mvc_spec.cr
+++ b/spec/unit_test/detector/csharp/aspnet_mvc_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/csharp/*"
describe "Detect C# ASP.Net MVC" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorCSharpAspNetMvc.new options
+ instance = Detector::CSharp::AspNetMvc.new options
it "packages" do
instance.detect("packages.config", "Microsoft.AspNet.Mvc").should eq(true)
diff --git a/spec/unit_test/detector/detect_elixir_phoenix_spec.cr b/spec/unit_test/detector/elixir/phoenix_spec.cr
similarity index 66%
rename from spec/unit_test/detector/detect_elixir_phoenix_spec.cr
rename to spec/unit_test/detector/elixir/phoenix_spec.cr
index 5c62a4a6..bd0bfb7e 100644
--- a/spec/unit_test/detector/detect_elixir_phoenix_spec.cr
+++ b/spec/unit_test/detector/elixir/phoenix_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/elixir/*"
describe "Detect Elixir Phoenix" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorElixirPhoenix.new options
+ instance = Detector::Elixir::Phoenix.new options
it "mix" do
instance.detect("mix.exs", "ElixirPhoenix").should eq(true)
diff --git a/spec/unit_test/detector/detect_go_beego_spec.cr b/spec/unit_test/detector/go/beego_spec.cr
similarity index 69%
rename from spec/unit_test/detector/detect_go_beego_spec.cr
rename to spec/unit_test/detector/go/beego_spec.cr
index ac72d2d8..60cbc61b 100644
--- a/spec/unit_test/detector/detect_go_beego_spec.cr
+++ b/spec/unit_test/detector/go/beego_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/go/*"
describe "Detect Go BeegoEcho" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorGoBeego.new options
+ instance = Detector::Go::Beego.new options
it "go.mod" do
instance.detect("go.mod", "github.com/beego/beego").should eq(true)
diff --git a/spec/unit_test/detector/detect_go_echo_spec.cr b/spec/unit_test/detector/go/echo_spec.cr
similarity index 69%
rename from spec/unit_test/detector/detect_go_echo_spec.cr
rename to spec/unit_test/detector/go/echo_spec.cr
index 9567bdcf..854e0f13 100644
--- a/spec/unit_test/detector/detect_go_echo_spec.cr
+++ b/spec/unit_test/detector/go/echo_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/go/*"
describe "Detect Go Echo" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorGoEcho.new options
+ instance = Detector::Go::Echo.new options
it "go.mod" do
instance.detect("go.mod", "github.com/labstack/echo").should eq(true)
diff --git a/spec/unit_test/detector/detect_go_fiber_spec.cr b/spec/unit_test/detector/go/fiber_spec.cr
similarity index 68%
rename from spec/unit_test/detector/detect_go_fiber_spec.cr
rename to spec/unit_test/detector/go/fiber_spec.cr
index f11064cf..26e1eaae 100644
--- a/spec/unit_test/detector/detect_go_fiber_spec.cr
+++ b/spec/unit_test/detector/go/fiber_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/go/*"
describe "Detect Go Fiber" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorGoFiber.new options
+ instance = Detector::Go::Fiber.new options
it "go.mod" do
instance.detect("go.mod", "github.com/gofiber/fiber").should eq(true)
diff --git a/spec/unit_test/detector/detect_go_gin_spec.cr b/spec/unit_test/detector/go/gin_spec.cr
similarity index 69%
rename from spec/unit_test/detector/detect_go_gin_spec.cr
rename to spec/unit_test/detector/go/gin_spec.cr
index 2fe4f96d..868893f8 100644
--- a/spec/unit_test/detector/detect_go_gin_spec.cr
+++ b/spec/unit_test/detector/go/gin_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/go/*"
describe "Detect Go Gin" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorGoGin.new options
+ instance = Detector::Go::Gin.new options
it "go.mod" do
instance.detect("go.mod", "github.com/gin-gonic/gin").should eq(true)
diff --git a/spec/unit_test/detector/detect_java_armeria_spec.cr b/spec/unit_test/detector/java/armeria_spec.cr
similarity index 76%
rename from spec/unit_test/detector/detect_java_armeria_spec.cr
rename to spec/unit_test/detector/java/armeria_spec.cr
index 8b559335..9519bb35 100644
--- a/spec/unit_test/detector/detect_java_armeria_spec.cr
+++ b/spec/unit_test/detector/java/armeria_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Java Armeria" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorJavaArmeria.new options
+ instance = Detector::Java::Armeria.new options
it "pom.xml" do
instance.detect("pom.xml", "com.linecorp.armeria").should eq(true)
diff --git a/spec/unit_test/detector/detect_java_jsp_spec.cr b/spec/unit_test/detector/java/jsp_spec.cr
similarity index 68%
rename from spec/unit_test/detector/detect_java_jsp_spec.cr
rename to spec/unit_test/detector/java/jsp_spec.cr
index d376b583..26d02bae 100644
--- a/spec/unit_test/detector/detect_java_jsp_spec.cr
+++ b/spec/unit_test/detector/java/jsp_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Java JSP" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorJavaJsp.new options
+ instance = Detector::Java::Jsp.new options
it "case1" do
instance.detect("1.jsp", "<% info(); %>").should eq(true)
diff --git a/spec/unit_test/detector/detect_java_spring_spec.cr b/spec/unit_test/detector/java/spring_spec.cr
similarity index 72%
rename from spec/unit_test/detector/detect_java_spring_spec.cr
rename to spec/unit_test/detector/java/spring_spec.cr
index 33aba869..62a3086a 100644
--- a/spec/unit_test/detector/detect_java_spring_spec.cr
+++ b/spec/unit_test/detector/java/spring_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Java Spring" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorJavaSpring.new options
+ instance = Detector::Java::Spring.new options
it "test.java" do
instance.detect("test.java", "import org.springframework.boot.SpringApplication;").should eq(true)
diff --git a/spec/unit_test/detector/detect_js_express_spec.cr b/spec/unit_test/detector/javascript/express_spec.cr
similarity index 76%
rename from spec/unit_test/detector/detect_js_express_spec.cr
rename to spec/unit_test/detector/javascript/express_spec.cr
index e01d3570..5db01eae 100644
--- a/spec/unit_test/detector/detect_js_express_spec.cr
+++ b/spec/unit_test/detector/javascript/express_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect JS Express" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorJsExpress.new options
+ instance = Detector::Javascript::Express.new options
it "require_single_quot" do
instance.detect("index.js", "require('express')").should eq(true)
diff --git a/spec/unit_test/detector/detect_js_restify_spec.cr b/spec/unit_test/detector/javascript/restify_spec.cr
similarity index 76%
rename from spec/unit_test/detector/detect_js_restify_spec.cr
rename to spec/unit_test/detector/javascript/restify_spec.cr
index 39bc825a..c38275f7 100644
--- a/spec/unit_test/detector/detect_js_restify_spec.cr
+++ b/spec/unit_test/detector/javascript/restify_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect JS Restify" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorJsRestify.new options
+ instance = Detector::Javascript::Restify.new options
it "require_single_quot" do
instance.detect("index.js", "require('restify')").should eq(true)
diff --git a/spec/unit_test/detector/detect_kotlin_spring_spe_spec.cr b/spec/unit_test/detector/kotlin/spring_spec.cr
similarity index 71%
rename from spec/unit_test/detector/detect_kotlin_spring_spe_spec.cr
rename to spec/unit_test/detector/kotlin/spring_spec.cr
index d1757a32..fcace85e 100644
--- a/spec/unit_test/detector/detect_kotlin_spring_spe_spec.cr
+++ b/spec/unit_test/detector/kotlin/spring_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Kotlin Spring" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorKotlinSpring.new options
+ instance = Detector::Kotlin::Spring.new options
it "test.kt" do
instance.detect("test.kt", "import org.springframework.boot.SpringApplication").should eq(true)
diff --git a/spec/unit_test/detector/detect_php_pure_spec.cr b/spec/unit_test/detector/php/php_spec.cr
similarity index 82%
rename from spec/unit_test/detector/detect_php_pure_spec.cr
rename to spec/unit_test/detector/php/php_spec.cr
index 4f59fe90..dc4430ff 100644
--- a/spec/unit_test/detector/detect_php_pure_spec.cr
+++ b/spec/unit_test/detector/php/php_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Php Pure" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorPhpPure.new options
+ instance = Detector::Php::Php.new options
it "detect_php 1" do
instance.detect("1.php", " phpinfo(); ?>").should eq(true)
diff --git a/spec/unit_test/detector/detect_python_django_spec.cr b/spec/unit_test/detector/python/django_spec.cr
similarity index 70%
rename from spec/unit_test/detector/detect_python_django_spec.cr
rename to spec/unit_test/detector/python/django_spec.cr
index 5c1afac0..e42506d7 100644
--- a/spec/unit_test/detector/detect_python_django_spec.cr
+++ b/spec/unit_test/detector/python/django_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Python Django" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorPythonDjango.new options
+ instance = Detector::Python::Django.new options
it "settings.py" do
instance.detect("settings.py", "from django.apps import AppConfig").should eq(true)
diff --git a/spec/unit_test/detector/detect_python_fastapi_spec.cr b/spec/unit_test/detector/python/fastapi_spec.cr
similarity index 68%
rename from spec/unit_test/detector/detect_python_fastapi_spec.cr
rename to spec/unit_test/detector/python/fastapi_spec.cr
index 3e0abbed..1ba19f26 100644
--- a/spec/unit_test/detector/detect_python_fastapi_spec.cr
+++ b/spec/unit_test/detector/python/fastapi_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Python FastAPI" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorPythonFastAPI.new options
+ instance = Detector::Python::FastAPI.new options
it "settings.py" do
instance.detect("settings.py", "from fastapi").should eq(true)
diff --git a/spec/unit_test/detector/detect_python_flask_spec.cr b/spec/unit_test/detector/python/flask_spec.cr
similarity index 70%
rename from spec/unit_test/detector/detect_python_flask_spec.cr
rename to spec/unit_test/detector/python/flask_spec.cr
index b98f2b56..ae9ecf86 100644
--- a/spec/unit_test/detector/detect_python_flask_spec.cr
+++ b/spec/unit_test/detector/python/flask_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Python Flask" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorPythonFlask.new options
+ instance = Detector::Python::Flask.new options
it "detect_flask - app.py" do
instance.detect("app.py", "from flask import Flask").should eq(true)
diff --git a/spec/unit_test/detector/detect_ruby_hanami_spec.cr b/spec/unit_test/detector/ruby/hanami_spec.cr
similarity index 76%
rename from spec/unit_test/detector/detect_ruby_hanami_spec.cr
rename to spec/unit_test/detector/ruby/hanami_spec.cr
index 0f0237f5..7f6de1ab 100644
--- a/spec/unit_test/detector/detect_ruby_hanami_spec.cr
+++ b/spec/unit_test/detector/ruby/hanami_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Ruby Hanami" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorRubyHanami.new options
+ instance = Detector::Ruby::Hanami.new options
it "gemfile/single_quot" do
instance.detect("Gemfile", "gem 'hanami'").should eq(true)
diff --git a/spec/unit_test/detector/detect_ruby_rails_spec.cr b/spec/unit_test/detector/ruby/rails_spec.cr
similarity index 76%
rename from spec/unit_test/detector/detect_ruby_rails_spec.cr
rename to spec/unit_test/detector/ruby/rails_spec.cr
index 8963656a..cf9a0a3f 100644
--- a/spec/unit_test/detector/detect_ruby_rails_spec.cr
+++ b/spec/unit_test/detector/ruby/rails_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Ruby Rails" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorRubyRails.new options
+ instance = Detector::Ruby::Rails.new options
it "gemfile/single_quot" do
instance.detect("Gemfile", "gem 'rails'").should eq(true)
diff --git a/spec/unit_test/detector/detect_ruby_sinatra_spec.cr b/spec/unit_test/detector/ruby/sinatra_spec.cr
similarity index 76%
rename from spec/unit_test/detector/detect_ruby_sinatra_spec.cr
rename to spec/unit_test/detector/ruby/sinatra_spec.cr
index 473578e6..a257cc00 100644
--- a/spec/unit_test/detector/detect_ruby_sinatra_spec.cr
+++ b/spec/unit_test/detector/ruby/sinatra_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Ruby Sinatra" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorRubySinatra.new options
+ instance = Detector::Ruby::Sinatra.new options
it "gemfile/single_quot" do
instance.detect("Gemfile", "gem 'sinatra'").should eq(true)
diff --git a/spec/unit_test/detector/detect_rust_axum_spec.cr b/spec/unit_test/detector/rust/axum_spec.cr
similarity index 70%
rename from spec/unit_test/detector/detect_rust_axum_spec.cr
rename to spec/unit_test/detector/rust/axum_spec.cr
index 5048242a..dca4db69 100644
--- a/spec/unit_test/detector/detect_rust_axum_spec.cr
+++ b/spec/unit_test/detector/rust/axum_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Rust Axum" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorRustAxum.new options
+ instance = Detector::Rust::Axum.new options
it "Gargo.toml" do
instance.detect("Cargo.toml", "[dependencies]\naxum = {}").should eq(true)
diff --git a/spec/unit_test/detector/detect_rust_rocket_spec.cr b/spec/unit_test/detector/rust/rocket_spec.cr
similarity index 70%
rename from spec/unit_test/detector/detect_rust_rocket_spec.cr
rename to spec/unit_test/detector/rust/rocket_spec.cr
index a4621c33..a1cf53b3 100644
--- a/spec/unit_test/detector/detect_rust_rocket_spec.cr
+++ b/spec/unit_test/detector/rust/rocket_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect Rust Rocket" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorRustRocket.new options
+ instance = Detector::Rust::Rocket.new options
it "Gargo.toml" do
instance.detect("Cargo.toml", "[dependencies]\nrocket = {}").should eq(true)
diff --git a/spec/unit_test/detector/detect_oas2_spec.cr b/spec/unit_test/detector/specification/oas2_spec.cr
similarity index 83%
rename from spec/unit_test/detector/detect_oas2_spec.cr
rename to spec/unit_test/detector/specification/oas2_spec.cr
index 031223bb..cbbaa056 100644
--- a/spec/unit_test/detector/detect_oas2_spec.cr
+++ b/spec/unit_test/detector/specification/oas2_spec.cr
@@ -1,10 +1,10 @@
-require "../../../src/detector/detectors/*"
-require "../../../src/models/code_locator"
+require "../../../../src/detector/detectors/*"
+require "../../../../src/models/code_locator"
describe "Detect OAS 2.0(Swagger) Docs" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorOas2.new options
+ instance = Detector::Specification::Oas2.new options
it "json format" do
content = <<-EOS
diff --git a/spec/unit_test/detector/detect_oas3_spec.cr b/spec/unit_test/detector/specification/oas3_spec.cr
similarity index 83%
rename from spec/unit_test/detector/detect_oas3_spec.cr
rename to spec/unit_test/detector/specification/oas3_spec.cr
index 6570c71b..01f039fa 100644
--- a/spec/unit_test/detector/detect_oas3_spec.cr
+++ b/spec/unit_test/detector/specification/oas3_spec.cr
@@ -1,10 +1,10 @@
-require "../../../src/detector/detectors/*"
-require "../../../src/models/code_locator"
+require "../../../../src/detector/detectors/*"
+require "../../../../src/models/code_locator"
describe "Detect OAS 3.0 Docs" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorOas3.new options
+ instance = Detector::Specification::Oas3.new options
it "json format" do
content = <<-EOS
diff --git a/spec/unit_test/detector/detect_raml_spec.cr b/spec/unit_test/detector/specification/raml_spec.cr
similarity index 79%
rename from spec/unit_test/detector/detect_raml_spec.cr
rename to spec/unit_test/detector/specification/raml_spec.cr
index 1149cac0..e72bb492 100644
--- a/spec/unit_test/detector/detect_raml_spec.cr
+++ b/spec/unit_test/detector/specification/raml_spec.cr
@@ -1,9 +1,9 @@
-require "../../../src/detector/detectors/*"
+require "../../../../src/detector/detectors/*"
describe "Detect RAML" do
config_init = ConfigInitializer.new
options = config_init.default_options
- instance = DetectorRAML.new options
+ instance = Detector::Specification::RAML.new options
it "raml" do
instance.detect("app.yaml", "#%RAML\nApp: 1").should eq(true)
diff --git a/spec/unit_test/models/analyzer_spec.cr b/spec/unit_test/models/analyzer_spec.cr
index 72a8fa6b..10b95b0c 100644
--- a/spec/unit_test/models/analyzer_spec.cr
+++ b/spec/unit_test/models/analyzer_spec.cr
@@ -4,7 +4,7 @@ require "../../../src/options.cr"
describe "Initialize Analyzer" do
config_init = ConfigInitializer.new
options = config_init.default_options
- options["base"] = "noir"
+ options["base"] = YAML::Any.new("noir")
object = Analyzer.new(options)
it "getter - url" do
@@ -24,7 +24,7 @@ end
describe "Initialize FileAnalyzer" do
config_init = ConfigInitializer.new
options = config_init.default_options
- options["base"] = "noir"
+ options["base"] = YAML::Any.new("noir")
object = FileAnalyzer.new(options)
it "getter - url" do
diff --git a/spec/unit_test/models/deliver_spec.cr b/spec/unit_test/models/deliver_spec.cr
index 42053ddc..be7fe7fd 100644
--- a/spec/unit_test/models/deliver_spec.cr
+++ b/spec/unit_test/models/deliver_spec.cr
@@ -4,9 +4,9 @@ require "../../../src/options.cr"
describe "Initialize" do
config_init = ConfigInitializer.new
options = config_init.default_options
- options["base"] = "noir"
- options["send_proxy"] = "http://localhost:8090"
- options["nolog"] = "yes"
+ options["base"] = YAML::Any.new("noir")
+ options["send_proxy"] = YAML::Any.new("http://localhost:8090")
+ options["nolog"] = YAML::Any.new(true)
it "Deliver" do
object = Deliver.new options
@@ -14,26 +14,26 @@ describe "Initialize" do
end
it "Deliver with headers" do
- options["send_with_headers"] = "X-API-Key: abcdssss"
+ options["send_with_headers"] = YAML::Any.new([YAML::Any.new("X-API-Key: abcdssss")])
object = Deliver.new options
object.headers["X-API-Key"].should eq("abcdssss")
end
it "Deliver with headers (bearer case)" do
- options["send_with_headers"] = "Authorization: Bearer gAAAAABl3qwaQqol243Np"
+ options["send_with_headers"] = YAML::Any.new([YAML::Any.new("Authorization: Bearer gAAAAABl3qwaQqol243Np")])
object = Deliver.new options
object.headers["Authorization"].should eq("Bearer gAAAAABl3qwaQqol243Np")
end
it "Deliver with matchers" do
- options["use_matchers"] = "/admin"
+ options["use_matchers"] = YAML::Any.new([YAML::Any.new("/admin")])
object = Deliver.new options
- object.matchers.should eq(["/admin"])
+ object.matchers[0].to_s.should eq("/admin")
end
it "Deliver with filters" do
- options["use_filters"] = "/admin"
+ options["use_filters"] = YAML::Any.new([YAML::Any.new("/admin")])
object = Deliver.new options
- object.filters.should eq(["/admin"])
+ object.filters[0].to_s.should eq("/admin")
end
end
diff --git a/spec/unit_test/models/detector_spec.cr b/spec/unit_test/models/detector_spec.cr
index ba3192cf..f6fa0489 100644
--- a/spec/unit_test/models/detector_spec.cr
+++ b/spec/unit_test/models/detector_spec.cr
@@ -4,7 +4,7 @@ require "../../../src/config_initializer.cr"
describe "Initialize" do
config_init = ConfigInitializer.new
options = config_init.default_options
- options["base"] = "noir"
+ options["base"] = YAML::Any.new("noir")
object = Detector.new(options)
it "getter - name" do
diff --git a/spec/unit_test/models/noir_spec.cr b/spec/unit_test/models/noir_spec.cr
index 7df2235a..653866ec 100644
--- a/spec/unit_test/models/noir_spec.cr
+++ b/spec/unit_test/models/noir_spec.cr
@@ -5,7 +5,7 @@ require "../../../src/models/endpoint.cr"
describe "Initialize" do
config_init = ConfigInitializer.new
options = config_init.default_options
- options["base"] = "noir"
+ options["base"] = YAML::Any.new("noir")
runner = NoirRunner.new(options)
it "getter - options" do
@@ -17,9 +17,9 @@ end
describe "Methods" do
config_init = ConfigInitializer.new
options = config_init.default_options
- options["base"] = "noir"
- options["url"] = "https://www.hahwul.com"
- options["nolog"] = "yes"
+ options["base"] = YAML::Any.new("noir")
+ options["url"] = YAML::Any.new("https://www.hahwul.com")
+ options["nolog"] = YAML::Any.new(true)
runner = NoirRunner.new(options)
runner.endpoints << Endpoint.new("/abcd", "GET")
@@ -31,3 +31,38 @@ describe "Methods" do
runner.endpoints[1].url.should eq("https://www.hahwul.com/abcd")
end
end
+
+describe "set-pvalue" do
+ config_init = ConfigInitializer.new
+ options = config_init.default_options
+ options["base"] = YAML::Any.new("noir")
+ options["set_pvalue_query"] = YAML::Any.new([YAML::Any.new("FUZZ")])
+ options["set_pvalue_header"] = YAML::Any.new([YAML::Any.new("name=FUZZ")])
+ options["set_pvalue_cookie"] = YAML::Any.new([YAML::Any.new("name:FUZZ")])
+ options["set_pvalue_json"] = YAML::Any.new([YAML::Any.new("name:FUZZ=FUZZ")])
+ runner = NoirRunner.new(options)
+
+ it "applies pvalue to query parameter" do
+ runner.apply_pvalue("query", "name", "value").should eq("FUZZ")
+ end
+
+ it "applies pvalue to header parameter with '=' delimiter" do
+ runner.apply_pvalue("header", "name", "value").should eq("FUZZ")
+ end
+
+ it "does not apply pvalue to header parameter when name does not match" do
+ runner.apply_pvalue("header", "name2", "value").should eq("value")
+ end
+
+ it "applies pvalue to cookie parameter with ':' delimiter" do
+ runner.apply_pvalue("cookie", "name", "value").should eq("FUZZ")
+ end
+
+ it "does not apply pvalue to cookie parameter when name does not match" do
+ runner.apply_pvalue("cookie", "name2", "value").should eq("value")
+ end
+
+ it "includes '=' in the pvalue for JSON parameter" do
+ runner.apply_pvalue("json", "name", "value").should eq("FUZZ=FUZZ")
+ end
+end
diff --git a/spec/unit_test/models/output_builder_spec.cr b/spec/unit_test/models/output_builder_spec.cr
index 49c022db..77815a20 100644
--- a/spec/unit_test/models/output_builder_spec.cr
+++ b/spec/unit_test/models/output_builder_spec.cr
@@ -4,9 +4,9 @@ require "../../../src/options.cr"
describe "Initialize" do
config_init = ConfigInitializer.new
options = config_init.default_options
- options["base"] = "noir"
- options["format"] = "json"
- options["output"] = "output.json"
+ options["base"] = YAML::Any.new("noir")
+ options["format"] = YAML::Any.new("json")
+ options["output"] = YAML::Any.new("output.json")
it "OutputBuilder" do
object = OutputBuilder.new options
@@ -72,8 +72,8 @@ end
describe OutputBuilderDiff do
config_init = ConfigInitializer.new
options = config_init.default_options
- options["base"] = "noir"
- options["format"] = "json"
+ options["base"] = YAML::Any.new("noir")
+ options["format"] = YAML::Any.new("json")
it "calculates the diff correctly" do
old_endpoints = [Endpoint.new("GET", "/old")]
diff --git a/spec/unit_test/passive_scan/passive_scan_spec.cr b/spec/unit_test/passive_scan/passive_scan_spec.cr
new file mode 100644
index 00000000..063a5119
--- /dev/null
+++ b/spec/unit_test/passive_scan/passive_scan_spec.cr
@@ -0,0 +1,107 @@
+require "../../../src/passive_scan/detect.cr"
+require "../../../src/models/logger.cr"
+require "../../../src/models/passive_scan.cr"
+
+describe NoirPassiveScan do
+ it "detects matches with 'and' condition" do
+ logger = NoirLogger.new(false, false, true)
+ rules = [
+ PassiveScan.new(YAML.parse(%(
+ id: hahwul-test
+ info:
+ name: use x-api-key
+ author:
+ - abcd
+ - aaaa
+ severity: critical
+ description: ....
+ reference:
+ - https://google.com
+ matchers-condition: and
+ matchers:
+ - type: word
+ patterns:
+ - test
+ - content
+ condition: and
+ category: secret
+ techs:
+ - '*'
+ - ruby-rails
+ ))),
+ ]
+ file_content = "This is a test content.\nAnother test line."
+ results = NoirPassiveScan.detect("test_file.txt", file_content, rules, logger)
+
+ results.size.should eq(1)
+ results[0].line_number.should eq(1)
+ end
+
+ it "detects matches with 'or' condition" do
+ logger = NoirLogger.new(false, false, true)
+ rules = [
+ PassiveScan.new(YAML.parse(%(
+ id: hahwul-test
+ info:
+ name: use x-api-key
+ author:
+ - abcd
+ - aaaa
+ severity: critical
+ description: ....
+ reference:
+ - https://google.com
+ matchers-condition: and
+ matchers:
+ - type: word
+ patterns:
+ - test
+ - content
+ condition: or
+ category: secret
+ techs:
+ - '*'
+ - ruby-rails
+ ))),
+ ]
+ file_content = "This is a test content.\nAnother test line."
+ results = NoirPassiveScan.detect("test_file.txt", file_content, rules, logger)
+
+ results.size.should eq(2)
+ results[0].line_number.should eq(1)
+ results[1].line_number.should eq(2)
+ end
+
+ it "detects regex matches" do
+ logger = NoirLogger.new(false, false, true)
+ rules = [
+ PassiveScan.new(YAML.parse(%(
+ id: hahwul-test
+ info:
+ name: use x-api-key
+ author:
+ - abcd
+ - aaaa
+ severity: critical
+ description: ....
+ reference:
+ - https://google.com
+ matchers-condition: and
+ matchers:
+ - type: regex
+ patterns:
+ - ^This
+ condition: or
+ category: secret
+ techs:
+ - '*'
+ - ruby-rails
+ ))),
+ ]
+ file_content = "This is a test content.\nAnother test line."
+ results = NoirPassiveScan.detect("test_file.txt", file_content, rules, logger)
+
+ results.size.should eq(1)
+ results[0].line_number.should eq(1)
+ end
+end
diff --git a/spec/unit_test/tagger/tagger_spec.cr b/spec/unit_test/tagger/tagger_spec.cr
index 3472f288..397cbaf1 100644
--- a/spec/unit_test/tagger/tagger_spec.cr
+++ b/spec/unit_test/tagger/tagger_spec.cr
@@ -117,7 +117,7 @@ describe "Tagger" do
config_init = ConfigInitializer.new
noir_options = config_init.default_options
e = Endpoint.new("/ws", "GET")
- e.set_protocol("ws")
+ e.protocol = "ws"
extected_endpoints = [e]
diff --git a/spec/unit_test/techs/techs_spec.cr b/spec/unit_test/techs/techs_spec.cr
index 55c7ea58..5b6ead85 100644
--- a/spec/unit_test/techs/techs_spec.cr
+++ b/spec/unit_test/techs/techs_spec.cr
@@ -19,7 +19,7 @@ describe "Similar to tech" do
end
describe "Get Techs" do
- techs = NoirTechs.get_techs
+ techs = NoirTechs.techs
techs.each do |k, v|
it "#{k} in techs" do
v.should_not be_empty
diff --git a/spec/unit_test/utils/utils_spec.cr b/spec/unit_test/utils/utils_spec.cr
index c79852ca..263d6c4b 100644
--- a/spec/unit_test/utils/utils_spec.cr
+++ b/spec/unit_test/utils/utils_spec.cr
@@ -53,15 +53,15 @@ describe "get_symbol" do
end
end
-describe "str_to_bool" do
- it "yes" do
- str_to_bool("yes").should eq(true)
+describe "any_to_bool" do
+ it true do
+ any_to_bool(true).should eq(true)
end
- it "no" do
- str_to_bool("no").should eq(false)
+ it false do
+ any_to_bool(false).should eq(false)
end
it "any string" do
- str_to_bool("hahwul").should eq(false)
+ any_to_bool("hahwul").should eq(false)
end
end
diff --git a/src/analyzer/analyzer.cr b/src/analyzer/analyzer.cr
index 86211c26..a86cf608 100644
--- a/src/analyzer/analyzer.cr
+++ b/src/analyzer/analyzer.cr
@@ -1,38 +1,50 @@
-require "./analyzers/*"
+require "./analyzers/**"
require "./analyzers/file_analyzers/*"
+macro define_analyzers(analyzers)
+ {% for analyzer in analyzers %}
+ analyzers[{{analyzer[0].id.stringify}}] = ->(options : Hash(String, YAML::Any)) do
+ instance = Analyzer::{{analyzer[1].id}}.new(options)
+ instance.analyze
+ end
+ {% end %}
+end
+
def initialize_analyzers(logger : NoirLogger)
# Initializing analyzers
- analyzers = {} of String => Proc(Hash(String, String), Array(Endpoint))
+ analyzers = {} of String => Proc(Hash(String, YAML::Any), Array(Endpoint))
# Mapping analyzers to their respective functions
- analyzers["c#-aspnet-mvc"] = ->analyzer_cs_aspnet_mvc(Hash(String, String))
- analyzers["crystal_kemal"] = ->analyzer_crystal_kemal(Hash(String, String))
- analyzers["crystal_lucky"] = ->analyzer_crystal_lucky(Hash(String, String))
- analyzers["elixir_phoenix"] = ->analyzer_elixir_phoenix(Hash(String, String))
- analyzers["go_beego"] = ->analyzer_go_beego(Hash(String, String))
- analyzers["go_echo"] = ->analyzer_go_echo(Hash(String, String))
- analyzers["go_fiber"] = ->analyzer_go_fiber(Hash(String, String))
- analyzers["go_gin"] = ->analyzer_go_gin(Hash(String, String))
- analyzers["har"] = ->analyzer_har(Hash(String, String))
- analyzers["java_armeria"] = ->analyzer_armeria(Hash(String, String))
- analyzers["java_jsp"] = ->analyzer_jsp(Hash(String, String))
- analyzers["java_spring"] = ->analyzer_java_spring(Hash(String, String))
- analyzers["js_express"] = ->analyzer_express(Hash(String, String))
- analyzers["js_restify"] = ->analyzer_restify(Hash(String, String))
- analyzers["kotlin_spring"] = ->analyzer_kotlin_spring(Hash(String, String))
- analyzers["oas2"] = ->analyzer_oas2(Hash(String, String))
- analyzers["oas3"] = ->analyzer_oas3(Hash(String, String))
- analyzers["php_pure"] = ->analyzer_php_pure(Hash(String, String))
- analyzers["python_django"] = ->analyzer_python_django(Hash(String, String))
- analyzers["python_fastapi"] = ->analyzer_python_fastapi(Hash(String, String))
- analyzers["python_flask"] = ->analyzer_python_flask(Hash(String, String))
- analyzers["raml"] = ->analyzer_raml(Hash(String, String))
- analyzers["ruby_hanami"] = ->analyzer_ruby_hanami(Hash(String, String))
- analyzers["ruby_rails"] = ->analyzer_ruby_rails(Hash(String, String))
- analyzers["ruby_sinatra"] = ->analyzer_ruby_sinatra(Hash(String, String))
- analyzers["rust_axum"] = ->analyzer_rust_axum(Hash(String, String))
- analyzers["rust_rocket"] = ->analyzer_rust_rocket(Hash(String, String))
+ define_analyzers([
+ {"c#-aspnet-mvc", CSharp::AspNetMvc},
+ {"crystal_kemal", Crystal::Kemal},
+ {"crystal_lucky", Crystal::Lucky},
+ {"elixir_phoenix", Elixir::Phoenix},
+ {"go_beego", Go::Beego},
+ {"go_echo", Go::Echo},
+ {"go_fiber", Go::Fiber},
+ {"go_gin", Go::Gin},
+ {"har", Specification::Har},
+ {"java_armeria", Java::Armeria},
+ {"java_jsp", Java::Jsp},
+ {"java_spring", Java::Spring},
+ {"js_express", Javascript::Express},
+ {"js_restify", Javascript::Restify},
+ {"kotlin_spring", Kotlin::Spring},
+ {"oas2", Specification::Oas2},
+ {"oas3", Specification::Oas3},
+ {"php_pure", Php::Php},
+ {"python_django", Python::Django},
+ {"python_fastapi", Python::FastAPI},
+ {"python_flask", Python::Flask},
+ {"raml", Specification::RAML},
+ {"ruby_hanami", Ruby::Hanami},
+ {"ruby_rails", Ruby::Rails},
+ {"ruby_sinatra", Ruby::Sinatra},
+ {"rust_axum", Rust::Axum},
+ {"rust_rocket", Rust::Rocket},
+ {"rust_actix_web", Rust::ActixWeb},
+ ])
logger.success "#{analyzers.size} Analyzers initialized"
logger.debug "Analyzers:"
@@ -42,7 +54,7 @@ def initialize_analyzers(logger : NoirLogger)
analyzers
end
-def analysis_endpoints(options : Hash(String, String), techs, logger : NoirLogger)
+def analysis_endpoints(options : Hash(String, YAML::Any), techs, logger : NoirLogger)
result = [] of Endpoint
file_analyzer = FileAnalyzer.new options
logger.info "Initializing analyzers"
@@ -57,7 +69,7 @@ def analysis_endpoints(options : Hash(String, String), techs, logger : NoirLogge
techs.each do |tech|
if analyzer.has_key?(tech)
- if NoirTechs.similar_to_tech(options["exclude_techs"]).includes?(tech)
+ if NoirTechs.similar_to_tech(options["exclude_techs"].to_s).includes?(tech)
logger.sub "➔ Skipping #{tech} analysis"
next
end
diff --git a/src/analyzer/analyzers/analyzer_armeria.cr b/src/analyzer/analyzers/analyzer_armeria.cr
deleted file mode 100644
index 27eba695..00000000
--- a/src/analyzer/analyzers/analyzer_armeria.cr
+++ /dev/null
@@ -1,69 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerArmeria < Analyzer
- REGEX_SERVER_CODE_BLOCK = /Server\s*\.builder\(\s*\)\s*\.[^;]*?build\(\)\s*\./
- REGEX_SERVICE_CODE = /\.service(If|Under|)?\([^;]+?\)/
- REGEX_ROUTE_CODE = /\.route\(\)\s*\.\s*(\w+)\s*\(([^\.]*)\)\./
-
- def analyze
- # Source Analysis
- begin
- Dir.glob("#{@base_path}/**/*") do |path|
- next if File.directory?(path)
-
- if File.exists?(path) && (path.ends_with?(".java") || path.ends_with?(".kt"))
- details = Details.new(PathInfo.new(path))
-
- content = File.read(path, encoding: "utf-8", invalid: :skip)
- content.scan(REGEX_SERVER_CODE_BLOCK) do |server_codeblcok_match|
- server_codeblock = server_codeblcok_match[0]
-
- server_codeblock.scan(REGEX_SERVICE_CODE) do |service_code_match|
- next if service_code_match.size != 2
- endpoint_param_index = 0
- if service_code_match[1] == "If"
- endpoint_param_index = 1
- end
-
- service_code = service_code_match[0]
- parameter_code = service_code.split("(")[1]
- split_params = parameter_code.split(",")
- next if split_params.size <= endpoint_param_index
- endpoint = split_params[endpoint_param_index].strip
-
- endpoint = endpoint[1..-2]
- @result << Endpoint.new("#{endpoint}", "GET", details)
- end
-
- server_codeblock.scan(REGEX_ROUTE_CODE) do |route_code_match|
- next if route_code_match.size != 3
- method = route_code_match[1].upcase
- if method == "PATH"
- method = "GET"
- end
-
- next if !["GET", "POST", "DELETE", "PUT", "PATCH", "HEAD", "OPTIONS"].includes?(method)
-
- endpoint = route_code_match[2].split(")")[0].strip
- next if endpoint[0] != endpoint[-1]
- next if endpoint[0] != '"'
-
- endpoint = endpoint[1..-2]
- @result << Endpoint.new("#{endpoint}", method, details)
- end
- end
- end
- end
- rescue e
- logger.debug e
- end
- Fiber.yield
-
- @result
- end
-end
-
-def analyzer_armeria(options : Hash(String, String))
- instance = AnalyzerArmeria.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_crystal_kemal.cr b/src/analyzer/analyzers/analyzer_crystal_kemal.cr
deleted file mode 100644
index 51d8155c..00000000
--- a/src/analyzer/analyzers/analyzer_crystal_kemal.cr
+++ /dev/null
@@ -1,176 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerCrystalKemal < Analyzer
- def analyze
- # Variables
- is_public = true
- public_folders = [] of String
-
- # Source Analysis
- begin
- Dir.glob("#{@base_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path) && File.extname(path) == ".cr" && !path.includes?("lib")
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- last_endpoint = Endpoint.new("", "")
- file.each_line.with_index do |line, index|
- endpoint = line_to_endpoint(line)
- if endpoint.method != ""
- details = Details.new(PathInfo.new(path, index + 1))
- endpoint.set_details(details)
- result << endpoint
- last_endpoint = endpoint
- end
-
- param = line_to_param(line)
- if param.name != ""
- if last_endpoint.method != ""
- last_endpoint.push_param(param)
- end
- end
-
- if line.includes? "serve_static false" || "serve_static(false)"
- is_public = false
- end
-
- if line.includes? "public_folder"
- begin
- splited = line.split("public_folder")
- public_folder = ""
-
- if splited.size > 1
- public_folder = splited[1].gsub("(", "").gsub(")", "").gsub(" ", "").gsub("\"", "").gsub("'", "")
- if public_folder != ""
- public_folders << public_folder
- end
- end
- rescue
- end
- end
- end
- end
- end
- end
- rescue e
- logger.debug e
- end
-
- # Public Dir Analysis
- if is_public
- begin
- Dir.glob("#{@base_path}/public/**/*") do |file|
- next if File.directory?(file)
- real_path = "#{@base_path}/public/".gsub(/\/+/, '/')
- relative_path = file.sub(real_path, "")
- @result << Endpoint.new("/#{relative_path}", "GET")
- end
-
- public_folders.each do |folder|
- Dir.glob("#{@base_path}/#{folder}/**/*") do |file|
- next if File.directory?(file)
- relative_path = get_relative_path(@base_path, file)
- relative_path = get_relative_path(folder, relative_path)
- @result << Endpoint.new("/#{relative_path}", "GET")
- end
- end
- rescue e
- logger.debug e
- end
- end
-
- result
- end
-
- def line_to_param(content : String) : Param
- if content.includes? "env.params.query["
- param = content.split("env.params.query[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "query")
- end
-
- if content.includes? "env.params.json["
- param = content.split("env.params.json[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "json")
- end
-
- if content.includes? "env.params.body["
- param = content.split("env.params.body[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "form")
- end
-
- if content.includes? "env.request.headers["
- param = content.split("env.request.headers[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "header")
- end
-
- if content.includes? "env.request.cookies["
- param = content.split("env.request.cookies[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "cookie")
- end
-
- if content.includes? "cookies.get_raw("
- param = content.split("cookies.get_raw(")[1].split(")")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "cookie")
- end
-
- Param.new("", "", "")
- end
-
- def line_to_endpoint(content : String) : Endpoint
- content.scan(/get\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "GET")
- end
- end
-
- content.scan(/post\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "POST")
- end
- end
-
- content.scan(/put\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "PUT")
- end
- end
-
- content.scan(/delete\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "DELETE")
- end
- end
-
- content.scan(/patch\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "PATCH")
- end
- end
-
- content.scan(/head\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "HEAD")
- end
- end
-
- content.scan(/options\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "OPTIONS")
- end
- end
-
- content.scan(/ws\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- endpoint = Endpoint.new("#{match[1]}", "GET")
- endpoint.set_protocol("ws")
- return endpoint
- end
- end
-
- Endpoint.new("", "")
- end
-end
-
-def analyzer_crystal_kemal(options : Hash(String, String))
- instance = AnalyzerCrystalKemal.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_crystal_lucky.cr b/src/analyzer/analyzers/analyzer_crystal_lucky.cr
deleted file mode 100644
index 6a599f64..00000000
--- a/src/analyzer/analyzers/analyzer_crystal_lucky.cr
+++ /dev/null
@@ -1,141 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerCrystalLucky < Analyzer
- def analyze
- # Public Dir Analysis
- begin
- Dir.glob("#{@base_path}/public/**/*") do |file|
- next if File.directory?(file)
- real_path = "#{@base_path}/public/".gsub(/\/+/, '/')
- relative_path = file.sub(real_path, "")
- @result << Endpoint.new("/#{relative_path}", "GET")
- end
- rescue e
- logger.debug e
- end
-
- # Source Analysis
- begin
- Dir.glob("#{@base_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path) && File.extname(path) == ".cr" && !path.includes?("lib")
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- last_endpoint = Endpoint.new("", "")
- file.each_line.with_index do |line, index|
- endpoint = line_to_endpoint(line)
- if endpoint.method != ""
- details = Details.new(PathInfo.new(path, index + 1))
- endpoint.set_details(details)
- result << endpoint
- last_endpoint = endpoint
- end
-
- param = line_to_param(line)
- if param.name != ""
- if last_endpoint.method != ""
- last_endpoint.push_param(param)
- end
- end
- end
- end
- end
- end
- rescue e
- logger.debug e
- end
-
- result
- end
-
- def line_to_param(content : String) : Param
- if content.includes? "params.from_query[\""
- param = content.split("params.from_query[\"")[1].split("\"]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "query")
- end
-
- if content.includes? "params.from_json[\""
- param = content.split("params.from_json[\"")[1].split("\"]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "json")
- end
-
- if content.includes? "params.from_form_data[\""
- param = content.split("params.from_form_data[\"")[1].split("\"]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "form")
- end
-
- if content.includes? "params.get("
- param = content.split("params.get(")[1].split(")")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param.gsub(":", ""), "", "query")
- end
-
- if content.includes? "request.headers["
- param = content.split("request.headers[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "header")
- end
-
- if content.includes? "cookies.get("
- param = content.split("cookies.get(")[1].split(")")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "cookie")
- end
-
- if content.includes? "cookies["
- param = content.split("cookies[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "cookie")
- end
-
- Param.new("", "", "")
- end
-
- def line_to_endpoint(content : String) : Endpoint
- content.scan(/get\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "GET")
- end
- end
-
- content.scan(/post\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "POST")
- end
- end
-
- content.scan(/put\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "PUT")
- end
- end
-
- content.scan(/delete\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "DELETE")
- end
- end
-
- content.scan(/patch\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "PATCH")
- end
- end
-
- content.scan(/trace\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "TRACE")
- end
- end
-
- content.scan(/ws\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- endpoint = Endpoint.new("#{match[1]}", "GET")
- endpoint.set_protocol("ws")
- return endpoint
- end
- end
-
- Endpoint.new("", "")
- end
-end
-
-def analyzer_crystal_lucky(options : Hash(String, String))
- instance = AnalyzerCrystalLucky.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_cs_aspnet_mvc.cr b/src/analyzer/analyzers/analyzer_cs_aspnet_mvc.cr
deleted file mode 100644
index 8bfc5876..00000000
--- a/src/analyzer/analyzers/analyzer_cs_aspnet_mvc.cr
+++ /dev/null
@@ -1,51 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerCsAspNetMvc < Analyzer
- def analyze
- # Static Analysis
- locator = CodeLocator.instance
- route_config_file = locator.get("cs-apinet-mvc-routeconfig")
-
- if File.exists?("#{route_config_file}")
- File.open("#{route_config_file}", "r", encoding: "utf-8", invalid: :skip) do |file|
- maproute_check = false
- maproute_buffer = ""
-
- file.each_line.with_index do |line, index|
- if line.includes? ".MapRoute("
- maproute_check = true
- maproute_buffer = line
- end
-
- if line.includes? ");"
- maproute_check = false
- if maproute_buffer != ""
- buffer = maproute_buffer.gsub(/[\r\n]/, "")
- buffer = buffer.gsub(/\s+/, "")
- buffer.split(",").each do |item|
- if item.includes? "url:"
- url = item.gsub(/url:/, "").gsub(/"/, "")
- details = Details.new(PathInfo.new(route_config_file, index + 1))
- @result << Endpoint.new("/#{url}", "GET", details)
- end
- end
-
- maproute_buffer = ""
- end
- end
-
- if maproute_check
- maproute_buffer += line
- end
- end
- end
- end
-
- @result
- end
-end
-
-def analyzer_cs_aspnet_mvc(options : Hash(String, String))
- instance = AnalyzerCsAspNetMvc.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_elixir_phoenix.cr b/src/analyzer/analyzers/analyzer_elixir_phoenix.cr
deleted file mode 100644
index 2f86950f..00000000
--- a/src/analyzer/analyzers/analyzer_elixir_phoenix.cr
+++ /dev/null
@@ -1,67 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerElixirPhoenix < Analyzer
- def analyze
- # Source Analysis
- begin
- Dir.glob("#{@base_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path) && File.extname(path) == ".ex"
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- file.each_line.with_index do |line, index|
- endpoints = line_to_endpoint(line)
- endpoints.each do |endpoint|
- if endpoint.method != ""
- details = Details.new(PathInfo.new(path, index + 1))
- endpoint.set_details(details)
- @result << endpoint
- end
- end
- end
- end
- end
- end
- rescue e
- logger.debug e
- end
-
- @result
- end
-
- def line_to_endpoint(line : String) : Array(Endpoint)
- endpoints = Array(Endpoint).new
-
- line.scan(/get\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
- endpoints << Endpoint.new("#{match[1]}", "GET")
- end
-
- line.scan(/post\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
- endpoints << Endpoint.new("#{match[1]}", "POST")
- end
-
- line.scan(/patch\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
- endpoints << Endpoint.new("#{match[1]}", "PATCH")
- end
-
- line.scan(/put\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
- endpoints << Endpoint.new("#{match[1]}", "PUT")
- end
-
- line.scan(/delete\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
- endpoints << Endpoint.new("#{match[1]}", "DELETE")
- end
-
- line.scan(/socket\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
- tmp = Endpoint.new("#{match[1]}", "GET")
- tmp.set_protocol("ws")
- endpoints << tmp
- end
-
- endpoints
- end
-end
-
-def analyzer_elixir_phoenix(options : Hash(String, String))
- instance = AnalyzerElixirPhoenix.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_express.cr b/src/analyzer/analyzers/analyzer_express.cr
deleted file mode 100644
index 4304a0a2..00000000
--- a/src/analyzer/analyzers/analyzer_express.cr
+++ /dev/null
@@ -1,111 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerExpress < Analyzer
- def analyze
- # Source Analysis
- begin
- Dir.glob("#{base_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path)
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- last_endpoint = Endpoint.new("", "")
- file.each_line.with_index do |line, index|
- endpoint = line_to_endpoint(line)
- if endpoint.method != ""
- details = Details.new(PathInfo.new(path, index + 1))
- endpoint.set_details(details)
- result << endpoint
- last_endpoint = endpoint
- end
-
- param = line_to_param(line)
- if param.name != ""
- if last_endpoint.method != ""
- last_endpoint.push_param(param)
- end
- end
- end
- end
- end
- end
- rescue e
- # TODO
- end
-
- result
- end
-
- def express_get_endpoint(line : String)
- api_path = ""
- splited = line.split("(")
- if splited.size > 0
- api_path = splited[1].split(",")[0].gsub(/['"]/, "")
- end
-
- api_path
- end
-
- def line_to_param(line : String) : Param
- if line.includes? "req.body."
- param = line.split("req.body.")[1].split(")")[0].split("}")[0].split(";")[0]
- return Param.new(param, "", "json")
- end
-
- if line.includes? "req.query."
- param = line.split("req.query.")[1].split(")")[0].split("}")[0].split(";")[0]
- return Param.new(param, "", "query")
- end
-
- if line.includes? "req.cookies."
- param = line.split("req.cookies.")[1].split(")")[0].split("}")[0].split(";")[0]
- return Param.new(param, "", "cookie")
- end
-
- if line.includes? "req.header("
- param = line.split("req.header(")[1].split(")")[0].gsub(/['"]/, "")
- return Param.new(param, "", "header")
- end
-
- Param.new("", "", "")
- end
-
- def line_to_endpoint(line : String) : Endpoint
- if line.includes? ".get('/"
- api_path = express_get_endpoint(line)
- if api_path != ""
- return Endpoint.new(api_path, "GET")
- end
- end
- if line.includes? ".post('/"
- api_path = express_get_endpoint(line)
- if api_path != ""
- return Endpoint.new(api_path, "POST")
- end
- end
- if line.includes? ".put('/"
- api_path = express_get_endpoint(line)
- if api_path != ""
- return Endpoint.new(api_path, "PUT")
- end
- end
- if line.includes? ".delete('/"
- api_path = express_get_endpoint(line)
- if api_path != ""
- return Endpoint.new(api_path, "DELETE")
- end
- end
- if line.includes? ".patch('/"
- api_path = express_get_endpoint(line)
- if api_path != ""
- return Endpoint.new(api_path, "PATCH")
- end
- end
-
- Endpoint.new("", "")
- end
-end
-
-def analyzer_express(options : Hash(String, String))
- instance = AnalyzerExpress.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_go_beego.cr b/src/analyzer/analyzers/analyzer_go_beego.cr
deleted file mode 100644
index d01d7b5e..00000000
--- a/src/analyzer/analyzers/analyzer_go_beego.cr
+++ /dev/null
@@ -1,150 +0,0 @@
-require "../../models/analyzer"
-require "../../minilexers/golang"
-
-class AnalyzerGoBeego < Analyzer
- def analyze
- # Source Analysis
- public_dirs = [] of (Hash(String, String))
- groups = [] of Hash(String, String)
- begin
- Dir.glob("#{base_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path) && File.extname(path) == ".go"
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- last_endpoint = Endpoint.new("", "")
- file.each_line.with_index do |line, index|
- details = Details.new(PathInfo.new(path, index + 1))
- lexer = GolangLexer.new
-
- if line.includes?(".Group(")
- map = lexer.tokenize(line)
- before = Token.new(:unknown, "", 0)
- group_name = ""
- group_path = ""
- map.each do |token|
- if token.type == :assign
- group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "")
- end
-
- if token.type == :string
- group_path = token.value.to_s
- groups.each do |group|
- group.each do |key, value|
- if before.value.to_s.includes? key
- group_path = value + group_path
- end
- end
- end
- end
-
- before = token
- end
-
- if group_name.size > 0 && group_path.size > 0
- groups << {
- group_name => group_path,
- }
- end
- end
-
- if line.includes?(".Get(") || line.includes?(".Post(") || line.includes?(".Put(") || line.includes?(".Delete(")
- get_route_path(line, groups).tap do |route_path|
- if route_path.size > 0
- new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0].to_s.upcase, details)
- result << new_endpoint
- last_endpoint = new_endpoint
- end
- end
- end
-
- if line.includes?(".Any(") || line.includes?(".Handler(") || line.includes?(".Router(")
- get_route_path(line, groups).tap do |route_path|
- if route_path.size > 0
- new_endpoint = Endpoint.new("#{route_path}", "GET", details)
- result << new_endpoint
- last_endpoint = new_endpoint
- end
- end
- end
-
- ["GetString", "GetStrings", "GetInt", "GetInt8", "GetUint8", "GetInt16", "GetUint16", "GetInt32", "GetUint32",
- "GetInt64", "GetUint64", "GetBool", "GetFloat"].each do |pattern|
- match = line.match(/#{pattern}\(\"(.*)\"\)/)
- if match
- param_name = match[1]
- last_endpoint.params << Param.new(param_name, "", "query")
- end
- end
-
- if line.includes?("GetCookie(")
- match = line.match(/GetCookie\(\"(.*)\"\)/)
- if match
- cookie_name = match[1]
- last_endpoint.params << Param.new(cookie_name, "", "cookie")
- end
- end
-
- if line.includes?("GetSecureCookie(")
- match = line.match(/GetSecureCookie\(\"(.*)\"\)/)
- if match
- cookie_name = match[1]
- last_endpoint.params << Param.new(cookie_name, "", "cookie")
- end
- end
- end
- end
- end
- end
- rescue e
- logger.debug e
- end
-
- public_dirs.each do |p_dir|
- full_path = (base_path + "/" + p_dir["file_path"]).gsub_repeatedly("//", "/")
- Dir.glob("#{full_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path)
- if p_dir["static_path"].ends_with?("/")
- p_dir["static_path"] = p_dir["static_path"][0..-2]
- end
-
- details = Details.new(PathInfo.new(path))
- result << Endpoint.new("#{p_dir["static_path"]}#{path.gsub(full_path, "")}", "GET", details)
- end
- end
- end
-
- Fiber.yield
-
- result
- end
-
- def get_route_path(line : String, groups : Array(Hash(String, String))) : String
- lexer = GolangLexer.new
- map = lexer.tokenize(line)
- before = Token.new(:unknown, "", 0)
- map.each do |token|
- if token.type == :string
- final_path = token.value.to_s
- groups.each do |group|
- group.each do |key, value|
- if before.value.to_s.includes? key
- final_path = value + final_path
- end
- end
- end
-
- return final_path
- end
-
- before = token
- end
-
- ""
- end
-end
-
-def analyzer_go_beego(options : Hash(String, String))
- instance = AnalyzerGoBeego.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_go_echo.cr b/src/analyzer/analyzers/analyzer_go_echo.cr
deleted file mode 100644
index d68f1260..00000000
--- a/src/analyzer/analyzers/analyzer_go_echo.cr
+++ /dev/null
@@ -1,192 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerGoEcho < Analyzer
- def analyze
- # Source Analysis
- public_dirs = [] of (Hash(String, String))
- groups = [] of Hash(String, String)
-
- begin
- Dir.glob("#{base_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path) && File.extname(path) == ".go"
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- last_endpoint = Endpoint.new("", "")
- file.each_line.with_index do |line, index|
- details = Details.new(PathInfo.new(path, index + 1))
- lexer = GolangLexer.new
-
- if line.includes?(".Group(")
- map = lexer.tokenize(line)
- before = Token.new(:unknown, "", 0)
- group_name = ""
- group_path = ""
- map.each do |token|
- if token.type == :assign
- group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "")
- end
-
- if token.type == :string
- group_path = token.value.to_s
- groups.each do |group|
- group.each do |key, value|
- if before.value.to_s.includes? key
- group_path = value + group_path
- end
- end
- end
- end
-
- before = token
- end
-
- if group_name.size > 0 && group_path.size > 0
- groups << {
- group_name => group_path,
- }
- end
- end
-
- if line.includes?(".GET(") || line.includes?(".POST(") || line.includes?(".PUT(") || line.includes?(".DELETE(")
- get_route_path(line, groups).tap do |route_path|
- if route_path.size > 0
- new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0], details)
- result << new_endpoint
- last_endpoint = new_endpoint
- end
- end
- end
-
- if line.includes?("Param(") || line.includes?("FormValue(")
- get_param(line).tap do |param|
- if param.name.size > 0 && last_endpoint.method != ""
- last_endpoint.params << param
- end
- end
- end
-
- if line.includes?("Static(")
- get_static_path(line).tap do |static_path|
- if static_path.size > 0
- public_dirs << static_path
- end
- end
- end
-
- if line.includes?("Request().Header.Get(")
- match = line.match(/Request\(\)\.Header\.Get\(\"(.*)\"\)/)
- if match
- header_name = match[1]
- last_endpoint.params << Param.new(header_name, "", "header")
- end
- end
-
- if line.includes?("Cookie(")
- match = line.match(/Cookie\(\"(.*)\"\)/)
- if match
- cookie_name = match[1]
- last_endpoint.params << Param.new(cookie_name, "", "cookie")
- end
- end
- end
- end
- end
- end
- rescue e
- logger.debug e
- end
-
- public_dirs.each do |p_dir|
- full_path = (base_path + "/" + p_dir["file_path"]).gsub_repeatedly("//", "/")
- Dir.glob("#{full_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path)
- if p_dir["static_path"].ends_with?("/")
- p_dir["static_path"] = p_dir["static_path"][0..-2]
- end
-
- details = Details.new(PathInfo.new(path))
- result << Endpoint.new("#{p_dir["static_path"]}#{path.gsub(full_path, "")}", "GET", details)
- end
- end
- end
-
- Fiber.yield
-
- result
- end
-
- def get_param(line : String) : Param
- param_type = "json"
- if line.includes?("QueryParam")
- param_type = "query"
- end
- if line.includes?("FormValue")
- param_type = "form"
- end
-
- first = line.strip.split("(")
- if first.size > 1
- second = first[1].split(")")
- if second.size > 1
- param_name = second[0].gsub("\"", "")
- rtn = Param.new(param_name, "", param_type)
-
- return rtn
- end
- end
-
- Param.new("", "", "")
- end
-
- def get_static_path(line : String) : Hash(String, String)
- first = line.strip.split("(")
- if first.size > 1
- second = first[1].split(",")
- if second.size > 1
- static_path = second[0].gsub("\"", "")
- file_path = second[1].gsub("\"", "").gsub(" ", "").gsub(")", "")
- rtn = {
- "static_path" => static_path,
- "file_path" => file_path,
- }
-
- return rtn
- end
- end
-
- {
- "static_path" => "",
- "file_path" => "",
- }
- end
-
- def get_route_path(line : String, groups : Array(Hash(String, String))) : String
- lexer = GolangLexer.new
- map = lexer.tokenize(line)
- before = Token.new(:unknown, "", 0)
- map.each do |token|
- if token.type == :string
- final_path = token.value.to_s
- groups.each do |group|
- group.each do |key, value|
- if before.value.to_s.includes? key
- final_path = value + final_path
- end
- end
- end
-
- return final_path
- end
-
- before = token
- end
-
- ""
- end
-end
-
-def analyzer_go_echo(options : Hash(String, String))
- instance = AnalyzerGoEcho.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_go_fiber.cr b/src/analyzer/analyzers/analyzer_go_fiber.cr
deleted file mode 100644
index fdcdd55b..00000000
--- a/src/analyzer/analyzers/analyzer_go_fiber.cr
+++ /dev/null
@@ -1,203 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerGoFiber < Analyzer
- def analyze
- # Source Analysis
- public_dirs = [] of (Hash(String, String))
- groups = [] of Hash(String, String)
-
- begin
- Dir.glob("#{base_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path) && File.extname(path) == ".go"
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- last_endpoint = Endpoint.new("", "")
- file.each_line.with_index do |line, index|
- details = Details.new(PathInfo.new(path, index + 1))
- lexer = GolangLexer.new
-
- if line.includes?(".Group(")
- map = lexer.tokenize(line)
- before = Token.new(:unknown, "", 0)
- group_name = ""
- group_path = ""
- map.each do |token|
- if token.type == :assign
- group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "")
- end
-
- if token.type == :string
- group_path = token.value.to_s
- groups.each do |group|
- group.each do |key, value|
- if before.value.to_s.includes? key
- group_path = value + group_path
- end
- end
- end
- end
-
- before = token
- end
-
- if group_name.size > 0 && group_path.size > 0
- groups << {
- group_name => group_path,
- }
- end
- end
-
- if line.includes?(".Get(") || line.includes?(".Post(") || line.includes?(".Put(") || line.includes?(".Delete(")
- get_route_path(line, groups).tap do |route_path|
- if route_path.size > 0
- new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0].upcase, details)
- if line.includes?("websocket.New(")
- new_endpoint.set_protocol("ws")
- end
- result << new_endpoint
- last_endpoint = new_endpoint
- end
- end
- end
-
- if line.includes?(".Query(") || line.includes?(".FormValue(")
- get_param(line).tap do |param|
- if param.name.size > 0 && last_endpoint.method != ""
- last_endpoint.params << param
- end
- end
- end
-
- if line.includes?("Static(")
- get_static_path(line).tap do |static_path|
- if static_path.size > 0
- public_dirs << static_path
- end
- end
- end
-
- if line.includes?("GetRespHeader(")
- match = line.match(/GetRespHeader\(\"(.*)\"\)/)
- if match
- header_name = match[1]
- last_endpoint.params << Param.new(header_name, "", "header")
- end
- end
-
- if line.includes?("Vary(")
- match = line.match(/Vary\(\"(.*)\"\)/)
- if match
- header_value = match[1]
- last_endpoint.params << Param.new("Vary", header_value, "header")
- end
- end
-
- if line.includes?("Cookies(")
- match = line.match(/Cookies\(\"(.*)\"\)/)
- if match
- cookie_name = match[1]
- last_endpoint.params << Param.new(cookie_name, "", "cookie")
- end
- end
- end
- end
- end
- end
- rescue e
- logger.debug e
- end
-
- public_dirs.each do |p_dir|
- full_path = (base_path + "/" + p_dir["file_path"]).gsub_repeatedly("//", "/")
- Dir.glob("#{full_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path)
- if p_dir["static_path"].ends_with?("/")
- p_dir["static_path"] = p_dir["static_path"][0..-2]
- end
-
- details = Details.new(PathInfo.new(path))
- result << Endpoint.new("#{p_dir["static_path"]}#{path.gsub(full_path, "")}", "GET", details)
- end
- end
- end
-
- Fiber.yield
-
- result
- end
-
- def get_param(line : String) : Param
- param_type = "json"
- if line.includes?("Query")
- param_type = "query"
- end
- if line.includes?("FormValue")
- param_type = "form"
- end
-
- first = line.strip.split("(")
- if first.size > 1
- second = first[1].split(")")
- if second.size > 1
- param_name = second[0].gsub("\"", "")
- rtn = Param.new(param_name, "", param_type)
-
- return rtn
- end
- end
-
- Param.new("", "", "")
- end
-
- def get_static_path(line : String) : Hash(String, String)
- first = line.strip.split("(")
- if first.size > 1
- second = first[1].split(",")
- if second.size > 1
- static_path = second[0].gsub("\"", "")
- file_path = second[1].gsub("\"", "").gsub(" ", "").gsub(")", "").gsub_repeatedly("//", "/")
- rtn = {
- "static_path" => static_path,
- "file_path" => file_path,
- }
-
- return rtn
- end
- end
-
- {
- "static_path" => "",
- "file_path" => "",
- }
- end
-
- def get_route_path(line : String, groups : Array(Hash(String, String))) : String
- lexer = GolangLexer.new
- map = lexer.tokenize(line)
- before = Token.new(:unknown, "", 0)
- map.each do |token|
- if token.type == :string
- final_path = token.value.to_s
- groups.each do |group|
- group.each do |key, value|
- if before.value.to_s.includes? key
- final_path = value + final_path
- end
- end
- end
-
- return final_path
- end
-
- before = token
- end
-
- ""
- end
-end
-
-def analyzer_go_fiber(options : Hash(String, String))
- instance = AnalyzerGoFiber.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_go_gin.cr b/src/analyzer/analyzers/analyzer_go_gin.cr
deleted file mode 100644
index b64b2a28..00000000
--- a/src/analyzer/analyzers/analyzer_go_gin.cr
+++ /dev/null
@@ -1,194 +0,0 @@
-require "../../models/analyzer"
-require "../../minilexers/golang"
-
-class AnalyzerGoGin < Analyzer
- def analyze
- # Source Analysis
- public_dirs = [] of (Hash(String, String))
- groups = [] of Hash(String, String)
- begin
- Dir.glob("#{base_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path) && File.extname(path) == ".go"
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- last_endpoint = Endpoint.new("", "")
- file.each_line.with_index do |line, index|
- details = Details.new(PathInfo.new(path, index + 1))
- lexer = GolangLexer.new
-
- if line.includes?(".Group(")
- map = lexer.tokenize(line)
- before = Token.new(:unknown, "", 0)
- group_name = ""
- group_path = ""
- map.each do |token|
- if token.type == :assign
- group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "")
- end
-
- if token.type == :string
- group_path = token.value.to_s
- groups.each do |group|
- group.each do |key, value|
- if before.value.to_s.includes? key
- group_path = value + group_path
- end
- end
- end
- end
-
- before = token
- end
-
- if group_name.size > 0 && group_path.size > 0
- groups << {
- group_name => group_path,
- }
- end
- end
-
- if line.includes?(".GET(") || line.includes?(".POST(") || line.includes?(".PUT(") || line.includes?(".DELETE(")
- get_route_path(line, groups).tap do |route_path|
- if route_path.size > 0
- new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0], details)
- result << new_endpoint
- last_endpoint = new_endpoint
- end
- end
- end
-
- ["Query", "PostForm", "GetHeader"].each do |pattern|
- if line.includes?("#{pattern}(")
- get_param(line).tap do |param|
- if param.name.size > 0 && last_endpoint.method != ""
- last_endpoint.params << param
- end
- end
- end
- end
-
- if line.includes?("Static(")
- get_static_path(line).tap do |static_path|
- if static_path["static_path"].size > 0 && static_path["file_path"].size > 0
- public_dirs << static_path
- end
- end
- end
-
- if line.includes?("Cookie(")
- match = line.match(/Cookie\(\"(.*)\"\)/)
- if match
- cookie_name = match[1]
- last_endpoint.params << Param.new(cookie_name, "", "cookie")
- end
- end
- end
- end
- end
- end
- rescue e
- logger.debug e
- end
-
- public_dirs.each do |p_dir|
- full_path = (base_path + "/" + p_dir["file_path"]).gsub_repeatedly("//", "/")
- Dir.glob("#{full_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path)
- if p_dir["static_path"].ends_with?("/")
- p_dir["static_path"] = p_dir["static_path"][0..-2]
- end
-
- details = Details.new(PathInfo.new(path))
- result << Endpoint.new("#{p_dir["static_path"]}#{path.gsub(full_path, "")}", "GET", details)
- end
- end
- end
-
- Fiber.yield
-
- result
- end
-
- def get_param(line : String) : Param
- param_type = "json"
- if line.includes?("Query(")
- param_type = "query"
- end
- if line.includes?("PostForm(")
- param_type = "form"
- end
- if line.includes?("GetHeader(")
- param_type = "header"
- end
-
- first = line.strip.split("(")
- if first.size > 1
- second = first[1].split(")")
- if second.size > 1
- if line.includes?("DefaultQuery") || line.includes?("DefaultPostForm")
- param_name = second[0].split(",")[0].gsub("\"", "")
- rtn = Param.new(param_name, "", param_type)
- else
- param_name = second[0].gsub("\"", "")
- rtn = Param.new(param_name, "", param_type)
- end
-
- return rtn
- end
- end
-
- Param.new("", "", "")
- end
-
- def get_static_path(line : String) : Hash(String, String)
- first = line.strip.split("(")
- if first.size > 1
- second = first[1].split(",")
- if second.size > 1
- static_path = second[0].gsub("\"", "")
- file_path = second[1].gsub("\"", "").gsub(" ", "").gsub(")", "")
- rtn = {
- "static_path" => static_path,
- "file_path" => file_path,
- }
-
- return rtn
- end
- end
-
- {
- "static_path" => "",
- "file_path" => "",
- }
- end
-
- def get_route_path(line : String, groups : Array(Hash(String, String))) : String
- lexer = GolangLexer.new
- map = lexer.tokenize(line)
- before = Token.new(:unknown, "", 0)
- map.each do |token|
- if token.type == :string
- final_path = token.value.to_s
- groups.each do |group|
- group.each do |key, value|
- if before.value.to_s.includes? key
- final_path = value + final_path
- end
- end
- end
-
- return final_path
- end
-
- before = token
- end
-
- ""
- end
-end
-
-def analyzer_go_gin(options : Hash(String, String))
- instance = AnalyzerGoGin.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_har.cr b/src/analyzer/analyzers/analyzer_har.cr
deleted file mode 100644
index 7766a293..00000000
--- a/src/analyzer/analyzers/analyzer_har.cr
+++ /dev/null
@@ -1,68 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerHar < Analyzer
- def analyze
- locator = CodeLocator.instance
- har_files = locator.all("har-path")
-
- if har_files.is_a?(Array(String)) && @url != ""
- har_files.each do |har_file|
- if File.exists?(har_file)
- data = HAR.from_file(har_file)
- logger.debug "Open #{har_file} file"
- data.entries.each do |entry|
- if entry.request.url.includes? @url
- path = entry.request.url.to_s.gsub(@url, "")
- endpoint = Endpoint.new(path, entry.request.method)
-
- entry.request.query_string.each do |query|
- endpoint.params << Param.new(query.name, query.value, "query")
- end
-
- is_websocket = false
- entry.request.headers.each do |header|
- endpoint.params << Param.new(header.name, header.value, "header")
- if header.name == "Upgrade" && header.value == "websocket"
- is_websocket = true
- end
- end
-
- entry.request.cookies.each do |cookie|
- endpoint.params << Param.new(cookie.name, cookie.value, "cookie")
- end
-
- post_data = entry.request.post_data
- if post_data
- params = post_data.params
- mime_type = post_data.mime_type
- param_type = "body"
- if mime_type == "application/json"
- param_type = "json"
- end
- if params
- params.each do |param|
- endpoint.params << Param.new(param.name, param.value.to_s, param_type)
- end
- end
- end
-
- details = Details.new(PathInfo.new(har_file, 0))
- endpoint.set_details(details)
- if is_websocket
- endpoint.set_protocol "ws"
- end
- @result << endpoint
- end
- end
- end
- end
- end
-
- @result
- end
-end
-
-def analyzer_har(options : Hash(String, String))
- instance = AnalyzerHar.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_java_spring.cr b/src/analyzer/analyzers/analyzer_java_spring.cr
deleted file mode 100644
index 6f3e2ccf..00000000
--- a/src/analyzer/analyzers/analyzer_java_spring.cr
+++ /dev/null
@@ -1,454 +0,0 @@
-require "../../models/analyzer"
-require "../../minilexers/java"
-require "../../miniparsers/java"
-
-class AnalyzerJavaSpring < Analyzer
- REGEX_ROUTER_CODE_BLOCK = /route\(\)?.*?\);/m
- REGEX_ROUTE_CODE_LINE = /((?:andRoute|route)\s*\(|\.)\s*(GET|POST|DELETE|PUT)\(\s*"([^"]*)/
- FILE_CONTENT_CACHE = Hash(String, String).new
-
- def analyze
- parser_map = Hash(String, JavaParser).new
- package_map = Hash(String, Hash(String, ClassModel)).new
- webflux_base_path_map = Hash(String, String).new
-
- Dir.glob("#{@base_path}/**/*") do |path|
- url = ""
-
- # Extract the Webflux base path from 'application.yml' in specified directories
- if File.directory?(path)
- if path.ends_with?("/src")
- application_yml_path = File.join(path, "main/resources/application.yml")
- if File.exists?(application_yml_path)
- begin
- config = YAML.parse(File.read(application_yml_path))
- spring = config["spring"]
- webflux = spring["webflux"]
- webflux_base_path = webflux["base-path"]
-
- if webflux_base_path
- webflux_base_path_map[path] = webflux_base_path.as_s
- end
- rescue e
- # Handle parsing errors if necessary
- end
- end
-
- application_properties_path = File.join(path, "main/resources/application.properties")
- if File.exists?(application_properties_path)
- begin
- properties = File.read(application_properties_path)
- base_path = properties.match(/spring\.webflux\.base-path\s*=\s*(.*)/)
- if base_path
- webflux_base_path = base_path[1]
- webflux_base_path_map[path] = webflux_base_path if webflux_base_path
- end
- rescue e
- # Handle parsing errors if necessary
- end
- end
- end
- elsif File.exists?(path) && path.ends_with?(".java")
- webflux_base_path = find_base_path(path, webflux_base_path_map)
- # Load Java file content into cache for processing
- content = File.read(path, encoding: "utf-8", invalid: :skip)
- FILE_CONTENT_CACHE[path] = content
-
- # Process files that include Spring MVC bindings for routing
- spring_web_bind_package = "org.springframework.web.bind.annotation."
- has_spring_bindings = content.includes?(spring_web_bind_package)
- if has_spring_bindings
- if parser_map.has_key?(path)
- parser = parser_map[path]
- tokens = parser.tokens
- else
- parser = create_parser(Path.new(path), content)
- tokens = parser.tokens
- parser_map[path] = parser
- end
-
- package_name = parser.get_package_name(tokens)
- next if package_name == ""
- root_source_directory : Path = parser.get_root_source_directory(path, package_name)
- package_directory = Path.new(path).dirname
-
- # Import packages
- import_map = Hash(String, ClassModel).new
- parser.import_statements.each do |import_statement|
- import_path = import_statement.gsub(".", "/")
- if import_path.ends_with?("/*")
- import_directory = root_source_directory.join(import_path[..-3])
- if Dir.exists?(import_directory)
- Dir.glob("#{import_directory}/*.java") do |_path|
- next if path == _path
- if !parser_map.has_key?(_path)
- _parser = create_parser(Path.new(_path))
- parser_map[_path] = _parser
- else
- _parser = parser_map[_path]
- end
-
- _parser.classes.each do |package_class|
- import_map[package_class.name] = package_class
- end
- end
- end
- else
- source_path = root_source_directory.join(import_path + ".java")
- next if source_path.dirname == package_directory || !File.exists?(source_path)
- if !parser_map.has_key?(source_path.to_s)
- _parser = create_parser(source_path)
- parser_map[source_path.to_s] = _parser
- _parser.classes.each do |package_class|
- import_map[package_class.name] = package_class
- end
- else
- _parser = parser_map[source_path.to_s]
- _parser.classes.each do |package_class|
- import_map[package_class.name] = package_class
- end
- end
- end
- end
-
- # Import packages from the same directory
- package_class_map = package_map[package_directory]?
- if package_class_map.nil?
- package_class_map = Hash(String, ClassModel).new
- Dir.glob("#{package_directory}/*.java") do |_path|
- next if path == _path
- if !parser_map.has_key?(_path)
- _parser = create_parser(Path.new(_path))
- parser_map[_path] = _parser
- else
- _parser = parser_map[_path]
- end
-
- _parser.classes.each do |package_class|
- package_class_map[package_class.name] = package_class
- end
-
- parser.classes.each do |package_class|
- package_class_map[package_class.name] = package_class
- end
-
- package_map[package_directory] = package_class_map
- end
- end
-
- # Extract URL mappings and methods from Spring MVC annotated classes
- class_map = package_class_map.merge(import_map)
- parser.classes.each do |class_model|
- class_annotation = class_model.annotations["RequestMapping"]?
- if !class_annotation.nil?
- next if class_annotation.params.size == 0
- class_path_token = class_annotation.params[0][-1]
- if class_path_token.type == :STRING_LITERAL
- url = class_path_token.value[1..-2]
- if url.ends_with? "*"
- url = url[0..-2]
- end
- end
- end
-
- class_model.methods.values.each do |method|
- method.annotations.values.each do |method_annotation|
- url_paths = Array(String).new
-
- # Spring MVC method mappings
- request_methods = Array(String).new
- if method_annotation.name.ends_with? "Mapping"
- parameter_format = nil
- annotation_parameters = method_annotation.params
- annotation_parameters.each do |annotation_parameter_tokens|
- if annotation_parameter_tokens.size > 2
- annotation_parameter_key = annotation_parameter_tokens[0].value
- annotation_parameter_value = annotation_parameter_tokens[-1].value
- if annotation_parameter_key == "method"
- if annotation_parameter_value == "}"
- # Handle methods declared with multiple HTTP verbs
- annotation_parameter_tokens.reverse_each do |token|
- break if token.value == "method"
- next if token.type == :LBRACE || token.type == :RBRACE
- next if token.type == :DOT
- http_methods = ["GET", "POST", "PUT", "DELETE", "PATCH"]
- if http_methods.includes?(token.value)
- request_methods.push(token.value)
- end
- end
- else
- request_methods.push(annotation_parameter_value)
- end
- elsif annotation_parameter_key == "consumes"
- # Set parameter format based on the 'consumes' attribute of the annotation.
- if annotation_parameter_value.ends_with? "APPLICATION_FORM_URLENCODED_VALUE"
- parameter_format = "form"
- elsif annotation_parameter_value.ends_with? "APPLICATION_JSON_VALUE"
- parameter_format = "json"
- end
- end
- end
- end
-
- if webflux_base_path.ends_with?("/") && url.starts_with?("/")
- webflux_base_path = webflux_base_path[..-2]
- end
-
- # Parse and construct endpoints for methods annotated with 'RequestMapping' or specific HTTP methods
- if method_annotation.name == "RequestMapping"
- url_paths = [""]
- if method_annotation.params.size > 0
- url_paths = get_mapping_path(parser, tokens, method_annotation.params)
- end
-
- line = method_annotation.tokens[0].line
- details = Details.new(PathInfo.new(path, line))
-
- if request_methods.empty?
- # Handle default HTTP methods if no specific method is annotated
- ["GET", "POST", "PUT", "DELETE", "PATCH"].each do |_request_method|
- parameters = get_endpoint_parameters(parser, _request_method, method, parameter_format, class_map)
- url_paths.each do |url_path|
- @result << Endpoint.new("#{webflux_base_path}#{url}#{url_path}", _request_method, parameters, details)
- end
- end
- else
- # Create endpoints for annotated HTTP methods
- url_paths.each do |url_path|
- request_methods.each do |request_method|
- parameters = get_endpoint_parameters(parser, request_method, method, parameter_format, class_map)
- @result << Endpoint.new("#{webflux_base_path}#{url}#{url_path}", request_method, parameters, details)
- end
- end
- end
- break
- else
- # Handle other specific mapping annotations like 'GetMapping', 'PostMapping', etc
- mapping_annotations = ["GetMapping", "PostMapping", "PutMapping", "DeleteMapping", "PatchMapping"]
- mapping_index = mapping_annotations.index(method_annotation.name)
- if !mapping_index.nil?
- line = method_annotation.tokens[0].line
- request_method = mapping_annotations[mapping_index][0..-8].upcase
- if parameter_format.nil? && request_method == "POST"
- parameter_format = "form"
- end
- parameters = get_endpoint_parameters(parser, request_method, method, parameter_format, class_map)
-
- url_paths = [""]
- if method_annotation.params.size > 0
- url_paths = get_mapping_path(parser, tokens, method_annotation.params)
- end
-
- details = Details.new(PathInfo.new(path, line))
- url_paths.each do |url_path|
- @result << Endpoint.new("#{webflux_base_path}#{url}#{url_path}", request_method, parameters, details)
- end
- break
- end
- end
- end
- end
- end
- end
- else
- # Extract and construct endpoints from reactive route configurations
- content.scan(REGEX_ROUTER_CODE_BLOCK) do |route_code|
- method_code = route_code[0]
- method_code.scan(REGEX_ROUTE_CODE_LINE) do |match|
- next if match.size != 4
- method = match[2]
- endpoint = match[3].gsub(/\n/, "")
- details = Details.new(PathInfo.new(path))
- @result << Endpoint.new("#{url}#{endpoint}", method, details)
- end
- end
- end
- end
- end
- Fiber.yield
-
- @result
- end
-
- def create_parser(path : Path, content : String = "")
- if content == ""
- if FILE_CONTENT_CACHE.has_key?(path.to_s)
- content = FILE_CONTENT_CACHE[path.to_s]
- else
- content = File.read(path, encoding: "utf-8", invalid: :skip)
- end
- end
-
- lexer = JavaLexer.new
- tokens = lexer.tokenize(content)
- parser = JavaParser.new(path.to_s, tokens)
- parser
- end
-
- def find_base_path(current_path : String, base_paths : Hash(String, String))
- base_paths.keys.sort_by!(&.size).reverse!.each do |path|
- if current_path.starts_with?(path)
- return base_paths[path]
- end
- end
-
- ""
- end
-
- def get_mapping_path(parser : JavaParser, tokens : Array(Token), method_params : Array(Array(Token)))
- # 1. Search for the value of the Mapping annotation.
- # 2. If the value is a string literal, return the literal.
- # 3. If the value is an array, return each element of the array.
- # 4. In other cases, return an empty array.
- url_paths = Array(String).new
- if method_params[0].size != 0
- path_argument_index = 0
- method_params.each_with_index do |mapping_parameter, index|
- if mapping_parameter[0].type == :IDENTIFIER && mapping_parameter[0].value == "value"
- path_argument_index = index
- end
- end
-
- path_parameter_tokens = method_params[path_argument_index]
- # Extract single and multiple mapping path
- if path_parameter_tokens[-1].type == :STRING_LITERAL
- url_paths << path_parameter_tokens[-1].value[1..-2]
- elsif path_parameter_tokens[-1].type == :RBRACE
- i = path_parameter_tokens.size - 2
- while i > 0
- parameter_token = path_parameter_tokens[i]
- if parameter_token.type == :LBRACE
- break
- elsif parameter_token.type == :COMMA
- i -= 1
- next
- elsif parameter_token.type == :STRING_LITERAL
- url_paths << parameter_token.value[1..-2]
- else
- break
- end
-
- i -= 1
- end
- end
- end
-
- url_paths
- end
-
- def get_endpoint_parameters(parser : JavaParser, request_method : String, method : MethodModel, parameter_format : String | Nil, package_class_map : Hash(String, ClassModel)) : Array(Param)
- endpoint_parameters = Array(Param).new
- method.params.each do |method_param_tokens|
- next if method_param_tokens.size == 0
- if method_param_tokens[-1].type == :IDENTIFIER
- if method_param_tokens[0].type == :AT
- if method_param_tokens[1].value == "PathVariable"
- next
- elsif method_param_tokens[1].value == "RequestBody"
- if parameter_format.nil?
- parameter_format = "json"
- end
- elsif method_param_tokens[1].value == "RequestParam"
- parameter_format = "query"
- elsif method_param_tokens[1].value == "RequestHeader"
- parameter_format = "header"
- end
- end
-
- if parameter_format.nil?
- parameter_format = "query"
- end
-
- default_value = nil
- # Extract parameter name directly if not an identifier
- parameter_name = method_param_tokens[-1].value
- if method_param_tokens.size > 2
- if method_param_tokens[2].type == :LPAREN
- request_parameters = parser.parse_formal_parameters(method_param_tokens, 2)
- request_parameters.each do |request_parameter_tokens|
- if request_parameter_tokens.size > 2
- request_param_name = request_parameter_tokens[0].value
- request_param_value = request_parameter_tokens[-1].value
-
- # Extract 'name' from @RequestParam(value/defaultValue/name = "name")
- if request_param_name == "value"
- parameter_name = request_param_value[1..-2]
- elsif request_param_name == "name"
- parameter_name = request_param_value[1..-2]
- elsif request_param_name == "defaultValue"
- default_value = request_param_value[1..-2]
- end
- end
- end
- # Handle direct string literal as parameter name, e.g., @RequestParam("name")
- if method_param_tokens[3].type == :STRING_LITERAL
- parameter_name_token = method_param_tokens[3]
- parameter_name = parameter_name_token.value[1..-2]
- end
- end
- end
-
- argument_name = method_param_tokens[-1].value
- parameter_type = method_param_tokens[-2].value
- if ["long", "int", "integer", "char", "boolean", "string", "multipartfile"].index(parameter_type.downcase)
- param_default_value = default_value.nil? ? "" : default_value
- endpoint_parameters << Param.new(parameter_name, param_default_value, parameter_format)
- elsif parameter_type == "HttpServletRequest"
- i = 0
- while i < method.body.size - 6
- if [:TAB, :NEWLINE].index(method.body[i].type)
- i += 1
- next
- end
-
- next if method.body[i].type == :NEWLINE
-
- if method.body[i].type == :IDENTIFIER && method.body[i].value == argument_name
- if method.body[i + 1].type == :DOT
- if method.body[i + 2].type == :IDENTIFIER && method.body[i + 3].type == :LPAREN
- servlet_request_method_name = method.body[i + 2].value
- if method.body[i + 4].type == :STRING_LITERAL
- parameter_name = method.body[i + 4].value[1..-2]
- if servlet_request_method_name == "getParameter"
- unless endpoint_parameters.any? { |param| param.name == parameter_name }
- endpoint_parameters << Param.new(parameter_name, "", parameter_format)
- end
- i += 6
- next
- elsif servlet_request_method_name == "getHeader"
- unless endpoint_parameters.any? { |param| param.name == parameter_name }
- endpoint_parameters << Param.new(parameter_name, "", "header")
- end
- i += 6
- next
- end
- end
- end
- end
- end
-
- i += 1
- end
- else
- # Map fields of user-defined class to parameters.
- if package_class_map.has_key?(parameter_type)
- package_class = package_class_map[parameter_type]
- package_class.fields.values.each do |field|
- if field.access_modifier == "public" || field.has_setter?
- param_default_value = default_value.nil? ? field.init_value : default_value
- endpoint_parameters << Param.new(field.name, param_default_value, parameter_format)
- end
- end
- end
- end
- end
- end
-
- endpoint_parameters
- end
-end
-
-def analyzer_java_spring(options : Hash(String, String))
- instance = AnalyzerJavaSpring.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_jsp.cr b/src/analyzer/analyzers/analyzer_jsp.cr
deleted file mode 100644
index 915c6023..00000000
--- a/src/analyzer/analyzers/analyzer_jsp.cr
+++ /dev/null
@@ -1,57 +0,0 @@
-require "../../utils/utils.cr"
-require "../../models/analyzer"
-
-class AnalyzerJsp < Analyzer
- def analyze
- # Source Analysis
- begin
- Dir.glob("#{base_path}/**/*") do |path|
- next if File.directory?(path)
-
- relative_path = get_relative_path(base_path, path)
-
- if File.exists?(path) && File.extname(path) == ".jsp"
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- params_query = [] of Param
-
- file.each_line do |line|
- if line.includes? "request.getParameter"
- match = line.strip.match(/request.getParameter\("(.*?)"\)/)
- if match
- param_name = match[1]
- params_query << Param.new(param_name, "", "query")
- end
- end
-
- if line.includes? "${param."
- match = line.strip.match(/\$\{param\.(.*?)\}/)
- if match
- param_name = match[1]
- params_query << Param.new(param_name, "", "query")
- end
- end
- rescue
- next
- end
- details = Details.new(PathInfo.new(path))
- result << Endpoint.new("/#{relative_path}", "GET", params_query, details)
- end
- end
- end
- rescue e
- logger.debug e
- end
- Fiber.yield
-
- result
- end
-
- def allow_patterns
- ["$_GET", "$_POST", "$_REQUEST", "$_SERVER"]
- end
-end
-
-def analyzer_jsp(options : Hash(String, String))
- instance = AnalyzerJsp.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_kotlin_spring.cr b/src/analyzer/analyzers/analyzer_kotlin_spring.cr
deleted file mode 100644
index 1b9c47b2..00000000
--- a/src/analyzer/analyzers/analyzer_kotlin_spring.cr
+++ /dev/null
@@ -1,495 +0,0 @@
-require "../../models/analyzer"
-require "../../minilexers/kotlin"
-require "../../miniparsers/kotlin"
-require "../../utils/utils.cr"
-
-class AnalyzerKotlinSpring < Analyzer
- REGEX_ROUTER_CODE_BLOCK = /route\(\)?.*?\);/m
- REGEX_ROUTE_CODE_LINE = /((?:andRoute|route)\s*\(|\.)\s*(GET|POST|DELETE|PUT)\(\s*"([^"]*)/
- FILE_CONTENT_CACHE = Hash(String, String).new
- KOTLIN_EXTENSION = "kt"
- HTTP_METHODS = %w[GET POST PUT DELETE PATCH]
-
- def analyze
- parser_map = Hash(String, KotlinParser).new
- package_map = Hash(String, Hash(String, KotlinParser::ClassModel)).new
- webflux_base_path_map = Hash(String, String).new
-
- Dir.glob("#{@base_path}/**/*") do |path|
- next unless File.exists?(path)
-
- if File.directory?(path)
- process_directory(path, webflux_base_path_map)
- elsif path.ends_with?(".#{KOTLIN_EXTENSION}")
- process_kotlin_file(path, parser_map, package_map, webflux_base_path_map)
- end
- end
-
- Fiber.yield
- @result
- end
-
- # Process directory to extract WebFlux base path from 'application.yml'
- private def process_directory(path : String, webflux_base_path_map : Hash(String, String))
- if path.ends_with?("/src")
- application_yml_path = File.join(path, "main/resources/application.yml")
- if File.exists?(application_yml_path)
- begin
- config = YAML.parse(File.read(application_yml_path))
- spring = config["spring"]
- if spring
- webflux = spring["webflux"]
- if webflux
- base_path = webflux["base-path"]
- if base_path
- webflux_base_path = base_path.as_s
- webflux_base_path_map[path] = webflux_base_path if webflux_base_path
- end
- end
- end
- rescue e
- # Handle parsing errors if necessary
- end
- end
-
- application_properties_path = File.join(path, "main/resources/application.properties")
- if File.exists?(application_properties_path)
- begin
- properties = File.read(application_properties_path)
- base_path = properties.match(/spring\.webflux\.base-path\s*=\s*(.*)/)
- if base_path
- webflux_base_path = base_path[1]
- webflux_base_path_map[path] = webflux_base_path if webflux_base_path
- end
- rescue e
- # Handle parsing errors if necessary
- end
- end
- end
- end
-
- # Process individual Kotlin files to analyze Spring WebFlux annotations
- private def process_kotlin_file(path : String, parser_map : Hash(String, KotlinParser), package_map : Hash(String, Hash(String, KotlinParser::ClassModel)), webflux_base_path_map : Hash(String, String))
- content = fetch_file_content(path)
- parser = parser_map[path]? || create_parser(Path.new(path), content)
- parser_map[path] ||= parser
- tokens = parser.tokens
-
- package_name = parser.get_package_name(tokens)
- return if package_name.empty?
-
- root_source_directory = parser.get_root_source_directory(path, package_name)
- package_directory = Path.new(path).parent
-
- import_map = process_imports(parser, root_source_directory, package_directory, path, parser_map)
- package_class_map = package_map[package_directory.to_s]? || process_package_classes(package_directory, path, parser_map)
- package_map[package_directory.to_s] ||= package_class_map
-
- class_map = package_class_map.merge(import_map)
- parser.classes.each { |source_class| class_map[source_class.name] = source_class }
-
- match = webflux_base_path_map.find { |base_path, _| path.starts_with?(base_path) }
- webflux_base_path = match ? match.last : ""
- process_class_annotations(path, parser, class_map, webflux_base_path)
- end
-
- # Fetch content of a file and cache it
- private def fetch_file_content(path : String) : String
- FILE_CONTENT_CACHE[path] ||= File.read(path, encoding: "utf-8", invalid: :skip)
- end
-
- # Create a Kotlin parser for a given path and content
- private def create_parser(path : Path, content : String = "") : KotlinParser
- content = fetch_file_content(path.to_s) if content.empty?
- lexer = KotlinLexer.new
- tokens = lexer.tokenize(content)
- KotlinParser.new(path.to_s, tokens)
- end
-
- # Process imports in the Kotlin file to gather class models
- private def process_imports(parser : KotlinParser, root_source_directory : Path, package_directory : Path, current_path : String, parser_map : Hash(String, KotlinParser)) : Hash(String, KotlinParser::ClassModel)
- import_map = Hash(String, KotlinParser::ClassModel).new
- parser.import_statements.each do |import_statement|
- import_path = import_statement.gsub(".", "/")
- if import_path.ends_with?("/*")
- process_wildcard_import(root_source_directory, import_path, current_path, parser_map, import_map)
- else
- process_single_import(root_source_directory, import_path, package_directory, parser_map, import_map)
- end
- end
-
- import_map
- end
-
- # Handle wildcard imports
- private def process_wildcard_import(root_source_directory : Path, import_path : String, current_path : String, parser_map : Hash(String, KotlinParser), import_map : Hash(String, KotlinParser::ClassModel))
- import_directory = root_source_directory.join(import_path[0..-3])
- return unless Dir.exists?(import_directory)
-
- # TODO: Be aware that the import file location might differ from the actual file system path.
- Dir.glob("#{import_directory}/*.#{KOTLIN_EXTENSION}") do |path|
- next if path == current_path
- parser = parser_map[path]? || create_parser(Path.new(path))
- parser_map[path] ||= parser
- parser.classes.each { |package_class| import_map[package_class.name] = package_class }
- end
- end
-
- # Handle single imports
- private def process_single_import(root_source_directory : Path, import_path : String, package_directory : Path, parser_map : Hash(String, KotlinParser), import_map : Hash(String, KotlinParser::ClassModel))
- source_path = root_source_directory.join("#{import_path}.#{KOTLIN_EXTENSION}")
- return if source_path.dirname == package_directory || !File.exists?(source_path)
- # TODO: Be aware that the import file location might differ from the actual file system path.
- parser = parser_map[source_path.to_s]? || create_parser(source_path)
- parser_map[source_path.to_s] ||= parser
- parser.classes.each { |package_class| import_map[package_class.name] = package_class }
- end
-
- # Process all classes in the same package directory
- private def process_package_classes(package_directory : Path, current_path : String, parser_map : Hash(String, KotlinParser)) : Hash(String, KotlinParser::ClassModel)
- package_class_map = Hash(String, KotlinParser::ClassModel).new
- Dir.glob("#{package_directory}/*.#{KOTLIN_EXTENSION}") do |path|
- next if path == current_path
- parser = parser_map[path]? || create_parser(Path.new(path))
- parser_map[path] ||= parser
- parser.classes.each { |package_class| package_class_map[package_class.name] = package_class }
- end
- package_class_map
- end
-
- # Process class annotations to find URL mappings and HTTP methods
- private def process_class_annotations(path : String, parser : KotlinParser, class_map : Hash(String, KotlinParser::ClassModel), webflux_base_path : String)
- parser.classes.each do |class_model|
- class_annotation = class_model.annotations["@RequestMapping"]?
-
- url = class_annotation ? extract_url_from_annotation(class_annotation) : ""
- class_model.methods.values.each do |method|
- process_method_annotations(path, parser, method, class_map, webflux_base_path, url)
- end
- end
- end
-
- # Extract URL from class annotation
- private def extract_url_from_annotation(annotation_model : KotlinParser::AnnotationModel) : String
- return "" if annotation_model.params.empty?
- url_token = annotation_model.params[0][-1]
- url = url_token.type == :STRING_LITERAL ? url_token.value[1..-2] : ""
- url.ends_with?("*") ? url[0..-2] : url
- end
-
- # Process method annotations to find specific mappings and create endpoints
- private def process_method_annotations(path : String, parser : KotlinParser, method : KotlinParser::MethodModel, class_map : Hash(String, KotlinParser::ClassModel), webflux_base_path : String, url : String)
- method.annotations.values.each do |method_annotation|
- next unless method_annotation.name.ends_with?("Mapping")
-
- request_optional, parameter_format = extract_request_methods_and_format(parser, method_annotation)
- url_paths = method_annotation.name.starts_with?("@") ? extract_mapping_paths(parser, method_annotation) : [""]
- details = Details.new(PathInfo.new(path, method_annotation.tokens[0].line))
- url_paths += request_optional["values"]
- url_paths += request_optional["paths"]
-
- create_endpoints(webflux_base_path, url, url_paths, request_optional, parser, method, parameter_format, class_map, details)
- end
- end
-
- # Extract HTTP methods and parameter format from annotation
- private def extract_request_methods_and_format(parser : KotlinParser, annotation_model : KotlinParser::AnnotationModel) : Tuple(Hash(String, Array(String)), String | Nil)
- parameter_format = nil
- request_optional = Hash(String, Array(String)).new
- request_optional["methods"] = Array(String).new
- request_optional["params"] = Array(String).new
- request_optional["headers"] = Array(String).new
- request_optional["values"] = Array(String).new
- request_optional["paths"] = Array(String).new
-
- annotation_model.params.each do |tokens|
- next if tokens.size < 3
- next if tokens[2].value != "[" && tokens[2].value != "arrayOf"
- bracket_index = tokens[2].value != "arrayOf" ? tokens[2].index : tokens[2].index + 1
-
- case tokens[0].value
- when "method"
- parser.parse_formal_parameters(bracket_index).each do |param_tokens|
- method_index = param_tokens[0].value != "RequestMethod" ? 0 : 2
- request_optional["methods"] << param_tokens[method_index].value
- end
- when "consumes"
- parser.parse_formal_parameters(bracket_index).each do |param_tokens|
- if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
- if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
- parameter_format = case param_tokens[0].value[1..-2].upcase
- when "APPLICATION/X-WWW-FORM-URLENCODED"
- "form"
- when "APPLICATION/JSON"
- "json"
- else
- nil
- end
- break
- end
- end
- end
- when "params"
- parser.parse_formal_parameters(bracket_index).each do |param_tokens|
- if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
- request_optional["params"] << param_tokens[0].value[1..-2]
- end
- end
- when "headers"
- parser.parse_formal_parameters(bracket_index).each do |param_tokens|
- if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
- request_optional["headers"] << param_tokens[0].value[1..-2]
- end
- end
- when "value"
- parser.parse_formal_parameters(bracket_index).each do |param_tokens|
- if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
- request_optional["values"] << param_tokens[0].value[1..-2]
- end
- end
- when "path"
- parser.parse_formal_parameters(bracket_index).each do |param_tokens|
- if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
- request_optional["paths"] << param_tokens[0].value[1..-2]
- end
- end
- end
- end
-
- if request_optional["methods"].empty?
- if annotation_model.name == "@RequestMapping"
- # Default to all HTTP methods if no method is specified
- request_optional["methods"].concat(HTTP_METHODS)
- else
- # Extract HTTP method from annotation name
- http_method = HTTP_METHODS.find { |method| annotation_model.name.upcase == "@#{method}MAPPING" }
- request_optional["methods"].push(http_method) if http_method
- end
- end
-
- {request_optional, parameter_format}
- end
-
- # Extract URL mapping paths from annotation parameters
- private def extract_mapping_paths(parser : KotlinParser, annotation_model : KotlinParser::AnnotationModel) : Array(String)
- return [""] if annotation_model.params.empty?
- get_mapping_path(parser, annotation_model.params)
- end
-
- # Create endpoints for the extracted HTTP methods and paths
- private def create_endpoints(webflux_base_path : String, url : String, url_paths : Array(String), request_optional : Hash(String, Array(String)), parser : KotlinParser, method : KotlinParser::MethodModel, parameter_format : String | Nil, class_map : Hash(String, KotlinParser::ClassModel), details : Details)
- # Iterate over each URL path to create full URLs
- url_paths.each do |url_path|
- full_url = join_path(webflux_base_path, url, url_path)
-
- # Iterate over each request method to create endpoints
- request_optional["methods"].each do |request_method|
- # Determine parameter format if not specified
- parameter_format ||= determine_parameter_format(request_method)
-
- # Get parameters for the endpoint
- parameters = get_endpoint_parameters(parser, method, parameter_format, class_map)
-
- # Add query or form parameters
- add_params(parameters, request_optional["params"], parameter_format)
-
- # Add header parameters
- add_params(parameters, request_optional["headers"], "header")
-
- # Create and store the endpoint
- @result << Endpoint.new(full_url, request_method, parameters, details)
- end
- end
- end
-
- # Determine the parameter format based on the request method
- private def determine_parameter_format(request_method)
- case request_method
- when "POST", "PUT", "DELETE", "PATCH"
- "form"
- when "GET"
- "query"
- else
- nil
- end
- end
-
- # Add parameters to the parameters array
- # params: Array of parameter strings
- # default_format: Default format for the parameters (query, form, header)
- private def add_params(parameters, params, default_format)
- params.each do |param|
- format = default_format || "query"
- param, default_value = param.includes?("=") ? param.split("=") : [param, ""]
- new_param_obj = Param.new(param, default_value, format)
-
- # Add parameter if it doesn't already exist in the parameters array
- parameters << new_param_obj unless parameters.includes?(new_param_obj)
- end
- end
-
- # Extract mapping paths from annotation parameters
- private def get_mapping_path(parser : KotlinParser, method_params : Array(Array(Token))) : Array(String)
- url_paths = Array(String).new
- path_argument_index = method_params.index { |param| param[0].value == "value" } || 0
- path_parameter_tokens = method_params[path_argument_index]
- if path_parameter_tokens[-1].type == :STRING_LITERAL
- url_paths << path_parameter_tokens[-1].value[1..-2]
- elsif path_parameter_tokens[-1].type == :RBRACE
- i = path_parameter_tokens.size - 2
- while i > 0
- parameter_token = path_parameter_tokens[i]
- case parameter_token.type
- when :LCURL
- break
- when :COMMA
- i -= 1
- next
- when :STRING_LITERAL
- url_paths << parameter_token.value[1..-2]
- else
- break
- end
- i -= 1
- end
- end
-
- url_paths
- end
-
- # Get endpoint parameters from the method's annotation and signature
- private def get_endpoint_parameters(parser : KotlinParser, method : KotlinParser::MethodModel, parameter_format : String | Nil, package_class_map : Hash(String, KotlinParser::ClassModel)) : Array(Param)
- endpoint_parameters = Array(Param).new
- method.params.each do |tokens|
- next if tokens.size < 3
-
- i = 0
- while i < tokens.size
- case tokens[i + 1].type
- when :ANNOTATION
- i += 1
- when :LPAREN
- rparen = parser.find_bracket_partner(tokens[i + 1].index)
- if rparen && tokens[i + (rparen - tokens[i + 1].index) + 2].type == :ANNOTATION
- i += rparen - tokens[i + 1].index + 2
- else
- break
- end
- else
- break
- end
- end
-
- token = tokens[i]
- parameter_index = tokens[-1].value != "?" ? -1 : -2
- if tokens[parameter_index].value == "Pageable"
- next if parameter_format.nil?
- endpoint_parameters << Param.new("page", "", parameter_format)
- endpoint_parameters << Param.new("size", "", parameter_format)
- endpoint_parameters << Param.new("sort", "", parameter_format)
- else
- name = token.value
- parameter_format = get_parameter_format(name, parameter_format)
- next if parameter_format.nil?
-
- default_value, parameter_name, parameter_type = extract_parameter_details(tokens, parser, i)
- next if parameter_name.empty? || parameter_type.nil?
-
- param_default_value = default_value.nil? ? "" : default_value
- if ["long", "int", "integer", "char", "boolean", "string", "multipartfile"].includes?(parameter_type.downcase)
- endpoint_parameters << Param.new(parameter_name, param_default_value, parameter_format)
- else
- add_user_defined_class_params(package_class_map, parameter_type, default_value, parameter_name, parameter_format, endpoint_parameters)
- end
- end
- end
- endpoint_parameters
- end
-
- # Get parameter format based on annotation name
- private def get_parameter_format(name : String, current_format : String | Nil) : String | Nil
- case name
- when "@RequestBody"
- current_format || "json"
- when "@RequestParam"
- "query"
- when "@RequestHeader"
- "header"
- when "@CookieValue"
- "cookie"
- when "@PathVariable"
- nil
- when "@ModelAttribute"
- current_format || "form"
- else
- current_format || "query"
- end
- end
-
- # Extract details of parameters from tokens
- private def extract_parameter_details(tokens : Array(Token), parser : KotlinParser, index : Int32) : Tuple(String, String, String?)
- default_value = ""
- parameter_name = ""
- parameter_type = nil
-
- if tokens[index + 1].type == :LPAREN
- attributes = parser.parse_formal_parameters(tokens[index + 1].index)
- attributes.each do |attribute_tokens|
- if attribute_tokens.size > 2
- attribute_name = attribute_tokens[0].value
- attribute_value = attribute_tokens[2].value
- case attribute_name
- when "value", "name"
- parameter_name = attribute_value
- when "defaultValue"
- default_value = attribute_value
- end
- else
- parameter_name = attribute_tokens[0].value
- end
- end
- end
-
- colon_index = tokens[-1].value == "?" ? -3 : -2
- if tokens[colon_index].type == :COLON
- parameter_name = tokens[-3].value if parameter_name.empty? && tokens[-3].type == :IDENTIFIER
- parameter_type = tokens[-1].type == :QUEST ? tokens[-2].value : tokens[-1].value if tokens[-1].type == :IDENTIFIER
- elsif tokens[colon_index + 1].type == :RANGLE
- parameter_type = tokens[-2].value
- parameter_name = tokens[-6].value if tokens[-5].type == :COLON
- end
-
- default_value = default_value[1..-2] if default_value.size > 1 && default_value[0] == '"' && default_value[-1] == '"'
- parameter_name = parameter_name[1..-2] if parameter_name.size > 1 && parameter_name[0] == '"' && parameter_name[-1] == '"'
-
- {default_value, parameter_name, parameter_type}
- end
-
- # Add parameters from user-defined class fields
- private def add_user_defined_class_params(package_class_map : Hash(String, KotlinParser::ClassModel), parameter_type : String, default_value : String?, parameter_name : String, parameter_format : String | Nil, endpoint_parameters : Array(Param))
- if package_class_map.has_key?(parameter_type)
- package_class = package_class_map[parameter_type]
- if package_class.enum_class?
- param_default_value = default_value.nil? ? "" : default_value
- endpoint_parameters << Param.new(parameter_name, param_default_value, parameter_format)
- else
- package_class.fields.values.each do |field|
- if package_class_map.has_key?(field.type) && parameter_type != field.type
- add_user_defined_class_params(package_class_map, field.type, field.init_value, field.name, parameter_format, endpoint_parameters)
- else
- if field.access_modifier == "public" || field.has_setter?
- param_default_value = default_value.nil? ? field.init_value : default_value
- endpoint_parameters << Param.new(field.name, param_default_value, parameter_format)
- end
- end
- end
- end
- end
- end
-end
-
-# Function to instantiate and run the AnalyzerKotlinSpring
-def analyzer_kotlin_spring(options : Hash(String, String))
- instance = AnalyzerKotlinSpring.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_oas2.cr b/src/analyzer/analyzers/analyzer_oas2.cr
deleted file mode 100644
index 3db8e77a..00000000
--- a/src/analyzer/analyzers/analyzer_oas2.cr
+++ /dev/null
@@ -1,134 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerOAS2 < Analyzer
- def analyze
- locator = CodeLocator.instance
- swagger_jsons = locator.all("swagger-json")
- swagger_yamls = locator.all("swagger-yaml")
-
- if swagger_jsons.is_a?(Array(String))
- swagger_jsons.each do |swagger_json|
- if File.exists?(swagger_json)
- details = Details.new(PathInfo.new(swagger_json))
- content = File.read(swagger_json, encoding: "utf-8", invalid: :skip)
- json_obj = JSON.parse(content)
- base_path = ""
- begin
- if json_obj["basePath"].to_s != ""
- base_path = json_obj["basePath"].to_s
- end
- rescue e
- @logger.debug "Exception of #{swagger_json}/basePath"
- @logger.debug_sub e
- end
-
- begin
- paths = json_obj["paths"].as_h
- paths.each do |path, path_obj|
- path_obj.as_h.each do |method, method_obj|
- params = [] of Param
-
- if method_obj.as_h.has_key?("parameters")
- method_obj["parameters"].as_a.each do |param_obj|
- param_name = param_obj["name"].to_s
- if param_obj["in"] == "query"
- param = Param.new(param_name, "", "query")
- params << param
- elsif param_obj["in"] == "form"
- param = Param.new(param_name, "", "json")
- params << param
- elsif param_obj["in"] == "formData"
- param = Param.new(param_name, "", "form")
- params << param
- elsif param_obj["in"] == "header"
- param = Param.new(param_name, "", "header")
- params << param
- end
- end
- @result << Endpoint.new(base_path + path, method.upcase, params, details)
- else
- @result << Endpoint.new(base_path + path, method.upcase, details)
- end
- rescue e
- @logger.debug "Exception of #{swagger_json}/paths/path/method"
- @logger.debug_sub e
- end
- rescue e
- @logger.debug "Exception of #{swagger_json}/paths/path"
- @logger.debug_sub e
- end
- rescue e
- @logger.debug "Exception of #{swagger_json}/paths"
- @logger.debug_sub e
- end
- end
- end
- end
-
- if swagger_yamls.is_a?(Array(String))
- swagger_yamls.each do |swagger_yaml|
- if File.exists?(swagger_yaml)
- details = Details.new(PathInfo.new(swagger_yaml))
- content = File.read(swagger_yaml, encoding: "utf-8", invalid: :skip)
- yaml_obj = YAML.parse(content)
- base_path = ""
- begin
- if yaml_obj["basePath"].to_s != ""
- base_path = yaml_obj["basePath"].to_s
- end
- rescue e
- @logger.debug "Exception of #{swagger_yaml}/basePath"
- @logger.debug_sub e
- end
-
- begin
- paths = yaml_obj["paths"].as_h
- paths.each do |path, path_obj|
- path_obj.as_h.each do |method, method_obj|
- params = [] of Param
-
- if method_obj.as_h.has_key?("parameters")
- method_obj["parameters"].as_a.each do |param_obj|
- param_name = param_obj["name"].to_s
- if param_obj["in"] == "query"
- param = Param.new(param_name, "", "query")
- params << param
- elsif param_obj["in"] == "form"
- param = Param.new(param_name, "", "json")
- params << param
- elsif param_obj["in"] == "formData"
- param = Param.new(param_name, "", "form")
- params << param
- elsif param_obj["in"] == "header"
- param = Param.new(param_name, "", "header")
- params << param
- end
- end
- @result << Endpoint.new(base_path + path.to_s, method.to_s.upcase, params, details)
- else
- @result << Endpoint.new(base_path + path.to_s, method.to_s.upcase, details)
- end
- rescue e
- @logger.debug "Exception of #{swagger_yaml}/paths/path/method"
- @logger.debug_sub e
- end
- rescue e
- @logger.debug "Exception of #{swagger_yaml}/paths/path"
- @logger.debug_sub e
- end
- rescue e
- @logger.debug "Exception of #{swagger_yaml}/paths"
- @logger.debug_sub e
- end
- end
- end
- end
-
- @result
- end
-end
-
-def analyzer_oas2(options : Hash(String, String))
- instance = AnalyzerOAS2.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_oas3.cr b/src/analyzer/analyzers/analyzer_oas3.cr
deleted file mode 100644
index 20aff2d4..00000000
--- a/src/analyzer/analyzers/analyzer_oas3.cr
+++ /dev/null
@@ -1,289 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerOAS3 < Analyzer
- def get_base_path(servers)
- base_path = @url
- servers.as_a.each do |server_obj|
- if server_obj["url"].to_s.starts_with?("http")
- user_uri = URI.parse(@url)
- source_uri = URI.parse(server_obj["url"].to_s)
- if user_uri.host == source_uri.host
- base_path = @url + source_uri.path
- break
- end
- end
- end
-
- base_path
- end
-
- def analyze
- locator = CodeLocator.instance
- oas3_jsons = locator.all("oas3-json")
- oas3_yamls = locator.all("oas3-yaml")
- base_path = @url
-
- if oas3_jsons.is_a?(Array(String))
- oas3_jsons.each do |oas3_json|
- if File.exists?(oas3_json)
- details = Details.new(PathInfo.new(oas3_json))
- content = File.read(oas3_json, encoding: "utf-8", invalid: :skip)
- json_obj = JSON.parse(content)
-
- begin
- base_path = get_base_path json_obj["servers"]
- rescue e
- @logger.debug "Exception of #{oas3_json}/servers"
- @logger.debug_sub e
- end
-
- begin
- paths = json_obj["paths"].as_h
- paths.each do |path, path_obj|
- params_of_path = [] of Param
- methods_of_path = [] of String
- path_obj.as_h.each do |method, method_obj|
- params = [] of Param
-
- # Param in Path
- begin
- if method == "parameters"
- method_obj.as_a.each do |param_obj|
- param_name = param_obj["name"].to_s
- if param_obj["in"] == "query"
- param = Param.new(param_name, "", "query")
- params_of_path << param
- elsif param_obj["in"] == "header"
- param = Param.new(param_name, "", "header")
- params_of_path << param
- elsif param_obj["in"] == "cookie"
- param = Param.new(param_name, "", "cookie")
- params_of_path << param
- end
- end
- end
-
- if method == "requestBody"
- method_obj["content"].as_h.each do |content_type, content_obj|
- if content_type == "application/json"
- content_obj["schema"]["properties"].as_h.each do |param_name, _|
- param = Param.new(param_name.to_s, "", "json")
- params_of_path << param
- end
- elsif content_type == "application/x-www-form-urlencoded"
- content_obj["schema"]["properties"].as_h.each do |param_name, _|
- param = Param.new(param_name.to_s, "", "form")
- params_of_path << param
- end
- end
- end
- end
- rescue e
- @logger.debug "Exception of #{oas3_json}/paths/parameters"
- @logger.debug_sub e
- end
-
- # Param in Method
- begin
- if method_obj.is_a?(JSON::Any) || method_obj.is_a?(Hash(String, JSON::Any))
- if method_obj.as_h.has_key?("parameters")
- method_obj["parameters"].as_a.each do |param_obj|
- param_name = param_obj["name"].to_s
- if param_obj["in"] == "query"
- param = Param.new(param_name, "", "query")
- params << param
- elsif param_obj["in"] == "header"
- param = Param.new(param_name, "", "header")
- params << param
- elsif param_obj["in"] == "cookie"
- param = Param.new(param_name, "", "cookie")
- params << param
- end
- end
- end
- end
- rescue e
- @logger.debug "Exception of #{oas3_json}/paths/method/parameters"
- @logger.debug_sub e
- end
-
- begin
- if method_obj.as_h.has_key?("requestBody")
- method_obj["requestBody"]["content"].as_h.each do |content_type, content_obj|
- if content_type == "application/json"
- content_obj["schema"]["properties"].as_h.each do |param_name, _|
- param = Param.new(param_name, "", "json")
- params << param
- end
- elsif content_type == "application/x-www-form-urlencoded"
- content_obj["schema"]["properties"].as_h.each do |param_name, _|
- param = Param.new(param_name, "", "form")
- params << param
- end
- end
- end
- end
- rescue e
- @logger.debug "Exception of #{oas3_json}/paths/method/parameters"
- @logger.debug_sub e
- end
-
- if params.size > 0 && (method.to_s.upcase != "PARAMETERS" && method.to_s.upcase != "REQUESTBODY")
- @result << Endpoint.new(base_path + path, method.upcase, params, details)
- methods_of_path << method.to_s.upcase
- elsif method.to_s.upcase != "PARAMETERS" && method.to_s.upcase != "REQUESTBODY"
- @result << Endpoint.new(base_path + path, method.upcase, details)
- methods_of_path << method.to_s.upcase
- end
- rescue e
- @logger.debug "Exception of #{oas3_json}/paths/endpoint"
- @logger.debug_sub e
- end
- if params_of_path.size > 0
- methods_of_path.each do |method_path|
- @result << Endpoint.new(base_path + path, method_path.upcase, params_of_path, details)
- end
- end
- end
- rescue e
- @logger.debug "Exception of #{oas3_json}/paths"
- @logger.debug_sub e
- end
- end
- end
- end
-
- if oas3_yamls.is_a?(Array(String))
- oas3_yamls.each do |oas3_yaml|
- if File.exists?(oas3_yaml)
- details = Details.new(PathInfo.new(oas3_yaml))
- content = File.read(oas3_yaml, encoding: "utf-8", invalid: :skip)
- yaml_obj = YAML.parse(content)
-
- begin
- base_path = get_base_path yaml_obj["servers"]
- rescue e
- @logger.debug "Exception of #{oas3_yaml}/servers"
- @logger.debug_sub e
- end
-
- begin
- paths = yaml_obj["paths"].as_h
- paths.each do |path, path_obj|
- params_of_path = [] of Param
- methods_of_path = [] of String
- path_obj.as_h.each do |method, method_obj|
- params = [] of Param
-
- # Param in Path
- begin
- if method == "parameters"
- method_obj.as_a.each do |param_obj|
- param_name = param_obj["name"].to_s
- if param_obj["in"] == "query"
- param = Param.new(param_name, "", "query")
- params_of_path << param
- elsif param_obj["in"] == "header"
- param = Param.new(param_name, "", "header")
- params_of_path << param
- elsif param_obj["in"] == "cookie"
- param = Param.new(param_name, "", "cookie")
- params_of_path << param
- end
- end
- end
-
- if method == "requestBody"
- method_obj["content"].as_h.each do |content_type, content_obj|
- if content_type == "application/json"
- content_obj["schema"]["properties"].as_h.each do |param_name, _|
- param = Param.new(param_name.to_s, "", "json")
- params_of_path << param
- end
- elsif content_type == "application/x-www-form-urlencoded"
- content_obj["schema"]["properties"].as_h.each do |param_name, _|
- param = Param.new(param_name.to_s, "", "form")
- params_of_path << param
- end
- end
- end
- end
- rescue e
- @logger.debug "Exception of #{oas3_yaml}/paths/parameters"
- @logger.debug_sub e
- end
-
- # Param in Method
- begin
- if method_obj.is_a?(YAML::Any) || method_obj.is_a?(Hash(String, YAML::Any))
- if method_obj.as_h.has_key?("parameters")
- method_obj["parameters"].as_a.each do |param_obj|
- param_name = param_obj["name"].to_s
- if param_obj["in"] == "query"
- param = Param.new(param_name, "", "query")
- params << param
- elsif param_obj["in"] == "header"
- param = Param.new(param_name, "", "header")
- params << param
- elsif param_obj["in"] == "cookie"
- param = Param.new(param_name, "", "cookie")
- params << param
- end
- end
- end
-
- if method_obj.as_h.has_key?("requestBody")
- method_obj["requestBody"]["content"].as_h.each do |content_type, content_obj|
- if content_type == "application/json"
- content_obj["schema"]["properties"].as_h.each do |param_name, _|
- param = Param.new(param_name.to_s, "", "json")
- params << param
- end
- elsif content_type == "application/x-www-form-urlencoded"
- content_obj["schema"]["properties"].as_h.each do |param_name, _|
- param = Param.new(param_name.to_s, "", "form")
- params << param
- end
- end
- end
- end
- end
- rescue e
- @logger.debug "Exception of #{oas3_yaml}/paths/method/parameters"
- @logger.debug_sub e
- end
-
- if params.size > 0 && (method.to_s.upcase != "PARAMETERS" && method.to_s.upcase != "REQUESTBODY")
- @result << Endpoint.new(base_path + path.to_s, method.to_s.upcase, params, details)
- methods_of_path << method.to_s.upcase
- elsif method.to_s.upcase != "PARAMETERS" && method.to_s.upcase != "REQUESTBODY"
- @result << Endpoint.new(base_path + path.to_s, method.to_s.upcase, details)
- methods_of_path << method.to_s.upcase
- end
- end
-
- if params_of_path.size > 0
- methods_of_path.each do |method_path|
- @result << Endpoint.new(base_path + path.to_s, method_path.to_s.upcase, params_of_path, details)
- end
- end
- rescue e
- @logger.debug "Exception of #{oas3_yaml}/paths/endpoint"
- @logger.debug_sub e
- end
- rescue e
- @logger.debug "Exception of #{oas3_yaml}/paths"
- @logger.debug_sub e
- end
- end
- end
- end
-
- @result
- end
-end
-
-def analyzer_oas3(options : Hash(String, String))
- instance = AnalyzerOAS3.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_php_pure.cr b/src/analyzer/analyzers/analyzer_php_pure.cr
deleted file mode 100644
index 359db01a..00000000
--- a/src/analyzer/analyzers/analyzer_php_pure.cr
+++ /dev/null
@@ -1,73 +0,0 @@
-require "../../utils/utils.cr"
-require "../../models/analyzer"
-
-class AnalyzerPhpPure < Analyzer
- def analyze
- # Source Analysis
- begin
- Dir.glob("#{base_path}/**/*") do |path|
- next if File.directory?(path)
-
- relative_path = get_relative_path(base_path, path)
-
- if File.exists?(path) && File.extname(path) == ".php"
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- params_query = [] of Param
- params_body = [] of Param
- methods = [] of String
-
- file.each_line do |line|
- if allow_patterns.any? { |pattern| line.includes? pattern }
- match = line.strip.match(/\$_(.*?)\['(.*?)'\]/)
-
- if match
- method = match[1]
- param_name = match[2]
-
- if method == "GET"
- params_query << Param.new(param_name, "", "query")
- elsif method == "POST"
- params_body << Param.new(param_name, "", "form")
- methods << "POST"
- elsif method == "REQUEST"
- params_query << Param.new(param_name, "", "query")
- params_body << Param.new(param_name, "", "form")
- methods << "POST"
- elsif method == "SERVER"
- if param_name.includes? "HTTP_"
- param_name = param_name.sub("HTTP_", "").gsub("_", "-")
- params_query << Param.new(param_name, "", "header")
- params_body << Param.new(param_name, "", "header")
- end
- end
- end
- end
- rescue
- next
- end
-
- details = Details.new(PathInfo.new(path))
- methods.each do |method|
- result << Endpoint.new("/#{relative_path}", method, params_body, details)
- end
- result << Endpoint.new("/#{relative_path}", "GET", params_query, details)
- end
- end
- end
- rescue e
- logger.debug e
- end
- Fiber.yield
-
- result
- end
-
- def allow_patterns
- ["$_GET", "$_POST", "$_REQUEST", "$_SERVER"]
- end
-end
-
-def analyzer_php_pure(options : Hash(String, String))
- instance = AnalyzerPhpPure.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_python.cr b/src/analyzer/analyzers/analyzer_python.cr
deleted file mode 100644
index 7b60ed49..00000000
--- a/src/analyzer/analyzers/analyzer_python.cr
+++ /dev/null
@@ -1,421 +0,0 @@
-require "../../models/analyzer"
-require "json"
-
-class AnalyzerPython < Analyzer
- # HTTP method names commonly used in REST APIs
- HTTP_METHODS = ["get", "post", "put", "patch", "delete", "head", "options", "trace"]
- # Indentation size in spaces; different sizes can cause analysis issues
- INDENTATION_SIZE = 4
- # Regex for valid Python variable names
- PYTHON_VAR_NAME_REGEX = /[a-zA-Z_][a-zA-Z0-9_]*/
-
- # Parses the definition of a function from the source lines starting at a given index
- def parse_function_def(source_lines : Array(String), start_index : Int32) : FunctionDefinition | Nil
- parameters = [] of FunctionParameter
- def_line = source_lines[start_index]
- return nil unless def_line.includes?("def ")
-
- # Extract the function name and parameter line
- name = def_line.split("def ", 2)[1].split("(", 2)[0].strip
- param_line = def_line.split("(", 2)[1]
-
- index = 0
- param_name = ""
- param_type = ""
- param_default = ""
-
- is_option = false
- is_default = false
- bracket_count = 0
- parentheses_count = 1
-
- line_index = start_index
- # Iterate over the parameter line to parse each parameter
- while parentheses_count != 0
- while index < param_line.size
- char = param_line[index]
- if char == '['
- bracket_count += 1
- elsif char == ']'
- bracket_count -= 1
- elsif bracket_count == 0
- if char == '('
- parentheses_count += 1
- elsif parentheses_count == 1 && char == '='
- is_default = true
- index += 1
- next
- elsif parentheses_count == 1 && char == ','
- parameters << FunctionParameter.new(param_name.strip, param_type.strip, param_default.strip)
-
- param_name = ""
- param_type = ""
- param_default = ""
- is_option = false
- is_default = false
- index += 1
- next
- elsif char == ')'
- parentheses_count -= 1
- if parentheses_count == 0
- if param_name.size != 0
- parameters << FunctionParameter.new(param_name.strip, param_type.strip, param_default.strip)
- end
- break
- end
- elsif char == ':'
- is_option = true
- index += 1
- next
- end
- end
-
- if is_default
- param_default += char
- elsif is_option
- param_type += char
- else
- param_name += char
- end
-
- index += 1
- end
-
- line_index += 1
- if line_index < source_lines.size
- param_line = source_lines[line_index]
- index = 0
- next
- end
-
- break
- end
-
- FunctionDefinition.new(name, parameters)
- end
-
- # Finds all the modules imported in a given Python file
- def find_imported_modules(app_base_path : String, file_path : String, content : String? = nil) : Hash(String, Tuple(String, Int32))
- # If content is not provided, read it from the file
- content = File.read(file_path, encoding: "utf-8", invalid: :skip) if content.nil?
-
- file_base_path = file_path
- file_base_path = File.dirname(file_path) if file_path.ends_with? ".py"
-
- import_map = Hash(String, Tuple(String, Int32)).new
- offset = 0
- content.each_line do |line|
- package_path = app_base_path
- from_import = ""
- imports = ""
-
- # Check if the line starts with "from" or "import"
- if line.starts_with?("from")
- line.scan(/from\s*([^'"\s\\]*)\s*import\s*(.*)/) do |match|
- next if match.size != 3
- from_import = match[1]
- imports = match[2]
- end
- elsif line.starts_with?("import")
- line.scan(/import\s*([^'"\s\\]*)/) do |match|
- next if match.size != 2
- imports = match[1]
- end
- end
-
- unless imports.empty?
- round_bracket_index = line.index('(')
- if !round_bracket_index.nil?
- # Parse 'import (\n a,\n b,\n c)' pattern
- index = offset + round_bracket_index + 1
- while index < content.size && content[index] != ')'
- index += 1
- end
- imports = content[(offset + round_bracket_index + 1)..(index - 1)].strip
- end
-
- # Handle relative paths
- if from_import.starts_with?("..")
- package_path = File.join(file_base_path, "..")
- from_import = from_import[2..]
- elsif from_import.starts_with?(".")
- package_path = file_base_path
- from_import = from_import[1..]
- end
-
- imports.split(",").each do |import|
- import = import.strip
- if import.starts_with?("..")
- package_path = File.join(file_base_path, "..")
- elsif import.starts_with?(".")
- package_path = file_base_path
- end
-
- dotted_as_names = import
- dotted_as_names = "#{from_import}.#{import}" unless from_import.empty?
-
- # Create package map (Hash[name => filepath, ...])
- import_package_map = find_imported_package(package_path, dotted_as_names)
- next if import_package_map.empty?
- import_package_map.each do |name, filepath, package_type|
- import_map[name] = {filepath, package_type}
- end
- end
- end
-
- offset += line.size + 1
- end
-
- import_map
- end
-
- # Finds the package path for imported modules
- def find_imported_package(package_path : String, dotted_as_names : String) : Array(Tuple(String, String, Int32))
- package_map = Array(Tuple(String, String, Int32)).new
-
- py_path = ""
- is_positive_travel = false
- dotted_as_names_split = dotted_as_names.split(".")
-
- dotted_as_names_split.each_with_index do |names, index|
- travel_package_path = File.join(package_path, names)
-
- py_guess = "#{travel_package_path}.py"
- if File.directory?(travel_package_path)
- package_path = travel_package_path
- is_positive_travel = true
- elsif dotted_as_names_split.size - 2 <= index && File.exists?(py_guess)
- py_path = py_guess
- is_positive_travel = true
- else
- break
- end
- end
-
- if is_positive_travel
- names = dotted_as_names_split[-1]
- names.split(",").each do |name|
- import = name.strip
- next if import.empty?
-
- alias_name = nil
- if import.includes?(" as ")
- import, alias_name = import.split(" as ")
- end
-
- package_type = File.exists?(File.join(package_path, "#{import}.py")) ? PackageType::FILE : PackageType::CODE
-
- if !alias_name.nil?
- package_map << {alias_name, py_path, package_type}
- else
- package_map << {import, py_path, package_type}
- end
- end
- end
-
- package_map
- end
-
- # Finds all parameters in JSON objects within a given code block
- def find_json_params(codeblock_lines : Array(String), json_var_names : Array(String)) : Array(Param)
- params = [] of Param
-
- codeblock_lines.each do |codeblock_line|
- json_var_names.each do |json_var_name|
- matches = codeblock_line.scan(/[^a-zA-Z_]#{json_var_name}\[[rf]?['"]([^'"]*)['"]\]/)
- if matches.size == 0
- matches = codeblock_line.scan(/[^a-zA-Z_]#{json_var_name}\.get\([rf]?['"]([^'"]*)['"]/)
- end
-
- if !matches.nil?
- matches.each do |match|
- if match.size > 0
- params << Param.new(match[1], "", "json")
- end
- end
- end
- end
- end
-
- params
- end
-
- # Parses a function or class definition from a string or an array of strings
- def parse_code_block(data : String | Array(String), after : Regex | Nil = nil) : String | Nil
- content = ""
- lines = [] of String
- if data.is_a?(String)
- lines = data.split("\n")
- content = data
- else
- lines = data
- content = data.join("\n")
- end
-
- # Remove lines before the "after" line if provided
- unless after.nil?
- line_size = lines.size
- lines.each_with_index do |line, index|
- if line.starts_with?(after)
- lines = lines[index..]
- content = lines.join("\n")
- break
- end
- end
-
- # If no line starts with "after", return nil
- return nil if line_size == lines.size
- end
-
- # Infer indentation size
- indent_size = 0
- if lines.size > 0
- while indent_size < lines[0].size && lines[0][indent_size] == ' '
- # Only spaces, no tabs
- indent_size += 1
- end
-
- indent_size += INDENTATION_SIZE
- end
-
- # Parse function or class code block
- if indent_size > 0
- double_quote_open, single_quote_open = [false, false]
- double_comment_open, single_comment_open = [false, false]
- end_index = lines[0].size + 1
- lines[1..].each do |line|
- line_index = 0
- clear_line = line
- while line_index < line.size
- if line_index < line.size - 2
- if !single_quote_open && !double_quote_open
- if !double_comment_open && line[line_index..line_index + 2] == "'''"
- single_comment_open = !single_comment_open
- line_index += 3
- next
- elsif !single_comment_open && line[line_index..line_index + 2] == "\"\"\""
- double_comment_open = !double_comment_open
- line_index += 3
- next
- end
- end
- end
-
- if !single_comment_open && !double_comment_open
- if !single_quote_open && line[line_index] == '"' && line[line_index - 1] != '\\'
- double_quote_open = !double_quote_open
- elsif !double_quote_open && line[line_index] == '\'' && line[line_index - 1] != '\\'
- single_quote_open = !single_quote_open
- elsif !single_quote_open && !double_quote_open && line[line_index] == '#' && line[line_index - 1] != '\\'
- clear_line = line[..(line_index - 1)]
- break
- end
- end
-
- line_index += 1
- end
-
- open_status = single_comment_open || double_comment_open || single_quote_open || double_quote_open
- if clear_line[0..(indent_size - 1)].strip == "" || open_status
- end_index += line.size + 1
- else
- break
- end
- end
-
- end_index -= 1
- return content[..end_index].strip
- end
-
- nil
- end
-
- # Returns the literal value from a string if it represents a number or a quoted string
- def return_literal_value(data : String) : String
- # Check if the data is numeric
- if data.numeric?
- data
- else
- # Check if the data is a string
- if data.size != 0
- if data[0] == data[-1] && ['"', '\''].includes? data[0]
- data = data[1..-2]
- data
- end
- end
- end
-
- ""
- end
-
- module PackageType
- FILE = 0
- CODE = 1
- end
-
- class FunctionParameter
- @name : String
- @type : String
- @default : String
-
- def initialize(name : String, type : String, default : String)
- @name = name
- @type = type
- @default = default
- end
-
- def name : String
- @name
- end
-
- def type : String
- @type
- end
-
- def default : String
- @default
- end
-
- def to_s : String
- if @type.size != 0
- if @default.size != 0
- "Name(#{@name}): Type(#{@type}) = Default(#{@default})"
- else
- "Name(#{@name}): Type(#{@type})"
- end
- else
- "Name(#{@name})"
- end
- end
-
- def name=(name : String)
- @name = name
- end
-
- def type=(type : String)
- @type = type
- end
-
- def default=(default : String)
- @default = default
- end
- end
-
- class FunctionDefinition
- @name : String
- @params : Array(FunctionParameter)
-
- def initialize(name : String, params : Array(FunctionParameter))
- @name = name
- @params = params
- end
-
- def params : Array(FunctionParameter)
- @params
- end
-
- def add_parameter(param : FunctionParameter)
- @params << param
- end
- end
-end
diff --git a/src/analyzer/analyzers/analyzer_python_django.cr b/src/analyzer/analyzers/analyzer_python_django.cr
deleted file mode 100644
index fb3b1263..00000000
--- a/src/analyzer/analyzers/analyzer_python_django.cr
+++ /dev/null
@@ -1,400 +0,0 @@
-require "../../models/analyzer"
-require "./analyzer_python"
-require "json"
-
-class AnalyzerDjango < AnalyzerPython
- # Base path for the Django project
- @django_base_path : String = ""
-
- # Regular expressions for extracting Django URL configurations
- REGEX_ROOT_URLCONF = /\s*ROOT_URLCONF\s*=\s*r?['"]([^'"\\]*)['"]/
- REGEX_ROUTE_MAPPING = /(?:url|path|register)\s*\(\s*r?['"]([^"']*)['"][^,]*,\s*([^),]*)/
- REGEX_INCLUDE_URLS = /include\s*\(\s*r?['"]([^'"\\]*)['"]/
-
- # Map request parameters to their respective fields
- REQUEST_PARAM_FIELD_MAP = {
- "GET" => {["GET"], "query"},
- "POST" => {["POST"], "form"},
- "COOKIES" => {nil, "cookie"},
- "META" => {nil, "header"},
- "data" => {["POST", "PUT", "PATCH"], "form"},
- }
-
- # Map request parameter types to HTTP methods
- REQUEST_PARAM_TYPE_MAP = {
- "query" => nil,
- "form" => ["GET", "POST", "PUT", "PATCH"],
- "cookie" => nil,
- "header" => nil,
- }
-
- def analyze
- endpoints = [] of Endpoint
-
- # Find root Django URL configurations
- root_django_urls_list = find_root_django_urls()
- root_django_urls_list.each do |root_django_urls|
- @django_base_path = root_django_urls.basepath
- extract_endpoints(root_django_urls).each do |endpoint|
- endpoints << endpoint
- end
- end
-
- # Find static files
- begin
- Dir.glob("#{@base_path}/static/**/*") do |file|
- next if File.directory?(file)
- relative_path = file.sub("#{@base_path}/static/", "")
- endpoints << Endpoint.new("/#{relative_path}", "GET")
- end
- rescue e
- logger.debug e
- end
-
- endpoints
- end
-
- # Find all root Django URLs
- def find_root_django_urls : Array(DjangoUrls)
- root_django_urls_list = [] of DjangoUrls
-
- search_dir = @base_path
- begin
- Dir.glob("#{search_dir}/**/*") do |file|
- spawn do
- begin
- next if File.directory?(file)
- if file.ends_with? ".py"
- content = File.read(file, encoding: "utf-8", invalid: :skip)
- content.scan(REGEX_ROOT_URLCONF) do |match|
- next if match.size != 2
- dotted_as_urlconf = match[1].split(".")
- relative_path = "#{dotted_as_urlconf.join("/")}.py"
-
- Dir.glob("#{search_dir}/**/#{relative_path}") do |filepath|
- basepath = filepath.split("/")[..-(dotted_as_urlconf.size + 1)].join("/")
- root_django_urls_list << DjangoUrls.new("", filepath, basepath)
- end
- end
- end
- rescue e : File::NotFoundError
- @logger.debug "File not found: #{file}"
- end
- end
- Fiber.yield
- end
- rescue e
- logger.debug e
- end
-
- root_django_urls_list.uniq
- end
-
- # Extract endpoints from a Django URL configuration file
- def extract_endpoints(django_urls : DjangoUrls) : Array(Endpoint)
- endpoints = [] of Endpoint
- url_base_path = File.dirname(django_urls.filepath)
-
- file = File.open(django_urls.filepath, encoding: "utf-8", invalid: :skip)
- content = file.gets_to_end
- package_map = find_imported_modules(@django_base_path, url_base_path, content)
-
- # Temporary fix to parse only the string after "urlpatterns = ["
- keywords = ["urlpatterns", "=", "["]
- keywords.each do |keyword|
- if !content.includes? keyword
- return endpoints
- end
-
- content = content.split(keyword, 2)[1]
- end
-
- # TODO: Parse correct urlpatterns from variable concatenation case
- content.scan(REGEX_ROUTE_MAPPING) do |route_match|
- next if route_match.size != 3
- route = route_match[1]
- route = route.gsub(/^\^/, "").gsub(/\$$/, "")
- view = route_match[2].split(",")[0]
- url = "/#{django_urls.prefix}/#{route}".gsub(/\/+/, "/")
- new_django_urls = nil
- view.scan(REGEX_INCLUDE_URLS) do |include_pattern_match|
- # Detect new URL configurations
- next if include_pattern_match.size != 2
- new_route_path = "#{@django_base_path}/#{include_pattern_match[1].gsub(".", "/")}.py"
-
- if File.exists?(new_route_path)
- new_django_urls = DjangoUrls.new("#{django_urls.prefix}#{route}", new_route_path, django_urls.basepath)
- details = Details.new(PathInfo.new(new_route_path))
- extract_endpoints(new_django_urls).each do |endpoint|
- endpoint.set_details(details)
- endpoints << endpoint
- end
- end
- end
- next if new_django_urls != nil
-
- details = Details.new(PathInfo.new(django_urls.filepath))
- if view == ""
- endpoints << Endpoint.new(url, "GET", details)
- else
- dotted_as_names_split = view.split(".")
-
- filepath = ""
- function_or_class_name = ""
- dotted_as_names_split.each_with_index do |name, index|
- if (package_map.has_key? name) && (index < dotted_as_names_split.size)
- filepath, package_type = package_map[name]
- function_or_class_name = name
- if package_type == PackageType::FILE && index + 1 < dotted_as_names_split.size
- function_or_class_name = dotted_as_names_split[index + 1]
- end
-
- break
- end
- end
-
- if filepath != ""
- extract_endpoints_from_file(url, filepath, function_or_class_name).each do |endpoint|
- endpoint.set_details(details)
- endpoints << endpoint
- end
- else
- # By default, Django allows requests with methods other than GET as well
- endpoints << Endpoint.new(url, "GET", details)
- end
- end
- end
-
- endpoints
- end
-
- # Extract endpoints from a given file
- def extract_endpoints_from_file(url : String, filepath : String, function_or_class_name : String)
- endpoints = Array(Endpoint).new
- suspicious_http_methods = ["GET"]
- suspicious_params = Array(Param).new
-
- content = File.read(filepath, encoding: "utf-8", invalid: :skip)
- content_lines = content.split "\n"
-
- # Function Based View
- function_start_index = content.index /def\s+#{function_or_class_name}\s*\(/
- if !function_start_index.nil?
- function_codeblock = parse_code_block(content[function_start_index..])
- if !function_codeblock.nil?
- lines = function_codeblock.split "\n"
- function_define_line = lines[0]
- lines = lines[1..]
-
- # Check if the decorator line contains an HTTP method
- index = content_lines.index(function_define_line)
- if !index.nil?
- while index > 0
- index -= 1
-
- preceding_definition = content_lines[index]
- if preceding_definition.size > 0 && preceding_definition[0] == '@'
- HTTP_METHODS.each do |http_method_name|
- method_name_match = preceding_definition.downcase.match /[^a-zA-Z0-9](#{http_method_name})[^a-zA-Z0-9]/
- if !method_name_match.nil?
- suspicious_http_methods << http_method_name.upcase
- end
- end
- end
-
- break
- end
- end
-
- lines.each do |line|
- # Check if line has 'request.method == "GET"' similar pattern
- if line.includes? "request.method"
- suspicious_code = line.split("request.method")[1].strip
- HTTP_METHODS.each do |http_method_name|
- method_name_match = suspicious_code.downcase.match /['"](#{http_method_name})['"]/
- if !method_name_match.nil?
- suspicious_http_methods << http_method_name.upcase
- end
- end
- end
-
- extract_params_from_line(line, suspicious_http_methods).each do |param|
- suspicious_params << param
- end
- end
-
- suspicious_http_methods.uniq.each do |http_method_name|
- endpoints << Endpoint.new(url, http_method_name, filter_params(http_method_name, suspicious_params))
- end
-
- return endpoints
- end
- end
-
- # Class Based View
- regext_http_methods = HTTP_METHODS.join "|"
- class_start_index = content.index /class\s+#{function_or_class_name}\s*[\(:]/
- if !class_start_index.nil?
- class_codeblock = parse_code_block(content[class_start_index..])
- if !class_codeblock.nil?
- lines = class_codeblock.split "\n"
- class_define_line = lines[0]
- lines = lines[1..]
-
- # Determine implicit HTTP methods based on class name
- if class_define_line.includes? "Form"
- suspicious_http_methods << "GET"
- suspicious_http_methods << "POST"
- elsif class_define_line.includes? "Delete"
- suspicious_http_methods << "DELETE"
- suspicious_http_methods << "POST"
- elsif class_define_line.includes? "Create"
- suspicious_http_methods << "POST"
- elsif class_define_line.includes? "Update"
- suspicious_http_methods << "POST"
- end
-
- # Check HTTP methods in class methods
- lines.each do |line|
- method_function_match = line.match(/\s+def\s+(#{regext_http_methods})\s*\(/)
- if !method_function_match.nil?
- suspicious_http_methods << method_function_match[1].upcase
- end
-
- extract_params_from_line(line, suspicious_http_methods).each do |param|
- suspicious_params << param
- end
- end
-
- suspicious_http_methods.uniq.each do |http_method_name|
- endpoints << Endpoint.new(url, http_method_name, filter_params(http_method_name, suspicious_params))
- end
-
- return endpoints
- end
- end
-
- # Default to GET method
- [Endpoint.new(url, "GET")]
- end
-
- # Extract parameters from a line of code
- def extract_params_from_line(line : String, endpoint_methods : Array(String))
- suspicious_params = Array(Param).new
-
- if line.includes? "request."
- REQUEST_PARAM_FIELD_MAP.each do |field_name, tuple|
- field_methods, param_type = tuple
- matches = line.scan(/request\.#{field_name}\[[rf]?['"]([^'"]*)['"]\]/)
- if matches.size == 0
- matches = line.scan(/request\.#{field_name}\.get\([rf]?['"]([^'"]*)['"]/)
- end
-
- if matches.size != 0
- matches.each do |match|
- next if match.size != 2
- param_name = match[1]
- if field_name == "META"
- if param_name.starts_with? "HTTP_"
- param_name = param_name[5..]
- end
- end
-
- # If a specific parameter is found, allow the corresponding methods
- if !field_methods.nil?
- field_methods.each do |field_method|
- if !endpoint_methods.includes? field_method
- endpoint_methods << field_method
- end
- end
- end
-
- suspicious_params << Param.new(param_name, "", param_type)
- end
- end
- end
- end
-
- if line.includes? "form.cleaned_data"
- matches = line.scan(/form\.cleaned_data\[[rf]?['"]([^'"]*)['"]\]/)
- if matches.size == 0
- matches = line.scan(/form\.cleaned_data\.get\([rf]?['"]([^'"]*)['"]/)
- end
-
- if matches.size != 0
- matches.each do |match|
- next if match.size != 2
- suspicious_params << Param.new(match[1], "", "form")
- end
- end
- end
-
- suspicious_params
- end
-
- # Filter parameters based on HTTP method
- def filter_params(method : String, params : Array(Param))
- filtered_params = Array(Param).new
- upper_method = method.upcase
-
- params.each do |param|
- is_supported_param = false
- support_methods = REQUEST_PARAM_TYPE_MAP.fetch(param.param_type, nil)
- if !support_methods.nil?
- support_methods.each do |support_method|
- if upper_method == support_method.upcase
- is_supported_param = true
- end
- end
- else
- is_supported_param = true
- end
-
- filtered_params.each do |filtered_param|
- if filtered_param.name == param.name && filtered_param.param_type == param.param_type
- is_supported_param = false
- break
- end
- end
-
- if is_supported_param
- filtered_params << param
- end
- end
-
- filtered_params
- end
-
- module PackageType
- FILE = 0
- CODE = 1
- end
-
- struct DjangoUrls
- include JSON::Serializable
- property prefix, filepath, basepath
-
- def initialize(@prefix : String, @filepath : String, @basepath : String)
- if !File.directory? @basepath
- raise "The basepath for DjangoUrls (#{@basepath}) does not exist or is not a directory."
- end
- end
- end
-
- struct DjangoView
- include JSON::Serializable
- property prefix, filepath, name
-
- def initialize(@prefix : String, @filepath : String, @name : String)
- if !File.directory? @filepath
- raise "The filepath for DjangoView (#{@filepath}) does not exist."
- end
- end
- end
-end
-
-# Main function to analyze a Django project
-def analyzer_python_django(options : Hash(String, String))
- instance = AnalyzerDjango.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_python_fastapi.cr b/src/analyzer/analyzers/analyzer_python_fastapi.cr
deleted file mode 100644
index d3b605c1..00000000
--- a/src/analyzer/analyzers/analyzer_python_fastapi.cr
+++ /dev/null
@@ -1,332 +0,0 @@
-require "../../models/analyzer"
-require "./analyzer_python"
-
-class AnalyzerFastAPI < AnalyzerPython
- @fastapi_base_path : String = ""
-
- def analyze
- include_router_map = Hash(String, Hash(String, Router)).new
- fastapi_base_file : String = ""
-
- begin
- # Iterate through all Python files in the base path
- Dir.glob("#{base_path}/**/*.py") do |path|
- next if File.directory?(path)
- source = File.read(path, encoding: "utf-8", invalid: :skip)
-
- source.each_line do |line|
- match = line.match /(#{PYTHON_VAR_NAME_REGEX})\s*=\s*FastAPI\s*\(/
- if !match.nil?
- fastapi_instance_name = match[1]
- unless include_router_map.has_key?(fastapi_instance_name)
- include_router_map[path] = {match[1] => Router.new("")}
-
- # base path
- fastapi_base_file = path
- @fastapi_base_path = Path.new(File.dirname(path)).parent.to_s
- break
- end
- end
-
- # https://fastapi.tiangolo.com/tutorial/bigger-applications/
- match = line.match /(#{PYTHON_VAR_NAME_REGEX})\s*=\s*APIRouter\s*\(/
- if !match.nil?
- prefix = ""
- router_instance_name = match[1]
- param_codes = line.split("APIRouter", 2)[1]
- prefix_match = param_codes.match /prefix\s*=\s*['"]([^'"]*)['"]/
- if !prefix_match.nil? && prefix_match.size == 2
- prefix = prefix_match[1]
- end
-
- if include_router_map.has_key?(path)
- include_router_map[path][router_instance_name] = Router.new(prefix)
- else
- include_router_map[path] = {router_instance_name => Router.new(prefix)}
- end
- end
- end
- end
- rescue e : Exception
- logger.debug e.message
- end
-
- begin
- configure_router_prefix(fastapi_base_file, include_router_map)
-
- include_router_map.each do |path, router_map|
- source = File.read(path, encoding: "utf-8", invalid: :skip)
- import_modules = find_imported_modules(@fastapi_base_path, path, source)
- codelines = source.split("\n")
- router_map.each do |instance_name, router_class|
- codelines.each_with_index do |line, index|
- line.scan(/@#{instance_name}\.([a-zA-Z]+)\([rf]?['"]([^'"]*)['"](.*)/) do |match|
- if match.size > 0
- http_method_name = match[1].downcase
- if ["websocket", "route", "api_route"].includes?(http_method_name)
- http_method_name = "GET"
- elsif !HTTP_METHODS.includes?(http_method_name)
- next
- end
-
- http_method_name = http_method_name.upcase
-
- http_route_path = match[2]
- _extra_params = match[3]
- params = [] of Param
-
- # Get path params from route path
- query_params = [] of String
- http_route_path.scan(/\{(#{PYTHON_VAR_NAME_REGEX})\}/) do |route_match|
- if route_match.size > 0
- query_params << route_match[1]
- end
- end
-
- # Parsing extra params
- function_definition = parse_function_def(codelines, index + 1)
- if !function_definition.nil?
- function_params = function_definition.params
- if function_params.size > 0
- function_params.each do |param|
- # https://fastapi.tiangolo.com/tutorial/path-params-numeric-validations/#order-the-parameters-as-you-need-tricks
- next if param.name == "*"
-
- unless query_params.includes?(param.name)
- # Default value is numeric or string only
- default_value = return_literal_value(param.default)
-
- # Get param type by default value first
- param_type = infer_parameter_type(param.default) unless param.default.empty?
-
- # Get param type by type if not found
- if param_type.nil? && !param.type.empty?
- param_type = param.type
- # https://peps.python.org/pep-0593/
- param_type = param_type.split("Annotated[", 2)[-1].split(",", 2)[-1] if param_type.includes?("Annotated[")
-
- # https://peps.python.org/pep-0484/#union-types
- param_type = param_type.split("Union[", 2)[-1] if param_type.includes?("Union[")
-
- param_type = infer_parameter_type(param_type, true)
- param_type = "query" if param_type.nil? && param.type.empty?
- else
- param_type = "query" if param_type.nil?
- end
-
- if param_type.nil?
- if /^#{PYTHON_VAR_NAME_REGEX}$/.match(param.type)
- new_params = nil
- if ["Request", "dict"].includes?(param.type)
- function_codeblock = parse_code_block(codelines[index + 1..])
- next if function_codeblock.nil?
- new_params = find_dictionary_params(function_codeblock, param)
- elsif import_modules.has_key?(param.type)
- # Parse model class from module path
- import_module_path = import_modules[param.type].first
-
- # Skip if import module path is not identified
- next if import_module_path.empty?
-
- import_module_source = File.read(import_module_path, encoding: "utf-8", invalid: :skip)
- new_params = find_base_model_params(import_module_source, param.type, param.name)
- else
- # Parse model class from current source
- new_params = find_base_model_params(source, param.type, param.name)
- end
-
- next if new_params.nil?
-
- new_params.each do |model_param|
- params << model_param
- end
- end
- else
- # Add endpoint param
- params << Param.new(param.name, default_value, param_type)
- end
- end
- end
- end
- end
-
- details = Details.new(PathInfo.new(path, index + 1))
- result << Endpoint.new(router_class.join(http_route_path), http_method_name, params, details)
- end
- end
- end
- end
- rescue e : Exception
- logger.debug e.message
- end
- end
- Fiber.yield
-
- result
- end
-
- # Configures the prefix for each router
- def configure_router_prefix(file : String, include_router_map : Hash(String, Hash(String, Router)), router_prefix : String = "")
- return if file.empty? || !File.exists?(file)
-
- # Parse the source file for router configuration
- source = File.read(file, encoding: "utf-8", invalid: :skip)
- import_modules = find_imported_modules(@fastapi_base_path, file, source)
- include_router_map[file].each do |instance_name, router_class|
- router_class.prefix = router_prefix
-
- # Parse '{app}.include_router({item}.router, prefix="{prefix}")' code
- source.scan(/#{instance_name}\.include_router\(([^\)]*)\)/).each do |match|
- if match.size > 0
- params = match[1].split(",")
- prefix = ""
- router_instance_name = params[0].strip
- if params.size != 1
- select_params = params.select(&.strip.starts_with?("prefix"))
- if select_params.size != 0
- prefix = select_params.first.split("=")[1]
- if prefix.count("\"") == 2
- prefix = prefix.split("\"")[1].split("\"")[0]
- elsif prefix.count("'") == 2
- prefix = prefix.split("'")[1].split("'")[0]
- end
- end
- end
-
- # Register router's prefix recursively
- prefix = router_class.join(prefix)
- if router_instance_name.count(".") == 0
- next unless import_modules.has_key?(router_instance_name)
- import_module_path = import_modules[router_instance_name].first
-
- next unless include_router_map.has_key?(import_module_path)
- configure_router_prefix(import_module_path, include_router_map, prefix)
- elsif router_instance_name.count(".") == 1
- module_name, _router_instance_name = router_instance_name.split(".")
- next unless import_modules.has_key?(module_name)
- import_module_path = import_modules[module_name].first
-
- next unless include_router_map.has_key?(import_module_path)
- configure_router_prefix(import_module_path, include_router_map, prefix)
- end
- end
- end
- end
- end
-
- # Infers the type of the parameter based on its default value or type annotation
- def infer_parameter_type(data : String, is_param_type = false) : String | Nil
- if data.match(/(\b)*Cookie(\b)*/)
- "cookie"
- elsif data.match(/(\b)*Header(\b)*/) != nil
- "header"
- elsif data.match(/(\b)*Body(\b)*/) || data.match(/(\b)*Form(\b)*/) ||
- data.match(/(\b)*File(\b)*/) || data.match(/(\b)*UploadFile(\b)*/)
- "form"
- elsif data.match(/(\b)*Query(\b)*/)
- "query"
- elsif data.match(/(\b)*WebSocket(\b)*/)
- "websocket"
- elsif is_param_type
- # default variable type
- ["str", "int", "float", "bool", "EmailStr"].each do |type|
- return "query" if data.includes?(type)
- end
- end
- end
-
- # Finds the parameters for a base model class
- def find_base_model_params(source : String, class_name : String, param_name : String) : Array(Param)
- params = [] of Param
- class_codeblock = parse_code_block(source, /\s*class\s*#{class_name}\s*\(/)
- return params if class_codeblock.nil?
-
- # Parse the class code block to extract parameters
- class_codeblock.split("\n").each_with_index do |line, index|
- if index == 0
- param_code = line.split("(", 2)[-1].split(")")[0]
- if param_code.match(/(\b)*str,\s*(enum\.){0,1}Enum(\b)*/)
- return [Param.new(param_name.strip, "", "query")]
- end
- return params unless /^#{PYTHON_VAR_NAME_REGEX}$/.match(param_code)
- else
- break unless line.split(":").size == 2
-
- param_name, extra = line.split(":", 2)
- param_type = ""
- param_default = ""
- param_type_and_default = extra.split("=", 2)
- if param_type_and_default.size == 2
- param_type, param_default = param_type_and_default
- else
- param_type = param_type_and_default[0]
- end
-
- if !param_name.empty? && !param_type.empty?
- default_value = return_literal_value(param_default.strip)
- params << Param.new(param_name.strip, default_value, "form")
- end
- end
- end
-
- params
- end
-
- # Finds parameters in dictionary structures
- def find_dictionary_params(source : String, param : FunctionParameter) : Array(Param)
- new_params = [] of Param
- json_variable_names = [] of String
- codelines = source.split("\n")
- if param.type == "Request"
- # Parse JSON variable names
- codelines.each do |codeline|
- match = codeline.match /(#{PYTHON_VAR_NAME_REGEX}).*=\s*(await\s*){0,1}#{param.name}.json\(\)/
- json_variable_names << match[1] if !match.nil? && !json_variable_names.includes?(match[1])
- end
-
- new_params = find_json_params(codelines, json_variable_names)
- elsif param.type == "dict"
- json_variable_names << param.name
- new_params = find_json_params(codelines, json_variable_names)
- end
-
- new_params
- end
-end
-
-# Router class for handling URL prefix joining
-class Router
- @prefix : String
-
- def initialize(prefix : String)
- @prefix = prefix
- end
-
- def prefix
- @prefix
- end
-
- def join(url : String) : String
- url = url[1..] if prefix.ends_with?("/") && url.starts_with?("/")
- url = "/#{url}" unless prefix.ends_with?("/") || url.starts_with?("/")
-
- @prefix + url
- end
-
- def prefix=(new_prefix : String)
- @prefix = new_prefix
- end
-end
-
-# Analyzer function for FastAPI
-def analyzer_python_fastapi(options : Hash(String, String))
- instance = AnalyzerFastAPI.new(options)
- instance.analyze
-end
-
-# Extend String class to check if a string is numeric
-class String
- def numeric?
- self.to_f != nil rescue false
- end
-end
diff --git a/src/analyzer/analyzers/analyzer_python_flask.cr b/src/analyzer/analyzers/analyzer_python_flask.cr
deleted file mode 100644
index ddcabf1f..00000000
--- a/src/analyzer/analyzers/analyzer_python_flask.cr
+++ /dev/null
@@ -1,256 +0,0 @@
-require "../../models/analyzer"
-require "./analyzer_python"
-
-class AnalyzerFlask < AnalyzerPython
- # Reference: https://stackoverflow.com/a/16664376
- # Reference: https://tedboy.github.io/flask/generated/generated/flask.Request.html
- REQUEST_PARAM_FIELD_MAP = {
- "data" => {["POST", "PUT", "PATCH", "DELETE"], "form"},
- "args" => {["GET"], "query"},
- "form" => {["POST", "PUT", "PATCH", "DELETE"], "form"},
- "files" => {["POST", "PUT", "PATCH", "DELETE"], "form"},
- "values" => {["GET", "POST", "PUT", "PATCH", "DELETE"], "query"},
- "json" => {["POST", "PUT", "PATCH", "DELETE"], "json"},
- "cookie" => {nil, "cookie"},
- "headers" => {nil, "header"},
- }
-
- REQUEST_PARAM_TYPE_MAP = {
- "query" => nil,
- "form" => ["POST", "PUT", "PATCH", "DELETE"],
- "json" => ["POST", "PUT", "PATCH", "DELETE"],
- "cookie" => nil,
- "header" => nil,
- }
-
- def analyze
- blueprint_prefix_map = Hash(String, String).new
-
- begin
- # Iterate through all Python files in the base path
- Dir.glob("#{base_path}/**/*.py") do |path|
- next if File.directory?(path)
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- file.each_line.with_index do |line, index|
- # Identify Flask instance assignments
- match = line.match /(#{PYTHON_VAR_NAME_REGEX})\s*=\s*Flask\s*\(/
- if !match.nil?
- flask_instance_name = match[1]
- blueprint_prefix_map[flask_instance_name] ||= ""
- end
-
- # Common flask instance name
- blueprint_prefix_map["app"] ||= ""
-
- # Identify Blueprint instance assignments
- match = line.match /(#{PYTHON_VAR_NAME_REGEX})\s*=\s*Blueprint\s*\(/
- if !match.nil?
- prefix = ""
- blueprint_instance_name = match[1]
- param_codes = line.split("Blueprint", 2)[1]
- prefix_match = param_codes.match /url_prefix\s*=\s*['"]([^'"]*)['"]/
- if !prefix_match.nil? && prefix_match.size == 2
- prefix = prefix_match[1]
- end
-
- blueprint_prefix_map[blueprint_instance_name] ||= prefix
- end
-
- # Temporary Addition: register_view
- blueprint_prefix_map.each do |blueprint_name, blueprint_prefix|
- register_views_match = line.match /#{blueprint_name},\s*routes\s*=\s*(.*)\)/
- if !register_views_match.nil?
- route_paths = register_views_match[1]
- route_paths.scan /['"]([^'"]*)['"]/ do |path_str_match|
- if !path_str_match.nil? && path_str_match.size == 2
- route_path = path_str_match[1]
- # Parse methods from reference views (TODO)
- route_url = "#{blueprint_prefix}#{route_path}"
- route_url = "/#{route_url}" unless route_url.starts_with?("/")
- details = Details.new(PathInfo.new(path, index + 1))
- result << Endpoint.new(route_url, "GET", details)
- end
- end
- end
- end
- end
- end
- end
- rescue e : Exception
- logger.debug e.message
- end
-
- begin
- # Process each Python file in the base path
- Dir.glob("#{base_path}/**/*.py") do |path|
- next if File.directory?(path)
- source = File.read(path, encoding: "utf-8", invalid: :skip)
- lines = source.split "\n"
-
- line_index = 0
- while line_index < lines.size
- line = lines[line_index]
- blueprint_prefix_map.each do |flask_instance_name, prefix|
- # Identify Flask route decorators
- line.scan(/@#{flask_instance_name}\.route\([rf]?['"]([^'"]*)['"](.*)/) do |match|
- if match.size > 0
- route_path = match[1]
- extra_params = match[2]
-
- # Skip decorator lines
- codeline_index = line_index
- while codeline_index < lines.size
- decorator_match = lines[codeline_index].match /\s*@/
- if !decorator_match.nil?
- codeline_index += 1
- next
- end
- break
- end
-
- codeblock = parse_code_block(lines[codeline_index..].join("\n"))
- next if codeblock.nil?
- codeblock_lines = codeblock.split("\n")[1..]
-
- get_endpoints(route_path, extra_params, codeblock_lines, prefix).each do |endpoint|
- details = Details.new(PathInfo.new(path, line_index + 1))
- endpoint.set_details(details)
- result << endpoint
- end
- end
- end
- end
- line_index += 1
- end
- end
- rescue e : Exception
- logger.debug e.message
- end
- Fiber.yield
-
- result
- end
-
- # Extracts endpoint information from the given route and code block
- def get_endpoints(route_path : String, extra_params : String, codeblock_lines : Array(String), prefix : String)
- endpoints = [] of Endpoint
- suspicious_http_methods = [] of String
- suspicious_params = [] of Param
-
- if !prefix.ends_with?("/") && !route_path.starts_with?("/")
- prefix = "#{prefix}/"
- end
-
- # Parse declared methods from route decorator
- methods_match = extra_params.match /methods\s*=\s*(.*)/
- if !methods_match.nil? && methods_match.size == 2
- declare_methods = methods_match[1].downcase
- HTTP_METHODS.each do |method_name|
- if declare_methods.includes? method_name
- suspicious_http_methods << method_name.upcase
- end
- end
- else
- suspicious_http_methods << "GET"
- end
-
- json_variable_names = [] of String
- # Parse JSON variable names
- codeblock_lines.each do |codeblock_line|
- match = codeblock_line.match /([a-zA-Z_][a-zA-Z0-9_]*).*=\s*json\.loads\(request\.data/
- if !match.nil? && match.size == 2 && !json_variable_names.includes?(match[1])
- json_variable_names << match[1]
- end
-
- match = codeblock_line.match /([a-zA-Z_][a-zA-Z0-9_]*).*=\s*request\.json/
- if !match.nil? && match.size == 2 && !json_variable_names.includes?(match[1])
- json_variable_names << match[1]
- end
- end
-
- # Parse declared parameters
- codeblock_lines.each do |codeblock_line|
- REQUEST_PARAM_FIELD_MAP.each do |field_name, tuple|
- _, noir_param_type = tuple
- matches = codeblock_line.scan(/request\.#{field_name}\[[rf]?['"]([^'"]*)['"]\]/)
- if matches.size == 0
- matches = codeblock_line.scan(/request\.#{field_name}\.get\([rf]?['"]([^'"]*)['"]/)
- end
- if matches.size == 0
- noir_param_type = "json"
- json_variable_names.each do |json_variable_name|
- matches = codeblock_line.scan(/[^a-zA-Z_]#{json_variable_name}\[[rf]?['"]([^'"]*)['"]\]/)
- if matches.size == 0
- matches = codeblock_line.scan(/[^a-zA-Z_]#{json_variable_name}\.get\([rf]?['"]([^'"]*)['"]/)
- end
-
- if matches.size > 0
- break
- end
- end
- end
-
- matches.each do |parameter_match|
- next if parameter_match.size != 2
- param_name = parameter_match[1]
-
- suspicious_params << Param.new(param_name, "", noir_param_type)
- end
- end
- end
-
- suspicious_http_methods.uniq.each do |http_method_name|
- if !prefix.ends_with?("/") && !route_path.starts_with?("/")
- prefix = "#{prefix}/"
- end
-
- route_url = "#{prefix}#{route_path}"
- route_url = "/#{route_url}" unless route_url.starts_with?("/")
-
- params = get_filtered_params(http_method_name, suspicious_params)
- endpoints << Endpoint.new(route_url, http_method_name, params)
- end
-
- endpoints
- end
-
- # Filters the parameters based on the HTTP method
- def get_filtered_params(method : String, params : Array(Param)) : Array(Param)
- # Split to other module (duplicated method with analyzer_django)
- filtered_params = Array(Param).new
- upper_method = method.upcase
-
- params.each do |param|
- is_support_param = false
- support_methods = REQUEST_PARAM_TYPE_MAP.fetch(param.param_type, nil)
- if !support_methods.nil?
- support_methods.each do |support_method|
- if upper_method == support_method.upcase
- is_support_param = true
- end
- end
- else
- is_support_param = true
- end
-
- filtered_params.each do |filtered_param|
- if filtered_param.name == param.name && filtered_param.param_type == param.param_type
- is_support_param = false
- break
- end
- end
-
- if is_support_param
- filtered_params << param
- end
- end
-
- filtered_params
- end
-end
-
-# Analyzer function for Flask
-def analyzer_python_flask(options : Hash(String, String))
- instance = AnalyzerFlask.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_raml.cr b/src/analyzer/analyzers/analyzer_raml.cr
deleted file mode 100644
index a1e543d0..00000000
--- a/src/analyzer/analyzers/analyzer_raml.cr
+++ /dev/null
@@ -1,71 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerRAML < Analyzer
- def analyze
- locator = CodeLocator.instance
- raml_specs = locator.all("raml-spec")
-
- if raml_specs.is_a?(Array(String))
- raml_specs.each do |raml_spec|
- if File.exists?(raml_spec)
- details = Details.new(PathInfo.new(raml_spec))
-
- content = File.read(raml_spec, encoding: "utf-8", invalid: :skip)
- yaml_obj = YAML.parse(content)
- yaml_obj.as_h.each do |path, path_obj|
- begin
- path_obj.as_h.each do |method, method_obj|
- params = [] of Param
-
- if method_obj.as_h.has_key? "queryParameters"
- method_obj["queryParameters"].as_h.each do |param_name, _|
- param = Param.new(param_name.to_s, "", "query")
- params << param
- end
- end
-
- if method_obj.as_h.has_key? "body"
- method_obj["body"].as_h.each do |content_type, content_obj|
- if content_type == "application/json"
- content_obj["example"].as_h.each do |param_name, _|
- param = Param.new(param_name.to_s, "", "json")
- params << param
- end
- elsif content_type == "application/x-www-form-urlencoded"
- content_obj["example"].as_h.each do |param_name, _|
- param = Param.new(param_name.to_s, "", "form")
- params << param
- end
- end
- end
- end
-
- if method_obj.as_h.has_key? "headers"
- method_obj["headers"].as_h.each do |param_name, _|
- param = Param.new(param_name.to_s, "", "header")
- params << param
- end
- end
-
- @result << Endpoint.new(path.to_s, method.to_s.upcase, params, details)
- rescue e
- @logger.debug "Exception of #{raml_spec}/paths/#{path}/#{method}"
- @logger.debug_sub e
- end
- rescue e
- @logger.debug "Exception of #{raml_spec}/paths/#{path}"
- @logger.debug_sub e
- end
- end
- end
- end
- end
-
- @result
- end
-end
-
-def analyzer_raml(options : Hash(String, String))
- instance = AnalyzerRAML.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_restify.cr b/src/analyzer/analyzers/analyzer_restify.cr
deleted file mode 100644
index 5e539007..00000000
--- a/src/analyzer/analyzers/analyzer_restify.cr
+++ /dev/null
@@ -1,111 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerRestify < Analyzer
- def analyze
- # Source Analysis
- begin
- Dir.glob("#{base_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path)
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- last_endpoint = Endpoint.new("", "")
- file.each_line.with_index do |line, index|
- endpoint = line_to_endpoint(line)
- if endpoint.method != ""
- details = Details.new(PathInfo.new(path, index + 1))
- endpoint.set_details(details)
- result << endpoint
- last_endpoint = endpoint
- end
-
- param = line_to_param(line)
- if param.name != ""
- if last_endpoint.method != ""
- last_endpoint.push_param(param)
- end
- end
- end
- end
- end
- end
- rescue e
- # TODO
- end
-
- result
- end
-
- def express_get_endpoint(line : String)
- api_path = ""
- splited = line.split("(")
- if splited.size > 0
- api_path = splited[1].split(",")[0].gsub(/['"]/, "")
- end
-
- api_path
- end
-
- def line_to_param(line : String) : Param
- if line.includes? "req.body."
- param = line.split("req.body.")[1].split(")")[0].split("}")[0].split(";")[0]
- return Param.new(param, "", "json")
- end
-
- if line.includes? "req.query."
- param = line.split("req.query.")[1].split(")")[0].split("}")[0].split(";")[0]
- return Param.new(param, "", "query")
- end
-
- if line.includes? "req.cookies."
- param = line.split("req.cookies.")[1].split(")")[0].split("}")[0].split(";")[0]
- return Param.new(param, "", "cookie")
- end
-
- if line.includes? "req.header("
- param = line.split("req.header(")[1].split(")")[0].gsub(/['"]/, "")
- return Param.new(param, "", "header")
- end
-
- Param.new("", "", "")
- end
-
- def line_to_endpoint(line : String) : Endpoint
- if line.includes? ".get('/"
- api_path = express_get_endpoint(line)
- if api_path != ""
- return Endpoint.new(api_path, "GET")
- end
- end
- if line.includes? ".post('/"
- api_path = express_get_endpoint(line)
- if api_path != ""
- return Endpoint.new(api_path, "POST")
- end
- end
- if line.includes? ".put('/"
- api_path = express_get_endpoint(line)
- if api_path != ""
- return Endpoint.new(api_path, "PUT")
- end
- end
- if line.includes? ".delete('/"
- api_path = express_get_endpoint(line)
- if api_path != ""
- return Endpoint.new(api_path, "DELETE")
- end
- end
- if line.includes? ".patch('/"
- api_path = express_get_endpoint(line)
- if api_path != ""
- return Endpoint.new(api_path, "PATCH")
- end
- end
-
- Endpoint.new("", "")
- end
-end
-
-def analyzer_restify(options : Hash(String, String))
- instance = AnalyzerRestify.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_ruby_hanami.cr b/src/analyzer/analyzers/analyzer_ruby_hanami.cr
deleted file mode 100644
index ec954fff..00000000
--- a/src/analyzer/analyzers/analyzer_ruby_hanami.cr
+++ /dev/null
@@ -1,75 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerRubyHanami < Analyzer
- def analyze
- # Config Analysis
- path = "#{@base_path}/config/routes.rb"
- if File.exists?(path)
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- last_endpoint = Endpoint.new("", "")
- file.each_line.with_index do |line, index|
- details = Details.new(PathInfo.new(path, index + 1))
- endpoint = line_to_endpoint(line, details)
- if endpoint.method != ""
- @result << endpoint
- last_endpoint = endpoint
- _ = last_endpoint
- end
- end
- end
- end
-
- @result
- end
-
- def line_to_endpoint(content : String, details : Details) : Endpoint
- content.scan(/get\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "GET", details)
- end
- end
-
- content.scan(/post\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "POST", details)
- end
- end
-
- content.scan(/put\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "PUT", details)
- end
- end
-
- content.scan(/delete\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "DELETE", details)
- end
- end
-
- content.scan(/patch\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "PATCH", details)
- end
- end
-
- content.scan(/head\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "HEAD", details)
- end
- end
-
- content.scan(/options\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "OPTIONS", details)
- end
- end
-
- Endpoint.new("", "")
- end
-end
-
-def analyzer_ruby_hanami(options : Hash(String, String))
- instance = AnalyzerRubyHanami.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_ruby_rails.cr b/src/analyzer/analyzers/analyzer_ruby_rails.cr
deleted file mode 100644
index c39bccde..00000000
--- a/src/analyzer/analyzers/analyzer_ruby_rails.cr
+++ /dev/null
@@ -1,262 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerRubyRails < Analyzer
- def analyze
- # Public Dir Analysis
- begin
- Dir.glob("#{@base_path}/public/**/*") do |file|
- next if File.directory?(file)
- real_path = "#{@base_path}/public/".gsub(/\/+/, '/')
- relative_path = file.sub(real_path, "")
- details = Details.new(PathInfo.new(file))
- @result << Endpoint.new("/#{relative_path}", "GET", details)
- end
- rescue e
- logger.debug e
- end
-
- # Config Analysis
- if File.exists?("#{@base_path}/config/routes.rb")
- File.open("#{@base_path}/config/routes.rb", "r", encoding: "utf-8", invalid: :skip) do |file|
- file.each_line do |line|
- stripped_line = line.strip
- if stripped_line.size > 0 && stripped_line[0] != '#'
- line.scan(/resources?\s+:.*/) do |match|
- splited = match[0].split(":")
- if splited.size > 1
- resource = splited[1].split(",")[0]
-
- @result += controller_to_endpoint("#{@base_path}/app/controllers/#{resource}_controller.rb", @url, resource)
- @result += controller_to_endpoint("#{@base_path}/app/controllers/#{resource}s_controller.rb", @url, resource)
- @result += controller_to_endpoint("#{@base_path}/app/controllers/#{resource}es_controller.rb", @url, resource)
- end
- end
-
- details = Details.new(PathInfo.new("#{@base_path}/config/routes.rb"))
- line.scan(/get\s+['"](.+?)['"]/) do |match|
- @result << Endpoint.new("#{match[1]}", "GET", details)
- end
- line.scan(/post\s+['"](.+?)['"]/) do |match|
- @result << Endpoint.new("#{match[1]}", "POST", details)
- end
- line.scan(/put\s+['"](.+?)['"]/) do |match|
- @result << Endpoint.new("#{match[1]}", "PUT", details)
- end
- line.scan(/delete\s+['"](.+?)['"]/) do |match|
- @result << Endpoint.new("#{match[1]}", "DELETE", details)
- end
- line.scan(/patch\s+['"](.+?)['"]/) do |match|
- @result << Endpoint.new("#{match[1]}", "PATCH", details)
- end
- end
- end
- end
- end
-
- @result
- end
-
- def controller_to_endpoint(path : String, @url : String, resource : String)
- @result = [] of Endpoint
-
- if File.exists?(path)
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |controller_file|
- param_type = "form"
- params_query = [] of Param
- params_body = [] of Param
- params_method = Hash(String, Array(Param)).new
- methods = [] of String
- this_method = ""
-
- controller_content = controller_file.gets_to_end
- if controller_content.includes? "render json:"
- param_type = "json"
- end
-
- controller_file.rewind
- controller_file.each_line do |controller_line|
- if controller_line.includes? "def "
- func_name = controller_line.split("def ")[1].split("(")[0]
- case func_name
- when "index"
- methods << "GET/INDEX"
- this_method = func_name
- when "show"
- methods << "GET/SHOW"
- this_method = func_name
- when "create"
- methods << "POST"
- this_method = func_name
- when "update"
- methods << "PUT"
- this_method = func_name
- when "destroy"
- methods << "DELETE"
- this_method = func_name
- end
- end
-
- if controller_line.includes? "params.require"
- splited_param = controller_line.strip.split("permit")
- if splited_param.size > 1
- tparam = splited_param[1].gsub("(", "").gsub(")", "").gsub("s", "").gsub(":", "")
- tparam.split(",").each do |param|
- params_body << Param.new(param.strip, "", param_type)
- params_query << Param.new(param.strip, "", "query")
- end
- end
- end
-
- if controller_line.includes? "params[:"
- splited_param = controller_line.strip.split("params[:")[1]
- if splited_param
- param = splited_param.split("]")[0]
- params_body << Param.new(param.strip, "", param_type)
- params_query << Param.new(param.strip, "", "query")
- end
- end
-
- if controller_line.includes? "request.headers["
- splited_param = controller_line.strip.split("request.headers[")[1]
- if splited_param
- param = splited_param.split("]")[0].gsub("'", "").gsub("\"", "")
- param_line = Param.new(param.strip, "", "header")
- if params_method.has_key? this_method
- params_method[this_method] << param_line
- else
- params_method[this_method] = [] of Param
- params_method[this_method] << param_line
- end
- end
- end
-
- if controller_line.includes? "cookies[:"
- splited_param = controller_line.strip.split("cookies[:")[1]
- if splited_param
- param = splited_param.split("]")[0].gsub("'", "").gsub("\"", "")
- if this_method != ""
- param_line = Param.new(param.strip, "", "cookie")
- if params_method.has_key? this_method
- params_method[this_method] << param_line
- else
- params_method[this_method] = [] of Param
- params_method[this_method] << param_line
- end
- end
- end
- end
-
- if controller_line.includes? "cookies.signed[:"
- splited_param = controller_line.strip.split("cookies.signed[:")[1]
- if splited_param
- param = splited_param.split("]")[0].gsub("'", "").gsub("\"", "")
- if this_method != ""
- param_line = Param.new(param.strip, "", "cookie")
- if params_method.has_key? this_method
- params_method[this_method] << param_line
- else
- params_method[this_method] = [] of Param
- params_method[this_method] << param_line
- end
- end
- end
- end
-
- if controller_line.includes? "cookies.encrypted[:"
- splited_param = controller_line.strip.split("cookies.encrypted[:")[1]
- if splited_param
- param = splited_param.split("]")[0].gsub("'", "").gsub("\"", "")
- if this_method != ""
- param_line = Param.new(param.strip, "", "cookie")
- if params_method.has_key? this_method
- params_method[this_method] << param_line
- else
- params_method[this_method] = [] of Param
- params_method[this_method] << param_line
- end
- end
- end
- end
- end
-
- deduplication_params_query = [] of Param
- get_param_duplicated : Array(String) = [] of String
-
- params_query.each do |get_param|
- if get_param_duplicated.includes? get_param.name
- deduplication_params_query << get_param
- else
- get_param_duplicated << get_param.name
- end
- end
-
- details = Details.new(PathInfo.new(path))
- methods.each do |method|
- if method == "GET/INDEX"
- if params_method.has_key? "index"
- index_params = [] of Param
- params_method["index"].each do |param|
- index_params << param
- end
- end
-
- index_params ||= [] of Param
- deduplication_params_query ||= [] of Param
- last_params = index_params + deduplication_params_query
- @result << Endpoint.new("/#{resource}", "GET", last_params, details)
- elsif method == "GET/SHOW"
- if params_method.has_key? "show"
- show_params = [] of Param
- params_method["show"].each do |param|
- show_params << param
- end
- end
- show_params ||= [] of Param
- deduplication_params_query ||= [] of Param
- last_params = show_params + deduplication_params_query
- @result << Endpoint.new("/#{resource}/1", "GET", last_params, details)
- else
- if method == "POST"
- if params_method.has_key? "create"
- create_params = [] of Param
- params_method["create"].each do |param|
- create_params << param
- end
- end
- create_params ||= [] of Param
- params_body ||= [] of Param
- last_params = create_params + params_body
- @result << Endpoint.new("/#{resource}", method, last_params, details)
- elsif method == "DELETE"
- params_delete = [] of Param
- if params_method.has_key? "delete"
- params_method["delete"].each do |param|
- params_delete << param
- end
- end
- @result << Endpoint.new("/#{resource}/1", method, params_delete, details)
- else
- if params_method.has_key? "update"
- update_params = [] of Param
- params_method["update"].each do |param|
- update_params << param
- end
- end
- update_params ||= [] of Param
- params_body ||= [] of Param
- last_params = update_params + params_body
- @result << Endpoint.new("/#{resource}/1", method, last_params, details)
- end
- end
- end
- end
- end
-
- @result
- end
-end
-
-def analyzer_ruby_rails(options : Hash(String, String))
- instance = AnalyzerRubyRails.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_ruby_sinatra.cr b/src/analyzer/analyzers/analyzer_ruby_sinatra.cr
deleted file mode 100644
index 48c22719..00000000
--- a/src/analyzer/analyzers/analyzer_ruby_sinatra.cr
+++ /dev/null
@@ -1,117 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerRubySinatra < Analyzer
- def analyze
- # Source Analysis
- begin
- Dir.glob("#{@base_path}/**/*") do |path|
- next if File.directory?(path)
- if File.exists?(path)
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- last_endpoint = Endpoint.new("", "")
- file.each_line.with_index do |line, index|
- endpoint = line_to_endpoint(line)
- if endpoint.method != ""
- details = Details.new(PathInfo.new(path, index + 1))
- endpoint.set_details(details)
- @result << endpoint
- last_endpoint = endpoint
- end
-
- param = line_to_param(line)
- if param.name != ""
- if last_endpoint.method != ""
- last_endpoint.push_param(param)
- end
- end
- end
- end
- end
- end
- rescue e
- logger.debug e
- end
-
- @result
- end
-
- def line_to_param(content : String) : Param
- if content.includes? "param["
- param = content.split("param[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "query")
- end
-
- if content.includes? "params["
- param = content.split("params[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "query")
- end
-
- if content.includes? "request.env["
- param = content.split("request.env[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
- return Param.new(param, "", "header")
- end
-
- if content.includes? "headers["
- param = content.split("headers[")[1].split("]")[0].gsub("\"", "").gsub("'", "").gsub(":", "")
- return Param.new(param, "", "header")
- end
-
- if content.includes? "cookies["
- param = content.split("cookies[")[1].split("]")[0].gsub("\"", "").gsub("'", "").gsub(":", "")
- return Param.new(param, "", "cookie")
- end
-
- Param.new("", "", "")
- end
-
- def line_to_endpoint(content : String) : Endpoint
- content.scan(/get\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "GET")
- end
- end
-
- content.scan(/post\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "POST")
- end
- end
-
- content.scan(/put\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "PUT")
- end
- end
-
- content.scan(/delete\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "DELETE")
- end
- end
-
- content.scan(/patch\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "PATCH")
- end
- end
-
- content.scan(/head\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "HEAD")
- end
- end
-
- content.scan(/options\s+['"](.+?)['"]/) do |match|
- if match.size > 1
- return Endpoint.new("#{match[1]}", "OPTIONS")
- end
- end
-
- Endpoint.new("", "")
- end
-end
-
-def analyzer_ruby_sinatra(options : Hash(String, String))
- instance = AnalyzerRubySinatra.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_rust_axum.cr b/src/analyzer/analyzers/analyzer_rust_axum.cr
deleted file mode 100644
index d075bf5e..00000000
--- a/src/analyzer/analyzers/analyzer_rust_axum.cr
+++ /dev/null
@@ -1,50 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerRustAxum < Analyzer
- def analyze
- # Source Analysis
- pattern = /\.route\("([^"]+)",\s*([^)]+)\)/
-
- begin
- Dir.glob("#{base_path}/**/*") do |path|
- next if File.directory?(path)
-
- if File.exists?(path) && File.extname(path) == ".rs"
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- file.each_line.with_index do |line, index|
- if line.includes? ".route("
- match = line.match(pattern)
- if match
- begin
- route_argument = match[1]
- callback_argument = match[2]
- details = Details.new(PathInfo.new(path, index + 1))
- result << Endpoint.new("#{route_argument}", callback_to_method(callback_argument), details)
- rescue
- end
- end
- end
- end
- end
- end
- end
- rescue e
- end
-
- result
- end
-
- def callback_to_method(str)
- method = str.split("(").first
- if !["get", "post", "put", "delete"].includes?(method)
- method = "get"
- end
-
- method.upcase
- end
-end
-
-def analyzer_rust_axum(options : Hash(String, String))
- instance = AnalyzerRustAxum.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/analyzer_rust_rocket.cr b/src/analyzer/analyzers/analyzer_rust_rocket.cr
deleted file mode 100644
index 92f3f918..00000000
--- a/src/analyzer/analyzers/analyzer_rust_rocket.cr
+++ /dev/null
@@ -1,51 +0,0 @@
-require "../../models/analyzer"
-
-class AnalyzerRustRocket < Analyzer
- def analyze
- # Source Analysis
- pattern = /#\[(get|post|delete|put)\("([^"]+)"(?:, data = "<([^>]+)>")?\)\]/
-
- begin
- Dir.glob("#{base_path}/**/*") do |path|
- next if File.directory?(path)
-
- if File.exists?(path) && File.extname(path) == ".rs"
- File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- file.each_line.with_index do |line, index|
- if line.includes?("#[") && line.includes?(")]")
- match = line.match(pattern)
- if match
- begin
- callback_argument = match[1]
- route_argument = match[2]
-
- details = Details.new(PathInfo.new(path, index + 1))
- result << Endpoint.new("#{route_argument}", callback_to_method(callback_argument), details)
- rescue
- end
- end
- end
- end
- end
- end
- end
- rescue e
- end
-
- result
- end
-
- def callback_to_method(str)
- method = str.split("(").first
- if !["get", "post", "put", "delete"].includes?(method)
- method = "get"
- end
-
- method.upcase
- end
-end
-
-def analyzer_rust_rocket(options : Hash(String, String))
- instance = AnalyzerRustRocket.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/crystal/kemal.cr b/src/analyzer/analyzers/crystal/kemal.cr
new file mode 100644
index 00000000..f734a613
--- /dev/null
+++ b/src/analyzer/analyzers/crystal/kemal.cr
@@ -0,0 +1,173 @@
+require "../../../models/analyzer"
+
+module Analyzer::Crystal
+ class Kemal < Analyzer
+ def analyze
+ # Variables
+ is_public = true
+ public_folders = [] of String
+
+ # Source Analysis
+ begin
+ Dir.glob("#{@base_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path) && File.extname(path) == ".cr" && !path.includes?("lib")
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ last_endpoint = Endpoint.new("", "")
+ file.each_line.with_index do |line, index|
+ endpoint = line_to_endpoint(line)
+ if endpoint.method != ""
+ details = Details.new(PathInfo.new(path, index + 1))
+ endpoint.details = details
+ result << endpoint
+ last_endpoint = endpoint
+ end
+
+ param = line_to_param(line)
+ if param.name != ""
+ if last_endpoint.method != ""
+ last_endpoint.push_param(param)
+ end
+ end
+
+ if line.includes? "serve_static false" || "serve_static(false)"
+ is_public = false
+ end
+
+ if line.includes? "public_folder"
+ begin
+ splited = line.split("public_folder")
+ public_folder = ""
+
+ if splited.size > 1
+ public_folder = splited[1].gsub("(", "").gsub(")", "").gsub(" ", "").gsub("\"", "").gsub("'", "")
+ if public_folder != ""
+ public_folders << public_folder
+ end
+ end
+ rescue
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+
+ # Public Dir Analysis
+ if is_public
+ begin
+ Dir.glob("#{@base_path}/public/**/*") do |file|
+ next if File.directory?(file)
+ real_path = "#{@base_path}/public/".gsub(/\/+/, '/')
+ relative_path = file.sub(real_path, "")
+ @result << Endpoint.new("/#{relative_path}", "GET")
+ end
+
+ public_folders.each do |folder|
+ Dir.glob("#{@base_path}/#{folder}/**/*") do |file|
+ next if File.directory?(file)
+ relative_path = get_relative_path(@base_path, file)
+ relative_path = get_relative_path(folder, relative_path)
+ @result << Endpoint.new("/#{relative_path}", "GET")
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+ end
+
+ result
+ end
+
+ def line_to_param(content : String) : Param
+ if content.includes? "env.params.query["
+ param = content.split("env.params.query[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "query")
+ end
+
+ if content.includes? "env.params.json["
+ param = content.split("env.params.json[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "json")
+ end
+
+ if content.includes? "env.params.body["
+ param = content.split("env.params.body[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "form")
+ end
+
+ if content.includes? "env.request.headers["
+ param = content.split("env.request.headers[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "header")
+ end
+
+ if content.includes? "env.request.cookies["
+ param = content.split("env.request.cookies[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "cookie")
+ end
+
+ if content.includes? "cookies.get_raw("
+ param = content.split("cookies.get_raw(")[1].split(")")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "cookie")
+ end
+
+ Param.new("", "", "")
+ end
+
+ def line_to_endpoint(content : String) : Endpoint
+ content.scan(/get\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "GET")
+ end
+ end
+
+ content.scan(/post\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "POST")
+ end
+ end
+
+ content.scan(/put\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "PUT")
+ end
+ end
+
+ content.scan(/delete\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "DELETE")
+ end
+ end
+
+ content.scan(/patch\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "PATCH")
+ end
+ end
+
+ content.scan(/head\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "HEAD")
+ end
+ end
+
+ content.scan(/options\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "OPTIONS")
+ end
+ end
+
+ content.scan(/ws\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ endpoint = Endpoint.new("#{match[1]}", "GET")
+ endpoint.protocol = "ws"
+ return endpoint
+ end
+ end
+
+ Endpoint.new("", "")
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/crystal/lucky.cr b/src/analyzer/analyzers/crystal/lucky.cr
new file mode 100644
index 00000000..3fddb1c1
--- /dev/null
+++ b/src/analyzer/analyzers/crystal/lucky.cr
@@ -0,0 +1,138 @@
+require "../../../models/analyzer"
+
+module Analyzer::Crystal
+ class Lucky < Analyzer
+ def analyze
+ # Public Dir Analysis
+ begin
+ Dir.glob("#{@base_path}/public/**/*") do |file|
+ next if File.directory?(file)
+ real_path = "#{@base_path}/public/".gsub(/\/+/, '/')
+ relative_path = file.sub(real_path, "")
+ @result << Endpoint.new("/#{relative_path}", "GET")
+ end
+ rescue e
+ logger.debug e
+ end
+
+ # Source Analysis
+ begin
+ Dir.glob("#{@base_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path) && File.extname(path) == ".cr" && !path.includes?("lib")
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ last_endpoint = Endpoint.new("", "")
+ file.each_line.with_index do |line, index|
+ endpoint = line_to_endpoint(line)
+ if endpoint.method != ""
+ details = Details.new(PathInfo.new(path, index + 1))
+ endpoint.details = details
+ result << endpoint
+ last_endpoint = endpoint
+ end
+
+ param = line_to_param(line)
+ if param.name != ""
+ if last_endpoint.method != ""
+ last_endpoint.push_param(param)
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+
+ result
+ end
+
+ def line_to_param(content : String) : Param
+ if content.includes? "params.from_query[\""
+ param = content.split("params.from_query[\"")[1].split("\"]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "query")
+ end
+
+ if content.includes? "params.from_json[\""
+ param = content.split("params.from_json[\"")[1].split("\"]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "json")
+ end
+
+ if content.includes? "params.from_form_data[\""
+ param = content.split("params.from_form_data[\"")[1].split("\"]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "form")
+ end
+
+ if content.includes? "params.get("
+ param = content.split("params.get(")[1].split(")")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param.gsub(":", ""), "", "query")
+ end
+
+ if content.includes? "request.headers["
+ param = content.split("request.headers[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "header")
+ end
+
+ if content.includes? "cookies.get("
+ param = content.split("cookies.get(")[1].split(")")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "cookie")
+ end
+
+ if content.includes? "cookies["
+ param = content.split("cookies[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "cookie")
+ end
+
+ Param.new("", "", "")
+ end
+
+ def line_to_endpoint(content : String) : Endpoint
+ content.scan(/get\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "GET")
+ end
+ end
+
+ content.scan(/post\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "POST")
+ end
+ end
+
+ content.scan(/put\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "PUT")
+ end
+ end
+
+ content.scan(/delete\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "DELETE")
+ end
+ end
+
+ content.scan(/patch\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "PATCH")
+ end
+ end
+
+ content.scan(/trace\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "TRACE")
+ end
+ end
+
+ content.scan(/ws\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ endpoint = Endpoint.new("#{match[1]}", "GET")
+ endpoint.protocol = "ws"
+ return endpoint
+ end
+ end
+
+ Endpoint.new("", "")
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/csharp/aspnet_mvc.cr b/src/analyzer/analyzers/csharp/aspnet_mvc.cr
new file mode 100644
index 00000000..c7ec5348
--- /dev/null
+++ b/src/analyzer/analyzers/csharp/aspnet_mvc.cr
@@ -0,0 +1,48 @@
+require "../../../models/analyzer"
+
+module Analyzer::CSharp
+ class AspNetMvc < Analyzer
+ def analyze
+ # Static Analysis
+ locator = CodeLocator.instance
+ route_config_file = locator.get("cs-apinet-mvc-routeconfig")
+
+ if File.exists?("#{route_config_file}")
+ File.open("#{route_config_file}", "r", encoding: "utf-8", invalid: :skip) do |file|
+ maproute_check = false
+ maproute_buffer = ""
+
+ file.each_line.with_index do |line, index|
+ if line.includes? ".MapRoute("
+ maproute_check = true
+ maproute_buffer = line
+ end
+
+ if line.includes? ");"
+ maproute_check = false
+ if maproute_buffer != ""
+ buffer = maproute_buffer.gsub(/[\r\n]/, "")
+ buffer = buffer.gsub(/\s+/, "")
+ buffer.split(",").each do |item|
+ if item.includes? "url:"
+ url = item.gsub(/url:/, "").gsub(/"/, "")
+ details = Details.new(PathInfo.new(route_config_file, index + 1))
+ @result << Endpoint.new("/#{url}", "GET", details)
+ end
+ end
+
+ maproute_buffer = ""
+ end
+ end
+
+ if maproute_check
+ maproute_buffer += line
+ end
+ end
+ end
+ end
+
+ @result
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/elixir/elixir_phoenix.cr b/src/analyzer/analyzers/elixir/elixir_phoenix.cr
new file mode 100644
index 00000000..a3c73d82
--- /dev/null
+++ b/src/analyzer/analyzers/elixir/elixir_phoenix.cr
@@ -0,0 +1,64 @@
+require "../../../models/analyzer"
+
+module Analyzer::Elixir
+ class Phoenix < Analyzer
+ def analyze
+ # Source Analysis
+ begin
+ Dir.glob("#{@base_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path) && File.extname(path) == ".ex"
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ file.each_line.with_index do |line, index|
+ endpoints = line_to_endpoint(line)
+ endpoints.each do |endpoint|
+ if endpoint.method != ""
+ details = Details.new(PathInfo.new(path, index + 1))
+ endpoint.details = details
+ @result << endpoint
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+
+ @result
+ end
+
+ def line_to_endpoint(line : String) : Array(Endpoint)
+ endpoints = Array(Endpoint).new
+
+ line.scan(/get\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
+ endpoints << Endpoint.new("#{match[1]}", "GET")
+ end
+
+ line.scan(/post\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
+ endpoints << Endpoint.new("#{match[1]}", "POST")
+ end
+
+ line.scan(/patch\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
+ endpoints << Endpoint.new("#{match[1]}", "PATCH")
+ end
+
+ line.scan(/put\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
+ endpoints << Endpoint.new("#{match[1]}", "PUT")
+ end
+
+ line.scan(/delete\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
+ endpoints << Endpoint.new("#{match[1]}", "DELETE")
+ end
+
+ line.scan(/socket\s+['"](.+?)['"]\s*,\s*(.+?)\s*/) do |match|
+ tmp = Endpoint.new("#{match[1]}", "GET")
+ tmp.protocol = "ws"
+ endpoints << tmp
+ end
+
+ endpoints
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/analyzer_example.cr b/src/analyzer/analyzers/example.cr
similarity index 74%
rename from src/analyzer/analyzers/analyzer_example.cr
rename to src/analyzer/analyzers/example.cr
index cae8a0cd..c94f1694 100644
--- a/src/analyzer/analyzers/analyzer_example.cr
+++ b/src/analyzer/analyzers/example.cr
@@ -8,11 +8,11 @@ class AnalyzerExample < Analyzer
next if File.directory?(path)
if File.exists?(path)
File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
- file.each_line.with_index do |line, index|
+ file.each_line.with_index do |_, _|
# For example (Add endpoint to result)
# endpoint = Endpoint.new("/", "GET")
# details = Details.new(PathInfo.new(path, index + 1))
- # endpoint.set_details(details)
+ # endpoint.details=details
# @result << endpoint
end
end
@@ -25,8 +25,3 @@ class AnalyzerExample < Analyzer
@result
end
end
-
-def analyzer_example(options : Hash(String, String))
- instance = AnalyzerExample.new(options)
- instance.analyze
-end
diff --git a/src/analyzer/analyzers/go/beego.cr b/src/analyzer/analyzers/go/beego.cr
new file mode 100644
index 00000000..a6f2e14b
--- /dev/null
+++ b/src/analyzer/analyzers/go/beego.cr
@@ -0,0 +1,145 @@
+require "../../../models/analyzer"
+require "../../../minilexers/golang"
+
+module Analyzer::Go
+ class Beego < Analyzer
+ def analyze
+ # Source Analysis
+ public_dirs = [] of (Hash(String, String))
+ groups = [] of Hash(String, String)
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path) && File.extname(path) == ".go"
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ last_endpoint = Endpoint.new("", "")
+ file.each_line.with_index do |line, index|
+ details = Details.new(PathInfo.new(path, index + 1))
+ lexer = GolangLexer.new
+
+ if line.includes?(".Group(")
+ map = lexer.tokenize(line)
+ before = Token.new(:unknown, "", 0)
+ group_name = ""
+ group_path = ""
+ map.each do |token|
+ if token.type == :assign
+ group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "")
+ end
+
+ if token.type == :string
+ group_path = token.value.to_s
+ groups.each do |group|
+ group.each do |key, value|
+ if before.value.to_s.includes? key
+ group_path = value + group_path
+ end
+ end
+ end
+ end
+
+ before = token
+ end
+
+ if group_name.size > 0 && group_path.size > 0
+ groups << {
+ group_name => group_path,
+ }
+ end
+ end
+
+ if line.includes?(".Get(") || line.includes?(".Post(") || line.includes?(".Put(") || line.includes?(".Delete(")
+ get_route_path(line, groups).tap do |route_path|
+ if route_path.size > 0
+ new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0].to_s.upcase, details)
+ result << new_endpoint
+ last_endpoint = new_endpoint
+ end
+ end
+ end
+
+ if line.includes?(".Any(") || line.includes?(".Handler(") || line.includes?(".Router(")
+ get_route_path(line, groups).tap do |route_path|
+ if route_path.size > 0
+ new_endpoint = Endpoint.new("#{route_path}", "GET", details)
+ result << new_endpoint
+ last_endpoint = new_endpoint
+ end
+ end
+ end
+
+ ["GetString", "GetStrings", "GetInt", "GetInt8", "GetUint8", "GetInt16", "GetUint16", "GetInt32", "GetUint32",
+ "GetInt64", "GetUint64", "GetBool", "GetFloat"].each do |pattern|
+ match = line.match(/#{pattern}\(\"(.*)\"\)/)
+ if match
+ param_name = match[1]
+ last_endpoint.params << Param.new(param_name, "", "query")
+ end
+ end
+
+ if line.includes?("GetCookie(")
+ match = line.match(/GetCookie\(\"(.*)\"\)/)
+ if match
+ cookie_name = match[1]
+ last_endpoint.params << Param.new(cookie_name, "", "cookie")
+ end
+ end
+
+ if line.includes?("GetSecureCookie(")
+ match = line.match(/GetSecureCookie\(\"(.*)\"\)/)
+ if match
+ cookie_name = match[1]
+ last_endpoint.params << Param.new(cookie_name, "", "cookie")
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+
+ public_dirs.each do |p_dir|
+ full_path = (base_path + "/" + p_dir["file_path"]).gsub_repeatedly("//", "/")
+ Dir.glob("#{full_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path)
+ if p_dir["static_path"].ends_with?("/")
+ p_dir["static_path"] = p_dir["static_path"][0..-2]
+ end
+
+ details = Details.new(PathInfo.new(path))
+ result << Endpoint.new("#{p_dir["static_path"]}#{path.gsub(full_path, "")}", "GET", details)
+ end
+ end
+ end
+
+ result
+ end
+
+ def get_route_path(line : String, groups : Array(Hash(String, String))) : String
+ lexer = GolangLexer.new
+ map = lexer.tokenize(line)
+ before = Token.new(:unknown, "", 0)
+ map.each do |token|
+ if token.type == :string
+ final_path = token.value.to_s
+ groups.each do |group|
+ group.each do |key, value|
+ if before.value.to_s.includes? key
+ final_path = value + final_path
+ end
+ end
+ end
+
+ return final_path
+ end
+
+ before = token
+ end
+
+ ""
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/go/echo.cr b/src/analyzer/analyzers/go/echo.cr
new file mode 100644
index 00000000..4b1af462
--- /dev/null
+++ b/src/analyzer/analyzers/go/echo.cr
@@ -0,0 +1,187 @@
+require "../../../models/analyzer"
+
+module Analyzer::Go
+ class Echo < Analyzer
+ def analyze
+ # Source Analysis
+ public_dirs = [] of (Hash(String, String))
+ groups = [] of Hash(String, String)
+
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path) && File.extname(path) == ".go"
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ last_endpoint = Endpoint.new("", "")
+ file.each_line.with_index do |line, index|
+ details = Details.new(PathInfo.new(path, index + 1))
+ lexer = GolangLexer.new
+
+ if line.includes?(".Group(")
+ map = lexer.tokenize(line)
+ before = Token.new(:unknown, "", 0)
+ group_name = ""
+ group_path = ""
+ map.each do |token|
+ if token.type == :assign
+ group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "")
+ end
+
+ if token.type == :string
+ group_path = token.value.to_s
+ groups.each do |group|
+ group.each do |key, value|
+ if before.value.to_s.includes? key
+ group_path = value + group_path
+ end
+ end
+ end
+ end
+
+ before = token
+ end
+
+ if group_name.size > 0 && group_path.size > 0
+ groups << {
+ group_name => group_path,
+ }
+ end
+ end
+
+ if line.includes?(".GET(") || line.includes?(".POST(") || line.includes?(".PUT(") || line.includes?(".DELETE(")
+ get_route_path(line, groups).tap do |route_path|
+ if route_path.size > 0
+ new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0], details)
+ result << new_endpoint
+ last_endpoint = new_endpoint
+ end
+ end
+ end
+
+ if line.includes?("Param(") || line.includes?("FormValue(")
+ get_param(line).tap do |param|
+ if param.name.size > 0 && last_endpoint.method != ""
+ last_endpoint.params << param
+ end
+ end
+ end
+
+ if line.includes?("Static(")
+ get_static_path(line).tap do |static_path|
+ if static_path.size > 0
+ public_dirs << static_path
+ end
+ end
+ end
+
+ if line.includes?("Request().Header.Get(")
+ match = line.match(/Request\(\)\.Header\.Get\(\"(.*)\"\)/)
+ if match
+ header_name = match[1]
+ last_endpoint.params << Param.new(header_name, "", "header")
+ end
+ end
+
+ if line.includes?("Cookie(")
+ match = line.match(/Cookie\(\"(.*)\"\)/)
+ if match
+ cookie_name = match[1]
+ last_endpoint.params << Param.new(cookie_name, "", "cookie")
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+
+ public_dirs.each do |p_dir|
+ full_path = (base_path + "/" + p_dir["file_path"]).gsub_repeatedly("//", "/")
+ Dir.glob("#{full_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path)
+ if p_dir["static_path"].ends_with?("/")
+ p_dir["static_path"] = p_dir["static_path"][0..-2]
+ end
+
+ details = Details.new(PathInfo.new(path))
+ result << Endpoint.new("#{p_dir["static_path"]}#{path.gsub(full_path, "")}", "GET", details)
+ end
+ end
+ end
+
+ result
+ end
+
+ def get_param(line : String) : Param
+ param_type = "json"
+ if line.includes?("QueryParam")
+ param_type = "query"
+ end
+ if line.includes?("FormValue")
+ param_type = "form"
+ end
+
+ first = line.strip.split("(")
+ if first.size > 1
+ second = first[1].split(")")
+ if second.size > 1
+ param_name = second[0].gsub("\"", "")
+ rtn = Param.new(param_name, "", param_type)
+
+ return rtn
+ end
+ end
+
+ Param.new("", "", "")
+ end
+
+ def get_static_path(line : String) : Hash(String, String)
+ first = line.strip.split("(")
+ if first.size > 1
+ second = first[1].split(",")
+ if second.size > 1
+ static_path = second[0].gsub("\"", "")
+ file_path = second[1].gsub("\"", "").gsub(" ", "").gsub(")", "")
+ rtn = {
+ "static_path" => static_path,
+ "file_path" => file_path,
+ }
+
+ return rtn
+ end
+ end
+
+ {
+ "static_path" => "",
+ "file_path" => "",
+ }
+ end
+
+ def get_route_path(line : String, groups : Array(Hash(String, String))) : String
+ lexer = GolangLexer.new
+ map = lexer.tokenize(line)
+ before = Token.new(:unknown, "", 0)
+ map.each do |token|
+ if token.type == :string
+ final_path = token.value.to_s
+ groups.each do |group|
+ group.each do |key, value|
+ if before.value.to_s.includes? key
+ final_path = value + final_path
+ end
+ end
+ end
+
+ return final_path
+ end
+
+ before = token
+ end
+
+ ""
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/go/fiber.cr b/src/analyzer/analyzers/go/fiber.cr
new file mode 100644
index 00000000..cb75e89e
--- /dev/null
+++ b/src/analyzer/analyzers/go/fiber.cr
@@ -0,0 +1,198 @@
+require "../../../models/analyzer"
+
+module Analyzer::Go
+ class Fiber < Analyzer
+ def analyze
+ # Source Analysis
+ public_dirs = [] of (Hash(String, String))
+ groups = [] of Hash(String, String)
+
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path) && File.extname(path) == ".go"
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ last_endpoint = Endpoint.new("", "")
+ file.each_line.with_index do |line, index|
+ details = Details.new(PathInfo.new(path, index + 1))
+ lexer = GolangLexer.new
+
+ if line.includes?(".Group(")
+ map = lexer.tokenize(line)
+ before = Token.new(:unknown, "", 0)
+ group_name = ""
+ group_path = ""
+ map.each do |token|
+ if token.type == :assign
+ group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "")
+ end
+
+ if token.type == :string
+ group_path = token.value.to_s
+ groups.each do |group|
+ group.each do |key, value|
+ if before.value.to_s.includes? key
+ group_path = value + group_path
+ end
+ end
+ end
+ end
+
+ before = token
+ end
+
+ if group_name.size > 0 && group_path.size > 0
+ groups << {
+ group_name => group_path,
+ }
+ end
+ end
+
+ if line.includes?(".Get(") || line.includes?(".Post(") || line.includes?(".Put(") || line.includes?(".Delete(")
+ get_route_path(line, groups).tap do |route_path|
+ if route_path.size > 0
+ new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0].upcase, details)
+ if line.includes?("websocket.New(")
+ new_endpoint.protocol = "ws"
+ end
+ result << new_endpoint
+ last_endpoint = new_endpoint
+ end
+ end
+ end
+
+ if line.includes?(".Query(") || line.includes?(".FormValue(")
+ get_param(line).tap do |param|
+ if param.name.size > 0 && last_endpoint.method != ""
+ last_endpoint.params << param
+ end
+ end
+ end
+
+ if line.includes?("Static(")
+ get_static_path(line).tap do |static_path|
+ if static_path.size > 0
+ public_dirs << static_path
+ end
+ end
+ end
+
+ if line.includes?("GetRespHeader(")
+ match = line.match(/GetRespHeader\(\"(.*)\"\)/)
+ if match
+ header_name = match[1]
+ last_endpoint.params << Param.new(header_name, "", "header")
+ end
+ end
+
+ if line.includes?("Vary(")
+ match = line.match(/Vary\(\"(.*)\"\)/)
+ if match
+ header_value = match[1]
+ last_endpoint.params << Param.new("Vary", header_value, "header")
+ end
+ end
+
+ if line.includes?("Cookies(")
+ match = line.match(/Cookies\(\"(.*)\"\)/)
+ if match
+ cookie_name = match[1]
+ last_endpoint.params << Param.new(cookie_name, "", "cookie")
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+
+ public_dirs.each do |p_dir|
+ full_path = (base_path + "/" + p_dir["file_path"]).gsub_repeatedly("//", "/")
+ Dir.glob("#{full_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path)
+ if p_dir["static_path"].ends_with?("/")
+ p_dir["static_path"] = p_dir["static_path"][0..-2]
+ end
+
+ details = Details.new(PathInfo.new(path))
+ result << Endpoint.new("#{p_dir["static_path"]}#{path.gsub(full_path, "")}", "GET", details)
+ end
+ end
+ end
+
+ result
+ end
+
+ def get_param(line : String) : Param
+ param_type = "json"
+ if line.includes?("Query")
+ param_type = "query"
+ end
+ if line.includes?("FormValue")
+ param_type = "form"
+ end
+
+ first = line.strip.split("(")
+ if first.size > 1
+ second = first[1].split(")")
+ if second.size > 1
+ param_name = second[0].gsub("\"", "")
+ rtn = Param.new(param_name, "", param_type)
+
+ return rtn
+ end
+ end
+
+ Param.new("", "", "")
+ end
+
+ def get_static_path(line : String) : Hash(String, String)
+ first = line.strip.split("(")
+ if first.size > 1
+ second = first[1].split(",")
+ if second.size > 1
+ static_path = second[0].gsub("\"", "")
+ file_path = second[1].gsub("\"", "").gsub(" ", "").gsub(")", "").gsub_repeatedly("//", "/")
+ rtn = {
+ "static_path" => static_path,
+ "file_path" => file_path,
+ }
+
+ return rtn
+ end
+ end
+
+ {
+ "static_path" => "",
+ "file_path" => "",
+ }
+ end
+
+ def get_route_path(line : String, groups : Array(Hash(String, String))) : String
+ lexer = GolangLexer.new
+ map = lexer.tokenize(line)
+ before = Token.new(:unknown, "", 0)
+ map.each do |token|
+ if token.type == :string
+ final_path = token.value.to_s
+ groups.each do |group|
+ group.each do |key, value|
+ if before.value.to_s.includes? key
+ final_path = value + final_path
+ end
+ end
+ end
+
+ return final_path
+ end
+
+ before = token
+ end
+
+ ""
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/go/gin.cr b/src/analyzer/analyzers/go/gin.cr
new file mode 100644
index 00000000..aa12070d
--- /dev/null
+++ b/src/analyzer/analyzers/go/gin.cr
@@ -0,0 +1,189 @@
+require "../../../models/analyzer"
+require "../../../minilexers/golang"
+
+module Analyzer::Go
+ class Gin < Analyzer
+ def analyze
+ # Source Analysis
+ public_dirs = [] of (Hash(String, String))
+ groups = [] of Hash(String, String)
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path) && File.extname(path) == ".go"
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ last_endpoint = Endpoint.new("", "")
+ file.each_line.with_index do |line, index|
+ details = Details.new(PathInfo.new(path, index + 1))
+ lexer = GolangLexer.new
+
+ if line.includes?(".Group(")
+ map = lexer.tokenize(line)
+ before = Token.new(:unknown, "", 0)
+ group_name = ""
+ group_path = ""
+ map.each do |token|
+ if token.type == :assign
+ group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "")
+ end
+
+ if token.type == :string
+ group_path = token.value.to_s
+ groups.each do |group|
+ group.each do |key, value|
+ if before.value.to_s.includes? key
+ group_path = value + group_path
+ end
+ end
+ end
+ end
+
+ before = token
+ end
+
+ if group_name.size > 0 && group_path.size > 0
+ groups << {
+ group_name => group_path,
+ }
+ end
+ end
+
+ if line.includes?(".GET(") || line.includes?(".POST(") || line.includes?(".PUT(") || line.includes?(".DELETE(")
+ get_route_path(line, groups).tap do |route_path|
+ if route_path.size > 0
+ new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0], details)
+ result << new_endpoint
+ last_endpoint = new_endpoint
+ end
+ end
+ end
+
+ ["Query", "PostForm", "GetHeader"].each do |pattern|
+ if line.includes?("#{pattern}(")
+ get_param(line).tap do |param|
+ if param.name.size > 0 && last_endpoint.method != ""
+ last_endpoint.params << param
+ end
+ end
+ end
+ end
+
+ if line.includes?("Static(")
+ get_static_path(line).tap do |static_path|
+ if static_path["static_path"].size > 0 && static_path["file_path"].size > 0
+ public_dirs << static_path
+ end
+ end
+ end
+
+ if line.includes?("Cookie(")
+ match = line.match(/Cookie\(\"(.*)\"\)/)
+ if match
+ cookie_name = match[1]
+ last_endpoint.params << Param.new(cookie_name, "", "cookie")
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+
+ public_dirs.each do |p_dir|
+ full_path = (base_path + "/" + p_dir["file_path"]).gsub_repeatedly("//", "/")
+ Dir.glob("#{full_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path)
+ if p_dir["static_path"].ends_with?("/")
+ p_dir["static_path"] = p_dir["static_path"][0..-2]
+ end
+
+ details = Details.new(PathInfo.new(path))
+ result << Endpoint.new("#{p_dir["static_path"]}#{path.gsub(full_path, "")}", "GET", details)
+ end
+ end
+ end
+
+ result
+ end
+
+ def get_param(line : String) : Param
+ param_type = "json"
+ if line.includes?("Query(")
+ param_type = "query"
+ end
+ if line.includes?("PostForm(")
+ param_type = "form"
+ end
+ if line.includes?("GetHeader(")
+ param_type = "header"
+ end
+
+ first = line.strip.split("(")
+ if first.size > 1
+ second = first[1].split(")")
+ if second.size > 1
+ if line.includes?("DefaultQuery") || line.includes?("DefaultPostForm")
+ param_name = second[0].split(",")[0].gsub("\"", "")
+ rtn = Param.new(param_name, "", param_type)
+ else
+ param_name = second[0].gsub("\"", "")
+ rtn = Param.new(param_name, "", param_type)
+ end
+
+ return rtn
+ end
+ end
+
+ Param.new("", "", "")
+ end
+
+ def get_static_path(line : String) : Hash(String, String)
+ first = line.strip.split("(")
+ if first.size > 1
+ second = first[1].split(",")
+ if second.size > 1
+ static_path = second[0].gsub("\"", "")
+ file_path = second[1].gsub("\"", "").gsub(" ", "").gsub(")", "")
+ rtn = {
+ "static_path" => static_path,
+ "file_path" => file_path,
+ }
+
+ return rtn
+ end
+ end
+
+ {
+ "static_path" => "",
+ "file_path" => "",
+ }
+ end
+
+ def get_route_path(line : String, groups : Array(Hash(String, String))) : String
+ lexer = GolangLexer.new
+ map = lexer.tokenize(line)
+ before = Token.new(:unknown, "", 0)
+ map.each do |token|
+ if token.type == :string
+ final_path = token.value.to_s
+ groups.each do |group|
+ group.each do |key, value|
+ if before.value.to_s.includes? key
+ final_path = value + final_path
+ end
+ end
+ end
+
+ return final_path
+ end
+
+ before = token
+ end
+
+ ""
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/java/armeria.cr b/src/analyzer/analyzers/java/armeria.cr
new file mode 100644
index 00000000..394cdcdc
--- /dev/null
+++ b/src/analyzer/analyzers/java/armeria.cr
@@ -0,0 +1,66 @@
+require "../../../models/analyzer"
+
+module Analyzer::Java
+ class Armeria < Analyzer
+ REGEX_SERVER_CODE_BLOCK = /Server\s*\.builder\(\s*\)\s*\.[^;]*?build\(\)\s*\./
+ REGEX_SERVICE_CODE = /\.service(If|Under|)?\([^;]+?\)/
+ REGEX_ROUTE_CODE = /\.route\(\)\s*\.\s*(\w+)\s*\(([^\.]*)\)\./
+
+ def analyze
+ # Source Analysis
+ begin
+ Dir.glob("#{@base_path}/**/*") do |path|
+ next if File.directory?(path)
+
+ if File.exists?(path) && (path.ends_with?(".java") || path.ends_with?(".kt"))
+ details = Details.new(PathInfo.new(path))
+
+ content = File.read(path, encoding: "utf-8", invalid: :skip)
+ content.scan(REGEX_SERVER_CODE_BLOCK) do |server_codeblcok_match|
+ server_codeblock = server_codeblcok_match[0]
+
+ server_codeblock.scan(REGEX_SERVICE_CODE) do |service_code_match|
+ next if service_code_match.size != 2
+ endpoint_param_index = 0
+ if service_code_match[1] == "If"
+ endpoint_param_index = 1
+ end
+
+ service_code = service_code_match[0]
+ parameter_code = service_code.split("(")[1]
+ split_params = parameter_code.split(",")
+ next if split_params.size <= endpoint_param_index
+ endpoint = split_params[endpoint_param_index].strip
+
+ endpoint = endpoint[1..-2]
+ @result << Endpoint.new("#{endpoint}", "GET", details)
+ end
+
+ server_codeblock.scan(REGEX_ROUTE_CODE) do |route_code_match|
+ next if route_code_match.size != 3
+ method = route_code_match[1].upcase
+ if method == "PATH"
+ method = "GET"
+ end
+
+ next if !["GET", "POST", "DELETE", "PUT", "PATCH", "HEAD", "OPTIONS"].includes?(method)
+
+ endpoint = route_code_match[2].split(")")[0].strip
+ next if endpoint[0] != endpoint[-1]
+ next if endpoint[0] != '"'
+
+ endpoint = endpoint[1..-2]
+ @result << Endpoint.new("#{endpoint}", method, details)
+ end
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+ Fiber.yield
+
+ @result
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/java/jsp.cr b/src/analyzer/analyzers/java/jsp.cr
new file mode 100644
index 00000000..65e4c9e9
--- /dev/null
+++ b/src/analyzer/analyzers/java/jsp.cr
@@ -0,0 +1,54 @@
+require "../../../utils/utils.cr"
+require "../../../models/analyzer"
+
+module Analyzer::Java
+ class Jsp < Analyzer
+ def analyze
+ # Source Analysis
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+
+ relative_path = get_relative_path(base_path, path)
+
+ if File.exists?(path) && File.extname(path) == ".jsp"
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ params_query = [] of Param
+
+ file.each_line do |line|
+ if line.includes? "request.getParameter"
+ match = line.strip.match(/request.getParameter\("(.*?)"\)/)
+ if match
+ param_name = match[1]
+ params_query << Param.new(param_name, "", "query")
+ end
+ end
+
+ if line.includes? "${param."
+ match = line.strip.match(/\$\{param\.(.*?)\}/)
+ if match
+ param_name = match[1]
+ params_query << Param.new(param_name, "", "query")
+ end
+ end
+ rescue
+ next
+ end
+ details = Details.new(PathInfo.new(path))
+ result << Endpoint.new("/#{relative_path}", "GET", params_query, details)
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+ Fiber.yield
+
+ result
+ end
+
+ def allow_patterns
+ ["$_GET", "$_POST", "$_REQUEST", "$_SERVER"]
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/java/spring.cr b/src/analyzer/analyzers/java/spring.cr
new file mode 100644
index 00000000..710887b3
--- /dev/null
+++ b/src/analyzer/analyzers/java/spring.cr
@@ -0,0 +1,450 @@
+require "../../../models/analyzer"
+require "../../../minilexers/java"
+require "../../../miniparsers/java"
+
+module Analyzer::Java
+ class Spring < Analyzer
+ REGEX_ROUTER_CODE_BLOCK = /route\(\)?.*?\);/m
+ REGEX_ROUTE_CODE_LINE = /((?:andRoute|route)\s*\(|\.)\s*(GET|POST|DELETE|PUT)\(\s*"([^"]*)/
+ FILE_CONTENT_CACHE = Hash(String, String).new
+
+ def analyze
+ parser_map = Hash(String, JavaParser).new
+ package_map = Hash(String, Hash(String, ClassModel)).new
+ webflux_base_path_map = Hash(String, String).new
+
+ Dir.glob("#{@base_path}/**/*") do |path|
+ url = ""
+
+ # Extract the Webflux base path from 'application.yml' in specified directories
+ if File.directory?(path)
+ if path.ends_with?("/src")
+ application_yml_path = File.join(path, "main/resources/application.yml")
+ if File.exists?(application_yml_path)
+ begin
+ config = YAML.parse(File.read(application_yml_path))
+ spring = config["spring"]
+ webflux = spring["webflux"]
+ webflux_base_path = webflux["base-path"]
+
+ if webflux_base_path
+ webflux_base_path_map[path] = webflux_base_path.as_s
+ end
+ rescue e
+ # Handle parsing errors if necessary
+ end
+ end
+
+ application_properties_path = File.join(path, "main/resources/application.properties")
+ if File.exists?(application_properties_path)
+ begin
+ properties = File.read(application_properties_path)
+ base_path = properties.match(/spring\.webflux\.base-path\s*=\s*(.*)/)
+ if base_path
+ webflux_base_path = base_path[1]
+ webflux_base_path_map[path] = webflux_base_path if webflux_base_path
+ end
+ rescue e
+ # Handle parsing errors if necessary
+ end
+ end
+ end
+ elsif File.exists?(path) && path.ends_with?(".java")
+ webflux_base_path = find_base_path(path, webflux_base_path_map)
+ # Load Java file content into cache for processing
+ content = File.read(path, encoding: "utf-8", invalid: :skip)
+ FILE_CONTENT_CACHE[path] = content
+
+ # Process files that include Spring MVC bindings for routing
+ spring_web_bind_package = "org.springframework.web.bind.annotation."
+ has_spring_bindings = content.includes?(spring_web_bind_package)
+ if has_spring_bindings
+ if parser_map.has_key?(path)
+ parser = parser_map[path]
+ tokens = parser.tokens
+ else
+ parser = create_parser(Path.new(path), content)
+ tokens = parser.tokens
+ parser_map[path] = parser
+ end
+
+ package_name = parser.get_package_name(tokens)
+ next if package_name == ""
+ root_source_directory : Path = parser.get_root_source_directory(path, package_name)
+ package_directory = Path.new(path).dirname
+
+ # Import packages
+ import_map = Hash(String, ClassModel).new
+ parser.import_statements.each do |import_statement|
+ import_path = import_statement.gsub(".", "/")
+ if import_path.ends_with?("/*")
+ import_directory = root_source_directory.join(import_path[..-3])
+ if Dir.exists?(import_directory)
+ Dir.glob("#{import_directory}/*.java") do |_path|
+ next if path == _path
+ if !parser_map.has_key?(_path)
+ _parser = create_parser(Path.new(_path))
+ parser_map[_path] = _parser
+ else
+ _parser = parser_map[_path]
+ end
+
+ _parser.classes.each do |package_class|
+ import_map[package_class.name] = package_class
+ end
+ end
+ end
+ else
+ source_path = root_source_directory.join(import_path + ".java")
+ next if source_path.dirname == package_directory || !File.exists?(source_path)
+ if !parser_map.has_key?(source_path.to_s)
+ _parser = create_parser(source_path)
+ parser_map[source_path.to_s] = _parser
+ _parser.classes.each do |package_class|
+ import_map[package_class.name] = package_class
+ end
+ else
+ _parser = parser_map[source_path.to_s]
+ _parser.classes.each do |package_class|
+ import_map[package_class.name] = package_class
+ end
+ end
+ end
+ end
+
+ # Import packages from the same directory
+ package_class_map = package_map[package_directory]?
+ if package_class_map.nil?
+ package_class_map = Hash(String, ClassModel).new
+ Dir.glob("#{package_directory}/*.java") do |_path|
+ next if path == _path
+ if !parser_map.has_key?(_path)
+ _parser = create_parser(Path.new(_path))
+ parser_map[_path] = _parser
+ else
+ _parser = parser_map[_path]
+ end
+
+ _parser.classes.each do |package_class|
+ package_class_map[package_class.name] = package_class
+ end
+
+ parser.classes.each do |package_class|
+ package_class_map[package_class.name] = package_class
+ end
+
+ package_map[package_directory] = package_class_map
+ end
+ end
+
+ # Extract URL mappings and methods from Spring MVC annotated classes
+ class_map = package_class_map.merge(import_map)
+ parser.classes.each do |class_model|
+ class_annotation = class_model.annotations["RequestMapping"]?
+ if !class_annotation.nil?
+ next if class_annotation.params.size == 0
+ class_path_token = class_annotation.params[0][-1]
+ if class_path_token.type == :STRING_LITERAL
+ url = class_path_token.value[1..-2]
+ if url.ends_with? "*"
+ url = url[0..-2]
+ end
+ end
+ end
+
+ class_model.methods.values.each do |method|
+ method.annotations.values.each do |method_annotation|
+ url_paths = Array(String).new
+
+ # Spring MVC method mappings
+ request_methods = Array(String).new
+ if method_annotation.name.ends_with? "Mapping"
+ parameter_format = nil
+ annotation_parameters = method_annotation.params
+ annotation_parameters.each do |annotation_parameter_tokens|
+ if annotation_parameter_tokens.size > 2
+ annotation_parameter_key = annotation_parameter_tokens[0].value
+ annotation_parameter_value = annotation_parameter_tokens[-1].value
+ if annotation_parameter_key == "method"
+ if ["}", "]"].includes?(annotation_parameter_value)
+ # Handle methods declared with multiple HTTP verbs
+ annotation_parameter_tokens.reverse_each do |token|
+ break if token.value == "method"
+ next if [:LBRACE, :RBRACE, :LBRACK, :RBRACK, :COMMA, :DOT].includes?(token.type)
+ http_methods = ["GET", "POST", "PUT", "DELETE", "PATCH"]
+ if http_methods.includes?(token.value)
+ request_methods.push(token.value)
+ end
+ end
+ else
+ request_methods.push(annotation_parameter_value)
+ end
+ elsif annotation_parameter_key == "consumes"
+ # Set parameter format based on the 'consumes' attribute of the annotation.
+ if annotation_parameter_value.ends_with? "APPLICATION_FORM_URLENCODED_VALUE"
+ parameter_format = "form"
+ elsif annotation_parameter_value.ends_with? "APPLICATION_JSON_VALUE"
+ parameter_format = "json"
+ end
+ end
+ end
+ end
+
+ if webflux_base_path.ends_with?("/") && url.starts_with?("/")
+ webflux_base_path = webflux_base_path[..-2]
+ end
+
+ # Parse and construct endpoints for methods annotated with 'RequestMapping' or specific HTTP methods
+ if method_annotation.name == "RequestMapping"
+ url_paths = [""]
+ if method_annotation.params.size > 0
+ url_paths = get_mapping_path(parser, tokens, method_annotation.params)
+ end
+
+ line = method_annotation.tokens[0].line
+ details = Details.new(PathInfo.new(path, line))
+
+ if request_methods.empty?
+ # Handle default HTTP methods if no specific method is annotated
+ ["GET", "POST", "PUT", "DELETE", "PATCH"].each do |_request_method|
+ parameters = get_endpoint_parameters(parser, _request_method, method, parameter_format, class_map)
+ url_paths.each do |url_path|
+ @result << Endpoint.new("#{webflux_base_path}#{url}#{url_path}", _request_method, parameters, details)
+ end
+ end
+ else
+ # Create endpoints for annotated HTTP methods
+ url_paths.each do |url_path|
+ request_methods.each do |request_method|
+ parameters = get_endpoint_parameters(parser, request_method, method, parameter_format, class_map)
+ @result << Endpoint.new("#{webflux_base_path}#{url}#{url_path}", request_method, parameters, details)
+ end
+ end
+ end
+ break
+ else
+ # Handle other specific mapping annotations like 'GetMapping', 'PostMapping', etc
+ mapping_annotations = ["GetMapping", "PostMapping", "PutMapping", "DeleteMapping", "PatchMapping"]
+ mapping_index = mapping_annotations.index(method_annotation.name)
+ if !mapping_index.nil?
+ line = method_annotation.tokens[0].line
+ request_method = mapping_annotations[mapping_index][0..-8].upcase
+ if parameter_format.nil? && request_method == "POST"
+ parameter_format = "form"
+ end
+ parameters = get_endpoint_parameters(parser, request_method, method, parameter_format, class_map)
+
+ url_paths = [""]
+ if method_annotation.params.size > 0
+ url_paths = get_mapping_path(parser, tokens, method_annotation.params)
+ end
+
+ details = Details.new(PathInfo.new(path, line))
+ url_paths.each do |url_path|
+ @result << Endpoint.new("#{webflux_base_path}#{url}#{url_path}", request_method, parameters, details)
+ end
+ break
+ end
+ end
+ end
+ end
+ end
+ end
+ else
+ # Extract and construct endpoints from reactive route configurations
+ content.scan(REGEX_ROUTER_CODE_BLOCK) do |route_code|
+ method_code = route_code[0]
+ method_code.scan(REGEX_ROUTE_CODE_LINE) do |match|
+ next if match.size != 4
+ method = match[2]
+ endpoint = match[3].gsub(/\n/, "")
+ details = Details.new(PathInfo.new(path))
+ @result << Endpoint.new("#{url}#{endpoint}", method, details)
+ end
+ end
+ end
+ end
+ end
+ Fiber.yield
+
+ @result
+ end
+
+ def create_parser(path : Path, content : String = "")
+ if content == ""
+ if FILE_CONTENT_CACHE.has_key?(path.to_s)
+ content = FILE_CONTENT_CACHE[path.to_s]
+ else
+ content = File.read(path, encoding: "utf-8", invalid: :skip)
+ end
+ end
+
+ lexer = JavaLexer.new
+ tokens = lexer.tokenize(content)
+ parser = JavaParser.new(path.to_s, tokens)
+ parser
+ end
+
+ def find_base_path(current_path : String, base_paths : Hash(String, String))
+ base_paths.keys.sort_by!(&.size).reverse!.each do |path|
+ if current_path.starts_with?(path)
+ return base_paths[path]
+ end
+ end
+
+ ""
+ end
+
+ def get_mapping_path(parser : JavaParser, tokens : Array(Token), method_params : Array(Array(Token)))
+ # 1. Search for the value of the Mapping annotation.
+ # 2. If the value is a string literal, return the literal.
+ # 3. If the value is an array, return each element of the array.
+ # 4. In other cases, return an empty array.
+ url_paths = Array(String).new
+ if method_params[0].size != 0
+ path_argument_index = 0
+ method_params.each_with_index do |mapping_parameter, index|
+ if mapping_parameter[0].type == :IDENTIFIER && mapping_parameter[0].value == "value"
+ path_argument_index = index
+ end
+ end
+
+ path_parameter_tokens = method_params[path_argument_index]
+ # Extract single and multiple mapping path
+ if path_parameter_tokens[-1].type == :STRING_LITERAL
+ url_paths << path_parameter_tokens[-1].value[1..-2]
+ elsif path_parameter_tokens[-1].type == :RBRACE
+ i = path_parameter_tokens.size - 2
+ while i > 0
+ parameter_token = path_parameter_tokens[i]
+ if parameter_token.type == :LBRACE
+ break
+ elsif parameter_token.type == :COMMA
+ i -= 1
+ next
+ elsif parameter_token.type == :STRING_LITERAL
+ url_paths << parameter_token.value[1..-2]
+ else
+ break
+ end
+
+ i -= 1
+ end
+ end
+ end
+
+ url_paths
+ end
+
+ def get_endpoint_parameters(parser : JavaParser, request_method : String, method : MethodModel, parameter_format : String | Nil, package_class_map : Hash(String, ClassModel)) : Array(Param)
+ endpoint_parameters = Array(Param).new
+ method.params.each do |method_param_tokens|
+ next if method_param_tokens.size == 0
+ if method_param_tokens[-1].type == :IDENTIFIER
+ if method_param_tokens[0].type == :AT
+ if method_param_tokens[1].value == "PathVariable"
+ next
+ elsif method_param_tokens[1].value == "RequestBody"
+ if parameter_format.nil?
+ parameter_format = "json"
+ end
+ elsif method_param_tokens[1].value == "RequestParam"
+ parameter_format = "query"
+ elsif method_param_tokens[1].value == "RequestHeader"
+ parameter_format = "header"
+ end
+ end
+
+ if parameter_format.nil?
+ parameter_format = "query"
+ end
+
+ default_value = nil
+ # Extract parameter name directly if not an identifier
+ parameter_name = method_param_tokens[-1].value
+ if method_param_tokens.size > 2
+ if method_param_tokens[2].type == :LPAREN
+ request_parameters = parser.parse_formal_parameters(method_param_tokens, 2)
+ request_parameters.each do |request_parameter_tokens|
+ if request_parameter_tokens.size > 2
+ request_param_name = request_parameter_tokens[0].value
+ request_param_value = request_parameter_tokens[-1].value
+
+ # Extract 'name' from @RequestParam(value/defaultValue/name = "name")
+ if request_param_name == "value"
+ parameter_name = request_param_value[1..-2]
+ elsif request_param_name == "name"
+ parameter_name = request_param_value[1..-2]
+ elsif request_param_name == "defaultValue"
+ default_value = request_param_value[1..-2]
+ end
+ end
+ end
+ # Handle direct string literal as parameter name, e.g., @RequestParam("name")
+ if method_param_tokens[3].type == :STRING_LITERAL
+ parameter_name_token = method_param_tokens[3]
+ parameter_name = parameter_name_token.value[1..-2]
+ end
+ end
+ end
+
+ argument_name = method_param_tokens[-1].value
+ parameter_type = method_param_tokens[-2].value
+ if ["long", "int", "integer", "char", "boolean", "string", "multipartfile"].index(parameter_type.downcase)
+ param_default_value = default_value.nil? ? "" : default_value
+ endpoint_parameters << Param.new(parameter_name, param_default_value, parameter_format)
+ elsif parameter_type == "HttpServletRequest"
+ i = 0
+ while i < method.body.size - 6
+ if [:TAB, :NEWLINE].index(method.body[i].type)
+ i += 1
+ next
+ end
+
+ next if method.body[i].type == :NEWLINE
+
+ if method.body[i].type == :IDENTIFIER && method.body[i].value == argument_name
+ if method.body[i + 1].type == :DOT
+ if method.body[i + 2].type == :IDENTIFIER && method.body[i + 3].type == :LPAREN
+ servlet_request_method_name = method.body[i + 2].value
+ if method.body[i + 4].type == :STRING_LITERAL
+ parameter_name = method.body[i + 4].value[1..-2]
+ if servlet_request_method_name == "getParameter"
+ unless endpoint_parameters.any? { |param| param.name == parameter_name }
+ endpoint_parameters << Param.new(parameter_name, "", parameter_format)
+ end
+ i += 6
+ next
+ elsif servlet_request_method_name == "getHeader"
+ unless endpoint_parameters.any? { |param| param.name == parameter_name }
+ endpoint_parameters << Param.new(parameter_name, "", "header")
+ end
+ i += 6
+ next
+ end
+ end
+ end
+ end
+ end
+
+ i += 1
+ end
+ else
+ # Map fields of user-defined class to parameters.
+ if package_class_map.has_key?(parameter_type)
+ package_class = package_class_map[parameter_type]
+ package_class.fields.values.each do |field|
+ if field.access_modifier == "public" || field.has_setter?
+ param_default_value = default_value.nil? ? field.init_value : default_value
+ endpoint_parameters << Param.new(field.name, param_default_value, parameter_format)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ endpoint_parameters
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/javascript/express.cr b/src/analyzer/analyzers/javascript/express.cr
new file mode 100644
index 00000000..39f0904b
--- /dev/null
+++ b/src/analyzer/analyzers/javascript/express.cr
@@ -0,0 +1,108 @@
+require "../../../models/analyzer"
+
+module Analyzer::Javascript
+ class Express < Analyzer
+ def analyze
+ # Source Analysis
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path)
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ last_endpoint = Endpoint.new("", "")
+ file.each_line.with_index do |line, index|
+ endpoint = line_to_endpoint(line)
+ if endpoint.method != ""
+ details = Details.new(PathInfo.new(path, index + 1))
+ endpoint.details = details
+ result << endpoint
+ last_endpoint = endpoint
+ end
+
+ param = line_to_param(line)
+ if param.name != ""
+ if last_endpoint.method != ""
+ last_endpoint.push_param(param)
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ # TODO
+ end
+
+ result
+ end
+
+ def express_get_endpoint(line : String)
+ api_path = ""
+ splited = line.split("(")
+ if splited.size > 0
+ api_path = splited[1].split(",")[0].gsub(/['"]/, "")
+ end
+
+ api_path
+ end
+
+ def line_to_param(line : String) : Param
+ if line.includes? "req.body."
+ param = line.split("req.body.")[1].split(")")[0].split("}")[0].split(";")[0]
+ return Param.new(param, "", "json")
+ end
+
+ if line.includes? "req.query."
+ param = line.split("req.query.")[1].split(")")[0].split("}")[0].split(";")[0]
+ return Param.new(param, "", "query")
+ end
+
+ if line.includes? "req.cookies."
+ param = line.split("req.cookies.")[1].split(")")[0].split("}")[0].split(";")[0]
+ return Param.new(param, "", "cookie")
+ end
+
+ if line.includes? "req.header("
+ param = line.split("req.header(")[1].split(")")[0].gsub(/['"]/, "")
+ return Param.new(param, "", "header")
+ end
+
+ Param.new("", "", "")
+ end
+
+ def line_to_endpoint(line : String) : Endpoint
+ if line.includes? ".get('/"
+ api_path = express_get_endpoint(line)
+ if api_path != ""
+ return Endpoint.new(api_path, "GET")
+ end
+ end
+ if line.includes? ".post('/"
+ api_path = express_get_endpoint(line)
+ if api_path != ""
+ return Endpoint.new(api_path, "POST")
+ end
+ end
+ if line.includes? ".put('/"
+ api_path = express_get_endpoint(line)
+ if api_path != ""
+ return Endpoint.new(api_path, "PUT")
+ end
+ end
+ if line.includes? ".delete('/"
+ api_path = express_get_endpoint(line)
+ if api_path != ""
+ return Endpoint.new(api_path, "DELETE")
+ end
+ end
+ if line.includes? ".patch('/"
+ api_path = express_get_endpoint(line)
+ if api_path != ""
+ return Endpoint.new(api_path, "PATCH")
+ end
+ end
+
+ Endpoint.new("", "")
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/javascript/restify.cr b/src/analyzer/analyzers/javascript/restify.cr
new file mode 100644
index 00000000..2e055dba
--- /dev/null
+++ b/src/analyzer/analyzers/javascript/restify.cr
@@ -0,0 +1,108 @@
+require "../../../models/analyzer"
+
+module Analyzer::Javascript
+ class Restify < Analyzer
+ def analyze
+ # Source Analysis
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path)
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ last_endpoint = Endpoint.new("", "")
+ file.each_line.with_index do |line, index|
+ endpoint = line_to_endpoint(line)
+ if endpoint.method != ""
+ details = Details.new(PathInfo.new(path, index + 1))
+ endpoint.details = details
+ result << endpoint
+ last_endpoint = endpoint
+ end
+
+ param = line_to_param(line)
+ if param.name != ""
+ if last_endpoint.method != ""
+ last_endpoint.push_param(param)
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ # TODO
+ end
+
+ result
+ end
+
+ def express_get_endpoint(line : String)
+ api_path = ""
+ splited = line.split("(")
+ if splited.size > 0
+ api_path = splited[1].split(",")[0].gsub(/['"]/, "")
+ end
+
+ api_path
+ end
+
+ def line_to_param(line : String) : Param
+ if line.includes? "req.body."
+ param = line.split("req.body.")[1].split(")")[0].split("}")[0].split(";")[0]
+ return Param.new(param, "", "json")
+ end
+
+ if line.includes? "req.query."
+ param = line.split("req.query.")[1].split(")")[0].split("}")[0].split(";")[0]
+ return Param.new(param, "", "query")
+ end
+
+ if line.includes? "req.cookies."
+ param = line.split("req.cookies.")[1].split(")")[0].split("}")[0].split(";")[0]
+ return Param.new(param, "", "cookie")
+ end
+
+ if line.includes? "req.header("
+ param = line.split("req.header(")[1].split(")")[0].gsub(/['"]/, "")
+ return Param.new(param, "", "header")
+ end
+
+ Param.new("", "", "")
+ end
+
+ def line_to_endpoint(line : String) : Endpoint
+ if line.includes? ".get('/"
+ api_path = express_get_endpoint(line)
+ if api_path != ""
+ return Endpoint.new(api_path, "GET")
+ end
+ end
+ if line.includes? ".post('/"
+ api_path = express_get_endpoint(line)
+ if api_path != ""
+ return Endpoint.new(api_path, "POST")
+ end
+ end
+ if line.includes? ".put('/"
+ api_path = express_get_endpoint(line)
+ if api_path != ""
+ return Endpoint.new(api_path, "PUT")
+ end
+ end
+ if line.includes? ".delete('/"
+ api_path = express_get_endpoint(line)
+ if api_path != ""
+ return Endpoint.new(api_path, "DELETE")
+ end
+ end
+ if line.includes? ".patch('/"
+ api_path = express_get_endpoint(line)
+ if api_path != ""
+ return Endpoint.new(api_path, "PATCH")
+ end
+ end
+
+ Endpoint.new("", "")
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/kotlin/spring.cr b/src/analyzer/analyzers/kotlin/spring.cr
new file mode 100644
index 00000000..8fdeb417
--- /dev/null
+++ b/src/analyzer/analyzers/kotlin/spring.cr
@@ -0,0 +1,491 @@
+require "../../../models/analyzer"
+require "../../../minilexers/kotlin"
+require "../../../miniparsers/kotlin"
+require "../../../utils/utils.cr"
+
+module Analyzer::Kotlin
+ class Spring < Analyzer
+ REGEX_ROUTER_CODE_BLOCK = /route\(\)?.*?\);/m
+ REGEX_ROUTE_CODE_LINE = /((?:andRoute|route)\s*\(|\.)\s*(GET|POST|DELETE|PUT)\(\s*"([^"]*)/
+ FILE_CONTENT_CACHE = Hash(String, String).new
+ KOTLIN_EXTENSION = "kt"
+ HTTP_METHODS = %w[GET POST PUT DELETE PATCH]
+
+ def analyze
+ parser_map = Hash(String, KotlinParser).new
+ package_map = Hash(String, Hash(String, KotlinParser::ClassModel)).new
+ webflux_base_path_map = Hash(String, String).new
+
+ Dir.glob("#{@base_path}/**/*") do |path|
+ next unless File.exists?(path)
+
+ if File.directory?(path)
+ process_directory(path, webflux_base_path_map)
+ elsif path.ends_with?(".#{KOTLIN_EXTENSION}")
+ process_kotlin_file(path, parser_map, package_map, webflux_base_path_map)
+ end
+ end
+
+ Fiber.yield
+ @result
+ end
+
+ # Process directory to extract WebFlux base path from 'application.yml'
+ private def process_directory(path : String, webflux_base_path_map : Hash(String, String))
+ if path.ends_with?("/src")
+ application_yml_path = File.join(path, "main/resources/application.yml")
+ if File.exists?(application_yml_path)
+ begin
+ config = YAML.parse(File.read(application_yml_path))
+ spring = config["spring"]
+ if spring
+ webflux = spring["webflux"]
+ if webflux
+ base_path = webflux["base-path"]
+ if base_path
+ webflux_base_path = base_path.as_s
+ webflux_base_path_map[path] = webflux_base_path if webflux_base_path
+ end
+ end
+ end
+ rescue e
+ # Handle parsing errors if necessary
+ end
+ end
+
+ application_properties_path = File.join(path, "main/resources/application.properties")
+ if File.exists?(application_properties_path)
+ begin
+ properties = File.read(application_properties_path)
+ base_path = properties.match(/spring\.webflux\.base-path\s*=\s*(.*)/)
+ if base_path
+ webflux_base_path = base_path[1]
+ webflux_base_path_map[path] = webflux_base_path if webflux_base_path
+ end
+ rescue e
+ # Handle parsing errors if necessary
+ end
+ end
+ end
+ end
+
+ # Process individual Kotlin files to analyze Spring WebFlux annotations
+ private def process_kotlin_file(path : String, parser_map : Hash(String, KotlinParser), package_map : Hash(String, Hash(String, KotlinParser::ClassModel)), webflux_base_path_map : Hash(String, String))
+ content = fetch_file_content(path)
+ parser = parser_map[path]? || create_parser(Path.new(path), content)
+ parser_map[path] ||= parser
+ tokens = parser.tokens
+
+ package_name = parser.get_package_name(tokens)
+ return if package_name.empty?
+
+ root_source_directory = parser.get_root_source_directory(path, package_name)
+ package_directory = Path.new(path).parent
+
+ import_map = process_imports(parser, root_source_directory, package_directory, path, parser_map)
+ package_class_map = package_map[package_directory.to_s]? || process_package_classes(package_directory, path, parser_map)
+ package_map[package_directory.to_s] ||= package_class_map
+
+ class_map = package_class_map.merge(import_map)
+ parser.classes.each { |source_class| class_map[source_class.name] = source_class }
+
+ match = webflux_base_path_map.find { |base_path, _| path.starts_with?(base_path) }
+ webflux_base_path = match ? match.last : ""
+ process_class_annotations(path, parser, class_map, webflux_base_path)
+ end
+
+ # Fetch content of a file and cache it
+ private def fetch_file_content(path : String) : String
+ FILE_CONTENT_CACHE[path] ||= File.read(path, encoding: "utf-8", invalid: :skip)
+ end
+
+ # Create a Kotlin parser for a given path and content
+ private def create_parser(path : Path, content : String = "") : KotlinParser
+ content = fetch_file_content(path.to_s) if content.empty?
+ lexer = KotlinLexer.new
+ tokens = lexer.tokenize(content)
+ KotlinParser.new(path.to_s, tokens)
+ end
+
+ # Process imports in the Kotlin file to gather class models
+ private def process_imports(parser : KotlinParser, root_source_directory : Path, package_directory : Path, current_path : String, parser_map : Hash(String, KotlinParser)) : Hash(String, KotlinParser::ClassModel)
+ import_map = Hash(String, KotlinParser::ClassModel).new
+ parser.import_statements.each do |import_statement|
+ import_path = import_statement.gsub(".", "/")
+ if import_path.ends_with?("/*")
+ process_wildcard_import(root_source_directory, import_path, current_path, parser_map, import_map)
+ else
+ process_single_import(root_source_directory, import_path, package_directory, parser_map, import_map)
+ end
+ end
+
+ import_map
+ end
+
+ # Handle wildcard imports
+ private def process_wildcard_import(root_source_directory : Path, import_path : String, current_path : String, parser_map : Hash(String, KotlinParser), import_map : Hash(String, KotlinParser::ClassModel))
+ import_directory = root_source_directory.join(import_path[0..-3])
+ return unless Dir.exists?(import_directory)
+
+ # TODO: Be aware that the import file location might differ from the actual file system path.
+ Dir.glob("#{import_directory}/*.#{KOTLIN_EXTENSION}") do |path|
+ next if path == current_path
+ parser = parser_map[path]? || create_parser(Path.new(path))
+ parser_map[path] ||= parser
+ parser.classes.each { |package_class| import_map[package_class.name] = package_class }
+ end
+ end
+
+ # Handle single imports
+ private def process_single_import(root_source_directory : Path, import_path : String, package_directory : Path, parser_map : Hash(String, KotlinParser), import_map : Hash(String, KotlinParser::ClassModel))
+ source_path = root_source_directory.join("#{import_path}.#{KOTLIN_EXTENSION}")
+ return if source_path.dirname == package_directory || !File.exists?(source_path)
+ # TODO: Be aware that the import file location might differ from the actual file system path.
+ parser = parser_map[source_path.to_s]? || create_parser(source_path)
+ parser_map[source_path.to_s] ||= parser
+ parser.classes.each { |package_class| import_map[package_class.name] = package_class }
+ end
+
+ # Process all classes in the same package directory
+ private def process_package_classes(package_directory : Path, current_path : String, parser_map : Hash(String, KotlinParser)) : Hash(String, KotlinParser::ClassModel)
+ package_class_map = Hash(String, KotlinParser::ClassModel).new
+ Dir.glob("#{package_directory}/*.#{KOTLIN_EXTENSION}") do |path|
+ next if path == current_path
+ parser = parser_map[path]? || create_parser(Path.new(path))
+ parser_map[path] ||= parser
+ parser.classes.each { |package_class| package_class_map[package_class.name] = package_class }
+ end
+ package_class_map
+ end
+
+ # Process class annotations to find URL mappings and HTTP methods
+ private def process_class_annotations(path : String, parser : KotlinParser, class_map : Hash(String, KotlinParser::ClassModel), webflux_base_path : String)
+ parser.classes.each do |class_model|
+ class_annotation = class_model.annotations["@RequestMapping"]?
+
+ url = class_annotation ? extract_url_from_annotation(class_annotation) : ""
+ class_model.methods.values.each do |method|
+ process_method_annotations(path, parser, method, class_map, webflux_base_path, url)
+ end
+ end
+ end
+
+ # Extract URL from class annotation
+ private def extract_url_from_annotation(annotation_model : KotlinParser::AnnotationModel) : String
+ return "" if annotation_model.params.empty?
+ url_token = annotation_model.params[0][-1]
+ url = url_token.type == :STRING_LITERAL ? url_token.value[1..-2] : ""
+ url.ends_with?("*") ? url[0..-2] : url
+ end
+
+ # Process method annotations to find specific mappings and create endpoints
+ private def process_method_annotations(path : String, parser : KotlinParser, method : KotlinParser::MethodModel, class_map : Hash(String, KotlinParser::ClassModel), webflux_base_path : String, url : String)
+ method.annotations.values.each do |method_annotation|
+ next unless method_annotation.name.ends_with?("Mapping")
+
+ request_optional, parameter_format = extract_request_methods_and_format(parser, method_annotation)
+ url_paths = method_annotation.name.starts_with?("@") ? extract_mapping_paths(parser, method_annotation) : [""]
+ details = Details.new(PathInfo.new(path, method_annotation.tokens[0].line))
+ url_paths += request_optional["values"]
+ url_paths += request_optional["paths"]
+
+ create_endpoints(webflux_base_path, url, url_paths, request_optional, parser, method, parameter_format, class_map, details)
+ end
+ end
+
+ # Extract HTTP methods and parameter format from annotation
+ private def extract_request_methods_and_format(parser : KotlinParser, annotation_model : KotlinParser::AnnotationModel) : Tuple(Hash(String, Array(String)), String | Nil)
+ parameter_format = nil
+ request_optional = Hash(String, Array(String)).new
+ request_optional["methods"] = Array(String).new
+ request_optional["params"] = Array(String).new
+ request_optional["headers"] = Array(String).new
+ request_optional["values"] = Array(String).new
+ request_optional["paths"] = Array(String).new
+
+ annotation_model.params.each do |tokens|
+ next if tokens.size < 3
+ next if tokens[2].value != "[" && tokens[2].value != "arrayOf"
+ bracket_index = tokens[2].value != "arrayOf" ? tokens[2].index : tokens[2].index + 1
+
+ case tokens[0].value
+ when "method"
+ parser.parse_formal_parameters(bracket_index).each do |param_tokens|
+ method_index = param_tokens[0].value != "RequestMethod" ? 0 : 2
+ request_optional["methods"] << param_tokens[method_index].value
+ end
+ when "consumes"
+ parser.parse_formal_parameters(bracket_index).each do |param_tokens|
+ if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
+ if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
+ parameter_format = case param_tokens[0].value[1..-2].upcase
+ when "APPLICATION/X-WWW-FORM-URLENCODED"
+ "form"
+ when "APPLICATION/JSON"
+ "json"
+ else
+ nil
+ end
+ break
+ end
+ end
+ end
+ when "params"
+ parser.parse_formal_parameters(bracket_index).each do |param_tokens|
+ if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
+ request_optional["params"] << param_tokens[0].value[1..-2]
+ end
+ end
+ when "headers"
+ parser.parse_formal_parameters(bracket_index).each do |param_tokens|
+ if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
+ request_optional["headers"] << param_tokens[0].value[1..-2]
+ end
+ end
+ when "value"
+ parser.parse_formal_parameters(bracket_index).each do |param_tokens|
+ if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
+ request_optional["values"] << param_tokens[0].value[1..-2]
+ end
+ end
+ when "path"
+ parser.parse_formal_parameters(bracket_index).each do |param_tokens|
+ if param_tokens.size > 0 && param_tokens[0].type == :STRING_LITERAL
+ request_optional["paths"] << param_tokens[0].value[1..-2]
+ end
+ end
+ end
+ end
+
+ if request_optional["methods"].empty?
+ if annotation_model.name == "@RequestMapping"
+ # Default to all HTTP methods if no method is specified
+ request_optional["methods"].concat(HTTP_METHODS)
+ else
+ # Extract HTTP method from annotation name
+ http_method = HTTP_METHODS.find { |method| annotation_model.name.upcase == "@#{method}MAPPING" }
+ request_optional["methods"].push(http_method) if http_method
+ end
+ end
+
+ {request_optional, parameter_format}
+ end
+
+ # Extract URL mapping paths from annotation parameters
+ private def extract_mapping_paths(parser : KotlinParser, annotation_model : KotlinParser::AnnotationModel) : Array(String)
+ return [""] if annotation_model.params.empty?
+ get_mapping_path(parser, annotation_model.params)
+ end
+
+ # Create endpoints for the extracted HTTP methods and paths
+ private def create_endpoints(webflux_base_path : String, url : String, url_paths : Array(String), request_optional : Hash(String, Array(String)), parser : KotlinParser, method : KotlinParser::MethodModel, parameter_format : String | Nil, class_map : Hash(String, KotlinParser::ClassModel), details : Details)
+ # Iterate over each URL path to create full URLs
+ url_paths.each do |url_path|
+ full_url = join_path(webflux_base_path, url, url_path)
+
+ # Iterate over each request method to create endpoints
+ request_optional["methods"].each do |request_method|
+ # Determine parameter format if not specified
+ parameter_format ||= determine_parameter_format(request_method)
+
+ # Get parameters for the endpoint
+ parameters = get_endpoint_parameters(parser, method, parameter_format, class_map)
+
+ # Add query or form parameters
+ add_params(parameters, request_optional["params"], parameter_format)
+
+ # Add header parameters
+ add_params(parameters, request_optional["headers"], "header")
+
+ # Create and store the endpoint
+ @result << Endpoint.new(full_url, request_method, parameters, details)
+ end
+ end
+ end
+
+ # Determine the parameter format based on the request method
+ private def determine_parameter_format(request_method)
+ case request_method
+ when "POST", "PUT", "DELETE", "PATCH"
+ "form"
+ when "GET"
+ "query"
+ else
+ nil
+ end
+ end
+
+ # Add parameters to the parameters array
+ # params: Array of parameter strings
+ # default_format: Default format for the parameters (query, form, header)
+ private def add_params(parameters, params, default_format)
+ params.each do |param|
+ format = default_format || "query"
+ param, default_value = param.includes?("=") ? param.split("=") : [param, ""]
+ new_param_obj = Param.new(param, default_value, format)
+
+ # Add parameter if it doesn't already exist in the parameters array
+ parameters << new_param_obj unless parameters.includes?(new_param_obj)
+ end
+ end
+
+ # Extract mapping paths from annotation parameters
+ private def get_mapping_path(parser : KotlinParser, method_params : Array(Array(Token))) : Array(String)
+ url_paths = Array(String).new
+ path_argument_index = method_params.index { |param| param[0].value == "value" } || 0
+ path_parameter_tokens = method_params[path_argument_index]
+ if path_parameter_tokens[-1].type == :STRING_LITERAL
+ url_paths << path_parameter_tokens[-1].value[1..-2]
+ elsif path_parameter_tokens[-1].type == :RBRACE
+ i = path_parameter_tokens.size - 2
+ while i > 0
+ parameter_token = path_parameter_tokens[i]
+ case parameter_token.type
+ when :LCURL
+ break
+ when :COMMA
+ i -= 1
+ next
+ when :STRING_LITERAL
+ url_paths << parameter_token.value[1..-2]
+ else
+ break
+ end
+ i -= 1
+ end
+ end
+
+ url_paths
+ end
+
+ # Get endpoint parameters from the method's annotation and signature
+ private def get_endpoint_parameters(parser : KotlinParser, method : KotlinParser::MethodModel, parameter_format : String | Nil, package_class_map : Hash(String, KotlinParser::ClassModel)) : Array(Param)
+ endpoint_parameters = Array(Param).new
+ method.params.each do |tokens|
+ next if tokens.size < 3
+
+ i = 0
+ while i < tokens.size
+ case tokens[i + 1].type
+ when :ANNOTATION
+ i += 1
+ when :LPAREN
+ rparen = parser.find_bracket_partner(tokens[i + 1].index)
+ if rparen && tokens[i + (rparen - tokens[i + 1].index) + 2].type == :ANNOTATION
+ i += rparen - tokens[i + 1].index + 2
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+
+ token = tokens[i]
+ parameter_index = tokens[-1].value != "?" ? -1 : -2
+ if tokens[parameter_index].value == "Pageable"
+ next if parameter_format.nil?
+ endpoint_parameters << Param.new("page", "", parameter_format)
+ endpoint_parameters << Param.new("size", "", parameter_format)
+ endpoint_parameters << Param.new("sort", "", parameter_format)
+ else
+ name = token.value
+ parameter_format = get_parameter_format(name, parameter_format)
+ next if parameter_format.nil?
+
+ default_value, parameter_name, parameter_type = extract_parameter_details(tokens, parser, i)
+ next if parameter_name.empty? || parameter_type.nil?
+
+ param_default_value = default_value.nil? ? "" : default_value
+ if ["long", "int", "integer", "char", "boolean", "string", "multipartfile"].includes?(parameter_type.downcase)
+ endpoint_parameters << Param.new(parameter_name, param_default_value, parameter_format)
+ else
+ add_user_defined_class_params(package_class_map, parameter_type, default_value, parameter_name, parameter_format, endpoint_parameters)
+ end
+ end
+ end
+ endpoint_parameters
+ end
+
+ # Get parameter format based on annotation name
+ private def get_parameter_format(name : String, current_format : String | Nil) : String | Nil
+ case name
+ when "@RequestBody"
+ current_format || "json"
+ when "@RequestParam"
+ "query"
+ when "@RequestHeader"
+ "header"
+ when "@CookieValue"
+ "cookie"
+ when "@PathVariable"
+ nil
+ when "@ModelAttribute"
+ current_format || "form"
+ else
+ current_format || "query"
+ end
+ end
+
+ # Extract details of parameters from tokens
+ private def extract_parameter_details(tokens : Array(Token), parser : KotlinParser, index : Int32) : Tuple(String, String, String?)
+ default_value = ""
+ parameter_name = ""
+ parameter_type = nil
+
+ if tokens[index + 1].type == :LPAREN
+ attributes = parser.parse_formal_parameters(tokens[index + 1].index)
+ attributes.each do |attribute_tokens|
+ if attribute_tokens.size > 2
+ attribute_name = attribute_tokens[0].value
+ attribute_value = attribute_tokens[2].value
+ case attribute_name
+ when "value", "name"
+ parameter_name = attribute_value
+ when "defaultValue"
+ default_value = attribute_value
+ end
+ else
+ parameter_name = attribute_tokens[0].value
+ end
+ end
+ end
+
+ colon_index = tokens[-1].value == "?" ? -3 : -2
+ if tokens[colon_index].type == :COLON
+ parameter_name = tokens[-3].value if parameter_name.empty? && tokens[-3].type == :IDENTIFIER
+ parameter_type = tokens[-1].type == :QUEST ? tokens[-2].value : tokens[-1].value if tokens[-1].type == :IDENTIFIER
+ elsif tokens[colon_index + 1].type == :RANGLE
+ parameter_type = tokens[-2].value
+ parameter_name = tokens[-6].value if tokens[-5].type == :COLON
+ end
+
+ default_value = default_value[1..-2] if default_value.size > 1 && default_value[0] == '"' && default_value[-1] == '"'
+ parameter_name = parameter_name[1..-2] if parameter_name.size > 1 && parameter_name[0] == '"' && parameter_name[-1] == '"'
+
+ {default_value, parameter_name, parameter_type}
+ end
+
+ # Add parameters from user-defined class fields
+ private def add_user_defined_class_params(package_class_map : Hash(String, KotlinParser::ClassModel), parameter_type : String, default_value : String?, parameter_name : String, parameter_format : String | Nil, endpoint_parameters : Array(Param))
+ if package_class_map.has_key?(parameter_type)
+ package_class = package_class_map[parameter_type]
+ if package_class.enum_class?
+ param_default_value = default_value.nil? ? "" : default_value
+ endpoint_parameters << Param.new(parameter_name, param_default_value, parameter_format)
+ else
+ package_class.fields.values.each do |field|
+ if package_class_map.has_key?(field.type) && parameter_type != field.type
+ add_user_defined_class_params(package_class_map, field.type, field.init_value, field.name, parameter_format, endpoint_parameters)
+ else
+ if field.access_modifier == "public" || field.has_setter?
+ param_default_value = default_value.nil? ? field.init_value : default_value
+ endpoint_parameters << Param.new(field.name, param_default_value, parameter_format)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/php/php.cr b/src/analyzer/analyzers/php/php.cr
new file mode 100644
index 00000000..24ebcfd4
--- /dev/null
+++ b/src/analyzer/analyzers/php/php.cr
@@ -0,0 +1,70 @@
+require "../../../utils/utils.cr"
+require "../../../models/analyzer"
+
+module Analyzer::Php
+ class Php < Analyzer
+ def analyze
+ # Source Analysis
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+
+ relative_path = get_relative_path(base_path, path)
+
+ if File.exists?(path) && File.extname(path) == ".php"
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ params_query = [] of Param
+ params_body = [] of Param
+ methods = [] of String
+
+ file.each_line do |line|
+ if allow_patterns.any? { |pattern| line.includes? pattern }
+ match = line.strip.match(/\$_(.*?)\['(.*?)'\]/)
+
+ if match
+ method = match[1]
+ param_name = match[2]
+
+ if method == "GET"
+ params_query << Param.new(param_name, "", "query")
+ elsif method == "POST"
+ params_body << Param.new(param_name, "", "form")
+ methods << "POST"
+ elsif method == "REQUEST"
+ params_query << Param.new(param_name, "", "query")
+ params_body << Param.new(param_name, "", "form")
+ methods << "POST"
+ elsif method == "SERVER"
+ if param_name.includes? "HTTP_"
+ param_name = param_name.sub("HTTP_", "").gsub("_", "-")
+ params_query << Param.new(param_name, "", "header")
+ params_body << Param.new(param_name, "", "header")
+ end
+ end
+ end
+ end
+ rescue
+ next
+ end
+
+ details = Details.new(PathInfo.new(path))
+ methods.each do |method|
+ result << Endpoint.new("/#{relative_path}", method, params_body, details)
+ end
+ result << Endpoint.new("/#{relative_path}", "GET", params_query, details)
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+ Fiber.yield
+
+ result
+ end
+
+ def allow_patterns
+ ["$_GET", "$_POST", "$_REQUEST", "$_SERVER"]
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/python/django.cr b/src/analyzer/analyzers/python/django.cr
new file mode 100644
index 00000000..4ae6a5b8
--- /dev/null
+++ b/src/analyzer/analyzers/python/django.cr
@@ -0,0 +1,403 @@
+require "../../../models/analyzer"
+require "./python"
+require "json"
+
+module Analyzer::Python
+ class Django < Python
+ # Base path for the Django project
+ @django_base_path : ::String = ""
+
+ # Regular expressions for extracting Django URL configurations
+ REGEX_ROOT_URLCONF = /\s*ROOT_URLCONF\s*=\s*r?['"]([^'"\\]*)['"]/
+ REGEX_ROUTE_MAPPING = /(?:url|path|register)\s*\(\s*r?['"]([^"']*)['"][^,]*,\s*([^),]*)/
+ REGEX_INCLUDE_URLS = /include\s*\(\s*r?['"]([^'"\\]*)['"]/
+
+ # Map request parameters to their respective fields
+ REQUEST_PARAM_FIELD_MAP = {
+ "GET" => {["GET"], "query"},
+ "POST" => {["POST"], "form"},
+ "COOKIES" => {nil, "cookie"},
+ "META" => {nil, "header"},
+ "data" => {["POST", "PUT", "PATCH"], "form"},
+ }
+
+ # Map request parameter types to HTTP methods
+ REQUEST_PARAM_TYPE_MAP = {
+ "query" => nil,
+ "form" => ["GET", "POST", "PUT", "PATCH"],
+ "cookie" => nil,
+ "header" => nil,
+ }
+
+ def analyze
+ endpoints = [] of Endpoint
+
+ # Find root Django URL configurations
+ root_django_urls_list = find_root_django_urls()
+ root_django_urls_list.each do |root_django_urls|
+ logger.debug "Found Django URL configurations in #{root_django_urls.filepath}"
+ @django_base_path = root_django_urls.basepath
+ extract_endpoints(root_django_urls).each do |endpoint|
+ endpoints << endpoint
+ end
+ end
+
+ # Find static files
+ begin
+ Dir.glob("#{@base_path}/static/**/*") do |file|
+ next if File.directory?(file)
+ relative_path = file.sub("#{@base_path}/static/", "")
+ endpoints << Endpoint.new("/#{relative_path}", "GET")
+ end
+ rescue e
+ logger.debug e
+ end
+
+ endpoints
+ end
+
+ # Find all root Django URLs
+ def find_root_django_urls : Array(DjangoUrls)
+ root_django_urls_list = [] of DjangoUrls
+
+ search_dir = @base_path
+ begin
+ Dir.glob("#{search_dir}/**/*") do |file|
+ spawn do
+ begin
+ next if File.directory?(file)
+ next if file.includes?("/site-packages/")
+ if file.ends_with? ".py"
+ content = File.read(file, encoding: "utf-8", invalid: :skip)
+ content.scan(REGEX_ROOT_URLCONF) do |match|
+ next if match.size != 2
+ dotted_as_urlconf = match[1].split(".")
+ relative_path = "#{dotted_as_urlconf.join("/")}.py"
+
+ Dir.glob("#{search_dir}/**/#{relative_path}") do |filepath|
+ basepath = filepath.split("/")[..-(dotted_as_urlconf.size + 1)].join("/")
+ root_django_urls_list << DjangoUrls.new("", filepath, basepath)
+ end
+ end
+ end
+ rescue e : File::NotFoundError
+ logger.debug "File not found: #{file}"
+ end
+ end
+ Fiber.yield
+ end
+ rescue e
+ logger.debug e
+ end
+
+ root_django_urls_list.uniq
+ end
+
+ # Extract endpoints from a Django URL configuration file
+ def extract_endpoints(django_urls : DjangoUrls) : Array(Endpoint)
+ logger.debug "Extracting endpoints from #{django_urls.filepath}"
+ endpoints = [] of Endpoint
+ url_base_path = File.dirname(django_urls.filepath)
+
+ file = File.open(django_urls.filepath, encoding: "utf-8", invalid: :skip)
+ content = file.gets_to_end
+ package_map = find_imported_modules(@django_base_path, url_base_path, content)
+
+ # Temporary fix to parse only the string after "urlpatterns = ["
+ keywords = ["urlpatterns", "=", "["]
+ keywords.each do |keyword|
+ if !content.includes? keyword
+ return endpoints
+ end
+
+ content = content.split(keyword, 2)[1]
+ end
+
+ # TODO: Parse correct urlpatterns from variable concatenation case
+ content.scan(REGEX_ROUTE_MAPPING) do |route_match|
+ next if route_match.size != 3
+ route = route_match[1]
+ route = route.gsub(/^\^/, "").gsub(/\$$/, "")
+ view = route_match[2].split(",")[0]
+ url = "/#{django_urls.prefix}/#{route}".gsub(/\/+/, "/")
+ new_django_urls = nil
+ view.scan(REGEX_INCLUDE_URLS) do |include_pattern_match|
+ # Detect new URL configurations
+ next if include_pattern_match.size != 2
+ new_route_path = "#{@django_base_path}/#{include_pattern_match[1].gsub(".", "/")}.py"
+
+ if File.exists?(new_route_path)
+ new_django_urls = DjangoUrls.new("#{django_urls.prefix}#{route}", new_route_path, django_urls.basepath)
+ details = Details.new(PathInfo.new(new_route_path))
+ if new_django_urls.filepath != django_urls.filepath
+ extract_endpoints(new_django_urls).each do |endpoint|
+ endpoint.details = details
+ endpoints << endpoint
+ end
+ end
+ end
+ end
+ next if new_django_urls != nil
+
+ details = Details.new(PathInfo.new(django_urls.filepath))
+ if view == ""
+ endpoints << Endpoint.new(url, "GET", details)
+ else
+ dotted_as_names_split = view.split(".")
+
+ filepath = ""
+ function_or_class_name = ""
+ dotted_as_names_split.each_with_index do |name, index|
+ if (package_map.has_key? name) && (index < dotted_as_names_split.size)
+ filepath, package_type = package_map[name]
+ function_or_class_name = name
+ if package_type == PackageType::FILE && index + 1 < dotted_as_names_split.size
+ function_or_class_name = dotted_as_names_split[index + 1]
+ end
+
+ break
+ end
+ end
+
+ if filepath != ""
+ extract_endpoints_from_file(url, filepath, function_or_class_name).each do |endpoint|
+ endpoint.details = details
+ endpoints << endpoint
+ end
+ else
+ # By default, Django allows requests with methods other than GET as well
+ endpoints << Endpoint.new(url, "GET", details)
+ end
+ end
+ end
+
+ endpoints
+ end
+
+ # Extract endpoints from a given file
+ def extract_endpoints_from_file(url : ::String, filepath : ::String, function_or_class_name : ::String)
+ @logger.debug "Extracting endpoints from #{filepath}"
+
+ endpoints = Array(Endpoint).new
+ suspicious_http_methods = ["GET"]
+ suspicious_params = Array(Param).new
+
+ content = File.read(filepath, encoding: "utf-8", invalid: :skip)
+ content_lines = content.split "\n"
+
+ # Function Based View
+ function_start_index = content.index /def\s+#{function_or_class_name}\s*\(/
+ if !function_start_index.nil?
+ function_codeblock = parse_code_block(content[function_start_index..])
+ if !function_codeblock.nil?
+ lines = function_codeblock.split "\n"
+ function_define_line = lines[0]
+ lines = lines[1..]
+
+ # Check if the decorator line contains an HTTP method
+ index = content_lines.index(function_define_line)
+ if !index.nil?
+ while index > 0
+ index -= 1
+
+ preceding_definition = content_lines[index]
+ if preceding_definition.size > 0 && preceding_definition[0] == '@'
+ HTTP_METHODS.each do |http_method_name|
+ method_name_match = preceding_definition.downcase.match /[^a-zA-Z0-9](#{http_method_name})[^a-zA-Z0-9]/
+ if !method_name_match.nil?
+ suspicious_http_methods << http_method_name.upcase
+ end
+ end
+ end
+
+ break
+ end
+ end
+
+ lines.each do |line|
+ # Check if line has 'request.method == "GET"' similar pattern
+ if line.includes? "request.method"
+ suspicious_code = line.split("request.method")[1].strip
+ HTTP_METHODS.each do |http_method_name|
+ method_name_match = suspicious_code.downcase.match /['"](#{http_method_name})['"]/
+ if !method_name_match.nil?
+ suspicious_http_methods << http_method_name.upcase
+ end
+ end
+ end
+
+ extract_params_from_line(line, suspicious_http_methods).each do |param|
+ suspicious_params << param
+ end
+ end
+
+ suspicious_http_methods.uniq.each do |http_method_name|
+ endpoints << Endpoint.new(url, http_method_name, filter_params(http_method_name, suspicious_params))
+ end
+
+ return endpoints
+ end
+ end
+
+ # Class Based View
+ regext_http_methods = HTTP_METHODS.join "|"
+ class_start_index = content.index /class\s+#{function_or_class_name}\s*[\(:]/
+ if !class_start_index.nil?
+ class_codeblock = parse_code_block(content[class_start_index..])
+ if !class_codeblock.nil?
+ lines = class_codeblock.split "\n"
+ class_define_line = lines[0]
+ lines = lines[1..]
+
+ # Determine implicit HTTP methods based on class name
+ if class_define_line.includes? "Form"
+ suspicious_http_methods << "GET"
+ suspicious_http_methods << "POST"
+ elsif class_define_line.includes? "Delete"
+ suspicious_http_methods << "DELETE"
+ suspicious_http_methods << "POST"
+ elsif class_define_line.includes? "Create"
+ suspicious_http_methods << "POST"
+ elsif class_define_line.includes? "Update"
+ suspicious_http_methods << "POST"
+ end
+
+ # Check HTTP methods in class methods
+ lines.each do |line|
+ method_function_match = line.match(/\s+def\s+(#{regext_http_methods})\s*\(/)
+ if !method_function_match.nil?
+ suspicious_http_methods << method_function_match[1].upcase
+ end
+
+ extract_params_from_line(line, suspicious_http_methods).each do |param|
+ suspicious_params << param
+ end
+ end
+
+ suspicious_http_methods.uniq.each do |http_method_name|
+ endpoints << Endpoint.new(url, http_method_name, filter_params(http_method_name, suspicious_params))
+ end
+
+ return endpoints
+ end
+ end
+
+ # Default to GET method
+ [Endpoint.new(url, "GET")]
+ end
+
+ # Extract parameters from a line of code
+ def extract_params_from_line(line : ::String, endpoint_methods : Array(::String))
+ suspicious_params = Array(Param).new
+
+ if line.includes? "request."
+ REQUEST_PARAM_FIELD_MAP.each do |field_name, tuple|
+ field_methods, param_type = tuple
+ matches = line.scan(/request\.#{field_name}\[[rf]?['"]([^'"]*)['"]\]/)
+ if matches.size == 0
+ matches = line.scan(/request\.#{field_name}\.get\([rf]?['"]([^'"]*)['"]/)
+ end
+
+ if matches.size != 0
+ matches.each do |match|
+ next if match.size != 2
+ param_name = match[1]
+ if field_name == "META"
+ if param_name.starts_with? "HTTP_"
+ param_name = param_name[5..]
+ end
+ end
+
+ # If a specific parameter is found, allow the corresponding methods
+ if !field_methods.nil?
+ field_methods.each do |field_method|
+ if !endpoint_methods.includes? field_method
+ endpoint_methods << field_method
+ end
+ end
+ end
+
+ suspicious_params << Param.new(param_name, "", param_type)
+ end
+ end
+ end
+ end
+
+ if line.includes? "form.cleaned_data"
+ matches = line.scan(/form\.cleaned_data\[[rf]?['"]([^'"]*)['"]\]/)
+ if matches.size == 0
+ matches = line.scan(/form\.cleaned_data\.get\([rf]?['"]([^'"]*)['"]/)
+ end
+
+ if matches.size != 0
+ matches.each do |match|
+ next if match.size != 2
+ suspicious_params << Param.new(match[1], "", "form")
+ end
+ end
+ end
+
+ suspicious_params
+ end
+
+ # Filter parameters based on HTTP method
+ def filter_params(method : ::String, params : Array(Param))
+ filtered_params = Array(Param).new
+ upper_method = method.upcase
+
+ params.each do |param|
+ is_supported_param = false
+ support_methods = REQUEST_PARAM_TYPE_MAP.fetch(param.param_type, nil)
+ if !support_methods.nil?
+ support_methods.each do |support_method|
+ if upper_method == support_method.upcase
+ is_supported_param = true
+ end
+ end
+ else
+ is_supported_param = true
+ end
+
+ filtered_params.each do |filtered_param|
+ if filtered_param.name == param.name && filtered_param.param_type == param.param_type
+ is_supported_param = false
+ break
+ end
+ end
+
+ if is_supported_param
+ filtered_params << param
+ end
+ end
+
+ filtered_params
+ end
+
+ module PackageType
+ FILE = 0
+ CODE = 1
+ end
+
+ struct DjangoUrls
+ include JSON::Serializable
+ property prefix, filepath, basepath
+
+ def initialize(@prefix : ::String, @filepath : ::String, @basepath : ::String)
+ if !File.directory? @basepath
+ raise "The basepath for DjangoUrls (#{@basepath}) does not exist or is not a directory."
+ end
+ end
+ end
+
+ struct DjangoView
+ include JSON::Serializable
+ property prefix, filepath, name
+
+ def initialize(@prefix : ::String, @filepath : ::String, @name : ::String)
+ if !File.directory? @filepath
+ raise "The filepath for DjangoView (#{@filepath}) does not exist."
+ end
+ end
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/python/fastapi.cr b/src/analyzer/analyzers/python/fastapi.cr
new file mode 100644
index 00000000..a4250626
--- /dev/null
+++ b/src/analyzer/analyzers/python/fastapi.cr
@@ -0,0 +1,330 @@
+require "../../../models/analyzer"
+require "./python"
+
+module Analyzer::Python
+ class FastAPI < Python
+ @fastapi_base_path : ::String = ""
+
+ def analyze
+ include_router_map = Hash(::String, Hash(::String, Router)).new
+ fastapi_base_file : ::String = ""
+
+ begin
+ # Iterate through all Python files in the base path
+ Dir.glob("#{base_path}/**/*.py") do |path|
+ next if File.directory?(path)
+ next if path.includes?("/site-packages/")
+ source = File.read(path, encoding: "utf-8", invalid: :skip)
+
+ source.each_line do |line|
+ line = line.gsub(" ", "")
+ match = line.match /(#{PYTHON_VAR_NAME_REGEX})(?::#{PYTHON_VAR_NAME_REGEX})?=(?:fastapi\.)?FastAPI\(/
+ if !match.nil?
+ fastapi_instance_name = match[1]
+ unless include_router_map.has_key?(fastapi_instance_name)
+ include_router_map[path] = {match[1] => Router.new("")}
+
+ # base path
+ fastapi_base_file = path
+ @fastapi_base_path = Path.new(File.dirname(path)).parent.to_s
+ break
+ end
+ end
+
+ # https://fastapi.tiangolo.com/tutorial/bigger-applications/
+ match = line.match /(#{PYTHON_VAR_NAME_REGEX})(?::#{PYTHON_VAR_NAME_REGEX})?=(?:fastapi\.)?APIRouter\(/
+ if !match.nil?
+ prefix = ""
+ router_instance_name = match[1]
+ param_codes = line.split("APIRouter", 2)[1]
+ prefix_match = param_codes.match /prefix\s*=\s*['"]([^'"]*)['"]/
+ if !prefix_match.nil? && prefix_match.size == 2
+ prefix = prefix_match[1]
+ end
+
+ if include_router_map.has_key?(path)
+ include_router_map[path][router_instance_name] = Router.new(prefix)
+ else
+ include_router_map[path] = {router_instance_name => Router.new(prefix)}
+ end
+ end
+ end
+ end
+ rescue e : Exception
+ logger.debug e.message
+ end
+
+ begin
+ configure_router_prefix(fastapi_base_file, include_router_map)
+
+ include_router_map.each do |path, router_map|
+ source = File.read(path, encoding: "utf-8", invalid: :skip)
+ import_modules = find_imported_modules(@fastapi_base_path, path, source)
+ codelines = source.split("\n")
+ router_map.each do |instance_name, router_class|
+ codelines.each_with_index do |line, index|
+ line.scan(/@#{instance_name}\.([a-zA-Z]+)\([rf]?['"]([^'"]*)['"](.*)/) do |match|
+ if match.size > 0
+ http_method_name = match[1].downcase
+ if ["websocket", "route", "api_route"].includes?(http_method_name)
+ http_method_name = "GET"
+ elsif !HTTP_METHODS.includes?(http_method_name)
+ next
+ end
+
+ http_method_name = http_method_name.upcase
+
+ http_route_path = match[2]
+ _extra_params = match[3]
+ params = [] of Param
+
+ # Get path params from route path
+ query_params = [] of ::String
+ http_route_path.scan(/\{(#{PYTHON_VAR_NAME_REGEX})\}/) do |route_match|
+ if route_match.size > 0
+ query_params << route_match[1]
+ end
+ end
+
+ # Parsing extra params
+ function_definition = parse_function_def(codelines, index + 1)
+ if !function_definition.nil?
+ function_params = function_definition.params
+ if function_params.size > 0
+ function_params.each do |param|
+ # https://fastapi.tiangolo.com/tutorial/path-params-numeric-validations/#order-the-parameters-as-you-need-tricks
+ next if param.name == "*"
+
+ unless query_params.includes?(param.name)
+ # Default value is numeric or string only
+ default_value = return_literal_value(param.default)
+
+ # Get param type by default value first
+ param_type = infer_parameter_type(param.default) unless param.default.empty?
+
+ # Get param type by type if not found
+ if param_type.nil? && !param.type.empty?
+ param_type = param.type
+ # https://peps.python.org/pep-0593/
+ param_type = param_type.split("Annotated[", 2)[-1].split(",", 2)[-1] if param_type.includes?("Annotated[")
+
+ # https://peps.python.org/pep-0484/#union-types
+ param_type = param_type.split("Union[", 2)[-1] if param_type.includes?("Union[")
+
+ param_type = infer_parameter_type(param_type, true)
+ param_type = "query" if param_type.nil? && param.type.empty?
+ else
+ param_type = "query" if param_type.nil?
+ end
+
+ if param_type.nil?
+ if /^#{PYTHON_VAR_NAME_REGEX}$/.match(param.type)
+ new_params = nil
+ if ["Request", "dict"].includes?(param.type)
+ function_codeblock = parse_code_block(codelines[index + 1..])
+ next if function_codeblock.nil?
+ new_params = find_dictionary_params(function_codeblock, param)
+ elsif import_modules.has_key?(param.type)
+ # Parse model class from module path
+ import_module_path = import_modules[param.type].first
+
+ # Skip if import module path is not identified
+ next if import_module_path.empty?
+
+ import_module_source = File.read(import_module_path, encoding: "utf-8", invalid: :skip)
+ new_params = find_base_model_params(import_module_source, param.type, param.name)
+ else
+ # Parse model class from current source
+ new_params = find_base_model_params(source, param.type, param.name)
+ end
+
+ next if new_params.nil?
+
+ new_params.each do |model_param|
+ params << model_param
+ end
+ end
+ else
+ # Add endpoint param
+ params << Param.new(param.name, default_value, param_type)
+ end
+ end
+ end
+ end
+ end
+
+ details = Details.new(PathInfo.new(path, index + 1))
+ result << Endpoint.new(router_class.join(http_route_path), http_method_name, params, details)
+ end
+ end
+ end
+ end
+ rescue e : Exception
+ logger.debug e.message
+ end
+ end
+ Fiber.yield
+
+ result
+ end
+
+ # Configures the prefix for each router
+ def configure_router_prefix(file : ::String, include_router_map : Hash(::String, Hash(::String, Router)), router_prefix : ::String = "")
+ return if file.empty? || !File.exists?(file)
+
+ # Parse the source file for router configuration
+ source = File.read(file, encoding: "utf-8", invalid: :skip)
+ import_modules = find_imported_modules(@fastapi_base_path, file, source)
+ include_router_map[file].each do |instance_name, router_class|
+ router_class.prefix = router_prefix
+
+ # Parse '{app}.include_router({item}.router, prefix="{prefix}")' code
+ source.scan(/#{instance_name}\.include_router\(([^\)]*)\)/).each do |match|
+ if match.size > 0
+ params = match[1].split(",")
+ prefix = ""
+ router_instance_name = params[0].strip
+ if params.size != 1
+ select_params = params.select(&.strip.starts_with?("prefix"))
+ if select_params.size != 0
+ prefix = select_params.first.split("=")[1]
+ if prefix.count("\"") == 2
+ prefix = prefix.split("\"")[1].split("\"")[0]
+ elsif prefix.count("'") == 2
+ prefix = prefix.split("'")[1].split("'")[0]
+ end
+ end
+ end
+
+ # Register router's prefix recursively
+ prefix = router_class.join(prefix)
+ if router_instance_name.count(".") == 0
+ next unless import_modules.has_key?(router_instance_name)
+ import_module_path = import_modules[router_instance_name].first
+
+ next unless include_router_map.has_key?(import_module_path)
+ configure_router_prefix(import_module_path, include_router_map, prefix)
+ elsif router_instance_name.count(".") == 1
+ module_name, _router_instance_name = router_instance_name.split(".")
+ next unless import_modules.has_key?(module_name)
+ import_module_path = import_modules[module_name].first
+
+ next unless include_router_map.has_key?(import_module_path)
+ configure_router_prefix(import_module_path, include_router_map, prefix)
+ end
+ end
+ end
+ end
+ end
+
+ # Infers the type of the parameter based on its default value or type annotation
+ def infer_parameter_type(data : ::String, is_param_type = false) : ::String | Nil
+ if data.match(/(\b)*Cookie(\b)*/)
+ "cookie"
+ elsif data.match(/(\b)*Header(\b)*/) != nil
+ "header"
+ elsif data.match(/(\b)*Body(\b)*/) || data.match(/(\b)*Form(\b)*/) ||
+ data.match(/(\b)*File(\b)*/) || data.match(/(\b)*UploadFile(\b)*/)
+ "form"
+ elsif data.match(/(\b)*Query(\b)*/)
+ "query"
+ elsif data.match(/(\b)*WebSocket(\b)*/)
+ "websocket"
+ elsif is_param_type
+ # default variable type
+ ["str", "int", "float", "bool", "EmailStr"].each do |type|
+ return "query" if data.includes?(type)
+ end
+ end
+ end
+
+ # Finds the parameters for a base model class
+ def find_base_model_params(source : ::String, class_name : ::String, param_name : ::String) : Array(Param)
+ params = [] of Param
+ class_codeblock = parse_code_block(source, /\s*class\s*#{class_name}\s*\(/)
+ return params if class_codeblock.nil?
+
+ # Parse the class code block to extract parameters
+ class_codeblock.split("\n").each_with_index do |line, index|
+ if index == 0
+ param_code = line.split("(", 2)[-1].split(")")[0]
+ if param_code.match(/(\b)*str,\s*(enum\.){0,1}Enum(\b)*/)
+ return [Param.new(param_name.strip, "", "query")]
+ end
+ return params unless /^#{PYTHON_VAR_NAME_REGEX}$/.match(param_code)
+ else
+ break unless line.split(":").size == 2
+
+ param_name, extra = line.split(":", 2)
+ param_type = ""
+ param_default = ""
+ param_type_and_default = extra.split("=", 2)
+ if param_type_and_default.size == 2
+ param_type, param_default = param_type_and_default
+ else
+ param_type = param_type_and_default[0]
+ end
+
+ if !param_name.empty? && !param_type.empty?
+ default_value = return_literal_value(param_default.strip)
+ params << Param.new(param_name.strip, default_value, "form")
+ end
+ end
+ end
+
+ params
+ end
+
+ # Finds parameters in dictionary structures
+ def find_dictionary_params(source : ::String, param : FunctionParameter) : Array(Param)
+ new_params = [] of Param
+ json_variable_names = [] of ::String
+ codelines = source.split("\n")
+ if param.type == "Request"
+ # Parse JSON variable names
+ codelines.each do |codeline|
+ match = codeline.match /(#{PYTHON_VAR_NAME_REGEX})\s*(?::\s*#{PYTHON_VAR_NAME_REGEX})?\s*=\s*(await\s*){0,1}#{param.name}.json\(\)/
+ json_variable_names << match[1] if !match.nil? && !json_variable_names.includes?(match[1])
+ end
+
+ new_params = find_json_params(codelines, json_variable_names)
+ elsif param.type == "dict"
+ json_variable_names << param.name
+ new_params = find_json_params(codelines, json_variable_names)
+ end
+
+ new_params
+ end
+ end
+
+ # Router class for handling URL prefix joining
+ class Router
+ @prefix : ::String
+
+ def initialize(prefix : ::String)
+ @prefix = prefix
+ end
+
+ def prefix
+ @prefix
+ end
+
+ def join(url : ::String) : ::String
+ url = url[1..] if prefix.ends_with?("/") && url.starts_with?("/")
+ url = "/#{url}" unless prefix.ends_with?("/") || url.starts_with?("/")
+
+ @prefix + url
+ end
+
+ def prefix=(new_prefix : ::String)
+ @prefix = new_prefix
+ end
+ end
+
+ # Extend ::String class to check if a string is numeric
+ class ::String
+ def numeric?
+ self.to_f != nil rescue false
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/python/flask.cr b/src/analyzer/analyzers/python/flask.cr
new file mode 100644
index 00000000..58f22466
--- /dev/null
+++ b/src/analyzer/analyzers/python/flask.cr
@@ -0,0 +1,501 @@
+require "../../../models/analyzer"
+require "../../../minilexers/python"
+require "../../../miniparsers/python"
+require "./python"
+
+module Analyzer::Python
+ class Flask < Python
+ # Reference: https://stackoverflow.com/a/16664376
+ # Reference: https://tedboy.github.io/flask/generated/generated/flask.Request.html
+ REQUEST_PARAM_FIELDS = {
+ "data" => {["POST", "PUT", "PATCH", "DELETE"], "form"},
+ "args" => {["GET"], "query"},
+ "form" => {["POST", "PUT", "PATCH", "DELETE"], "form"},
+ "files" => {["POST", "PUT", "PATCH", "DELETE"], "form"},
+ "values" => {["GET", "POST", "PUT", "PATCH", "DELETE"], "query"},
+ "json" => {["POST", "PUT", "PATCH", "DELETE"], "json"},
+ "cookie" => {nil, "cookie"},
+ "headers" => {nil, "header"},
+ }
+
+ REQUEST_PARAM_TYPES = {
+ "query" => nil,
+ "form" => ["POST", "PUT", "PATCH", "DELETE"],
+ "json" => ["POST", "PUT", "PATCH", "DELETE"],
+ "cookie" => nil,
+ "header" => nil,
+ }
+
+ @file_content_cache = Hash(::String, ::String).new
+ @parsers = Hash(::String, PythonParser).new
+ @routes = Hash(::String, Array(Tuple(Int32, ::String, ::String, ::String))).new
+
+ def analyze
+ flask_instances = Hash(::String, ::String).new
+ flask_instances["app"] ||= "" # Common flask instance name
+ blueprint_prefixes = Hash(::String, ::String).new
+ path_api_instances = Hash(::String, Hash(::String, ::String)).new
+ register_blueprint = Hash(::String, Hash(::String, ::String)).new
+
+ # Iterate through all Python files in the base path
+ Dir.glob("#{base_path}/**/*.py") do |path|
+ next if File.directory?(path)
+ next if path.includes?("/site-packages/")
+ @logger.debug "Analyzing #{path}"
+
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ lines = file.each_line.to_a
+ next unless lines.any?(&.includes?("flask"))
+ api_instances = Hash(::String, ::String).new
+ path_api_instances[path] = api_instances
+
+ lines.each_with_index do |line, line_index|
+ line = line.gsub(" ", "") # remove spaces for easier regex matching
+
+ # Identify Flask instance assignments
+ flask_match = line.match /(#{PYTHON_VAR_NAME_REGEX})(?::#{PYTHON_VAR_NAME_REGEX})?=(?:flask\.)?Flask\(/
+ if flask_match
+ flask_instance_name = flask_match[1]
+ api_instances[flask_instance_name] ||= ""
+ end
+
+ # Identify Blueprint instance assignments
+ blueprint_match = line.match /(#{PYTHON_VAR_NAME_REGEX})(?::#{PYTHON_VAR_NAME_REGEX})?=(?:flask\.)?Blueprint\(/
+ if blueprint_match
+ prefix = ""
+ blueprint_instance_name = blueprint_match[1]
+ param_codes = line.split("Blueprint", 2)[1]
+ prefix_match = param_codes.match /url_prefix=[rf]?['"]([^'"]*)['"]/
+ if !prefix_match.nil? && prefix_match.size == 2
+ prefix = prefix_match[1]
+ end
+
+ blueprint_prefixes[blueprint_instance_name] ||= prefix
+ api_instances[blueprint_instance_name] ||= prefix
+ end
+
+ # Identify Api instance assignments
+ init_app_match = line.match /(#{PYTHON_VAR_NAME_REGEX})\.init_app\((#{PYTHON_VAR_NAME_REGEX})/
+ if init_app_match
+ api_instance_name = init_app_match[1]
+ parser = get_parser(path)
+ if parser.@global_variables.has_key?(api_instance_name)
+ gv = parser.@global_variables[api_instance_name]
+ api_instances[api_instance_name] ||= ""
+ end
+ end
+
+ # Api from flask instance
+ flask_instances.each do |_flask_instance_name, _prefix|
+ api_match = line.match /(#{PYTHON_VAR_NAME_REGEX})(?::#{PYTHON_VAR_NAME_REGEX})?=(?:flask_restx\.)?Api\((app=)?#{_flask_instance_name}/
+ if api_match
+ api_instance_name = api_match[1]
+ api_instances[api_instance_name] ||= _prefix
+ end
+ end
+
+ # Api from blueprint instance
+ blueprint_prefixes.each do |_blueprint_instance_name, _prefix|
+ api_match = line.match /(#{PYTHON_VAR_NAME_REGEX})(?::#{PYTHON_VAR_NAME_REGEX})?=(?:flask_restx\.)?Api\((app=)?#{_blueprint_instance_name}/
+ if api_match
+ api_instance_name = api_match[1]
+ api_instances[api_instance_name] ||= _prefix
+ end
+ end
+
+ # Api Namespace
+ api_instances.each do |_api_instance_name, _prefix|
+ add_namespace_match = line.match /(#{_api_instance_name})\.add_namespace\((#{PYTHON_VAR_NAME_REGEX})/
+ if add_namespace_match
+ parser = get_parser(path)
+ if parser.@global_variables.has_key?(add_namespace_match[2])
+ gv = parser.@global_variables[add_namespace_match[2]]
+ if gv.type == "Namespace"
+ api_instances[gv.name] = extract_namespace_prefix(parser, add_namespace_match[2], _prefix)
+ end
+ end
+ end
+ end
+
+ # Temporary Addition: register_view
+ blueprint_prefixes.each do |blueprint_name, blueprint_prefix|
+ view_registration_match = line.match /#{blueprint_name},routes=(.*)\)/
+ if view_registration_match
+ route_paths = view_registration_match[1]
+ route_paths.scan /['"]([^'"]*)['"]/ do |path_str_match|
+ if !path_str_match.nil? && path_str_match.size == 2
+ route_path = path_str_match[1]
+ # Parse methods from reference views (TODO)
+ route_url = "#{blueprint_prefix}#{route_path}"
+ route_url = "/#{route_url}" unless route_url.starts_with?("/")
+ details = Details.new(PathInfo.new(path, line_index + 1))
+ result << Endpoint.new(route_url, "GET", details)
+ end
+ end
+ end
+ end
+
+ # Identify Blueprint registration
+ register_blueprint_match = line.match /(#{PYTHON_VAR_NAME_REGEX})\.register_blueprint\((#{DOT_NATION})/
+ if register_blueprint_match
+ url_prefix_match = line.match /url_prefix=[rf]?['"]([^'"]*)['"]/
+ if url_prefix_match
+ blueprint_name = register_blueprint_match[2]
+ parser = get_parser(path)
+ if parser.@global_variables.has_key?(blueprint_name)
+ gv = parser.@global_variables[blueprint_name]
+ if gv.type == "Blueprint"
+ register_blueprint[gv.path] ||= Hash(::String, ::String).new
+ register_blueprint[gv.path][blueprint_name] = url_prefix_match[1]
+ end
+ end
+ end
+ end
+
+ # Identify Flask route decorators
+ line.scan(/@(#{PYTHON_VAR_NAME_REGEX})\.route\([rf]?['"]([^'"]*)['"](.*)/) do |_match|
+ if _match.size > 0
+ router_name = _match[1]
+ route_path = _match[2]
+ extra_params = _match[3]
+ router_info = Tuple(Int32, ::String, ::String, ::String).new(line_index, path, route_path, extra_params)
+ @routes[router_name] ||= [] of Tuple(Int32, ::String, ::String, ::String)
+ @routes[router_name] << router_info
+ end
+ end
+ end
+ end
+ end
+
+ # Update the API instances with the blueprint prefixes
+ register_blueprint.each do |path, blueprint_info|
+ blueprint_info.each do |blueprint_name, blueprint_prefix|
+ if path_api_instances.has_key?(path)
+ api_instances = path_api_instances[path]
+ if api_instances.has_key?(blueprint_name)
+ api_instances[blueprint_name] = File.join(blueprint_prefix, api_instances[blueprint_name])
+ end
+ end
+ end
+ end
+
+ # Iterate through the routes and extract endpoints
+ @routes.each do |router_name, router_info_list|
+ router_info_list.each do |router_info|
+ line_index, path, route_path, extra_params = router_info
+ lines = fetch_file_content(path).lines
+ expect_params, class_def_index = extract_params_from_decorator(path, lines, line_index)
+ api_instances = path_api_instances[path]
+ if api_instances.has_key?(router_name)
+ prefix = api_instances[router_name]
+ else
+ parser = get_parser(path)
+ prefix = extract_namespace_prefix(parser, router_name, "")
+ end
+
+ is_class_router = false
+ indent = lines[class_def_index].index("def") || 0
+ unless lines[class_def_index].lstrip.starts_with?("def ")
+ if lines[class_def_index].lstrip.starts_with?("class ")
+ indent = lines[class_def_index].index("class") || 0
+ is_class_router = true
+ else
+ next # Skip if not a function and not a class
+ end
+ end
+
+ i = class_def_index
+ function_name_locations = Array(Tuple(Int32, ::String)).new
+ while i < lines.size
+ def_match = lines[i].match /(\s*)def\s+([a-zA-Z_][a-zA-Z0-9_]*)\s*\(/
+ if def_match
+ # Stop when the indentation is less than or equal to the class indentation
+ break if is_class_router && def_match[1].size <= indent
+
+ # Stop when the first function is found
+ function_name_locations << Tuple.new(i, def_match[2])
+ break unless is_class_router
+ end
+
+ # Stop when the next class definition is found
+ if is_class_router && i != class_def_index
+ class_match = lines[i].match /(\s*)class\s+([a-zA-Z_][a-zA-Z0-9_]*)\s*/
+ if class_match
+ break if class_match[1].size <= indent
+ end
+ end
+
+ i += 1
+ end
+
+ function_name_locations.each do |_class_def_index, _function_name|
+ if is_class_router
+ # Replace the class expect params with the function expect params
+ def_expect_params, _ = extract_params_from_decorator(path, lines, _class_def_index, :up)
+ if def_expect_params.size > 0
+ expect_params = def_expect_params
+ end
+ end
+
+ codeblock = parse_code_block(lines[_class_def_index..])
+ next if codeblock.nil?
+ codeblock_lines = codeblock.split("\n")
+
+ # Get the HTTP method from the function name when it is not specified in the route decorator
+ method = HTTP_METHODS.find { |http_method| _function_name.downcase == http_method.downcase } || "GET"
+ get_endpoints(method, route_path, extra_params, codeblock_lines, prefix).each do |endpoint|
+ details = Details.new(PathInfo.new(path, line_index + 1))
+ endpoint.details = details
+
+ # Add expect params as endpoint params
+ if expect_params.size > 0
+ expect_params.each do |param|
+ # Change the param type to form if the endpoint method is POST
+ if endpoint.method == "GET"
+ endpoint.push_param(Param.new(param.name, param.value, "query"))
+ else
+ endpoint.push_param(Param.new(param.name, param.value, "form"))
+ end
+ end
+ end
+ result << endpoint
+ end
+ end
+ end
+ end
+
+ Fiber.yield
+ result
+ end
+
+ # Fetch content of a file and cache it
+ private def fetch_file_content(path : ::String) : ::String
+ @file_content_cache[path] ||= File.read(path, encoding: "utf-8", invalid: :skip)
+ end
+
+ # Create a Python parser for a given path and content
+ def create_parser(path : ::String, content : ::String = "") : PythonParser
+ content = fetch_file_content(path) if content.empty?
+ lexer = PythonLexer.new
+ @logger.debug "Tokenizing #{path}"
+ tokens = lexer.tokenize(content)
+ @logger.debug "Parsing #{path}"
+ parser = PythonParser.new(path, tokens, @parsers)
+ @logger.debug "Parsed #{path}"
+ parser
+ end
+
+ # Get a parser for a given path
+ def get_parser(path : ::String, content : ::String = "") : PythonParser
+ @parsers[path] ||= create_parser(path, content)
+ @parsers[path]
+ end
+
+ # Extracts endpoint information from the given route and code block
+ def get_endpoints(method : ::String, route_path : ::String, extra_params : ::String, codeblock_lines : Array(::String), prefix : ::String)
+ endpoints = [] of Endpoint
+ methods = [] of ::String
+ suspicious_params = [] of Param
+
+ if !prefix.ends_with?("/") && !route_path.starts_with?("/")
+ prefix = "#{prefix}/"
+ end
+
+ # Parse declared methods from route decorator
+ methods_match = extra_params.match /methods\s*=\s*(.*)/
+ if !methods_match.nil? && methods_match.size == 2
+ declare_methods = methods_match[1].downcase
+ HTTP_METHODS.each do |method_name|
+ if declare_methods.includes? method_name
+ methods << method_name.upcase
+ end
+ end
+ else
+ methods << method.upcase
+ end
+
+ json_variable_names = [] of ::String
+ # Parse JSON variable names
+ codeblock_lines.each do |codeblock_line|
+ match = codeblock_line.match /([a-zA-Z_][a-zA-Z0-9_]*).*=\s*json\.loads\(request\.data/
+ if !match.nil? && match.size == 2 && !json_variable_names.includes?(match[1])
+ json_variable_names << match[1]
+ end
+
+ match = codeblock_line.match /([a-zA-Z_][a-zA-Z0-9_]*).*=\s*request\.json/
+ if !match.nil? && match.size == 2 && !json_variable_names.includes?(match[1])
+ json_variable_names << match[1]
+ end
+ end
+
+ # Parse declared parameters
+ codeblock_lines.each do |codeblock_line|
+ REQUEST_PARAM_FIELDS.each do |field_name, tuple|
+ _, noir_param_type = tuple
+ matches = codeblock_line.scan(/request\.#{field_name}\[[rf]?['"]([^'"]*)['"]\]/)
+ if matches.size == 0
+ matches = codeblock_line.scan(/request\.#{field_name}\.get\([rf]?['"]([^'"]*)['"]/)
+ end
+ if matches.size == 0
+ noir_param_type = "json"
+ json_variable_names.each do |json_variable_name|
+ matches = codeblock_line.scan(/[^a-zA-Z_]#{json_variable_name}\[[rf]?['"]([^'"]*)['"]\]/)
+ if matches.size == 0
+ matches = codeblock_line.scan(/[^a-zA-Z_]#{json_variable_name}\.get\([rf]?['"]([^'"]*)['"]/)
+ end
+
+ if matches.size > 0
+ break
+ end
+ end
+ end
+
+ matches.each do |parameter_match|
+ next if parameter_match.size != 2
+ param_name = parameter_match[1]
+
+ suspicious_params << Param.new(param_name, "", noir_param_type)
+ end
+ end
+ end
+
+ methods.uniq.each do |http_method_name|
+ route_url = "#{prefix}#{route_path}"
+ route_url = "/#{route_url}" unless route_url.starts_with?("/")
+
+ params = get_filtered_params(http_method_name, suspicious_params)
+ endpoints << Endpoint.new(route_url.gsub("//", "/"), http_method_name, params)
+ end
+
+ endpoints
+ end
+
+ # Filters the parameters based on the HTTP method
+ def get_filtered_params(method : ::String, params : Array(Param)) : Array(Param)
+ # Split to other module (duplicated method with analyzer_django)
+ filtered_params = Array(Param).new
+ upper_method = method.upcase
+
+ params.each do |param|
+ is_support_param = false
+ support_methods = REQUEST_PARAM_TYPES.fetch(param.param_type, nil)
+ if !support_methods.nil?
+ support_methods.each do |support_method|
+ if upper_method == support_method.upcase
+ is_support_param = true
+ end
+ end
+ else
+ is_support_param = true
+ end
+
+ filtered_params.each do |filtered_param|
+ if filtered_param.name == param.name && filtered_param.param_type == param.param_type
+ is_support_param = false
+ break
+ end
+ end
+
+ if is_support_param
+ filtered_params << param
+ end
+ end
+
+ filtered_params
+ end
+
+ # Extracts parameters from the decorator
+ def extract_params_from_decorator(path : ::String, lines : Array(::String), line_index : Int32, direction : Symbol = :down) : Tuple(Array(Param), Int32)
+ params = [] of Param
+ codeline_index = (direction == :down) ? line_index + 1 : line_index - 1
+
+ # Iterate through the lines until the decorator ends
+ while (direction == :down && codeline_index < lines.size) || (direction == :up && codeline_index >= 0)
+ decorator_match = lines[codeline_index].match /\s*@/
+ break if decorator_match.nil?
+
+ # Extract parameters from the expect decorator
+ # https://flask-restx.readthedocs.io/en/latest/swagger.html#the-api-expect-decorator
+ expect_match = lines[codeline_index].match /\s*@.+\.expect\(\s*(#{DOT_NATION})/
+ if !expect_match.nil?
+ parser = get_parser(path)
+ if parser.@global_variables.has_key?(expect_match[1])
+ gv = parser.@global_variables[expect_match[1]]
+ if gv.type == "Namespace.model"
+ model = gv.value.split("model(", 2)[1]
+ parameter_dict_literal = model.split("{", 1)[-1]
+
+ field_pos_list = [] of Tuple(Int32, Int32)
+ parameter_dict_literal.scan(/['"]([^'"]*)['"]:\s*fields\./) do |match|
+ match_begin = match.begin(0)
+ match_end = match.end(0)
+ field_pos_list << Tuple.new(match_begin, match_end)
+ end
+
+ field_pos_list.each_with_index do |field_pos, index|
+ field_begin_pos = field_pos[0]
+ field_end_pos = -1
+ if field_pos_list.size != 0 && index != field_pos_list.size - 1
+ next_field_start_pos = field_pos_list[index + 1][0]
+ field_end_pos += next_field_start_pos + field_pos[1]
+ end
+
+ field_literal = parameter_dict_literal[field_begin_pos..field_end_pos]
+ field_key_literal, field_value_literal = field_literal.split(":", 2)
+ field_key = field_key_literal.strip[1..-2]
+ default_value = ""
+ default_assign_match = /default=(.+)/.match(field_value_literal)
+ if default_assign_match
+ rindex = default_assign_match[1].rindex(",")
+ rindex = default_assign_match[1].rindex(")") if rindex.nil?
+ unless rindex.nil?
+ default_value = default_assign_match[1][..rindex - 1].strip
+ if default_value[0] == "'" || default_value[0] == '"'
+ default_value = default_value[1..-2]
+ end
+ end
+ end
+
+ params << Param.new(field_key, default_value, "query")
+ end
+ end
+ end
+ end
+
+ codeline_index += (direction == :down ? 1 : -1)
+ end
+
+ return params, [lines.size - 1, codeline_index].min
+ end
+
+ # Function to extract namespace from the parser and update the prefix
+ private def extract_namespace_prefix(parser : PythonParser, key : ::String, _prefix : ::String) : ::String
+ # Check if the parser's global variables contain the given key
+ if parser.@global_variables.has_key?(key)
+ gv = parser.@global_variables[key]
+
+ # If the global variable is of type "Namespace"
+ if gv.type == "Namespace"
+ # Extract namespace value from the global variable
+ namespace = gv.value.split("Namespace(", 2)[1]
+ if namespace.includes?("path=")
+ namespace = namespace.split("path=")[1].split(")")[0].split(",")[0]
+ else
+ namespace = namespace.split(",")[0].split(")")[0].strip
+ end
+
+ # Clean up the namespace string by removing surrounding quotes
+ if namespace.starts_with?("'") || namespace.starts_with?("\"")
+ namespace = namespace[1..]
+ end
+ if namespace.ends_with?("'") || namespace.ends_with?("\"")
+ namespace = namespace[..-2]
+ end
+
+ _prefix = File.join(_prefix, namespace)
+ end
+ end
+ _prefix
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/python/python.cr b/src/analyzer/analyzers/python/python.cr
new file mode 100644
index 00000000..92afacd7
--- /dev/null
+++ b/src/analyzer/analyzers/python/python.cr
@@ -0,0 +1,425 @@
+require "../../../models/analyzer"
+require "json"
+
+module Analyzer::Python
+ class Python < Analyzer
+ # HTTP method names commonly used in REST APIs
+ HTTP_METHODS = ["get", "post", "put", "patch", "delete", "head", "options", "trace"]
+ # Indentation size in spaces; different sizes can cause analysis issues
+ INDENTATION_SIZE = 4
+ # Regex for valid Python variable names
+ PYTHON_VAR_NAME_REGEX = /[a-zA-Z_][a-zA-Z0-9_]*/
+ # Regex for valid Python module names
+ DOT_NATION = /[a-zA-Z_][a-zA-Z0-9_.]*/
+
+ # Parses the definition of a function from the source lines starting at a given index
+ def parse_function_def(source_lines : Array(::String), start_index : Int32) : FunctionDefinition | Nil
+ parameters = [] of FunctionParameter
+ def_line = source_lines[start_index]
+ return nil unless def_line.includes?("def ")
+
+ # Extract the function name and parameter line
+ name = def_line.split("def ", 2)[1].split("(", 2)[0].strip
+ param_line = def_line.split("(", 2)[1]
+
+ index = 0
+ param_name = ""
+ param_type = ""
+ param_default = ""
+
+ is_option = false
+ is_default = false
+ bracket_count = 0
+ parentheses_count = 1
+
+ line_index = start_index
+ # Iterate over the parameter line to parse each parameter
+ while parentheses_count != 0
+ while index < param_line.size
+ char = param_line[index]
+ if char == '['
+ bracket_count += 1
+ elsif char == ']'
+ bracket_count -= 1
+ elsif bracket_count == 0
+ if char == '('
+ parentheses_count += 1
+ elsif parentheses_count == 1 && char == '='
+ is_default = true
+ index += 1
+ next
+ elsif parentheses_count == 1 && char == ','
+ parameters << FunctionParameter.new(param_name.strip, param_type.strip, param_default.strip)
+
+ param_name = ""
+ param_type = ""
+ param_default = ""
+ is_option = false
+ is_default = false
+ index += 1
+ next
+ elsif char == ')'
+ parentheses_count -= 1
+ if parentheses_count == 0
+ if param_name.size != 0
+ parameters << FunctionParameter.new(param_name.strip, param_type.strip, param_default.strip)
+ end
+ break
+ end
+ elsif char == ':'
+ is_option = true
+ index += 1
+ next
+ end
+ end
+
+ if is_default
+ param_default += char
+ elsif is_option
+ param_type += char
+ else
+ param_name += char
+ end
+
+ index += 1
+ end
+
+ line_index += 1
+ if line_index < source_lines.size
+ param_line = source_lines[line_index]
+ index = 0
+ next
+ end
+
+ break
+ end
+
+ FunctionDefinition.new(name, parameters)
+ end
+
+ # Finds all the modules imported in a given Python file
+ def find_imported_modules(app_base_path : ::String, file_path : ::String, content : ::String? = nil) : Hash(::String, Tuple(::String, Int32))
+ # If content is not provided, read it from the file
+ content = File.read(file_path, encoding: "utf-8", invalid: :skip) if content.nil?
+
+ file_base_path = file_path
+ file_base_path = File.dirname(file_path) if file_path.ends_with? ".py"
+
+ import_map = Hash(::String, Tuple(::String, Int32)).new
+ offset = 0
+ content.each_line do |line|
+ package_path = app_base_path
+ from_import = ""
+ imports = ""
+
+ # Check if the line starts with "from" or "import"
+ if line.starts_with?("from")
+ line.scan(/from\s*([^'"\s\\]*)\s*import\s*(.*)/) do |match|
+ next if match.size != 3
+ from_import = match[1]
+ imports = match[2]
+ end
+ elsif line.starts_with?("import")
+ line.scan(/import\s*([^'"\s\\]*)/) do |match|
+ next if match.size != 2
+ imports = match[1]
+ end
+ end
+
+ unless imports.empty?
+ round_bracket_index = line.index('(')
+ if !round_bracket_index.nil?
+ # Parse 'import (\n a,\n b,\n c)' pattern
+ index = offset + round_bracket_index + 1
+ while index < content.size && content[index] != ')'
+ index += 1
+ end
+ imports = content[(offset + round_bracket_index + 1)..(index - 1)].strip
+ end
+
+ # Handle relative paths
+ if from_import.starts_with?("..")
+ package_path = File.join(file_base_path, "..")
+ from_import = from_import[2..]
+ elsif from_import.starts_with?(".")
+ package_path = file_base_path
+ from_import = from_import[1..]
+ end
+
+ imports.split(",").each do |import|
+ import = import.strip
+ if import.starts_with?("..")
+ package_path = File.join(file_base_path, "..")
+ elsif import.starts_with?(".")
+ package_path = file_base_path
+ end
+
+ dotted_as_names = import
+ dotted_as_names = "#{from_import}.#{import}" unless from_import.empty?
+
+ # Create package map (Hash[name => filepath, ...])
+ import_package_map = find_imported_package(package_path, dotted_as_names)
+ next if import_package_map.empty?
+ import_package_map.each do |name, filepath, package_type|
+ import_map[name] = {filepath, package_type}
+ end
+ end
+ end
+
+ offset += line.size + 1
+ end
+
+ import_map
+ end
+
+ # Finds the package path for imported modules
+ def find_imported_package(package_path : ::String, dotted_as_names : ::String) : Array(Tuple(::String, ::String, Int32))
+ package_map = Array(Tuple(::String, ::String, Int32)).new
+
+ py_path = ""
+ is_positive_travel = false
+ dotted_as_names_split = dotted_as_names.split(".")
+
+ dotted_as_names_split.each_with_index do |names, index|
+ travel_package_path = File.join(package_path, names)
+
+ py_guess = "#{travel_package_path}.py"
+ if File.directory?(travel_package_path)
+ package_path = travel_package_path
+ is_positive_travel = true
+ elsif dotted_as_names_split.size - 2 <= index && File.exists?(py_guess)
+ py_path = py_guess
+ is_positive_travel = true
+ else
+ break
+ end
+ end
+
+ if is_positive_travel
+ names = dotted_as_names_split[-1]
+ names.split(",").each do |name|
+ import = name.strip
+ next if import.empty?
+
+ alias_name = nil
+ if import.includes?(" as ")
+ import, alias_name = import.split(" as ")
+ end
+
+ package_type = File.exists?(File.join(package_path, "#{import}.py")) ? PackageType::FILE : PackageType::CODE
+
+ if !alias_name.nil?
+ package_map << {alias_name, py_path, package_type}
+ else
+ package_map << {import, py_path, package_type}
+ end
+ end
+ end
+
+ package_map
+ end
+
+ # Finds all parameters in JSON objects within a given code block
+ def find_json_params(codeblock_lines : Array(::String), json_var_names : Array(::String)) : Array(Param)
+ params = [] of Param
+
+ codeblock_lines.each do |codeblock_line|
+ json_var_names.each do |json_var_name|
+ matches = codeblock_line.scan(/[^a-zA-Z_]#{json_var_name}\[[rf]?['"]([^'"]*)['"]\]/)
+ if matches.size == 0
+ matches = codeblock_line.scan(/[^a-zA-Z_]#{json_var_name}\.get\([rf]?['"]([^'"]*)['"]/)
+ end
+
+ if !matches.nil?
+ matches.each do |match|
+ if match.size > 0
+ params << Param.new(match[1], "", "json")
+ end
+ end
+ end
+ end
+ end
+
+ params
+ end
+
+ # Parses a function or class definition from a string or an array of strings
+ def parse_code_block(data : ::String | Array(::String), after : Regex | Nil = nil) : ::String | Nil
+ content = ""
+ lines = [] of ::String
+ if data.is_a?(::String)
+ lines = data.split("\n")
+ content = data
+ else
+ lines = data
+ content = data.join("\n")
+ end
+
+ # Remove lines before the "after" line if provided
+ unless after.nil?
+ line_size = lines.size
+ lines.each_with_index do |line, index|
+ if line.starts_with?(after)
+ lines = lines[index..]
+ content = lines.join("\n")
+ break
+ end
+ end
+
+ # If no line starts with "after", return nil
+ return nil if line_size == lines.size
+ end
+
+ # Infer indentation size
+ indent_size = 0
+ if lines.size > 0
+ while indent_size < lines[0].size && lines[0][indent_size] == ' '
+ # Only spaces, no tabs
+ indent_size += 1
+ end
+
+ indent_size += INDENTATION_SIZE
+ end
+
+ # Parse function or class code block
+ if indent_size > 0
+ double_quote_open, single_quote_open = [false, false]
+ double_comment_open, single_comment_open = [false, false]
+ end_index = lines[0].size + 1
+ lines[1..].each do |line|
+ line_index = 0
+ clear_line = line
+ while line_index < line.size
+ if line_index < line.size - 2
+ if !single_quote_open && !double_quote_open
+ if !double_comment_open && line[line_index..line_index + 2] == "'''"
+ single_comment_open = !single_comment_open
+ line_index += 3
+ next
+ elsif !single_comment_open && line[line_index..line_index + 2] == "\"\"\""
+ double_comment_open = !double_comment_open
+ line_index += 3
+ next
+ end
+ end
+ end
+
+ if !single_comment_open && !double_comment_open
+ if !single_quote_open && line[line_index] == '"' && line[line_index - 1] != '\\'
+ double_quote_open = !double_quote_open
+ elsif !double_quote_open && line[line_index] == '\'' && line[line_index - 1] != '\\'
+ single_quote_open = !single_quote_open
+ elsif !single_quote_open && !double_quote_open && line[line_index] == '#' && line[line_index - 1] != '\\'
+ clear_line = line[..(line_index - 1)]
+ break
+ end
+ end
+
+ line_index += 1
+ end
+
+ open_status = single_comment_open || double_comment_open || single_quote_open || double_quote_open
+ if clear_line[0..(indent_size - 1)].strip == "" || open_status
+ end_index += line.size + 1
+ else
+ break
+ end
+ end
+
+ end_index -= 1
+ return content[..end_index].strip
+ end
+
+ nil
+ end
+
+ # Returns the literal value from a string if it represents a number or a quoted string
+ def return_literal_value(data : ::String) : ::String
+ # Check if the data is numeric
+ if data.numeric?
+ data
+ else
+ # Check if the data is a string
+ if data.size != 0
+ if data[0] == data[-1] && ['"', '\''].includes? data[0]
+ data = data[1..-2]
+ data
+ end
+ end
+ end
+
+ ""
+ end
+
+ module PackageType
+ FILE = 0
+ CODE = 1
+ end
+
+ class FunctionParameter
+ @name : ::String
+ @type : ::String
+ @default : ::String
+
+ def initialize(name : ::String, type : ::String, default : ::String)
+ @name = name
+ @type = type
+ @default = default
+ end
+
+ def name : ::String
+ @name
+ end
+
+ def type : ::String
+ @type
+ end
+
+ def default : ::String
+ @default
+ end
+
+ def to_s : ::String
+ if @type.size != 0
+ if @default.size != 0
+ "Name(#{@name}): Type(#{@type}) = Default(#{@default})"
+ else
+ "Name(#{@name}): Type(#{@type})"
+ end
+ else
+ "Name(#{@name})"
+ end
+ end
+
+ def name=(name : ::String)
+ @name = name
+ end
+
+ def type=(type : ::String)
+ @type = type
+ end
+
+ def default=(default : ::String)
+ @default = default
+ end
+ end
+
+ class FunctionDefinition
+ @name : ::String
+ @params : Array(FunctionParameter)
+
+ def initialize(name : ::String, params : Array(FunctionParameter))
+ @name = name
+ @params = params
+ end
+
+ def params : Array(FunctionParameter)
+ @params
+ end
+
+ def add_parameter(param : FunctionParameter)
+ @params << param
+ end
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/ruby/hanami.cr b/src/analyzer/analyzers/ruby/hanami.cr
new file mode 100644
index 00000000..6d1fd68d
--- /dev/null
+++ b/src/analyzer/analyzers/ruby/hanami.cr
@@ -0,0 +1,72 @@
+require "../../../models/analyzer"
+
+module Analyzer::Ruby
+ class Hanami < Analyzer
+ def analyze
+ # Config Analysis
+ path = "#{@base_path}/config/routes.rb"
+ if File.exists?(path)
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ last_endpoint = Endpoint.new("", "")
+ file.each_line.with_index do |line, index|
+ details = Details.new(PathInfo.new(path, index + 1))
+ endpoint = line_to_endpoint(line, details)
+ if endpoint.method != ""
+ @result << endpoint
+ last_endpoint = endpoint
+ _ = last_endpoint
+ end
+ end
+ end
+ end
+
+ @result
+ end
+
+ def line_to_endpoint(content : String, details : Details) : Endpoint
+ content.scan(/get\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "GET", details)
+ end
+ end
+
+ content.scan(/post\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "POST", details)
+ end
+ end
+
+ content.scan(/put\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "PUT", details)
+ end
+ end
+
+ content.scan(/delete\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "DELETE", details)
+ end
+ end
+
+ content.scan(/patch\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "PATCH", details)
+ end
+ end
+
+ content.scan(/head\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "HEAD", details)
+ end
+ end
+
+ content.scan(/options\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "OPTIONS", details)
+ end
+ end
+
+ Endpoint.new("", "")
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/ruby/rails.cr b/src/analyzer/analyzers/ruby/rails.cr
new file mode 100644
index 00000000..efe23f8a
--- /dev/null
+++ b/src/analyzer/analyzers/ruby/rails.cr
@@ -0,0 +1,259 @@
+require "../../../models/analyzer"
+
+module Analyzer::Ruby
+ class Rails < Analyzer
+ def analyze
+ # Public Dir Analysis
+ begin
+ Dir.glob("#{@base_path}/public/**/*") do |file|
+ next if File.directory?(file)
+ real_path = "#{@base_path}/public/".gsub(/\/+/, '/')
+ relative_path = file.sub(real_path, "")
+ details = Details.new(PathInfo.new(file))
+ @result << Endpoint.new("/#{relative_path}", "GET", details)
+ end
+ rescue e
+ logger.debug e
+ end
+
+ # Config Analysis
+ if File.exists?("#{@base_path}/config/routes.rb")
+ File.open("#{@base_path}/config/routes.rb", "r", encoding: "utf-8", invalid: :skip) do |file|
+ file.each_line do |line|
+ stripped_line = line.strip
+ if stripped_line.size > 0 && stripped_line[0] != '#'
+ line.scan(/resources?\s+:.*/) do |match|
+ splited = match[0].split(":")
+ if splited.size > 1
+ resource = splited[1].split(",")[0]
+
+ @result += controller_to_endpoint("#{@base_path}/app/controllers/#{resource}_controller.rb", @url, resource)
+ @result += controller_to_endpoint("#{@base_path}/app/controllers/#{resource}s_controller.rb", @url, resource)
+ @result += controller_to_endpoint("#{@base_path}/app/controllers/#{resource}es_controller.rb", @url, resource)
+ end
+ end
+
+ details = Details.new(PathInfo.new("#{@base_path}/config/routes.rb"))
+ line.scan(/get\s+['"](.+?)['"]/) do |match|
+ @result << Endpoint.new("#{match[1]}", "GET", details)
+ end
+ line.scan(/post\s+['"](.+?)['"]/) do |match|
+ @result << Endpoint.new("#{match[1]}", "POST", details)
+ end
+ line.scan(/put\s+['"](.+?)['"]/) do |match|
+ @result << Endpoint.new("#{match[1]}", "PUT", details)
+ end
+ line.scan(/delete\s+['"](.+?)['"]/) do |match|
+ @result << Endpoint.new("#{match[1]}", "DELETE", details)
+ end
+ line.scan(/patch\s+['"](.+?)['"]/) do |match|
+ @result << Endpoint.new("#{match[1]}", "PATCH", details)
+ end
+ end
+ end
+ end
+ end
+
+ @result
+ end
+
+ def controller_to_endpoint(path : String, @url : String, resource : String)
+ @result = [] of Endpoint
+
+ if File.exists?(path)
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |controller_file|
+ param_type = "form"
+ params_query = [] of Param
+ params_body = [] of Param
+ params_method = Hash(String, Array(Param)).new
+ methods = [] of String
+ this_method = ""
+
+ controller_content = controller_file.gets_to_end
+ if controller_content.includes? "render json:"
+ param_type = "json"
+ end
+
+ controller_file.rewind
+ controller_file.each_line do |controller_line|
+ if controller_line.includes? "def "
+ func_name = controller_line.split("def ")[1].split("(")[0]
+ case func_name
+ when "index"
+ methods << "GET/INDEX"
+ this_method = func_name
+ when "show"
+ methods << "GET/SHOW"
+ this_method = func_name
+ when "create"
+ methods << "POST"
+ this_method = func_name
+ when "update"
+ methods << "PUT"
+ this_method = func_name
+ when "destroy"
+ methods << "DELETE"
+ this_method = func_name
+ end
+ end
+
+ if controller_line.includes? "params.require"
+ splited_param = controller_line.strip.split("permit")
+ if splited_param.size > 1
+ tparam = splited_param[1].gsub("(", "").gsub(")", "").gsub("s", "").gsub(":", "")
+ tparam.split(",").each do |param|
+ params_body << Param.new(param.strip, "", param_type)
+ params_query << Param.new(param.strip, "", "query")
+ end
+ end
+ end
+
+ if controller_line.includes? "params[:"
+ splited_param = controller_line.strip.split("params[:")[1]
+ if splited_param
+ param = splited_param.split("]")[0]
+ params_body << Param.new(param.strip, "", param_type)
+ params_query << Param.new(param.strip, "", "query")
+ end
+ end
+
+ if controller_line.includes? "request.headers["
+ splited_param = controller_line.strip.split("request.headers[")[1]
+ if splited_param
+ param = splited_param.split("]")[0].gsub("'", "").gsub("\"", "")
+ param_line = Param.new(param.strip, "", "header")
+ if params_method.has_key? this_method
+ params_method[this_method] << param_line
+ else
+ params_method[this_method] = [] of Param
+ params_method[this_method] << param_line
+ end
+ end
+ end
+
+ if controller_line.includes? "cookies[:"
+ splited_param = controller_line.strip.split("cookies[:")[1]
+ if splited_param
+ param = splited_param.split("]")[0].gsub("'", "").gsub("\"", "")
+ if this_method != ""
+ param_line = Param.new(param.strip, "", "cookie")
+ if params_method.has_key? this_method
+ params_method[this_method] << param_line
+ else
+ params_method[this_method] = [] of Param
+ params_method[this_method] << param_line
+ end
+ end
+ end
+ end
+
+ if controller_line.includes? "cookies.signed[:"
+ splited_param = controller_line.strip.split("cookies.signed[:")[1]
+ if splited_param
+ param = splited_param.split("]")[0].gsub("'", "").gsub("\"", "")
+ if this_method != ""
+ param_line = Param.new(param.strip, "", "cookie")
+ if params_method.has_key? this_method
+ params_method[this_method] << param_line
+ else
+ params_method[this_method] = [] of Param
+ params_method[this_method] << param_line
+ end
+ end
+ end
+ end
+
+ if controller_line.includes? "cookies.encrypted[:"
+ splited_param = controller_line.strip.split("cookies.encrypted[:")[1]
+ if splited_param
+ param = splited_param.split("]")[0].gsub("'", "").gsub("\"", "")
+ if this_method != ""
+ param_line = Param.new(param.strip, "", "cookie")
+ if params_method.has_key? this_method
+ params_method[this_method] << param_line
+ else
+ params_method[this_method] = [] of Param
+ params_method[this_method] << param_line
+ end
+ end
+ end
+ end
+ end
+
+ deduplication_params_query = [] of Param
+ get_param_duplicated : Array(String) = [] of String
+
+ params_query.each do |get_param|
+ if get_param_duplicated.includes? get_param.name
+ deduplication_params_query << get_param
+ else
+ get_param_duplicated << get_param.name
+ end
+ end
+
+ details = Details.new(PathInfo.new(path))
+ methods.each do |method|
+ if method == "GET/INDEX"
+ if params_method.has_key? "index"
+ index_params = [] of Param
+ params_method["index"].each do |param|
+ index_params << param
+ end
+ end
+
+ index_params ||= [] of Param
+ deduplication_params_query ||= [] of Param
+ last_params = index_params + deduplication_params_query
+ @result << Endpoint.new("/#{resource}", "GET", last_params, details)
+ elsif method == "GET/SHOW"
+ if params_method.has_key? "show"
+ show_params = [] of Param
+ params_method["show"].each do |param|
+ show_params << param
+ end
+ end
+ show_params ||= [] of Param
+ deduplication_params_query ||= [] of Param
+ last_params = show_params + deduplication_params_query
+ @result << Endpoint.new("/#{resource}/1", "GET", last_params, details)
+ else
+ if method == "POST"
+ if params_method.has_key? "create"
+ create_params = [] of Param
+ params_method["create"].each do |param|
+ create_params << param
+ end
+ end
+ create_params ||= [] of Param
+ params_body ||= [] of Param
+ last_params = create_params + params_body
+ @result << Endpoint.new("/#{resource}", method, last_params, details)
+ elsif method == "DELETE"
+ params_delete = [] of Param
+ if params_method.has_key? "delete"
+ params_method["delete"].each do |param|
+ params_delete << param
+ end
+ end
+ @result << Endpoint.new("/#{resource}/1", method, params_delete, details)
+ else
+ if params_method.has_key? "update"
+ update_params = [] of Param
+ params_method["update"].each do |param|
+ update_params << param
+ end
+ end
+ update_params ||= [] of Param
+ params_body ||= [] of Param
+ last_params = update_params + params_body
+ @result << Endpoint.new("/#{resource}/1", method, last_params, details)
+ end
+ end
+ end
+ end
+ end
+
+ @result
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/ruby/sinatra.cr b/src/analyzer/analyzers/ruby/sinatra.cr
new file mode 100644
index 00000000..fd4e0c05
--- /dev/null
+++ b/src/analyzer/analyzers/ruby/sinatra.cr
@@ -0,0 +1,114 @@
+require "../../../models/analyzer"
+
+module Analyzer::Ruby
+ class Sinatra < Analyzer
+ def analyze
+ # Source Analysis
+ begin
+ Dir.glob("#{@base_path}/**/*") do |path|
+ next if File.directory?(path)
+ if File.exists?(path)
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ last_endpoint = Endpoint.new("", "")
+ file.each_line.with_index do |line, index|
+ endpoint = line_to_endpoint(line)
+ if endpoint.method != ""
+ details = Details.new(PathInfo.new(path, index + 1))
+ endpoint.details = details
+ @result << endpoint
+ last_endpoint = endpoint
+ end
+
+ param = line_to_param(line)
+ if param.name != ""
+ if last_endpoint.method != ""
+ last_endpoint.push_param(param)
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ logger.debug e
+ end
+
+ @result
+ end
+
+ def line_to_param(content : String) : Param
+ if content.includes? "param["
+ param = content.split("param[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "query")
+ end
+
+ if content.includes? "params["
+ param = content.split("params[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "query")
+ end
+
+ if content.includes? "request.env["
+ param = content.split("request.env[")[1].split("]")[0].gsub("\"", "").gsub("'", "")
+ return Param.new(param, "", "header")
+ end
+
+ if content.includes? "headers["
+ param = content.split("headers[")[1].split("]")[0].gsub("\"", "").gsub("'", "").gsub(":", "")
+ return Param.new(param, "", "header")
+ end
+
+ if content.includes? "cookies["
+ param = content.split("cookies[")[1].split("]")[0].gsub("\"", "").gsub("'", "").gsub(":", "")
+ return Param.new(param, "", "cookie")
+ end
+
+ Param.new("", "", "")
+ end
+
+ def line_to_endpoint(content : String) : Endpoint
+ content.scan(/get\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "GET")
+ end
+ end
+
+ content.scan(/post\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "POST")
+ end
+ end
+
+ content.scan(/put\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "PUT")
+ end
+ end
+
+ content.scan(/delete\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "DELETE")
+ end
+ end
+
+ content.scan(/patch\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "PATCH")
+ end
+ end
+
+ content.scan(/head\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "HEAD")
+ end
+ end
+
+ content.scan(/options\s+['"](.+?)['"]/) do |match|
+ if match.size > 1
+ return Endpoint.new("#{match[1]}", "OPTIONS")
+ end
+ end
+
+ Endpoint.new("", "")
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/rust/actix_web.cr b/src/analyzer/analyzers/rust/actix_web.cr
new file mode 100644
index 00000000..9afc9243
--- /dev/null
+++ b/src/analyzer/analyzers/rust/actix_web.cr
@@ -0,0 +1,47 @@
+require "../../../models/analyzer"
+
+module Analyzer::Rust
+ class ActixWeb < Analyzer
+ def analyze
+ # Source Analysis
+ pattern = /#\[(get|post|put|delete|patch)\("([^"]+)"\)\]/
+
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+
+ if File.exists?(path) && File.extname(path) == ".rs"
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ file.each_line.with_index do |line, index|
+ if line.to_s.includes? "#["
+ match = line.match(pattern)
+ if match
+ begin
+ route_argument = match[2]
+ callback_argument = match[1]
+ details = Details.new(PathInfo.new(path, index + 1))
+ result << Endpoint.new("#{route_argument}", callback_to_method(callback_argument), details)
+ rescue
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ end
+
+ result
+ end
+
+ def callback_to_method(str)
+ method = str.split("(").first
+ if !["get", "post", "put", "delete"].includes?(method)
+ method = "get"
+ end
+
+ method.upcase
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/rust/axum.cr b/src/analyzer/analyzers/rust/axum.cr
new file mode 100644
index 00000000..a095b611
--- /dev/null
+++ b/src/analyzer/analyzers/rust/axum.cr
@@ -0,0 +1,47 @@
+require "../../../models/analyzer"
+
+module Analyzer::Rust
+ class Axum < Analyzer
+ def analyze
+ # Source Analysis
+ pattern = /\.route\("([^"]+)",\s*([^)]+)\)/
+
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+
+ if File.exists?(path) && File.extname(path) == ".rs"
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ file.each_line.with_index do |line, index|
+ if line.includes? ".route("
+ match = line.match(pattern)
+ if match
+ begin
+ route_argument = match[1]
+ callback_argument = match[2]
+ details = Details.new(PathInfo.new(path, index + 1))
+ result << Endpoint.new("#{route_argument}", callback_to_method(callback_argument), details)
+ rescue
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ end
+
+ result
+ end
+
+ def callback_to_method(str)
+ method = str.split("(").first
+ if !["get", "post", "put", "delete"].includes?(method)
+ method = "get"
+ end
+
+ method.upcase
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/rust/rocket.cr b/src/analyzer/analyzers/rust/rocket.cr
new file mode 100644
index 00000000..ed5aa950
--- /dev/null
+++ b/src/analyzer/analyzers/rust/rocket.cr
@@ -0,0 +1,48 @@
+require "../../../models/analyzer"
+
+module Analyzer::Rust
+ class Rocket < Analyzer
+ def analyze
+ # Source Analysis
+ pattern = /#\[(get|post|delete|put)\("([^"]+)"(?:, data = "<([^>]+)>")?\)\]/
+
+ begin
+ Dir.glob("#{base_path}/**/*") do |path|
+ next if File.directory?(path)
+
+ if File.exists?(path) && File.extname(path) == ".rs"
+ File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
+ file.each_line.with_index do |line, index|
+ if line.includes?("#[") && line.includes?(")]")
+ match = line.match(pattern)
+ if match
+ begin
+ callback_argument = match[1]
+ route_argument = match[2]
+
+ details = Details.new(PathInfo.new(path, index + 1))
+ result << Endpoint.new("#{route_argument}", callback_to_method(callback_argument), details)
+ rescue
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ end
+
+ result
+ end
+
+ def callback_to_method(str)
+ method = str.split("(").first
+ if !["get", "post", "put", "delete"].includes?(method)
+ method = "get"
+ end
+
+ method.upcase
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/specification/har.cr b/src/analyzer/analyzers/specification/har.cr
new file mode 100644
index 00000000..8108242f
--- /dev/null
+++ b/src/analyzer/analyzers/specification/har.cr
@@ -0,0 +1,65 @@
+require "../../../models/analyzer"
+
+module Analyzer::Specification
+ class Har < Analyzer
+ def analyze
+ locator = CodeLocator.instance
+ har_files = locator.all("har-path")
+
+ if har_files.is_a?(Array(String)) && @url != ""
+ har_files.each do |har_file|
+ if File.exists?(har_file)
+ data = HAR.from_file(har_file)
+ logger.debug "Open #{har_file} file"
+ data.entries.each do |entry|
+ if entry.request.url.includes? @url
+ path = entry.request.url.to_s.gsub(@url, "")
+ endpoint = Endpoint.new(path, entry.request.method)
+
+ entry.request.query_string.each do |query|
+ endpoint.params << Param.new(query.name, query.value, "query")
+ end
+
+ is_websocket = false
+ entry.request.headers.each do |header|
+ endpoint.params << Param.new(header.name, header.value, "header")
+ if header.name == "Upgrade" && header.value == "websocket"
+ is_websocket = true
+ end
+ end
+
+ entry.request.cookies.each do |cookie|
+ endpoint.params << Param.new(cookie.name, cookie.value, "cookie")
+ end
+
+ post_data = entry.request.post_data
+ if post_data
+ params = post_data.params
+ mime_type = post_data.mime_type
+ param_type = "body"
+ if mime_type == "application/json"
+ param_type = "json"
+ end
+ if params
+ params.each do |param|
+ endpoint.params << Param.new(param.name, param.value.to_s, param_type)
+ end
+ end
+ end
+
+ details = Details.new(PathInfo.new(har_file, 0))
+ endpoint.details = details
+ if is_websocket
+ endpoint.protocol = "ws"
+ end
+ @result << endpoint
+ end
+ end
+ end
+ end
+ end
+
+ @result
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/specification/oas2.cr b/src/analyzer/analyzers/specification/oas2.cr
new file mode 100644
index 00000000..30183f79
--- /dev/null
+++ b/src/analyzer/analyzers/specification/oas2.cr
@@ -0,0 +1,131 @@
+require "../../../models/analyzer"
+
+module Analyzer::Specification
+ class Oas2 < Analyzer
+ def analyze
+ locator = CodeLocator.instance
+ swagger_jsons = locator.all("swagger-json")
+ swagger_yamls = locator.all("swagger-yaml")
+
+ if swagger_jsons.is_a?(Array(String))
+ swagger_jsons.each do |swagger_json|
+ if File.exists?(swagger_json)
+ details = Details.new(PathInfo.new(swagger_json))
+ content = File.read(swagger_json, encoding: "utf-8", invalid: :skip)
+ json_obj = JSON.parse(content)
+ base_path = ""
+ begin
+ if json_obj["basePath"].to_s != ""
+ base_path = json_obj["basePath"].to_s
+ end
+ rescue e
+ @logger.debug "Exception of #{swagger_json}/basePath"
+ @logger.debug_sub e
+ end
+
+ begin
+ paths = json_obj["paths"].as_h
+ paths.each do |path, path_obj|
+ path_obj.as_h.each do |method, method_obj|
+ params = [] of Param
+
+ if method_obj.as_h.has_key?("parameters")
+ method_obj["parameters"].as_a.each do |param_obj|
+ param_name = param_obj["name"].to_s
+ if param_obj["in"] == "query"
+ param = Param.new(param_name, "", "query")
+ params << param
+ elsif param_obj["in"] == "form"
+ param = Param.new(param_name, "", "json")
+ params << param
+ elsif param_obj["in"] == "formData"
+ param = Param.new(param_name, "", "form")
+ params << param
+ elsif param_obj["in"] == "header"
+ param = Param.new(param_name, "", "header")
+ params << param
+ end
+ end
+ @result << Endpoint.new(base_path + path, method.upcase, params, details)
+ else
+ @result << Endpoint.new(base_path + path, method.upcase, details)
+ end
+ rescue e
+ @logger.debug "Exception of #{swagger_json}/paths/path/method"
+ @logger.debug_sub e
+ end
+ rescue e
+ @logger.debug "Exception of #{swagger_json}/paths/path"
+ @logger.debug_sub e
+ end
+ rescue e
+ @logger.debug "Exception of #{swagger_json}/paths"
+ @logger.debug_sub e
+ end
+ end
+ end
+ end
+
+ if swagger_yamls.is_a?(Array(String))
+ swagger_yamls.each do |swagger_yaml|
+ if File.exists?(swagger_yaml)
+ details = Details.new(PathInfo.new(swagger_yaml))
+ content = File.read(swagger_yaml, encoding: "utf-8", invalid: :skip)
+ yaml_obj = YAML.parse(content)
+ base_path = ""
+ begin
+ if yaml_obj["basePath"].to_s != ""
+ base_path = yaml_obj["basePath"].to_s
+ end
+ rescue e
+ @logger.debug "Exception of #{swagger_yaml}/basePath"
+ @logger.debug_sub e
+ end
+
+ begin
+ paths = yaml_obj["paths"].as_h
+ paths.each do |path, path_obj|
+ path_obj.as_h.each do |method, method_obj|
+ params = [] of Param
+
+ if method_obj.as_h.has_key?("parameters")
+ method_obj["parameters"].as_a.each do |param_obj|
+ param_name = param_obj["name"].to_s
+ if param_obj["in"] == "query"
+ param = Param.new(param_name, "", "query")
+ params << param
+ elsif param_obj["in"] == "form"
+ param = Param.new(param_name, "", "json")
+ params << param
+ elsif param_obj["in"] == "formData"
+ param = Param.new(param_name, "", "form")
+ params << param
+ elsif param_obj["in"] == "header"
+ param = Param.new(param_name, "", "header")
+ params << param
+ end
+ end
+ @result << Endpoint.new(base_path + path.to_s, method.to_s.upcase, params, details)
+ else
+ @result << Endpoint.new(base_path + path.to_s, method.to_s.upcase, details)
+ end
+ rescue e
+ @logger.debug "Exception of #{swagger_yaml}/paths/path/method"
+ @logger.debug_sub e
+ end
+ rescue e
+ @logger.debug "Exception of #{swagger_yaml}/paths/path"
+ @logger.debug_sub e
+ end
+ rescue e
+ @logger.debug "Exception of #{swagger_yaml}/paths"
+ @logger.debug_sub e
+ end
+ end
+ end
+ end
+
+ @result
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/specification/oas3.cr b/src/analyzer/analyzers/specification/oas3.cr
new file mode 100644
index 00000000..4e33deaf
--- /dev/null
+++ b/src/analyzer/analyzers/specification/oas3.cr
@@ -0,0 +1,286 @@
+require "../../../models/analyzer"
+
+module Analyzer::Specification
+ class Oas3 < Analyzer
+ def get_base_path(servers)
+ base_path = @url
+ servers.as_a.each do |server_obj|
+ if server_obj["url"].to_s.starts_with?("http")
+ user_uri = URI.parse(@url)
+ source_uri = URI.parse(server_obj["url"].to_s)
+ if user_uri.host == source_uri.host
+ base_path = @url + source_uri.path
+ break
+ end
+ end
+ end
+
+ base_path
+ end
+
+ def analyze
+ locator = CodeLocator.instance
+ oas3_jsons = locator.all("oas3-json")
+ oas3_yamls = locator.all("oas3-yaml")
+ base_path = @url
+
+ if oas3_jsons.is_a?(Array(String))
+ oas3_jsons.each do |oas3_json|
+ if File.exists?(oas3_json)
+ details = Details.new(PathInfo.new(oas3_json))
+ content = File.read(oas3_json, encoding: "utf-8", invalid: :skip)
+ json_obj = JSON.parse(content)
+
+ begin
+ base_path = get_base_path json_obj["servers"]
+ rescue e
+ @logger.debug "Exception of #{oas3_json}/servers"
+ @logger.debug_sub e
+ end
+
+ begin
+ paths = json_obj["paths"].as_h
+ paths.each do |path, path_obj|
+ params_of_path = [] of Param
+ methods_of_path = [] of String
+ path_obj.as_h.each do |method, method_obj|
+ params = [] of Param
+
+ # Param in Path
+ begin
+ if method == "parameters"
+ method_obj.as_a.each do |param_obj|
+ param_name = param_obj["name"].to_s
+ if param_obj["in"] == "query"
+ param = Param.new(param_name, "", "query")
+ params_of_path << param
+ elsif param_obj["in"] == "header"
+ param = Param.new(param_name, "", "header")
+ params_of_path << param
+ elsif param_obj["in"] == "cookie"
+ param = Param.new(param_name, "", "cookie")
+ params_of_path << param
+ end
+ end
+ end
+
+ if method == "requestBody"
+ method_obj["content"].as_h.each do |content_type, content_obj|
+ if content_type == "application/json"
+ content_obj["schema"]["properties"].as_h.each do |param_name, _|
+ param = Param.new(param_name.to_s, "", "json")
+ params_of_path << param
+ end
+ elsif content_type == "application/x-www-form-urlencoded"
+ content_obj["schema"]["properties"].as_h.each do |param_name, _|
+ param = Param.new(param_name.to_s, "", "form")
+ params_of_path << param
+ end
+ end
+ end
+ end
+ rescue e
+ @logger.debug "Exception of #{oas3_json}/paths/parameters"
+ @logger.debug_sub e
+ end
+
+ # Param in Method
+ begin
+ if method_obj.is_a?(JSON::Any) || method_obj.is_a?(Hash(String, JSON::Any))
+ if method_obj.as_h.has_key?("parameters")
+ method_obj["parameters"].as_a.each do |param_obj|
+ param_name = param_obj["name"].to_s
+ if param_obj["in"] == "query"
+ param = Param.new(param_name, "", "query")
+ params << param
+ elsif param_obj["in"] == "header"
+ param = Param.new(param_name, "", "header")
+ params << param
+ elsif param_obj["in"] == "cookie"
+ param = Param.new(param_name, "", "cookie")
+ params << param
+ end
+ end
+ end
+ end
+ rescue e
+ @logger.debug "Exception of #{oas3_json}/paths/method/parameters"
+ @logger.debug_sub e
+ end
+
+ begin
+ if method_obj.as_h.has_key?("requestBody")
+ method_obj["requestBody"]["content"].as_h.each do |content_type, content_obj|
+ if content_type == "application/json"
+ content_obj["schema"]["properties"].as_h.each do |param_name, _|
+ param = Param.new(param_name, "", "json")
+ params << param
+ end
+ elsif content_type == "application/x-www-form-urlencoded"
+ content_obj["schema"]["properties"].as_h.each do |param_name, _|
+ param = Param.new(param_name, "", "form")
+ params << param
+ end
+ end
+ end
+ end
+ rescue e
+ @logger.debug "Exception of #{oas3_json}/paths/method/parameters"
+ @logger.debug_sub e
+ end
+
+ if params.size > 0 && (method.to_s.upcase != "PARAMETERS" && method.to_s.upcase != "REQUESTBODY")
+ @result << Endpoint.new(base_path + path, method.upcase, params, details)
+ methods_of_path << method.to_s.upcase
+ elsif method.to_s.upcase != "PARAMETERS" && method.to_s.upcase != "REQUESTBODY"
+ @result << Endpoint.new(base_path + path, method.upcase, details)
+ methods_of_path << method.to_s.upcase
+ end
+ rescue e
+ @logger.debug "Exception of #{oas3_json}/paths/endpoint"
+ @logger.debug_sub e
+ end
+ if params_of_path.size > 0
+ methods_of_path.each do |method_path|
+ @result << Endpoint.new(base_path + path, method_path.upcase, params_of_path, details)
+ end
+ end
+ end
+ rescue e
+ @logger.debug "Exception of #{oas3_json}/paths"
+ @logger.debug_sub e
+ end
+ end
+ end
+ end
+
+ if oas3_yamls.is_a?(Array(String))
+ oas3_yamls.each do |oas3_yaml|
+ if File.exists?(oas3_yaml)
+ details = Details.new(PathInfo.new(oas3_yaml))
+ content = File.read(oas3_yaml, encoding: "utf-8", invalid: :skip)
+ yaml_obj = YAML.parse(content)
+
+ begin
+ base_path = get_base_path yaml_obj["servers"]
+ rescue e
+ @logger.debug "Exception of #{oas3_yaml}/servers"
+ @logger.debug_sub e
+ end
+
+ begin
+ paths = yaml_obj["paths"].as_h
+ paths.each do |path, path_obj|
+ params_of_path = [] of Param
+ methods_of_path = [] of String
+ path_obj.as_h.each do |method, method_obj|
+ params = [] of Param
+
+ # Param in Path
+ begin
+ if method == "parameters"
+ method_obj.as_a.each do |param_obj|
+ param_name = param_obj["name"].to_s
+ if param_obj["in"] == "query"
+ param = Param.new(param_name, "", "query")
+ params_of_path << param
+ elsif param_obj["in"] == "header"
+ param = Param.new(param_name, "", "header")
+ params_of_path << param
+ elsif param_obj["in"] == "cookie"
+ param = Param.new(param_name, "", "cookie")
+ params_of_path << param
+ end
+ end
+ end
+
+ if method == "requestBody"
+ method_obj["content"].as_h.each do |content_type, content_obj|
+ if content_type == "application/json"
+ content_obj["schema"]["properties"].as_h.each do |param_name, _|
+ param = Param.new(param_name.to_s, "", "json")
+ params_of_path << param
+ end
+ elsif content_type == "application/x-www-form-urlencoded"
+ content_obj["schema"]["properties"].as_h.each do |param_name, _|
+ param = Param.new(param_name.to_s, "", "form")
+ params_of_path << param
+ end
+ end
+ end
+ end
+ rescue e
+ @logger.debug "Exception of #{oas3_yaml}/paths/parameters"
+ @logger.debug_sub e
+ end
+
+ # Param in Method
+ begin
+ if method_obj.is_a?(YAML::Any) || method_obj.is_a?(Hash(String, YAML::Any))
+ if method_obj.as_h.has_key?("parameters")
+ method_obj["parameters"].as_a.each do |param_obj|
+ param_name = param_obj["name"].to_s
+ if param_obj["in"] == "query"
+ param = Param.new(param_name, "", "query")
+ params << param
+ elsif param_obj["in"] == "header"
+ param = Param.new(param_name, "", "header")
+ params << param
+ elsif param_obj["in"] == "cookie"
+ param = Param.new(param_name, "", "cookie")
+ params << param
+ end
+ end
+ end
+
+ if method_obj.as_h.has_key?("requestBody")
+ method_obj["requestBody"]["content"].as_h.each do |content_type, content_obj|
+ if content_type == "application/json"
+ content_obj["schema"]["properties"].as_h.each do |param_name, _|
+ param = Param.new(param_name.to_s, "", "json")
+ params << param
+ end
+ elsif content_type == "application/x-www-form-urlencoded"
+ content_obj["schema"]["properties"].as_h.each do |param_name, _|
+ param = Param.new(param_name.to_s, "", "form")
+ params << param
+ end
+ end
+ end
+ end
+ end
+ rescue e
+ @logger.debug "Exception of #{oas3_yaml}/paths/method/parameters"
+ @logger.debug_sub e
+ end
+
+ if params.size > 0 && (method.to_s.upcase != "PARAMETERS" && method.to_s.upcase != "REQUESTBODY")
+ @result << Endpoint.new(base_path + path.to_s, method.to_s.upcase, params, details)
+ methods_of_path << method.to_s.upcase
+ elsif method.to_s.upcase != "PARAMETERS" && method.to_s.upcase != "REQUESTBODY"
+ @result << Endpoint.new(base_path + path.to_s, method.to_s.upcase, details)
+ methods_of_path << method.to_s.upcase
+ end
+ end
+
+ if params_of_path.size > 0
+ methods_of_path.each do |method_path|
+ @result << Endpoint.new(base_path + path.to_s, method_path.to_s.upcase, params_of_path, details)
+ end
+ end
+ rescue e
+ @logger.debug "Exception of #{oas3_yaml}/paths/endpoint"
+ @logger.debug_sub e
+ end
+ rescue e
+ @logger.debug "Exception of #{oas3_yaml}/paths"
+ @logger.debug_sub e
+ end
+ end
+ end
+ end
+
+ @result
+ end
+ end
+end
diff --git a/src/analyzer/analyzers/specification/raml.cr b/src/analyzer/analyzers/specification/raml.cr
new file mode 100644
index 00000000..e3f4d055
--- /dev/null
+++ b/src/analyzer/analyzers/specification/raml.cr
@@ -0,0 +1,68 @@
+require "../../../models/analyzer"
+
+module Analyzer::Specification
+ class RAML < Analyzer
+ def analyze
+ locator = CodeLocator.instance
+ raml_specs = locator.all("raml-spec")
+
+ if raml_specs.is_a?(Array(String))
+ raml_specs.each do |raml_spec|
+ if File.exists?(raml_spec)
+ details = Details.new(PathInfo.new(raml_spec))
+
+ content = File.read(raml_spec, encoding: "utf-8", invalid: :skip)
+ yaml_obj = YAML.parse(content)
+ yaml_obj.as_h.each do |path, path_obj|
+ begin
+ path_obj.as_h.each do |method, method_obj|
+ params = [] of Param
+
+ if method_obj.as_h.has_key? "queryParameters"
+ method_obj["queryParameters"].as_h.each do |param_name, _|
+ param = Param.new(param_name.to_s, "", "query")
+ params << param
+ end
+ end
+
+ if method_obj.as_h.has_key? "body"
+ method_obj["body"].as_h.each do |content_type, content_obj|
+ if content_type == "application/json"
+ content_obj["example"].as_h.each do |param_name, _|
+ param = Param.new(param_name.to_s, "", "json")
+ params << param
+ end
+ elsif content_type == "application/x-www-form-urlencoded"
+ content_obj["example"].as_h.each do |param_name, _|
+ param = Param.new(param_name.to_s, "", "form")
+ params << param
+ end
+ end
+ end
+ end
+
+ if method_obj.as_h.has_key? "headers"
+ method_obj["headers"].as_h.each do |param_name, _|
+ param = Param.new(param_name.to_s, "", "header")
+ params << param
+ end
+ end
+
+ @result << Endpoint.new(path.to_s, method.to_s.upcase, params, details)
+ rescue e
+ @logger.debug "Exception of #{raml_spec}/paths/#{path}/#{method}"
+ @logger.debug_sub e
+ end
+ rescue e
+ @logger.debug "Exception of #{raml_spec}/paths/#{path}"
+ @logger.debug_sub e
+ end
+ end
+ end
+ end
+ end
+
+ @result
+ end
+ end
+end
diff --git a/src/completions.cr b/src/completions.cr
index 84463346..5a215f01 100644
--- a/src/completions.cr
+++ b/src/completions.cr
@@ -1,92 +1,156 @@
def generate_zsh_completion_script
<<-SCRIPT
- #compdef noir
+#compdef noir
- _arguments \\
- '-b[Set base path]:path:_files' \\
- '-u[Set base URL for endpoints]:URL:_urls' \\
- '-f[Set output format]:format:(plain yaml json jsonl markdown-table curl httpie oas2 oas3 only-url only-param only-header only-cookie)' \\
- '-o[Write result to file]:path:_files' \\
- '--set-pvalue[Specifies the value of the identified parameter]:value:' \\
- '--include-path[Include file path in the plain result]' \\
- '--no-color[Disable color output]' \\
- '--no-log[Displaying only the results]' \\
- '-T[Activates all taggers for full analysis coverage]' \\
- '--use-taggers[Activates specific taggers]:values:' \\
- '--list-taggers[Lists all available taggers]' \\
- '--send-req[Send results to a web request]' \\
- '--send-proxy[Send results to a web request via an HTTP proxy]:proxy:' \\
- '--send-es[Send results to Elasticsearch]:es:' \\
- '--with-headers[Add custom headers to be included in the delivery]:headers:' \\
- '--use-matchers[Send URLs that match specific conditions to the Deliver]:string:' \\
- '--use-filters[Exclude URLs that match specified conditions and send the rest to Deliver]:string:' \\
- '--diff-path[Specify the path to the old version of the source code for comparison]:path:_files' \\
- '-t[Specify the technologies to use]:techs:' \\
- '--exclude-techs[Specify the technologies to be excluded]:techs:' \\
- '--list-techs[Show all technologies]' \\
- '--config-file[Specify the path to a configuration file in YAML format]:path:_files' \\
- '--concurrency[Set concurrency]:concurrency:' \\
- '-d[Show debug messages]' \\
- '-v[Show version]' \\
- '--build-info[Show version and Build info]' \\
- '-h[Show help]'
- SCRIPT
+_arguments \\
+ '-b[Set base path]:path:_files' \\
+ '-u[Set base URL for endpoints]:URL:_urls' \\
+ '-f[Set output format]:format:(plain yaml json jsonl markdown-table curl httpie oas2 oas3 only-url only-param only-header only-cookie)' \\
+ '-o[Write result to file]:path:_files' \\
+ '--set-pvalue[Specifies the value of the identified parameter]:value:' \\
+ '--set-pvalue-header[Specifies the value of the identified parameter for headers]:value:' \\
+ '--set-pvalue-cookie[Specifies the value of the identified parameter for cookies]:value:' \\
+ '--set-pvalue-query[Specifies the value of the identified parameter for query parameters]:value:' \\
+ '--set-pvalue-form[Specifies the value of the identified parameter for form data]:value:' \\
+ '--set-pvalue-json[Specifies the value of the identified parameter for JSON data]:value:' \\
+ '--set-pvalue-path[Specifies the value of the identified parameter for path parameters]:value:' \\
+ '--status-codes[Display HTTP status codes for discovered endpoints]' \\
+ '--exclude-codes[Exclude specific HTTP response codes (comma-separated)]:status:' \\
+ '--include-path[Include file path in the plain result]' \\
+ '--no-color[Disable color output]' \\
+ '--no-log[Displaying only the results]' \\
+ '-T[Activates all taggers for full analysis coverage]' \\
+ '--use-taggers[Activates specific taggers]:values:' \\
+ '--list-taggers[Lists all available taggers]' \\
+ '--send-req[Send results to a web request]' \\
+ '--send-proxy[Send results to a web request via an HTTP proxy]:proxy:' \\
+ '--send-es[Send results to Elasticsearch]:es:' \\
+ '--with-headers[Add custom headers to be included in the delivery]:headers:' \\
+ '--use-matchers[Send URLs that match specific conditions to the Deliver]:string:' \\
+ '--use-filters[Exclude URLs that match specified conditions and send the rest to Deliver]:string:' \\
+ '--diff-path[Specify the path to the old version of the source code for comparison]:path:_files' \\
+ '-t[Specify the technologies to use]:techs:' \\
+ '--exclude-techs[Specify the technologies to be excluded]:techs:' \\
+ '--list-techs[Show all technologies]' \\
+ '--config-file[Specify the path to a configuration file in YAML format]:path:_files' \\
+ '--concurrency[Set concurrency]:concurrency:' \\
+ '-d[Show debug messages]' \\
+ '-v[Show version]' \\
+ '--build-info[Show version and Build info]' \\
+ '-h[Show help]'
+SCRIPT
end
def generate_bash_completion_script
<<-SCRIPT
- _noir_completions() {
- local cur prev opts
- COMPREPLY=()
- cur="${COMP_WORDS[COMP_CWORD]}"
- prev="${COMP_WORDS[COMP_CWORD-1]}"
- opts="
- -b --base-path
- -u --url
- -f --format
- -o --output
- --set-pvalue
- --include-path
- --no-color
- --no-log
- -T --use-all-taggers
- --use-taggers
- --list-taggers
- --send-req
- --send-proxy
- --send-es
- --with-headers
- --use-matchers
- --use-filters
- --diff-path
- -t --techs
- --exclude-techs
- --list-techs
- --config-file
- --concurrency
- -d --debug
- -v --version
- --build-info
- -h --help
- "
+_noir_completions() {
+ local cur prev opts
+ COMPREPLY=()
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ prev="${COMP_WORDS[COMP_CWORD-1]}"
+ opts="
+ -b --base-path
+ -u --url
+ -f --format
+ -o --output
+ --set-pvalue
+ --set-pvalue-header
+ --set-pvalue-cookie
+ --set-pvalue-query
+ --set-pvalue-form
+ --set-pvalue-json
+ --set-pvalue-path
+ --status-codes
+ --exclude-codes
+ --include-path
+ --no-color
+ --no-log
+ -T --use-all-taggers
+ --use-taggers
+ --list-taggers
+ --send-req
+ --send-proxy
+ --send-es
+ --with-headers
+ --use-matchers
+ --use-filters
+ --diff-path
+ -t --techs
+ --exclude-techs
+ --list-techs
+ --config-file
+ --concurrency
+ -d --debug
+ -v --version
+ --build-info
+ -h --help
+ "
- case "${prev}" in
- -f|--format)
- COMPREPLY=( $(compgen -W "plain yaml json jsonl markdown-table curl httpie oas2 oas3 only-url only-param only-header only-cookie" -- "${cur}") )
- return 0
- ;;
- --send-proxy|--send-es|--with-headers|--use-matchers|--use-filters|--diff-path|--config-file|--set-pvalue|--techs|--exclude-techs|-o|-b|-u)
- COMPREPLY=( $(compgen -f -- "${cur}") )
- return 0
- ;;
- *)
- ;;
- esac
-
- COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
+ case "${prev}" in
+ -f|--format)
+ COMPREPLY=( $(compgen -W "plain yaml json jsonl markdown-table curl httpie oas2 oas3 only-url only-param only-header only-cookie" -- "${cur}") )
+ return 0
+ ;;
+ --send-proxy|--send-es|--with-headers|--use-matchers|--use-filters|--diff-path|--config-file|--set-pvalue|--techs|--exclude-techs|-o|-b|-u)
+ COMPREPLY=( $(compgen -f -- "${cur}") )
return 0
- }
+ ;;
+ *)
+ ;;
+ esac
+
+ COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
+ return 0
+}
+
+complete -F _noir_completions noir
+SCRIPT
+end
+
+def generate_fish_completion_script
+ <<-SCRIPT
+function __fish_noir_needs_command
+ set -l cmd (commandline -opc)
+ if test (count $cmd) -eq 1
+ return 0
+ end
+ return 1
+end
- complete -F _noir_completions noir
- SCRIPT
+complete -c noir -n '__fish_noir_needs_command' -a '-b' -d 'Set base path'
+complete -c noir -n '__fish_noir_needs_command' -a '-u' -d 'Set base URL for endpoints'
+complete -c noir -n '__fish_noir_needs_command' -a '-f' -d 'Set output format'
+complete -c noir -n '__fish_noir_needs_command' -a '-o' -d 'Write result to file'
+complete -c noir -n '__fish_noir_needs_command' -a '--set-pvalue' -d 'Specifies the value of the identified parameter'
+complete -c noir -n '__fish_noir_needs_command' -a '--set-pvalue-header' -d 'Specifies the value of the identified parameter for headers'
+complete -c noir -n '__fish_noir_needs_command' -a '--set-pvalue-cookie' -d 'Specifies the value of the identified parameter for cookies'
+complete -c noir -n '__fish_noir_needs_command' -a '--set-pvalue-query' -d 'Specifies the value of the identified parameter for query parameters'
+complete -c noir -n '__fish_noir_needs_command' -a '--set-pvalue-form' -d 'Specifies the value of the identified parameter for form data'
+complete -c noir -n '__fish_noir_needs_command' -a '--set-pvalue-json' -d 'Specifies the value of the identified parameter for JSON data'
+complete -c noir -n '__fish_noir_needs_command' -a '--set-pvalue-path' -d 'Specifies the value of the identified parameter for path parameters'
+complete -c noir -n '__fish_noir_needs_command' -a '--status-codes' -d 'Display HTTP status codes for discovered endpoints'
+complete -c noir -n '__fish_noir_needs_command' -a '--exclude-codes' -d 'Exclude specific HTTP response codes (comma-separated)'
+complete -c noir -n '__fish_noir_needs_command' -a '--include-path' -d 'Include file path in the plain result'
+complete -c noir -n '__fish_noir_needs_command' -a '--no-color' -d 'Disable color output'
+complete -c noir -n '__fish_noir_needs_command' -a '--no-log' -d 'Displaying only the results'
+complete -c noir -n '__fish_noir_needs_command' -a '-T' -d 'Activates all taggers for full analysis coverage'
+complete -c noir -n '__fish_noir_needs_command' -a '--use-taggers' -d 'Activates specific taggers'
+complete -c noir -n '__fish_noir_needs_command' -a '--list-taggers' -d 'Lists all available taggers'
+complete -c noir -n '__fish_noir_needs_command' -a '--send-req' -d 'Send results to a web request'
+complete -c noir -n '__fish_noir_needs_command' -a '--send-proxy' -d 'Send results to a web request via an HTTP proxy'
+complete -c noir -n '__fish_noir_needs_command' -a '--send-es' -d 'Send results to Elasticsearch'
+complete -c noir -n '__fish_noir_needs_command' -a '--with-headers' -d 'Add custom headers to be included in the delivery'
+complete -c noir -n '__fish_noir_needs_command' -a '--use-matchers' -d 'Send URLs that match specific conditions to the Deliver'
+complete -c noir -n '__fish_noir_needs_command' -a '--use-filters' -d 'Exclude URLs that match specified conditions and send the rest to Deliver'
+complete -c noir -n '__fish_noir_needs_command' -a '--diff-path' -d 'Specify the path to the old version of the source code for comparison'
+complete -c noir -n '__fish_noir_needs_command' -a '-t' -d 'Specify the technologies to use'
+complete -c noir -n '__fish_noir_needs_command' -a '--exclude-techs' -d 'Specify the technologies to be excluded'
+complete -c noir -n '__fish_noir_needs_command' -a '--list-techs' -d 'Show all technologies'
+complete -c noir -n '__fish_noir_needs_command' -a '--config-file' -d 'Specify the path to a configuration file in YAML format'
+complete -c noir -n '__fish_noir_needs_command' -a '--concurrency' -d 'Set concurrency'
+complete -c noir -n '__fish_noir_needs_command' -a '-d' -d 'Show debug messages'
+complete -c noir -n '__fish_noir_needs_command' -a '-v' -d 'Show version'
+complete -c noir -n '__fish_noir_needs_command' -a '--build-info' -d 'Show version and Build info'
+complete -c noir -n '__fish_noir_needs_command' -a '-h' -d 'Show help'
+SCRIPT
end
diff --git a/src/config_initializer.cr b/src/config_initializer.cr
index 68694455..fbebd191 100644
--- a/src/config_initializer.cr
+++ b/src/config_initializer.cr
@@ -1,23 +1,15 @@
require "file"
require "yaml"
+require "./utils/home.cr"
class ConfigInitializer
@config_dir : String
@config_file : String
- @default_config : Hash(String, String) = {"key" => "default_value"} # Replace with your default config
+ @default_config : Hash(String, YAML::Any) = {"key" => YAML::Any.new("default_value")} # Replace with your default config
def initialize
# Define the config directory and file based on ENV variables
- if ENV.has_key? "NOIR_HOME"
- @config_dir = ENV["NOIR_HOME"]
- else
- # Define the config directory and file based on the OS
- {% if flag?(:windows) %}
- @config_dir = "#{ENV["APPDATA"]}\\noir"
- {% else %}
- @config_dir = "#{ENV["HOME"]}/.config/noir"
- {% end %}
- end
+ @config_dir = get_home
@config_file = File.join(@config_dir, "config.yaml")
@@ -29,6 +21,7 @@ class ConfigInitializer
def setup
# Create the directory if it doesn't exist
Dir.mkdir(@config_dir) unless Dir.exists?(@config_dir)
+ Dir.mkdir("#{@config_dir}/passive_rules") unless Dir.exists?("#{@config_dir}/passive_rules")
# Create the config file if it doesn't exist
File.write(@config_file, generate_config_file) unless File.exists?(@config_file)
@@ -45,9 +38,38 @@ class ConfigInitializer
begin
parsed_yaml = YAML.parse(File.read(@config_file)).as_h
symbolized_hash = parsed_yaml.transform_keys(&.to_s)
- stringlized_hash = symbolized_hash.transform_values(&.to_s)
- stringlized_hash
+ # Transform specific keys from "yes"/"no" to true/false for old version noir config
+ ["color", "debug", "include_path", "nolog", "send_req", "all_taggers"].each do |key|
+ if symbolized_hash[key] == "yes"
+ symbolized_hash[key] = YAML::Any.new(true)
+ elsif symbolized_hash[key] == "no"
+ symbolized_hash[key] = YAML::Any.new(false)
+ end
+ end
+
+ # Transform specific keys for array and string config values
+ [
+ "send_with_headers", "use_filters", "use_matchers",
+ "set_pvalue", "set_pvalue_header", "set_pvalue_cookie",
+ "set_pvalue_query", "set_pvalue_form", "set_pvalue_json", "set_pvalue_path",
+ ].each do |key|
+ if symbolized_hash[key].to_s == ""
+ # If the value is an empty string, initialize it as an empty array of YAML::Any
+ symbolized_hash[key] = YAML::Any.new([] of YAML::Any)
+ else
+ begin
+ # If the value is already an array, ensure it is treated as an array of YAML::Any
+ symbolized_hash[key].as_a
+ rescue
+ # If the value is a string, wrap it in an array of YAML::Any
+ symbolized_hash[key] = YAML::Any.new([YAML::Any.new(symbolized_hash[key].to_s)])
+ end
+ end
+ end
+
+ final_options = default_options.merge(symbolized_hash) { |_, _, new_val| new_val }
+ final_options
rescue e : Exception
puts "Failed to read config file: #{e.message}"
puts "Using default config."
@@ -58,28 +80,38 @@ class ConfigInitializer
def default_options
noir_options = {
- "base" => "",
- "color" => "yes",
- "config_file" => "",
- "concurrency" => "100",
- "debug" => "no",
- "exclude_techs" => "",
- "format" => "plain",
- "include_path" => "no",
- "nolog" => "no",
- "output" => "",
- "send_es" => "",
- "send_proxy" => "",
- "send_req" => "no",
- "send_with_headers" => "",
- "set_pvalue" => "",
- "techs" => "",
- "url" => "",
- "use_filters" => "",
- "use_matchers" => "",
- "all_taggers" => "no",
- "use_taggers" => "",
- "diff" => "",
+ "base" => YAML::Any.new(""),
+ "color" => YAML::Any.new(true),
+ "config_file" => YAML::Any.new(""),
+ "concurrency" => YAML::Any.new("100"),
+ "debug" => YAML::Any.new(false),
+ "exclude_codes" => YAML::Any.new(""),
+ "exclude_techs" => YAML::Any.new(""),
+ "format" => YAML::Any.new("plain"),
+ "include_path" => YAML::Any.new(false),
+ "nolog" => YAML::Any.new(false),
+ "output" => YAML::Any.new(""),
+ "send_es" => YAML::Any.new(""),
+ "send_proxy" => YAML::Any.new(""),
+ "send_req" => YAML::Any.new(false),
+ "send_with_headers" => YAML::Any.new([] of YAML::Any),
+ "set_pvalue" => YAML::Any.new([] of YAML::Any),
+ "set_pvalue_header" => YAML::Any.new([] of YAML::Any),
+ "set_pvalue_cookie" => YAML::Any.new([] of YAML::Any),
+ "set_pvalue_query" => YAML::Any.new([] of YAML::Any),
+ "set_pvalue_form" => YAML::Any.new([] of YAML::Any),
+ "set_pvalue_json" => YAML::Any.new([] of YAML::Any),
+ "set_pvalue_path" => YAML::Any.new([] of YAML::Any),
+ "status_codes" => YAML::Any.new(false),
+ "techs" => YAML::Any.new(""),
+ "url" => YAML::Any.new(""),
+ "use_filters" => YAML::Any.new([] of YAML::Any),
+ "use_matchers" => YAML::Any.new([] of YAML::Any),
+ "all_taggers" => YAML::Any.new(false),
+ "use_taggers" => YAML::Any.new(""),
+ "diff" => YAML::Any.new(""),
+ "passive_scan" => YAML::Any.new(false),
+ "passive_scan_path" => YAML::Any.new([] of YAML::Any),
}
noir_options
@@ -100,7 +132,7 @@ class ConfigInitializer
base: "#{options["base"]}"
# Whether to use color in the output
- color: "#{options["color"]}"
+ color: #{options["color"]}
# The configuration file to use
config_file: "#{options["config_file"]}"
@@ -109,7 +141,10 @@ class ConfigInitializer
concurrency: "#{options["concurrency"]}"
# Whether to enable debug mode
- debug: "#{options["debug"]}"
+ debug: #{options["debug"]}
+
+ # The status codes to exclude
+ exclude_codes: "#{options["exclude_codes"]}"
# Technologies to exclude
exclude_techs: "#{options["exclude_techs"]}"
@@ -118,10 +153,10 @@ class ConfigInitializer
format: "#{options["format"]}"
# Whether to include the path in the output
- include_path: "#{options["include_path"]}"
+ include_path: #{options["include_path"]}
# Whether to disable logging
- nolog: "#{options["nolog"]}"
+ nolog: #{options["nolog"]}
# The output file to write to
output: "#{options["output"]}"
@@ -135,14 +170,23 @@ class ConfigInitializer
send_proxy: "#{options["send_proxy"]}"
# Whether to send a request
- send_req: "#{options["send_req"]}"
+ send_req: #{options["send_req"]}
- # Whether to send headers with the request
+ # Whether to send headers with the request (Array of strings)
# e.g "Authorization: Bearer token"
- send_with_headers: "#{options["send_with_headers"]}"
+ send_with_headers:
- # The value to set for pvalue
- set_pvalue: "#{options["set_pvalue"]}"
+ # The value to set for pvalue (Array of strings)
+ set_pvalue:
+ set_pvalue_header:
+ set_pvalue_cookie:
+ set_pvalue_query:
+ set_pvalue_form:
+ set_pvalue_json:
+ set_pvalue_path:
+
+ # The status codes to use
+ status_codes: #{options["status_codes"]}
# The technologies to use
techs: "#{options["techs"]}"
@@ -150,14 +194,14 @@ class ConfigInitializer
# The URL to use
url: "#{options["url"]}"
- # Whether to use filters
- use_filters: "#{options["use_filters"]}"
+ # Whether to use filters (Array of strings)
+ use_filters:
- # Whether to use matchers
- use_matchers: "#{options["use_matchers"]}"
+ # Whether to use matchers (Array of strings)
+ use_matchers:
# Whether to use all taggers
- all_taggers: "#{options["all_taggers"]}"
+ all_taggers: #{options["all_taggers"]}
# The taggers to use
# e.g "tagger1,tagger2"
@@ -167,6 +211,11 @@ class ConfigInitializer
# The diff file to use
diff: "#{options["diff"]}"
+ # The passive rules to use
+ # e.g /path/to/rules
+ passive_scan: false
+ passive_scan_path: []
+
CONTENT
content
diff --git a/src/detector/detector.cr b/src/detector/detector.cr
index 509e6b5b..b1135b95 100644
--- a/src/detector/detector.cr
+++ b/src/detector/detector.cr
@@ -1,47 +1,53 @@
-require "./detectors/*"
+require "./detectors/**"
require "../models/detector"
+require "../models/passive_scan"
+require "../passive_scan/detect.cr"
+require "yaml"
macro defind_detectors(detectors)
{% for detector, index in detectors %}
- instance = {{detector}}.new(options)
+ instance = Detector::{{detector}}.new(options)
instance.set_name
detector_list << instance
{% end %}
end
-def detect_techs(base_path : String, options : Hash(String, String), logger : NoirLogger)
+def detect_techs(base_path : String, options : Hash(String, YAML::Any), passive_scans : Array(PassiveScan), logger : NoirLogger)
techs = [] of String
+ passive_result = [] of PassiveScanResult
detector_list = [] of Detector
+ mutex = Mutex.new
# Define detectors
defind_detectors([
- DetectorCSharpAspNetMvc,
- DetectorCrystalKemal,
- DetectorCrystalLucky,
- DetectorElixirPhoenix,
- DetectorGoBeego,
- DetectorGoEcho,
- DetectorGoFiber,
- DetectorGoGin,
- DetectorHar,
- DetectorJavaArmeria,
- DetectorJavaJsp,
- DetectorJavaSpring,
- DetectorJsExpress,
- DetectorJsRestify,
- DetectorKotlinSpring,
- DetectorOas2,
- DetectorOas3,
- DetectorPhpPure,
- DetectorPythonDjango,
- DetectorPythonFastAPI,
- DetectorPythonFlask,
- DetectorRAML,
- DetectorRubyHanami,
- DetectorRubyRails,
- DetectorRubySinatra,
- DetectorRustAxum,
- DetectorRustRocket,
+ CSharp::AspNetMvc,
+ Crystal::Kemal,
+ Crystal::Lucky,
+ Elixir::Phoenix,
+ Go::Beego,
+ Go::Echo,
+ Go::Fiber,
+ Go::Gin,
+ Specification::Har,
+ Java::Armeria,
+ Java::Jsp,
+ Java::Spring,
+ Javascript::Express,
+ Javascript::Restify,
+ Kotlin::Spring,
+ Specification::Oas2,
+ Specification::Oas3,
+ Php::Php,
+ Python::Django,
+ Python::FastAPI,
+ Python::Flask,
+ Specification::RAML,
+ Ruby::Hanami,
+ Ruby::Rails,
+ Ruby::Sinatra,
+ Rust::Axum,
+ Rust::Rocket,
+ Rust::ActixWeb,
])
channel = Channel(String).new
@@ -51,7 +57,7 @@ def detect_techs(base_path : String, options : Hash(String, String), logger : No
end
end
- options["concurrency"].to_i.times do
+ options["concurrency"].to_s.to_i.times do
spawn do
loop do
begin
@@ -65,6 +71,11 @@ def detect_techs(base_path : String, options : Hash(String, String), logger : No
techs << detector.name
end
end
+
+ results = NoirPassiveScan.detect(file, content, passive_scans, logger)
+ mutex.synchronize do
+ passive_result.concat(results)
+ end
rescue e : File::NotFoundError
logger.debug "File not found: #{file}"
end
@@ -73,5 +84,5 @@ def detect_techs(base_path : String, options : Hash(String, String), logger : No
end
Fiber.yield
- techs.uniq
+ {techs.uniq, passive_result}
end
diff --git a/src/detector/detectors/crystal/kemal.cr b/src/detector/detectors/crystal/kemal.cr
new file mode 100644
index 00000000..f02816ff
--- /dev/null
+++ b/src/detector/detectors/crystal/kemal.cr
@@ -0,0 +1,16 @@
+require "../../../models/detector"
+
+module Detector::Crystal
+ class Kemal < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("kemalcr/kemal")
+ check = check && filename.includes?("shard.yml")
+
+ check
+ end
+
+ def set_name
+ @name = "crystal_kemal"
+ end
+ end
+end
diff --git a/src/detector/detectors/crystal/lucky.cr b/src/detector/detectors/crystal/lucky.cr
new file mode 100644
index 00000000..51e375eb
--- /dev/null
+++ b/src/detector/detectors/crystal/lucky.cr
@@ -0,0 +1,16 @@
+require "../../../models/detector"
+
+module Detector::Crystal
+ class Lucky < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("luckyframework/lucky")
+ check = check && filename.includes?("shard.yml")
+
+ check
+ end
+
+ def set_name
+ @name = "crystal_lucky"
+ end
+ end
+end
diff --git a/src/detector/detectors/crystal_kemal.cr b/src/detector/detectors/crystal_kemal.cr
deleted file mode 100644
index 5374a075..00000000
--- a/src/detector/detectors/crystal_kemal.cr
+++ /dev/null
@@ -1,14 +0,0 @@
-require "../../models/detector"
-
-class DetectorCrystalKemal < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("kemalcr/kemal")
- check = check && filename.includes?("shard.yml")
-
- check
- end
-
- def set_name
- @name = "crystal_kemal"
- end
-end
diff --git a/src/detector/detectors/crystal_lucky.cr b/src/detector/detectors/crystal_lucky.cr
deleted file mode 100644
index 42fcf46a..00000000
--- a/src/detector/detectors/crystal_lucky.cr
+++ /dev/null
@@ -1,14 +0,0 @@
-require "../../models/detector"
-
-class DetectorCrystalLucky < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("luckyframework/lucky")
- check = check && filename.includes?("shard.yml")
-
- check
- end
-
- def set_name
- @name = "crystal_lucky"
- end
-end
diff --git a/src/detector/detectors/cs_aspnet_mvc.cr b/src/detector/detectors/cs_aspnet_mvc.cr
deleted file mode 100644
index f5f7e24b..00000000
--- a/src/detector/detectors/cs_aspnet_mvc.cr
+++ /dev/null
@@ -1,24 +0,0 @@
-require "../../models/detector"
-
-class DetectorCSharpAspNetMvc < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("Microsoft.AspNet.Mvc")
- check = check && filename.includes?("packages.config")
- check_routeconfig filename, file_contents
-
- check
- end
-
- def check_routeconfig(filename : String, file_contents : String)
- check = file_contents.includes?(".MapRoute")
- check = check && filename.includes?("RouteConfig.cs")
- if check
- locator = CodeLocator.instance
- locator.set("cs-apinet-mvc-routeconfig", filename)
- end
- end
-
- def set_name
- @name = "c#-aspnet-mvc"
- end
-end
diff --git a/src/detector/detectors/csharp/aspnet_mvc.cr b/src/detector/detectors/csharp/aspnet_mvc.cr
new file mode 100644
index 00000000..e2f5b16e
--- /dev/null
+++ b/src/detector/detectors/csharp/aspnet_mvc.cr
@@ -0,0 +1,26 @@
+require "../../../models/detector"
+
+module Detector::CSharp
+ class AspNetMvc < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("Microsoft.AspNet.Mvc")
+ check = check && filename.includes?("packages.config")
+ check_routeconfig filename, file_contents
+
+ check
+ end
+
+ def check_routeconfig(filename : String, file_contents : String)
+ check = file_contents.includes?(".MapRoute")
+ check = check && filename.includes?("RouteConfig.cs")
+ if check
+ locator = CodeLocator.instance
+ locator.set("cs-apinet-mvc-routeconfig", filename)
+ end
+ end
+
+ def set_name
+ @name = "c#-aspnet-mvc"
+ end
+ end
+end
diff --git a/src/detector/detectors/elixir/phoenix.cr b/src/detector/detectors/elixir/phoenix.cr
new file mode 100644
index 00000000..e356f7f1
--- /dev/null
+++ b/src/detector/detectors/elixir/phoenix.cr
@@ -0,0 +1,16 @@
+require "../../../models/detector"
+
+module Detector::Elixir
+ class Phoenix < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("ElixirPhoenix")
+ check = check && filename.includes?("mix.exs")
+
+ check
+ end
+
+ def set_name
+ @name = "elixir_phoenix"
+ end
+ end
+end
diff --git a/src/detector/detectors/elixir_phoenix.cr b/src/detector/detectors/elixir_phoenix.cr
deleted file mode 100644
index 1010f1bf..00000000
--- a/src/detector/detectors/elixir_phoenix.cr
+++ /dev/null
@@ -1,14 +0,0 @@
-require "../../models/detector"
-
-class DetectorElixirPhoenix < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("ElixirPhoenix")
- check = check && filename.includes?("mix.exs")
-
- check
- end
-
- def set_name
- @name = "elixir_phoenix"
- end
-end
diff --git a/src/detector/detectors/go/beego.cr b/src/detector/detectors/go/beego.cr
new file mode 100644
index 00000000..ba1b59c9
--- /dev/null
+++ b/src/detector/detectors/go/beego.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Go
+ class Beego < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.includes? "go.mod") && (file_contents.includes? "github.com/beego/beego")
+ true
+ else
+ false
+ end
+ end
+
+ def set_name
+ @name = "go_beego"
+ end
+ end
+end
diff --git a/src/detector/detectors/go/echo.cr b/src/detector/detectors/go/echo.cr
new file mode 100644
index 00000000..d8360b84
--- /dev/null
+++ b/src/detector/detectors/go/echo.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Go
+ class Echo < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.includes? "go.mod") && (file_contents.includes? "github.com/labstack/echo")
+ true
+ else
+ false
+ end
+ end
+
+ def set_name
+ @name = "go_echo"
+ end
+ end
+end
diff --git a/src/detector/detectors/go/fiber.cr b/src/detector/detectors/go/fiber.cr
new file mode 100644
index 00000000..c14152a6
--- /dev/null
+++ b/src/detector/detectors/go/fiber.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Go
+ class Fiber < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.includes? "go.mod") && (file_contents.includes? "github.com/gofiber/fiber")
+ true
+ else
+ false
+ end
+ end
+
+ def set_name
+ @name = "go_fiber"
+ end
+ end
+end
diff --git a/src/detector/detectors/go/gin.cr b/src/detector/detectors/go/gin.cr
new file mode 100644
index 00000000..52f72b10
--- /dev/null
+++ b/src/detector/detectors/go/gin.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Go
+ class Gin < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.includes? "go.mod") && (file_contents.includes? "github.com/gin-gonic/gin")
+ true
+ else
+ false
+ end
+ end
+
+ def set_name
+ @name = "go_gin"
+ end
+ end
+end
diff --git a/src/detector/detectors/go_beego.cr b/src/detector/detectors/go_beego.cr
deleted file mode 100644
index 9e74b57a..00000000
--- a/src/detector/detectors/go_beego.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorGoBeego < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.includes? "go.mod") && (file_contents.includes? "github.com/beego/beego")
- true
- else
- false
- end
- end
-
- def set_name
- @name = "go_beego"
- end
-end
diff --git a/src/detector/detectors/go_echo.cr b/src/detector/detectors/go_echo.cr
deleted file mode 100644
index c5a77418..00000000
--- a/src/detector/detectors/go_echo.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorGoEcho < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.includes? "go.mod") && (file_contents.includes? "github.com/labstack/echo")
- true
- else
- false
- end
- end
-
- def set_name
- @name = "go_echo"
- end
-end
diff --git a/src/detector/detectors/go_fiber.cr b/src/detector/detectors/go_fiber.cr
deleted file mode 100644
index 62863251..00000000
--- a/src/detector/detectors/go_fiber.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorGoFiber < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.includes? "go.mod") && (file_contents.includes? "github.com/gofiber/fiber")
- true
- else
- false
- end
- end
-
- def set_name
- @name = "go_fiber"
- end
-end
diff --git a/src/detector/detectors/go_gin.cr b/src/detector/detectors/go_gin.cr
deleted file mode 100644
index 90fddbf9..00000000
--- a/src/detector/detectors/go_gin.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorGoGin < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.includes? "go.mod") && (file_contents.includes? "github.com/gin-gonic/gin")
- true
- else
- false
- end
- end
-
- def set_name
- @name = "go_gin"
- end
-end
diff --git a/src/detector/detectors/har.cr b/src/detector/detectors/har.cr
deleted file mode 100644
index 44cc53ae..00000000
--- a/src/detector/detectors/har.cr
+++ /dev/null
@@ -1,29 +0,0 @@
-require "../../models/detector"
-require "../../utils/json"
-require "../../utils/yaml"
-require "../../models/code_locator"
-require "har"
-
-class DetectorHar < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.includes? ".har") || (filename.includes? ".json")
- if valid_json? file_contents
- begin
- data = HAR.from_string(file_contents)
- if data.version.to_s.includes? "1."
- locator = CodeLocator.instance
- locator.push("har-path", filename)
- return true
- end
- rescue
- end
- end
- end
-
- false
- end
-
- def set_name
- @name = "har"
- end
-end
diff --git a/src/detector/detectors/java/armeria.cr b/src/detector/detectors/java/armeria.cr
new file mode 100644
index 00000000..7ba8f11f
--- /dev/null
+++ b/src/detector/detectors/java/armeria.cr
@@ -0,0 +1,20 @@
+require "../../../models/detector"
+
+module Detector::Java
+ class Armeria < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (
+ (filename.includes? "pom.xml") || (filename.includes? "build.gradle") ||
+ (filename.includes? "build.gradle.kts") || (filename.includes? "settings.gradle.kts")
+ ) && (file_contents.includes? "com.linecorp.armeria")
+ true
+ else
+ false
+ end
+ end
+
+ def set_name
+ @name = "java_armeria"
+ end
+ end
+end
diff --git a/src/detector/detectors/java/jsp.cr b/src/detector/detectors/java/jsp.cr
new file mode 100644
index 00000000..d4292507
--- /dev/null
+++ b/src/detector/detectors/java/jsp.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Java
+ class Jsp < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("<%")
+ check = check && file_contents.includes?("%>")
+ check = check && filename.includes?(".jsp")
+
+ check
+ end
+
+ def set_name
+ @name = "java_jsp"
+ end
+ end
+end
diff --git a/src/detector/detectors/java/spring.cr b/src/detector/detectors/java/spring.cr
new file mode 100644
index 00000000..295d27ca
--- /dev/null
+++ b/src/detector/detectors/java/spring.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Java
+ class Spring < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.ends_with? ".java") && (file_contents.includes? "org.springframework")
+ return true
+ end
+
+ false
+ end
+
+ def set_name
+ @name = "java_spring"
+ end
+ end
+end
diff --git a/src/detector/detectors/java_armeria.cr b/src/detector/detectors/java_armeria.cr
deleted file mode 100644
index 773f8d16..00000000
--- a/src/detector/detectors/java_armeria.cr
+++ /dev/null
@@ -1,18 +0,0 @@
-require "../../models/detector"
-
-class DetectorJavaArmeria < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (
- (filename.includes? "pom.xml") || (filename.includes? "build.gradle") ||
- (filename.includes? "build.gradle.kts") || (filename.includes? "settings.gradle.kts")
- ) && (file_contents.includes? "com.linecorp.armeria")
- true
- else
- false
- end
- end
-
- def set_name
- @name = "java_armeria"
- end
-end
diff --git a/src/detector/detectors/java_jsp.cr b/src/detector/detectors/java_jsp.cr
deleted file mode 100644
index e0cca03f..00000000
--- a/src/detector/detectors/java_jsp.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorJavaJsp < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("<%")
- check = check && file_contents.includes?("%>")
- check = check && filename.includes?(".jsp")
-
- check
- end
-
- def set_name
- @name = "java_jsp"
- end
-end
diff --git a/src/detector/detectors/java_spring.cr b/src/detector/detectors/java_spring.cr
deleted file mode 100644
index f5466e5b..00000000
--- a/src/detector/detectors/java_spring.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorJavaSpring < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.ends_with? ".java") && (file_contents.includes? "org.springframework")
- return true
- end
-
- false
- end
-
- def set_name
- @name = "java_spring"
- end
-end
diff --git a/src/detector/detectors/javascript/express.cr b/src/detector/detectors/javascript/express.cr
new file mode 100644
index 00000000..b5bcb0ee
--- /dev/null
+++ b/src/detector/detectors/javascript/express.cr
@@ -0,0 +1,19 @@
+require "../../../models/detector"
+
+module Detector::Javascript
+ class Express < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.includes? ".js") && (file_contents.includes? "require('express')")
+ true
+ elsif (filename.includes? ".js") && (file_contents.includes? "require(\"express\")")
+ true
+ else
+ false
+ end
+ end
+
+ def set_name
+ @name = "js_express"
+ end
+ end
+end
diff --git a/src/detector/detectors/javascript/restify.cr b/src/detector/detectors/javascript/restify.cr
new file mode 100644
index 00000000..b8322507
--- /dev/null
+++ b/src/detector/detectors/javascript/restify.cr
@@ -0,0 +1,23 @@
+require "../../../models/detector"
+
+module Detector::Javascript
+ class Restify < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.includes? ".js") && (file_contents.includes? "require('restify')")
+ true
+ elsif (filename.includes? ".js") && (file_contents.includes? "require(\"restify\")")
+ true
+ elsif (filename.includes? ".ts") && (file_contents.includes? "server")
+ true
+ elsif (filename.includes? ".ts") && (file_contents.includes? "require(\"restify\")")
+ true
+ else
+ false
+ end
+ end
+
+ def set_name
+ @name = "js_restify"
+ end
+ end
+end
diff --git a/src/detector/detectors/js_express.cr b/src/detector/detectors/js_express.cr
deleted file mode 100644
index ada6c9d2..00000000
--- a/src/detector/detectors/js_express.cr
+++ /dev/null
@@ -1,17 +0,0 @@
-require "../../models/detector"
-
-class DetectorJsExpress < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.includes? ".js") && (file_contents.includes? "require('express')")
- true
- elsif (filename.includes? ".js") && (file_contents.includes? "require(\"express\")")
- true
- else
- false
- end
- end
-
- def set_name
- @name = "js_express"
- end
-end
diff --git a/src/detector/detectors/js_restify.cr b/src/detector/detectors/js_restify.cr
deleted file mode 100644
index eea22a90..00000000
--- a/src/detector/detectors/js_restify.cr
+++ /dev/null
@@ -1,21 +0,0 @@
-require "../../models/detector"
-
-class DetectorJsRestify < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.includes? ".js") && (file_contents.includes? "require('restify')")
- true
- elsif (filename.includes? ".js") && (file_contents.includes? "require(\"restify\")")
- true
- elsif (filename.includes? ".ts") && (file_contents.includes? "server")
- true
- elsif (filename.includes? ".ts") && (file_contents.includes? "require(\"restify\")")
- true
- else
- false
- end
- end
-
- def set_name
- @name = "js_restify"
- end
-end
diff --git a/src/detector/detectors/kotlin/spring.cr b/src/detector/detectors/kotlin/spring.cr
new file mode 100644
index 00000000..e6e06459
--- /dev/null
+++ b/src/detector/detectors/kotlin/spring.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Kotlin
+ class Spring < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.ends_with? ".kt") && (file_contents.includes? "org.springframework")
+ return true
+ end
+
+ false
+ end
+
+ def set_name
+ @name = "kotlin_spring"
+ end
+ end
+end
diff --git a/src/detector/detectors/kotlin_spring.cr b/src/detector/detectors/kotlin_spring.cr
deleted file mode 100644
index 9fa3ad41..00000000
--- a/src/detector/detectors/kotlin_spring.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorKotlinSpring < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.ends_with? ".kt") && (file_contents.includes? "org.springframework")
- return true
- end
-
- false
- end
-
- def set_name
- @name = "kotlin_spring"
- end
-end
diff --git a/src/detector/detectors/oas2.cr b/src/detector/detectors/oas2.cr
deleted file mode 100644
index 232776d2..00000000
--- a/src/detector/detectors/oas2.cr
+++ /dev/null
@@ -1,37 +0,0 @@
-require "../../models/detector"
-require "../../utils/json"
-require "../../utils/yaml"
-require "../../models/code_locator"
-
-class DetectorOas2 < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = false
- if valid_json? file_contents
- data = JSON.parse(file_contents)
- begin
- if data["swagger"].as_s.includes? "2."
- check = true
- locator = CodeLocator.instance
- locator.push("swagger-json", filename)
- end
- rescue
- end
- elsif valid_yaml? file_contents
- data = YAML.parse(file_contents)
- begin
- if data["swagger"].as_s.includes? "2."
- check = true
- locator = CodeLocator.instance
- locator.push("swagger-yaml", filename)
- end
- rescue
- end
- end
-
- check
- end
-
- def set_name
- @name = "oas2"
- end
-end
diff --git a/src/detector/detectors/oas3.cr b/src/detector/detectors/oas3.cr
deleted file mode 100644
index b47d66b6..00000000
--- a/src/detector/detectors/oas3.cr
+++ /dev/null
@@ -1,37 +0,0 @@
-require "../../models/detector"
-require "../../utils/json"
-require "../../utils/yaml"
-require "../../models/code_locator"
-
-class DetectorOas3 < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = false
- if valid_json? file_contents
- data = JSON.parse(file_contents)
- begin
- if data["openapi"].as_s.includes? "3."
- check = true
- locator = CodeLocator.instance
- locator.push("oas3-json", filename)
- end
- rescue
- end
- elsif valid_yaml? file_contents
- data = YAML.parse(file_contents)
- begin
- if data["openapi"].as_s.includes? "3."
- check = true
- locator = CodeLocator.instance
- locator.push("oas3-yaml", filename)
- end
- rescue
- end
- end
-
- check
- end
-
- def set_name
- @name = "oas3"
- end
-end
diff --git a/src/detector/detectors/php/php.cr b/src/detector/detectors/php/php.cr
new file mode 100644
index 00000000..fe4ac883
--- /dev/null
+++ b/src/detector/detectors/php/php.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Php
+ class Php < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("")
+ check = check || file_contents.includes?("?>")
+ check = check && filename.includes?(".php")
+
+ check
+ end
+
+ def set_name
+ @name = "php_pure"
+ end
+ end
+end
diff --git a/src/detector/detectors/php_pure.cr b/src/detector/detectors/php_pure.cr
deleted file mode 100644
index feb86f4f..00000000
--- a/src/detector/detectors/php_pure.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorPhpPure < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("")
- check = check || file_contents.includes?("?>")
- check = check && filename.includes?(".php")
-
- check
- end
-
- def set_name
- @name = "php_pure"
- end
-end
diff --git a/src/detector/detectors/python/django.cr b/src/detector/detectors/python/django.cr
new file mode 100644
index 00000000..4b5a8e1a
--- /dev/null
+++ b/src/detector/detectors/python/django.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Python
+ class Django < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.includes? ".py") && (file_contents.includes? "from django.")
+ true
+ else
+ false
+ end
+ end
+
+ def set_name
+ @name = "python_django"
+ end
+ end
+end
diff --git a/src/detector/detectors/python/fastapi.cr b/src/detector/detectors/python/fastapi.cr
new file mode 100644
index 00000000..6055ac66
--- /dev/null
+++ b/src/detector/detectors/python/fastapi.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Python
+ class FastAPI < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.ends_with? ".py") && (file_contents.includes? "from fastapi")
+ true
+ else
+ false
+ end
+ end
+
+ def set_name
+ @name = "python_fastapi"
+ end
+ end
+end
diff --git a/src/detector/detectors/python/flask.cr b/src/detector/detectors/python/flask.cr
new file mode 100644
index 00000000..f955682f
--- /dev/null
+++ b/src/detector/detectors/python/flask.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Python
+ class Flask < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.includes? ".py") && (file_contents.includes? "from flask")
+ true
+ else
+ false
+ end
+ end
+
+ def set_name
+ @name = "python_flask"
+ end
+ end
+end
diff --git a/src/detector/detectors/python_django.cr b/src/detector/detectors/python_django.cr
deleted file mode 100644
index 0cc8af4c..00000000
--- a/src/detector/detectors/python_django.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorPythonDjango < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.includes? ".py") && (file_contents.includes? "from django.")
- true
- else
- false
- end
- end
-
- def set_name
- @name = "python_django"
- end
-end
diff --git a/src/detector/detectors/python_fastapi.cr b/src/detector/detectors/python_fastapi.cr
deleted file mode 100644
index c7358cd0..00000000
--- a/src/detector/detectors/python_fastapi.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorPythonFastAPI < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.ends_with? ".py") && (file_contents.includes? "from fastapi")
- true
- else
- false
- end
- end
-
- def set_name
- @name = "python_fastapi"
- end
-end
diff --git a/src/detector/detectors/python_flask.cr b/src/detector/detectors/python_flask.cr
deleted file mode 100644
index ed675566..00000000
--- a/src/detector/detectors/python_flask.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorPythonFlask < Detector
- def detect(filename : String, file_contents : String) : Bool
- if (filename.includes? ".py") && (file_contents.includes? "from flask")
- true
- else
- false
- end
- end
-
- def set_name
- @name = "python_flask"
- end
-end
diff --git a/src/detector/detectors/raml.cr b/src/detector/detectors/raml.cr
deleted file mode 100644
index 903006ab..00000000
--- a/src/detector/detectors/raml.cr
+++ /dev/null
@@ -1,26 +0,0 @@
-require "../../models/detector"
-require "../../utils/yaml"
-require "../../models/code_locator"
-
-class DetectorRAML < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = false
- if valid_yaml? file_contents
- if file_contents.includes? "#%RAML"
- begin
- YAML.parse(file_contents)
- check = true
- locator = CodeLocator.instance
- locator.push("raml-spec", filename)
- rescue
- end
- end
- end
-
- check
- end
-
- def set_name
- @name = "raml"
- end
-end
diff --git a/src/detector/detectors/ruby/hanami.cr b/src/detector/detectors/ruby/hanami.cr
new file mode 100644
index 00000000..1dfad0c6
--- /dev/null
+++ b/src/detector/detectors/ruby/hanami.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Ruby
+ class Hanami < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("gem 'hanami'")
+ check = check || file_contents.includes?("gem \"hanami\"")
+ check = check && filename.includes?("Gemfile")
+
+ check
+ end
+
+ def set_name
+ @name = "ruby_hanami"
+ end
+ end
+end
diff --git a/src/detector/detectors/ruby/rails.cr b/src/detector/detectors/ruby/rails.cr
new file mode 100644
index 00000000..d0c6fe3d
--- /dev/null
+++ b/src/detector/detectors/ruby/rails.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Ruby
+ class Rails < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("gem 'rails'")
+ check = check || file_contents.includes?("gem \"rails\"")
+ check = check && filename.includes?("Gemfile")
+
+ check
+ end
+
+ def set_name
+ @name = "ruby_rails"
+ end
+ end
+end
diff --git a/src/detector/detectors/ruby/sinatra.cr b/src/detector/detectors/ruby/sinatra.cr
new file mode 100644
index 00000000..90eb67ff
--- /dev/null
+++ b/src/detector/detectors/ruby/sinatra.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Ruby
+ class Sinatra < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("gem 'sinatra'")
+ check = check || file_contents.includes?("gem \"sinatra\"")
+ check = check && filename.includes?("Gemfile")
+
+ check
+ end
+
+ def set_name
+ @name = "ruby_sinatra"
+ end
+ end
+end
diff --git a/src/detector/detectors/ruby_hanami.cr b/src/detector/detectors/ruby_hanami.cr
deleted file mode 100644
index 8ce9c0fd..00000000
--- a/src/detector/detectors/ruby_hanami.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorRubyHanami < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("gem 'hanami'")
- check = check || file_contents.includes?("gem \"hanami\"")
- check = check && filename.includes?("Gemfile")
-
- check
- end
-
- def set_name
- @name = "ruby_hanami"
- end
-end
diff --git a/src/detector/detectors/ruby_rails.cr b/src/detector/detectors/ruby_rails.cr
deleted file mode 100644
index 4dc665ff..00000000
--- a/src/detector/detectors/ruby_rails.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorRubyRails < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("gem 'rails'")
- check = check || file_contents.includes?("gem \"rails\"")
- check = check && filename.includes?("Gemfile")
-
- check
- end
-
- def set_name
- @name = "ruby_rails"
- end
-end
diff --git a/src/detector/detectors/ruby_sinatra.cr b/src/detector/detectors/ruby_sinatra.cr
deleted file mode 100644
index 19b6e06e..00000000
--- a/src/detector/detectors/ruby_sinatra.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorRubySinatra < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("gem 'sinatra'")
- check = check || file_contents.includes?("gem \"sinatra\"")
- check = check && filename.includes?("Gemfile")
-
- check
- end
-
- def set_name
- @name = "ruby_sinatra"
- end
-end
diff --git a/src/detector/detectors/rust/actix_web.cr b/src/detector/detectors/rust/actix_web.cr
new file mode 100644
index 00000000..fa60290d
--- /dev/null
+++ b/src/detector/detectors/rust/actix_web.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Rust
+ class ActixWeb < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("actix-web")
+ check = check && file_contents.includes?("dependencies")
+ check = check && filename.includes?("Cargo.toml")
+
+ check
+ end
+
+ def set_name
+ @name = "rust_actix_web"
+ end
+ end
+end
diff --git a/src/detector/detectors/rust/axum.cr b/src/detector/detectors/rust/axum.cr
new file mode 100644
index 00000000..335dca40
--- /dev/null
+++ b/src/detector/detectors/rust/axum.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Rust
+ class Axum < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("axum")
+ check = check && file_contents.includes?("dependencies")
+ check = check && filename.includes?("Cargo.toml")
+
+ check
+ end
+
+ def set_name
+ @name = "rust_axum"
+ end
+ end
+end
diff --git a/src/detector/detectors/rust/rocket.cr b/src/detector/detectors/rust/rocket.cr
new file mode 100644
index 00000000..28599007
--- /dev/null
+++ b/src/detector/detectors/rust/rocket.cr
@@ -0,0 +1,17 @@
+require "../../../models/detector"
+
+module Detector::Rust
+ class Rocket < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = file_contents.includes?("rocket")
+ check = check && file_contents.includes?("dependencies")
+ check = check && filename.includes?("Cargo.toml")
+
+ check
+ end
+
+ def set_name
+ @name = "rust_rocket"
+ end
+ end
+end
diff --git a/src/detector/detectors/rust_axum.cr b/src/detector/detectors/rust_axum.cr
deleted file mode 100644
index 4977385c..00000000
--- a/src/detector/detectors/rust_axum.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorRustAxum < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("axum")
- check = check && file_contents.includes?("dependencies")
- check = check && filename.includes?("Cargo.toml")
-
- check
- end
-
- def set_name
- @name = "rust_axum"
- end
-end
diff --git a/src/detector/detectors/rust_rocket.cr b/src/detector/detectors/rust_rocket.cr
deleted file mode 100644
index e7bc77d2..00000000
--- a/src/detector/detectors/rust_rocket.cr
+++ /dev/null
@@ -1,15 +0,0 @@
-require "../../models/detector"
-
-class DetectorRustRocket < Detector
- def detect(filename : String, file_contents : String) : Bool
- check = file_contents.includes?("rocket")
- check = check && file_contents.includes?("dependencies")
- check = check && filename.includes?("Cargo.toml")
-
- check
- end
-
- def set_name
- @name = "rust_rocket"
- end
-end
diff --git a/src/detector/detectors/specification/har.cr b/src/detector/detectors/specification/har.cr
new file mode 100644
index 00000000..7da41c7a
--- /dev/null
+++ b/src/detector/detectors/specification/har.cr
@@ -0,0 +1,31 @@
+require "../../../models/detector"
+require "../../../utils/json"
+require "../../../utils/yaml"
+require "../../../models/code_locator"
+require "har"
+
+module Detector::Specification
+ class Har < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ if (filename.includes? ".har") || (filename.includes? ".json")
+ if valid_json? file_contents
+ begin
+ data = HAR.from_string(file_contents)
+ if data.version.to_s.includes? "1."
+ locator = CodeLocator.instance
+ locator.push("har-path", filename)
+ return true
+ end
+ rescue
+ end
+ end
+ end
+
+ false
+ end
+
+ def set_name
+ @name = "har"
+ end
+ end
+end
diff --git a/src/detector/detectors/specification/oas2.cr b/src/detector/detectors/specification/oas2.cr
new file mode 100644
index 00000000..59494c07
--- /dev/null
+++ b/src/detector/detectors/specification/oas2.cr
@@ -0,0 +1,39 @@
+require "../../../models/detector"
+require "../../../utils/json"
+require "../../../utils/yaml"
+require "../../../models/code_locator"
+
+module Detector::Specification
+ class Oas2 < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = false
+ if valid_json? file_contents
+ data = JSON.parse(file_contents)
+ begin
+ if data["swagger"].as_s.includes? "2."
+ check = true
+ locator = CodeLocator.instance
+ locator.push("swagger-json", filename)
+ end
+ rescue
+ end
+ elsif valid_yaml? file_contents
+ data = YAML.parse(file_contents)
+ begin
+ if data["swagger"].as_s.includes? "2."
+ check = true
+ locator = CodeLocator.instance
+ locator.push("swagger-yaml", filename)
+ end
+ rescue
+ end
+ end
+
+ check
+ end
+
+ def set_name
+ @name = "oas2"
+ end
+ end
+end
diff --git a/src/detector/detectors/specification/oas3.cr b/src/detector/detectors/specification/oas3.cr
new file mode 100644
index 00000000..5202a6d6
--- /dev/null
+++ b/src/detector/detectors/specification/oas3.cr
@@ -0,0 +1,39 @@
+require "../../../models/detector"
+require "../../../utils/json"
+require "../../../utils/yaml"
+require "../../../models/code_locator"
+
+module Detector::Specification
+ class Oas3 < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = false
+ if valid_json? file_contents
+ data = JSON.parse(file_contents)
+ begin
+ if data["openapi"].as_s.includes? "3."
+ check = true
+ locator = CodeLocator.instance
+ locator.push("oas3-json", filename)
+ end
+ rescue
+ end
+ elsif valid_yaml? file_contents
+ data = YAML.parse(file_contents)
+ begin
+ if data["openapi"].as_s.includes? "3."
+ check = true
+ locator = CodeLocator.instance
+ locator.push("oas3-yaml", filename)
+ end
+ rescue
+ end
+ end
+
+ check
+ end
+
+ def set_name
+ @name = "oas3"
+ end
+ end
+end
diff --git a/src/detector/detectors/specification/raml.cr b/src/detector/detectors/specification/raml.cr
new file mode 100644
index 00000000..a206c619
--- /dev/null
+++ b/src/detector/detectors/specification/raml.cr
@@ -0,0 +1,28 @@
+require "../../../models/detector"
+require "../../../utils/yaml"
+require "../../../models/code_locator"
+
+module Detector::Specification
+ class RAML < Detector
+ def detect(filename : String, file_contents : String) : Bool
+ check = false
+ if valid_yaml? file_contents
+ if file_contents.includes? "#%RAML"
+ begin
+ YAML.parse(file_contents)
+ check = true
+ locator = CodeLocator.instance
+ locator.push("raml-spec", filename)
+ rescue
+ end
+ end
+ end
+
+ check
+ end
+
+ def set_name
+ @name = "raml"
+ end
+ end
+end
diff --git a/src/minilexers/java.cr b/src/minilexers/java.cr
index 40e58429..915e8eae 100644
--- a/src/minilexers/java.cr
+++ b/src/minilexers/java.cr
@@ -363,13 +363,17 @@ class JavaLexer < MiniLexer
when '@' then self << Tuple.new(:AT, "@")
when '{' then self << Tuple.new(:LBRACE, "{")
when '}' then self << Tuple.new(:RBRACE, "}")
+ when '[' then self << Tuple.new(:LBRACK, "[")
+ when ']' then self << Tuple.new(:RBRACK, "]")
+ when '<' then self << Tuple.new(:LT, "<")
+ when '>' then self << Tuple.new(:GT, ">")
when ';' then self << Tuple.new(:SEMI, ";")
when '='
if @input[@position + 1] == '='
@position += 1
- Tuple.new(:EQUAL, "==")
+ self << Tuple.new(:EQUAL, "==")
else
- Tuple.new(:ASSIGN, "=")
+ self << Tuple.new(:ASSIGN, "=")
end
when '\t' then self << Tuple.new(:TAB, "\t")
when '\n'
diff --git a/src/minilexers/python.cr b/src/minilexers/python.cr
new file mode 100644
index 00000000..699f3e62
--- /dev/null
+++ b/src/minilexers/python.cr
@@ -0,0 +1,241 @@
+require "../models/minilexer/*"
+
+class PythonLexer < MiniLexer
+ # Python Keywords
+ # Regular Expressions for Tokens
+ IDENTIFIER = /^[a-zA-Z_]\w*/ # Unsupport unicode characters for now
+
+ # Token Definitions
+ PUNCTUATION = {
+ '.' => :DOT, ',' => :COMMA, '(' => :LPAREN, ')' => :RPAREN,
+ '{' => :LCURL, '}' => :RCURL, '[' => :LSQUARE, ']' => :RSQUARE,
+ ';' => :SEMI, ':' => :COLON, '?' => :QUESTION,
+ }
+
+ # https://docs.python.org/3.12/reference/lexical_analysis.html#keywords
+ KEYWORDS = {
+ "False" => :FALSE,
+ "await" => :AWAIT,
+ "else" => :ELSE,
+ "import" => :IMPORT,
+ "pass" => :PASS,
+ "None" => :NONE,
+ "break" => :BREAK,
+ "except" => :EXCEPT,
+ "in" => :IN,
+ "raise" => :RAISE,
+ "True" => :TRUE,
+ "class" => :CLASS,
+ "finally" => :FINALLY,
+ "is" => :IS,
+ "return" => :RETURN,
+ "and" => :AND,
+ "continue" => :CONTINUE,
+ "for" => :FOR,
+ "lambda" => :LAMBDA,
+ "try" => :TRY,
+ "as" => :AS,
+ "def" => :DEF,
+ "from" => :FROM,
+ "nonlocal" => :NONLOCAL,
+ "while" => :WHILE,
+ "assert" => :ASSERT,
+ "del" => :DEL,
+ "global" => :GLOBAL,
+ "not" => :NOT,
+ "with" => :WITH,
+ "async" => :ASYNC,
+ "elif" => :ELIF,
+ "if" => :IF,
+ "or" => :OR,
+ "yield" => :YIELD,
+ }
+
+ # https://docs.python.org/3.12/library/token.html#module-token
+ OPERATORS = {
+ '+' => :ADD, '-' => :SUB, '*' => :MULT, '/' => :DIV, '%' => :MOD,
+ '=' => :ASSIGN, "==" => :EQUAL, "!=" => :NOTEQUAL, '>' => :RANGLE, '<' => :LANGLE,
+ ">=" => :GE, "<=" => :LE, "&&" => :AND, "||" => :OR, '!' => :BANG,
+ "++" => :INC, "--" => :DEC, "+=" => :ADD_ASSIGN, "-=" => :SUB_ASSIGN,
+ "*=" => :MUL_ASSIGN, "/=" => :DIV_ASSIGN, "%=" => :MOD_ASSIGN,
+ '&' => :BITAND, '|' => :BITOR, '^' => :CARET, '~' => :TILDE,
+ "->" => :ARROW, "=>" => :DOUBLE_ARROW, "?:" => :ELVIS,
+ "<<" => :LEFTSHIFT, ">>" => :RIGHTSHIFT, "**" => :DOUBLESTAR, "&=" => :AMPEREQUAL,
+ "|=" => :VBAREQUAL, "^=" => :CIRCUMFLEXEQUAL, "<<=" => :LEFTSHIFTEQUAL,
+ ">>=" => :RIGHTSHIFTEQUAL, "**=" => :DOUBLESTAREQUAL, "//" => :DOUBLESLASH,
+ "//=" => :DOUBLESLASHEQUAL, "@" => :AT, "@=" => :ATEQUAL,
+ "..." => :ELLIPSIS, ":=" => :COLONEQUAL,
+ "!" => :EXCLAMATION,
+ }
+
+ def initialize
+ super
+ end
+
+ def tokenize_logic(@input : String) : Array(Token)
+ @tokens.clear
+ while @position < @input.size
+ case @input[@position]
+ when '\n'
+ match_newline
+ match_indentation
+ when '#'
+ match_comment
+ when '0'..'9'
+ match_number
+ when '"', '\'', "f"
+ match_string
+ when '.', ',', '(', ')', '{', '}', '[', ']', ';', '?', ':'
+ match_punctuation
+ when '+', '-', '*', '/', '%', '&', '|', '^', '!', '=', '<', '>', '~'
+ match_operator
+ else
+ match_other
+ end
+ end
+ self << Tuple.new(:EOF, "")
+ @tokens
+ end
+
+ private def match_indentation
+ match = @input[@position..].match(/^[\t ]+\b/)
+ if match
+ self << Tuple.new(:INDENT, match[0])
+ @position += match[0].size
+ end
+ end
+
+ private def match_newline
+ while @position < @input.size && @input[@position] == '\n'
+ self << Tuple.new(:NEWLINE, @input[@position])
+ @position += 1
+ end
+ end
+
+ private def match_comment
+ start_pos = @position
+ while @position < @input.size && @input[@position] != '\n'
+ @position += 1
+ end
+ self << Tuple.new(:COMMENT, @input[start_pos...@position])
+ end
+
+ private def match_multiline_string
+ delimiter = @input[@position..@position + 2]
+ start_pos = @position
+ @position += 3
+ while @position < @input.size && !@input[@position..@position + 2].starts_with?(delimiter)
+ if @input[@position] == '\\'
+ @position += 1 # Skip escaped character
+ end
+
+ @position += 1
+ end
+ @position += 3 # Skip closing delimiter
+ self << Tuple.new(:MULTILINE_STRING, @input[start_pos...@position])
+ end
+
+ private def match_number
+ start_pos = @position
+ if @input[@position..].starts_with?("0x")
+ @position += 2 # Skip 0x
+ while @position < @input.size && @input[@position].to_s =~ /[0-9a-fA-F]/
+ @position += 1
+ end
+ else
+ while @position < @input.size && @input[@position].to_s =~ /\d/
+ @position += 1
+ end
+
+ if @position < @input.size && @input[@position] == '.'
+ @position += 1
+ while @position < @input.size && @input[@position].to_s =~ /\d/
+ @position += 1
+ end
+ end
+
+ if @position < @input.size && @input[@position].to_s =~ /[eE]/
+ @position += 1
+ if @input[@position].to_s =~ /[+-]/
+ @position += 1
+ end
+ while @position < @input.size && @input[@position].to_s =~ /\d/
+ @position += 1
+ end
+ end
+ end
+ if start_pos == @position
+ self << Tuple.new(:NUMBER, @input[start_pos])
+ else
+ self << Tuple.new(:NUMBER, @input[start_pos...@position])
+ end
+ end
+
+ private def match_string
+ c = @input[@position]
+ if c == '"' && @input[@position...@position + 3] == "\"\"\""
+ match_multiline_string
+ elsif c == '\'' && @input[@position...@position + 3] == "'''"
+ match_multiline_string
+ elsif c == 'f' && (@input[@position + 1] == '"' || @input[@position + 1] == '\'')
+ @position += 1
+ match_string
+ self << Tuple.new(:FSTRING, @input[@position])
+ else
+ start_pos = @position
+ @position += 1
+ while @position < @input.size && @input[@position] != c
+ if @input[@position] == '\\'
+ @position += 1 # Skip escaped character
+ end
+
+ @position += 1
+ end
+
+ @position += 1
+ self << Tuple.new(:STRING, @input[start_pos...@position])
+ end
+ end
+
+ private def match_punctuation
+ char = @input[@position]
+ type = PUNCTUATION[char]? || :UNKNOWN
+ self << Tuple.new(type, char)
+ @position += 1
+ end
+
+ private def match_operator
+ # Match longer operators first by checking next characters
+ if @position + 1 < @input.size && OPERATORS.has_key?(@input[@position..@position + 1])
+ op = @input[@position..@position + 1]
+ @position += 2
+ elsif OPERATORS.has_key?(@input[@position])
+ op = @input[@position]
+ @position += 1
+ else
+ op = nil
+ end
+
+ if op
+ type = OPERATORS[op]
+ self << Tuple.new(type, op)
+ else
+ # Handle the case where the operator is unknown
+ self << Tuple.new(:UNKNOWN, @input[@position])
+ @position += 1
+ end
+ end
+
+ private def match_other
+ if match = IDENTIFIER.match(@input[@position..])
+ token_type = KEYWORDS.has_key?(match[0]) ? KEYWORDS[match[0]] : :IDENTIFIER
+ self << Tuple.new(token_type, match[0])
+ @position += match[0].size
+ else
+ if @input[@position] != ' ' # Skip whitespace
+ self << Tuple.new(:UNKNOWN, @input[@position])
+ end
+ @position += 1
+ end
+ end
+end
diff --git a/src/miniparsers/java.cr b/src/miniparsers/java.cr
index 2a4973d9..5e176bd2 100644
--- a/src/miniparsers/java.cr
+++ b/src/miniparsers/java.cr
@@ -96,27 +96,28 @@ class JavaParser
end
def parse_formal_parameters(tokens : Array(Token), param_start_index : Int32)
- lparen_count = 0
- rparen_count = 0
- lbrace_count = 0
- rbrace_count = 0
parameters = Array(Array(Token)).new
- parameter_token = Array(Token).new
return parameters if tokens.size <= param_start_index
- while param_start_index < tokens.size
- if tokens[param_start_index].type == :TAB
- param_start_index += 1
- elsif tokens[param_start_index].type == :NEWLINE
- param_start_index += 1
- elsif tokens[param_start_index].type == :LPAREN
+ lparen_index = param_start_index
+ while lparen_index < tokens.size
+ if tokens[lparen_index].type == :TAB
+ lparen_index += 1
+ elsif tokens[lparen_index].type == :NEWLINE
+ lparen_index += 1
+ elsif tokens[lparen_index].type == :LPAREN
break
else
+ # No parameters or wrong index was given
return parameters
end
end
- cursor = param_start_index
+ # Parse the formal parameters between ( and )
+ lparen_count = 0
+ other_open_count = 0
+ cursor = lparen_index
+ parameter_token = Array(Token).new # Add this line to declare the parameter_token variable
while cursor < tokens.size
token = tokens[cursor]
if token.type == :LPAREN
@@ -124,26 +125,28 @@ class JavaParser
if lparen_count > 1
parameter_token << token
end
- elsif token.type == :LBRACE
- lbrace_count += 1
+ elsif token.type == :LBRACE || token.type == :LBRACK || token.type == :LT
+ other_open_count += 1
parameter_token << token
- elsif token.type == :RBRACE
- rbrace_count += 1
+ elsif token.type == :RBRACE || token.type == :RBRACK || token.type == :GT
+ other_open_count -= 1
parameter_token << token
- elsif lbrace_count == rbrace_count && lparen_count - 1 == rparen_count && token.type == :COMMA
+ elsif token.type == :COMMA && other_open_count == 0 && lparen_count == 1
parameters << parameter_token
parameter_token = Array(Token).new
- elsif lparen_count > 0
- if token.type == :RPAREN
- rparen_count += 1
- if lparen_count == rparen_count
- parameters << parameter_token
- break
- else
- parameter_token << token
+ else
+ if token.type != :RPAREN
+ if token.type == :TAB || token.type == :NEWLINE # Skip TAB and NEWLINE tokens
+ cursor += 1
+ next
end
+ parameter_token << token # Add token to the parameter token list
else
- unless token.type == :TAB || token.type == :NEWLINE
+ lparen_count -= 1
+ if lparen_count == 0
+ parameters << parameter_token
+ break # End of the formal parameters
+ else
parameter_token << token
end
end
diff --git a/src/miniparsers/kotlin.cr b/src/miniparsers/kotlin.cr
index 702628b8..452c143b 100644
--- a/src/miniparsers/kotlin.cr
+++ b/src/miniparsers/kotlin.cr
@@ -332,7 +332,7 @@ class KotlinParser
lcurl_nesting, paren_nesting = 0, 0
class_tokens = Array(Token).new
class_tokens << token
- elsif start_class
+ elsif start_class && lcurl_nesting > 0
# Recursively parse nested classes
parse_classes(tokens, index)
end
diff --git a/src/miniparsers/python.cr b/src/miniparsers/python.cr
new file mode 100644
index 00000000..6cc3bb15
--- /dev/null
+++ b/src/miniparsers/python.cr
@@ -0,0 +1,372 @@
+require "../minilexers/python"
+require "../models/minilexer/token"
+
+class PythonParser
+ property tokens : Array(Token)
+ property path : String
+
+ def initialize(@path : String, @tokens : Array(Token), @parsers : Hash(String, PythonParser), @visited : Array(String) = Array(String).new)
+ @import_statements = Hash(String, ImportModel).new
+ @global_variables = Hash(String, GlobalVariables).new
+ @basedir = File.dirname(@path)
+ @is_package_file = File.exists?(File.dirname(@path) + "/__init__.py")
+ while @basedir != "" && File.exists?(@basedir + "/__init__.py")
+ @basedir = File.dirname(@basedir)
+ end
+
+ @debug = false
+ @visited << path
+ parse
+ end
+
+ def parse
+ parse_import_statements(@tokens)
+ parse_global_variables()
+ end
+
+ # Create a parser for the given path
+ def create_parser(path : Path, content : String = "") : PythonParser
+ if content == ""
+ content = File.read(path, encoding: "utf-8", invalid: :skip)
+ end
+
+ lexer = PythonLexer.new
+ tokens = lexer.tokenize(content)
+ parser = PythonParser.new(path.to_s, tokens, @parsers, @visited.dup)
+ parser
+ end
+
+ # Get the parser for the given path
+ def get_parser(path : Path) : PythonParser
+ if @parsers.has_key?(path.to_s)
+ return @parsers[path.to_s]
+ end
+
+ parser = create_parser(path)
+ @parsers[path.to_s] = parser
+ parser
+ end
+
+ def parse_import_statements(tokens : Array(Token))
+ import_statements = Array(Array(String)).new
+ index = 0
+ while index < tokens.size
+ code_start = index == 0 || tokens[index - 1].type == :NEWLINE || tokens[index - 1].type == :INDENT
+ unless code_start
+ index += 1
+ next
+ end
+
+ from_strings = Array(String).new
+ if tokens[index].type == :FROM
+ index += 1
+ while tokens[index].type != :IMPORT && tokens[index].type != :EOF
+ if from_strings.size > 0
+ if tokens[index].type == :DOT
+ index += 1
+ next
+ end
+ else
+ if tokens[index].type == :DOT
+ if index + 1 < tokens.size && tokens[index + 1].type == :DOT
+ from_strings << ".."
+ index += 2
+ else
+ from_strings << "."
+ index += 1
+ end
+ next
+ end
+ end
+
+ from_strings << tokens[index].value
+ index += 1
+ end
+ end
+
+ if tokens[index].type == :IMPORT
+ index += 1
+ import_strings = from_strings.dup
+ while tokens[index].type != :NEWLINE && tokens[index].type != :EOF
+ if tokens[index].type == :COMMA
+ import_strings = from_strings.dup
+ index += 1
+ next
+ elsif tokens[index].type == :DOT
+ index += 1
+ next
+ elsif tokens[index].type == :LPAREN
+ index += 1
+ next
+ elsif tokens[index].type == :RPAREN
+ index += 1
+ next
+ elsif tokens[index].type == :COMMENT
+ index += 1
+ next
+ end
+
+ # Check if the import statement has an alias
+ import_name = tokens[index].value
+ if tokens[index + 1].type != :EOF && tokens[index + 1].type == :AS
+ as_name = tokens[index + 2].value
+ index += 2
+ import_strings << import_name + " as " + as_name
+ if from_strings.size > 0
+ import_statements << import_strings
+ end
+ else
+ # No alias
+ import_strings << import_name
+ if from_strings.size > 0
+ import_statements << import_strings
+ end
+ end
+
+ index += 1
+ end
+
+ if from_strings.size == 0
+ # No from statement, so add the import statement
+ import_statements << import_strings
+ end
+ end
+
+ index += 1
+ end
+
+ import_statements.each do |import_statement|
+ name = import_statement[-1]
+
+ # Check if the name has an alias
+ as_name = nil
+ if name.includes?(" as ")
+ name, as_name = name.split(" as ") # Set as_name
+ import_statement[-1] = name # Remove the alias part
+ end
+
+ path = nil
+ pypath = nil
+ remain_import_parts = false
+ package_dir = @basedir
+ if import_statement[0] == "."
+ package_dir = File.dirname(@path)
+ import_statement.shift
+ end
+
+ import_statement.each_with_index do |import_part, _index|
+ path = File.join(package_dir, import_part)
+ if import_part == ".."
+ path = File.dirname(package_dir)
+ end
+
+ # Order of checking is important
+ if File.directory?(path)
+ package_dir = path
+ elsif File.exists?(path + ".py")
+ pypath = path + ".py"
+ if _index != import_statement.size - 1
+ remain_import_parts = true
+ end
+ break
+ elsif package_dir != @basedir && File.exists?(File.join(package_dir, "__init__.py"))
+ pypath = File.join(package_dir, "__init__.py")
+ if _index != import_statement.size - 1
+ remain_import_parts = true
+ end
+ break
+ else
+ pypath = nil
+ break
+ end
+ end
+
+ unless pypath.nil?
+ if @visited.includes?(pypath)
+ next
+ end
+ if name == "*"
+ parser = get_parser(Path.new(pypath))
+ @global_variables.merge!(parser.@global_variables)
+ else
+ parser = get_parser(Path.new(pypath))
+ if !remain_import_parts && !pypath.ends_with?("__init__.py")
+ # import all global variables
+ parser.@global_variables.each do |key, value|
+ @global_variables["#{name}.#{key}"] = value
+ end
+ elsif parser.@global_variables.has_key?(name)
+ # import specific global variable
+ @global_variables[as_name.nil? ? name : as_name] = parser.@global_variables[name]
+ end
+ end
+ end
+ end
+ end
+
+ def parse_global_variables
+ index = 0
+ while index < tokens.size
+ if (index == 0 || tokens[index - 1].type == :NEWLINE) && index + 3 < tokens.size
+ type = nil
+ if tokens[index].type == :IDENTIFIER && tokens[index + 1].type == :COLON && tokens[index + 3].type == :ASSIGN
+ name = tokens[index].value
+ type = tokens[index + 2].value
+ value = extract_assign_data(index + 4)[1]
+ elsif tokens[index].type == :IDENTIFIER && tokens[index + 1].type == :ASSIGN
+ name = tokens[index].value
+ t = extract_assign_data(index + 2)
+ type, value = t[0], t[1]
+ # Check if the type is a direct function or class, then get the name of the function or class
+ if type.nil? && tokens[index + 2].type == :IDENTIFIER
+ _type = ""
+ while tokens[index + 2].type == :IDENTIFIER || tokens[index + 2].type == :DOT
+ _type += tokens[index + 2].value
+ index += 1
+ end
+ if tokens[index + 2].type == :LPAREN
+ type = _type
+ end
+ end
+ else
+ index += 1
+ next
+ end
+
+ @global_variables[name] = GlobalVariables.new(name, type, value, path)
+ end
+ index += 1
+ end
+ end
+
+ # Normalize the string or fstring
+ def normallize(index) : String
+ if @tokens[index].type == :STRING
+ str = @tokens[index].value[1..-2]
+ return str
+ elsif @tokens[index].type == :FSTRING
+ str = @tokens[index].value[1..-2]
+ str = str.gsub(/\{[a-zA-Z_]\w*\}/) do |match|
+ key = match[1..-2]
+ @global_variables.has_key?(key) ? @global_variables[key] : match
+ end
+
+ return str
+ end
+
+ @tokens[index].value
+ end
+
+ def extract_assign_data(index) : Tuple(String | Nil, String)
+ rawdata = ""
+ variable_type = nil # unknown
+ sindex = index
+ lparen = 0
+ while index < @tokens.size
+ token_type = @tokens[index].type
+ token_value = @tokens[index].value
+ if token_type == :LPAREN
+ lparen += 1
+ rawdata += token_value
+ elsif lparen > 0
+ rawdata += token_value
+ if token_type == :RPAREN
+ lparen -= 1
+ if lparen == 0
+ break
+ end
+ end
+ elsif token_type == :NEWLINE
+ break
+ elsif token_type == :COMMENT
+ index += 1
+ next
+ elsif sindex == index
+ # Start of the assignment
+ if token_type == :STRING || token_type == :FSTRING
+ return Tuple.new("str", normallize(index))
+ else
+ rawdata += token_value
+ end
+ else
+ rawdata += token_value
+ end
+ index += 1
+ end
+
+ rawdata = rawdata.strip
+ if @global_variables.has_key?(rawdata)
+ # Check if the rawdata is a global variable
+ gv = @global_variables[rawdata]
+ return Tuple.new(gv.type, gv.value)
+ end
+
+ # Check if the rawdata is an instance variable
+ instance_parts = rawdata.split(".")
+ if instance_parts.size > 1
+ instance = instance_parts[0]
+ if @global_variables.has_key?(instance)
+ gv = @global_variables[instance]
+ gv_type = gv.type
+ if gv_type
+ variable_type = gv_type + "." + instance_parts[1].split("(")[0]
+ end
+ end
+ end
+
+ Tuple.new(variable_type, rawdata)
+ end
+
+ def print_line(index)
+ while index < @tokens.size
+ break if @tokens[index].type == :NEWLINE
+ print(@tokens[index].to_s, " ")
+ index += 1
+ end
+ puts ""
+ end
+
+ def debug_print(s)
+ if @debug
+ puts s
+ end
+ end
+
+ # Class to model annotations
+ class ImportModel
+ property name : String
+ property path : String | Nil
+ property as_name : String | Nil
+
+ def initialize(@name : String, @path : String | Nil, @as_name : String | Nil)
+ end
+
+ def to_s
+ if @path.nil?
+ "#{@name} from {unknown}"
+ elsif @as_name.nil?
+ "#{@name} from #{@path}"
+ else
+ "#{@name} from #{@path} as #{@as_name}"
+ end
+ end
+ end
+
+ class GlobalVariables
+ property name : String
+ property type : String | Nil
+ property value : String
+ property path : String
+
+ def initialize(@name : String, @type : String | Nil, @value : String, @path : String)
+ end
+
+ def to_s
+ if @type.nil?
+ "#{@name} = #{@value} (#{path})"
+ else
+ "#{@name} : #{@type} = #{@value} (#{path})"
+ end
+ end
+ end
+end
diff --git a/src/models/analyzer.cr b/src/models/analyzer.cr
index ae6d7b28..3db1be3d 100644
--- a/src/models/analyzer.cr
+++ b/src/models/analyzer.cr
@@ -10,16 +10,16 @@ class Analyzer
@is_debug : Bool
@is_color : Bool
@is_log : Bool
- @options : Hash(String, String)
+ @options : Hash(String, YAML::Any)
- def initialize(options : Hash(String, String))
- @base_path = options["base"]
- @url = options["url"]
+ def initialize(options : Hash(String, YAML::Any))
+ @base_path = options["base"].to_s
+ @url = options["url"].to_s
@result = [] of Endpoint
@endpoint_references = [] of EndpointReference
- @is_debug = str_to_bool(options["debug"])
- @is_color = str_to_bool(options["color"])
- @is_log = str_to_bool(options["nolog"])
+ @is_debug = any_to_bool(options["debug"])
+ @is_color = any_to_bool(options["color"])
+ @is_log = any_to_bool(options["nolog"])
@options = options
@logger = NoirLogger.new @is_debug, @is_color, @is_log
@@ -59,7 +59,7 @@ class FileAnalyzer < Analyzer
end
end
- @options["concurrency"].to_i.times do
+ @options["concurrency"].to_s.to_i.times do
spawn do
loop do
begin
diff --git a/src/models/code_locator.cr b/src/models/code_locator.cr
index da14ca4b..3b756919 100644
--- a/src/models/code_locator.cr
+++ b/src/models/code_locator.cr
@@ -10,9 +10,9 @@ class CodeLocator
def initialize
options = {"debug" => "true", "color" => "true", "nolog" => "false"}
- @is_debug = str_to_bool(options["debug"])
- @is_color = str_to_bool(options["color"])
- @is_log = str_to_bool(options["nolog"])
+ @is_debug = any_to_bool(options["debug"])
+ @is_color = any_to_bool(options["color"])
+ @is_log = any_to_bool(options["nolog"])
@logger = NoirLogger.new(@is_debug, @is_color, @is_log)
@s_map = Hash(String, String).new
diff --git a/src/models/deliver.cr b/src/models/deliver.cr
index 717d43e1..af124995 100644
--- a/src/models/deliver.cr
+++ b/src/models/deliver.cr
@@ -2,7 +2,7 @@ require "./logger"
class Deliver
@logger : NoirLogger
- @options : Hash(String, String)
+ @options : Hash(String, YAML::Any)
@is_debug : Bool
@is_color : Bool
@is_log : Bool
@@ -11,45 +11,43 @@ class Deliver
@matchers : Array(String) = [] of String
@filters : Array(String) = [] of String
- def initialize(options : Hash(String, String))
+ def initialize(options : Hash(String, YAML::Any))
@options = options
- @is_debug = str_to_bool(options["debug"])
- @is_color = str_to_bool(options["color"])
- @is_log = str_to_bool(options["nolog"])
- @proxy = options["send_proxy"]
+ @is_debug = any_to_bool(options["debug"])
+ @is_color = any_to_bool(options["color"])
+ @is_log = any_to_bool(options["nolog"])
+ @proxy = options["send_proxy"].to_s
@logger = NoirLogger.new @is_debug, @is_color, @is_log
- if options["send_with_headers"] != ""
- headers_tmp = options["send_with_headers"].split("::NOIR::HEADERS::SPLIT::")
- @logger.info "Setting headers from command line."
- headers_tmp.each do |header|
- if header.includes? ":"
- @logger.debug "Adding '#{header}' to headers."
- splited = header.split(":")
- value = ""
- begin
- if splited[1][0].to_s == " "
- value = splited[1][1..-1].to_s
- else
- value = splited[1].to_s
- end
- rescue
+ options["send_with_headers"].as_a.each do |set_header|
+ if set_header.to_s.includes? ":"
+ splited = set_header.to_s.split(":")
+ value = ""
+ begin
+ if splited[1][0].to_s == " "
+ value = splited[1][1..-1].to_s
+ else
value = splited[1].to_s
end
-
- @headers[splited[0]] = value
+ rescue
+ value = splited[1].to_s
end
+
+ @headers[splited[0]] = value
end
- @logger.sub "➔ #{@headers.size} headers added."
end
- @matchers = options["use_matchers"].split("::NOIR::MATCHER::SPLIT::")
+ options["use_matchers"].as_a.each do |matcher|
+ @matchers << matcher.to_s
+ end
@matchers.delete("")
if @matchers.size > 0
@logger.info "#{@matchers.size} matchers added."
end
- @filters = options["use_filters"].split("::NOIR::FILTER::SPLIT::")
+ options["use_filters"].as_a.each do |filter|
+ @filters << filter.to_s
+ end
@filters.delete("")
if @filters.size > 0
@logger.info "#{@filters.size} filters added."
diff --git a/src/models/detector.cr b/src/models/detector.cr
index 04a519ca..910177bf 100644
--- a/src/models/detector.cr
+++ b/src/models/detector.cr
@@ -1,4 +1,5 @@
require "./logger"
+require "yaml"
class Detector
@logger : NoirLogger
@@ -8,12 +9,12 @@ class Detector
@name : String
@base_path : String
- def initialize(options : Hash(String, String))
- @is_debug = str_to_bool(options["debug"])
- @is_color = str_to_bool(options["color"])
- @is_log = str_to_bool(options["nolog"])
+ def initialize(options : Hash(String, YAML::Any))
+ @is_debug = any_to_bool(options["debug"])
+ @is_color = any_to_bool(options["color"])
+ @is_log = any_to_bool(options["nolog"])
@name = ""
- @base_path = options["base"]
+ @base_path = options["base"].to_s
@logger = NoirLogger.new @is_debug, @is_color, @is_log
end
diff --git a/src/models/endpoint.cr b/src/models/endpoint.cr
index 8da408cd..0006b008 100644
--- a/src/models/endpoint.cr
+++ b/src/models/endpoint.cr
@@ -30,10 +30,11 @@ struct Endpoint
@tags = [] of Tag
end
- def set_details(@details : Details)
+ def details=(details : Details)
+ @details = details
end
- def set_protocol(protocol : String)
+ def protocol=(protocol : String)
@protocol = protocol
end
@@ -52,6 +53,7 @@ struct Endpoint
params_hash["form"] = {} of String => String
params_hash["header"] = {} of String => String
params_hash["cookie"] = {} of String => String
+ params_hash["path"] = {} of String => String
@params.each do |param|
params_hash[param.param_type][param.name] = param.value
@@ -94,6 +96,10 @@ struct Param
@name == other.name && @value == other.value && @param_type == other.param_type
end
+ def param_type=(value : String)
+ @param_type = value
+ end
+
def add_tag(tag : Tag)
@tags << tag
end
@@ -103,6 +109,7 @@ struct Details
include JSON::Serializable
include YAML::Serializable
property code_paths : Array(PathInfo) = [] of PathInfo
+ property status_code : Int32 | Nil
# + New details types to be added in the future..
@@ -117,7 +124,12 @@ struct Details
@code_paths << code_path
end
+ def status_code=(status_code : Int32)
+ @status_code = status_code
+ end
+
def ==(other : Details) : Bool
+ return false if @status_code != other.status_code
return false if @code_paths.size != other.code_paths.size
return false unless @code_paths.all? { |path| other.code_paths.any? { |other_path| path == other_path } }
true
diff --git a/src/models/logger.cr b/src/models/logger.cr
index 7a8a01e3..064cb0cf 100644
--- a/src/models/logger.cr
+++ b/src/models/logger.cr
@@ -11,6 +11,15 @@ class NoirLogger
STDOUT.puts message
end
+ def puts_sub(message)
+ STDOUT.puts " " + message
+ end
+
+ def heading(message)
+ prefix = "★".colorize(:yellow).toggle(@color_mode)
+ STDERR.puts "#{prefix} #{message}"
+ end
+
def info(message)
if @no_log
return
diff --git a/src/models/minilexer/minilexer.cr b/src/models/minilexer/minilexer.cr
index 85d5ae18..7e954dcd 100644
--- a/src/models/minilexer/minilexer.cr
+++ b/src/models/minilexer/minilexer.cr
@@ -50,6 +50,9 @@ class MiniLexer
if @mode == :persistent
@tokens = @tokens + results
+ else
+ @position = 0
+ @pos_line_array.clear
end
results
@@ -67,13 +70,27 @@ class MiniLexer
def trace
line_number = -1
lines = @input.split "\n"
- puts "line size: #{lines.size}, token number: #{tokens.size}"
+ puts "Line Size: #{lines.size}, Token Count: #{tokens.size}"
@tokens.each do |token|
- if line_number <= token.line
- puts "\nLine #{token.line}: " + lines[token.line - 1]
- line_number = token.line + 1
+ if line_number != token.line
+ line_number = token.line
+ puts "\nLine #{token.line}: " + lines[line_number - 1]
+ next if token.type == :NEWLINE # Skip newline token
end
+
puts token.to_s
end
end
+
+ def start_repl
+ loop do
+ print ">> "
+ input = gets
+ break if input.nil?
+ input = input.chomp
+ break if input == "exit"
+ tokenize(input)
+ trace
+ end
+ end
end
diff --git a/src/models/noir.cr b/src/models/noir.cr
index 0c4191da..14e4475c 100644
--- a/src/models/noir.cr
+++ b/src/models/noir.cr
@@ -1,26 +1,31 @@
require "../detector/detector.cr"
require "../analyzer/analyzer.cr"
require "../tagger/tagger.cr"
+require "../passive_scan/rules.cr"
require "../deliver/*"
require "../output_builder/*"
require "./endpoint.cr"
require "./logger.cr"
-require "../utils/string_extension.cr"
+require "../utils/*"
require "json"
+require "yaml"
class NoirRunner
- @options : Hash(String, String)
+ @options : Hash(String, YAML::Any)
@techs : Array(String)
@endpoints : Array(Endpoint)
@logger : NoirLogger
@send_proxy : String
- @send_req : String
+ @send_req : Bool
@send_es : String
@is_debug : Bool
@is_color : Bool
@is_log : Bool
@concurrency : Int32
@config_file : String
+ @noir_home : String
+ @passive_scans : Array(PassiveScan)
+ @passive_results : Array(PassiveScanResult)
macro define_getter_methods(names)
{% for name, index in names %}
@@ -30,48 +35,54 @@ class NoirRunner
{% end %}
end
- define_getter_methods [options, techs, endpoints, logger]
+ define_getter_methods [options, techs, endpoints, logger, passive_results]
def initialize(options)
@options = options
- @config_file = @options["config_file"]
+ @config_file = @options["config_file"].to_s
+ @noir_home = get_home
+ @passive_scans = [] of PassiveScan
+ @passive_results = [] of PassiveScanResult
if @config_file != ""
- config = YAML.parse(File.read(@config_file))
- @options.each do |key, _|
- string_key = key.to_s
- begin
- if config[string_key] != "" && string_key != "base"
- @options[key] = "yes" if config[string_key] == true
- @options[key] = "no" if config[string_key] == false
-
- @options[key] = config[string_key].as_s
- end
- rescue
- end
- end
+ config = YAML.parse(File.read(@config_file)).as_h
+ symbolized_hash = config.transform_keys(&.to_s)
+ @options = @options.merge(symbolized_hash) { |_, _, new_val| new_val }
end
@techs = [] of String
@endpoints = [] of Endpoint
- @send_proxy = @options["send_proxy"]
- @send_req = @options["send_req"]
- @send_es = @options["send_es"]
- @is_debug = str_to_bool(@options["debug"])
- @is_color = str_to_bool(@options["color"])
- @is_log = str_to_bool(@options["nolog"])
- @concurrency = @options["concurrency"].to_i
+ @send_proxy = @options["send_proxy"].to_s
+ @send_req = any_to_bool(@options["send_req"])
+ @send_es = @options["send_es"].to_s
+ @is_debug = any_to_bool(@options["debug"])
+ @is_color = any_to_bool(@options["color"])
+ @is_log = any_to_bool(@options["nolog"])
+ @concurrency = @options["concurrency"].to_s.to_i
@logger = NoirLogger.new @is_debug, @is_color, @is_log
- if @options["techs"].size > 0
- techs_tmp = @options["techs"].split(",")
+ if @options["techs"].to_s.size > 0
+ techs_tmp = @options["techs"].to_s.split(",")
@logger.success "Setting #{techs_tmp.size} techs from command line."
techs_tmp.each do |tech|
@techs << NoirTechs.similar_to_tech(tech)
@logger.debug "Added #{tech} to techs."
end
end
+
+ if any_to_bool(@options["passive_scan"])
+ @logger.info "Passive scanner enabled."
+ if @options["passive_scan_path"].as_a.size > 0
+ @logger.sub "├── Using custom passive rules."
+ @options["passive_scan_path"].as_a.each do |rule_path|
+ @passive_scans = NoirPassiveScan.load_rules rule_path.to_s, @logger
+ end
+ else
+ @logger.sub "├── Using default passive rules."
+ @passive_scans = NoirPassiveScan.load_rules "#{@noir_home}/passive_rules/", @logger
+ end
+ end
end
def run
@@ -79,12 +90,17 @@ class NoirRunner
end
def detect
- detected_techs = detect_techs options["base"], options, @logger
- @techs += detected_techs
+ detected_techs = detect_techs options["base"].to_s, options, @passive_scans, @logger
+ @techs = detected_techs[0]
+ @passive_results = detected_techs[1]
+
if @is_debug
@logger.debug("CodeLocator Table:")
locator = CodeLocator.instance
locator.show_table
+
+ @logger.debug("Detected Techs: #{@techs}")
+ @logger.debug("Passive Results: #{@passive_results}")
end
end
@@ -92,19 +108,25 @@ class NoirRunner
@endpoints = analysis_endpoints options, @techs, @logger
optimize_endpoints
combine_url_and_endpoints
+ add_path_parameters
+
+ # Set status code
+ if any_to_bool(@options["status_codes"]) == true || @options["exclude_codes"].to_s != ""
+ update_status_codes
+ end
# Run tagger
- if @options["all_taggers"] == "yes"
+ if any_to_bool(@options["all_taggers"]) == true
@logger.success "Running all taggers."
NoirTaggers.run_tagger @endpoints, @options, "all"
if @is_debug
- NoirTaggers.get_taggers.each do |tagger|
+ NoirTaggers.taggers.each do |tagger|
@logger.debug "Tagger: #{tagger}"
end
end
elsif @options["use_taggers"] != ""
@logger.success "Running #{@options["use_taggers"]} taggers."
- NoirTaggers.run_tagger @endpoints, @options, @options["use_taggers"]
+ NoirTaggers.run_tagger @endpoints, @options, @options["use_taggers"].to_s
end
# Run deliver
@@ -121,9 +143,7 @@ class NoirRunner
tiny_tmp.params = [] of Param
endpoint.params.each do |param|
if !param.name.includes? " "
- if @options["set_pvalue"] != ""
- param.value = @options["set_pvalue"]
- end
+ param.value = apply_pvalue(param.param_type, param.name, param.value).to_s
tiny_tmp.params << param
end
end
@@ -135,7 +155,7 @@ class NoirRunner
if dup.method == tiny_tmp.method && dup.url == tiny_tmp.url
is_new = false
tiny_tmp.params.each do |param|
- existing_param = dup.params.find { |p| p.name == param.name }
+ existing_param = dup.params.find { |dup_param| dup_param.name == param.name }
unless existing_param
dup.params << param
end
@@ -151,9 +171,57 @@ class NoirRunner
@endpoints = final
end
+ def apply_pvalue(param_type, param_name, param_value) : String
+ case param_type
+ when "query"
+ pvalue_target = @options["set_pvalue_query"]
+ when "json"
+ pvalue_target = @options["set_pvalue_json"]
+ when "form"
+ pvalue_target = @options["set_pvalue_form"]
+ when "header"
+ pvalue_target = @options["set_pvalue_header"]
+ when "cookie"
+ pvalue_target = @options["set_pvalue_cookie"]
+ when "path"
+ pvalue_target = @options["set_pvalue_path"]
+ else
+ pvalue_target = YAML::Any.new([] of YAML::Any)
+ end
+
+ # Merge with @options["set_pvalue"]
+ merged_pvalue_target = [] of YAML::Any
+ merged_pvalue_target.concat(pvalue_target.as_a)
+ merged_pvalue_target.concat(@options["set_pvalue"].as_a)
+
+ merged_pvalue_target.each do |pvalue|
+ pvalue_str = pvalue.to_s
+ if pvalue_str.includes?("=") || pvalue_str.includes?(":")
+ first_equal = pvalue_str.index("=")
+ first_colon = pvalue_str.index(":")
+
+ if first_equal && (!first_colon || first_equal < first_colon)
+ splited = pvalue_str.split("=", 2)
+ if splited[0] == param_name || splited[0] == "*"
+ return splited[1].to_s
+ end
+ elsif first_colon
+ splited = pvalue_str.split(":", 2)
+ if splited[0] == param_name || splited[0] == "*"
+ return splited[1].to_s
+ end
+ end
+ else
+ return pvalue_str
+ end
+ end
+
+ param_value.to_s
+ end
+
def combine_url_and_endpoints
tmp = [] of Endpoint
- target_url = @options["url"]
+ target_url = @options["url"].to_s
if target_url != ""
@logger.info "Combining url and endpoints."
@@ -180,6 +248,112 @@ class NoirRunner
end
end
+ def add_path_parameters
+ @logger.info "Adding path parameters by URL."
+ final = [] of Endpoint
+
+ @endpoints.each do |endpoint|
+ new_endpoint = endpoint
+
+ scans = endpoint.url.scan(/\/\{([^}]+)\}/).flatten
+ scans.each do |match|
+ param = match[1].split(":")[0]
+ new_value = apply_pvalue("path", param, "")
+ if new_value != ""
+ new_endpoint.url = new_endpoint.url.gsub("{#{match[1]}}", new_value)
+ end
+
+ new_endpoint.params << Param.new(param, "", "path")
+ end
+
+ scans = endpoint.url.scan(/\/:([^\/]+)/).flatten
+ scans.each do |match|
+ new_value = apply_pvalue("path", match[1], "")
+ if new_value != ""
+ new_endpoint.url = new_endpoint.url.gsub(":#{match[1]}", new_value)
+ end
+
+ new_endpoint.params << Param.new(match[1], "", "path")
+ end
+
+ scans = endpoint.url.scan(/\/<([^>]+)>/).flatten
+ scans.each do |match|
+ param = match[1].split(":")[-1]
+ new_value = apply_pvalue("path", param, "")
+ if new_value != ""
+ new_endpoint.url = new_endpoint.url.gsub("<#{match[1]}>", new_value)
+ end
+ new_endpoint.params << Param.new(param, "", "path")
+ end
+
+ final << new_endpoint
+ end
+
+ @endpoints = final
+ end
+
+ def update_status_codes
+ @logger.info "Updating status codes."
+ final = [] of Endpoint
+
+ exclude_codes = [] of Int32
+ if @options["exclude_codes"].to_s != ""
+ @options["exclude_codes"].to_s.split(",").each do |code|
+ exclude_codes << code.strip.to_i
+ end
+ end
+
+ @endpoints.each do |endpoint|
+ begin
+ if endpoint.params.size > 0
+ endpoint_hash = endpoint.params_to_hash
+ body = {} of String => String
+ is_json = false
+ if endpoint_hash["json"].size > 0
+ is_json = true
+ body = endpoint_hash["json"]
+ else
+ body = endpoint_hash["form"]
+ end
+
+ response = Crest::Request.execute(
+ method: get_symbol(endpoint.method),
+ url: endpoint.url,
+ tls: OpenSSL::SSL::Context::Client.insecure,
+ user_agent: "Noir/#{Noir::VERSION}",
+ params: endpoint_hash["query"],
+ form: body,
+ json: is_json,
+ handle_errors: false,
+ read_timeout: 5.second
+ )
+ endpoint.details.status_code = response.status_code
+ unless exclude_codes.includes?(response.status_code)
+ final << endpoint
+ end
+ else
+ response = Crest::Request.execute(
+ method: get_symbol(endpoint.method),
+ url: endpoint.url,
+ tls: OpenSSL::SSL::Context::Client.insecure,
+ user_agent: "Noir/#{Noir::VERSION}",
+ handle_errors: false,
+ read_timeout: 5.second
+ )
+ endpoint.details.status_code = response.status_code
+ unless exclude_codes.includes?(response.status_code)
+ final << endpoint
+ end
+ end
+ rescue e
+ @logger.error "Failed to get status code for #{endpoint.url} (#{e.message})."
+ final << endpoint
+ end
+ end
+
+ @endpoints = final
+ end
+
def deliver
if @send_proxy != ""
@logger.info "Sending requests with proxy #{@send_proxy}."
@@ -187,7 +361,7 @@ class NoirRunner
deliver.run(@endpoints)
end
- if @send_req != "no"
+ if @send_req != false
@logger.info "Sending requests without proxy."
deliver = SendReq.new(@options)
deliver.run(@endpoints)
@@ -217,9 +391,9 @@ class NoirRunner
def report
case options["format"]
when "yaml"
- puts @endpoints.to_yaml
+ puts({"endpoints" => @endpoints, "passive_results" => @passive_results}.to_yaml)
when "json"
- puts @endpoints.to_json
+ puts({"endpoints" => @endpoints, "passive_results" => @passive_results}.to_json)
when "jsonl"
builder = OutputBuilderJsonl.new @options
builder.print @endpoints
@@ -255,7 +429,20 @@ class NoirRunner
builder.print @endpoints
else
builder = OutputBuilderCommon.new @options
+
+ @logger.heading "Endpoint Results:"
builder.print @endpoints
+
+ print_passive_results
+ end
+ end
+
+ def print_passive_results
+ if @passive_results.size > 0
+ @logger.puts ""
+ @logger.heading "Passive Results:"
+ builder = OutputBuilderPassiveScan.new @options
+ builder.print @passive_results, @logger, @is_color
end
end
end
diff --git a/src/models/output_builder.cr b/src/models/output_builder.cr
index 1a2cfdb7..d54e7d15 100644
--- a/src/models/output_builder.cr
+++ b/src/models/output_builder.cr
@@ -2,18 +2,18 @@ require "./logger"
class OutputBuilder
@logger : NoirLogger
- @options : Hash(String, String)
+ @options : Hash(String, YAML::Any)
@is_debug : Bool
@is_color : Bool
@is_log : Bool
@output_file : String
- def initialize(options : Hash(String, String))
- @is_debug = str_to_bool(options["debug"])
+ def initialize(options : Hash(String, YAML::Any))
+ @is_debug = any_to_bool(options["debug"])
@options = options
- @is_color = str_to_bool(options["color"])
- @is_log = str_to_bool(options["nolog"])
- @output_file = options["output"]
+ @is_color = any_to_bool(options["color"])
+ @is_log = any_to_bool(options["nolog"])
+ @output_file = options["output"].to_s
@logger = NoirLogger.new @is_debug, @is_color, @is_log
end
@@ -36,6 +36,7 @@ class OutputBuilder
final_url = url
final_body = ""
+ final_path_params = [] of String
final_headers = [] of String
final_cookies = [] of String
final_tags = [] of String
@@ -69,6 +70,10 @@ class OutputBuilder
end
end
+ if param.param_type == "path"
+ final_path_params << "#{param.name}"
+ end
+
if param.param_type == "header"
final_headers << "#{param.name}: #{param.value}"
end
@@ -103,18 +108,20 @@ class OutputBuilder
@logger.debug "Baked endpoints"
@logger.debug " + Final URL: #{final_url}"
+ @logger.debug " + Path Params: #{final_path_params}"
@logger.debug " + Body: #{final_body}"
@logger.debug " + Headers: #{final_headers}"
@logger.debug " + Cookies: #{final_cookies}"
@logger.debug " + Tags: #{final_tags}"
{
- url: final_url,
- body: final_body,
- header: final_headers,
- cookie: final_cookies,
- tags: final_tags.uniq,
- body_type: is_json ? "json" : "form",
+ url: final_url,
+ body: final_body,
+ path_param: final_path_params,
+ header: final_headers,
+ cookie: final_cookies,
+ tags: final_tags.uniq,
+ body_type: is_json ? "json" : "form",
}
end
diff --git a/src/models/passive_scan.cr b/src/models/passive_scan.cr
new file mode 100644
index 00000000..96673953
--- /dev/null
+++ b/src/models/passive_scan.cr
@@ -0,0 +1,70 @@
+require "./logger"
+require "yaml"
+
+struct PassiveScan
+ struct Info
+ include JSON::Serializable
+ include YAML::Serializable
+ property name : String
+ property author : Array(YAML::Any)
+ property severity : String
+ property description : String
+ property reference : Array(YAML::Any)
+
+ def initialize(yaml : YAML::Any)
+ @name = yaml["name"].as_s
+ @severity = yaml["severity"].as_s
+ @description = yaml["description"].as_s
+ @reference = yaml["reference"].as_a
+ @author = yaml["author"].as_a
+ end
+ end
+
+ struct Matcher
+ property type : String
+ property patterns : Array(YAML::Any)
+ property condition : String
+
+ def initialize(yaml : YAML::Any)
+ @type = yaml["type"].as_s
+ @patterns = yaml["patterns"].as_a
+ @condition = yaml["condition"].as_s
+ end
+ end
+
+ property id : String
+ property info : Info
+ property matchers_condition : String
+ property matchers : Array(Matcher)
+ property category : String
+ property techs : Array(YAML::Any)
+
+ def initialize(yaml : YAML::Any)
+ @id = yaml["id"].as_s
+ @info = Info.new(yaml["info"])
+ @matchers = yaml["matchers"].as_a.map { |matcher| Matcher.new(matcher) }
+ @matchers_condition = yaml["matchers-condition"].to_s
+ @category = yaml["category"].as_s
+ @techs = yaml["techs"].as_a
+ end
+
+ def valid?
+ @id != "" && @info != "" && @matchers.size > 0
+ end
+end
+
+struct PassiveScanResult
+ include JSON::Serializable
+ include YAML::Serializable
+ property id, info, category, techs, file_path, line_number, extract
+
+ def initialize(passive_scan : PassiveScan, file_path : String, line_number : Int32, extract : String)
+ @id = passive_scan.id
+ @info = passive_scan.info
+ @category = passive_scan.category
+ @techs = passive_scan.techs
+ @file_path = file_path
+ @line_number = line_number
+ @extract = extract
+ end
+end
diff --git a/src/models/tagger.cr b/src/models/tagger.cr
index dc676f50..a9c2d8cd 100644
--- a/src/models/tagger.cr
+++ b/src/models/tagger.cr
@@ -2,17 +2,17 @@ require "./logger"
class Tagger
@logger : NoirLogger
- @options : Hash(String, String)
+ @options : Hash(String, YAML::Any)
@is_debug : Bool
@is_color : Bool
@is_log : Bool
@name : String
- def initialize(options : Hash(String, String))
- @is_debug = str_to_bool(options["debug"])
+ def initialize(options : Hash(String, YAML::Any))
+ @is_debug = any_to_bool(options["debug"])
@options = options
- @is_color = str_to_bool(options["color"])
- @is_log = str_to_bool(options["nolog"])
+ @is_color = any_to_bool(options["color"])
+ @is_log = any_to_bool(options["nolog"])
@name = ""
@logger = NoirLogger.new @is_debug, @is_color, @is_log
diff --git a/src/noir.cr b/src/noir.cr
index 6a90a635..cdbd7f46 100644
--- a/src/noir.cr
+++ b/src/noir.cr
@@ -6,7 +6,7 @@ require "./options.cr"
require "./techs/techs.cr"
module Noir
- VERSION = "0.17.0"
+ VERSION = "0.18.0"
end
# Print banner
@@ -23,6 +23,39 @@ if noir_options["base"] == ""
exit(1)
end
+if noir_options["url"] != "" && !noir_options["url"].to_s.includes?("://")
+ STDERR.puts "WARNING: The protocol (http or https) is missing in the URL '#{noir_options["url"]}'.".colorize(Colorize::Color256.new(208))
+ noir_options["url"] = YAML::Any.new("http://#{noir_options["url"]}")
+end
+
+# Check URL
+if noir_options["status_codes"] == true && noir_options["url"] == ""
+ STDERR.puts "ERROR: The --status-codes option requires the -u or --url flag to be specified.".colorize(:yellow)
+ STDERR.puts "Please use -u or --url to set the URL."
+ STDERR.puts "If you need help, use -h or --help."
+ exit(1)
+end
+
+# Check URL
+if noir_options["exclude_codes"] != ""
+ if noir_options["url"] == ""
+ STDERR.puts "ERROR: The --exclude-codes option requires the -u or --url flag to be specified.".colorize(:yellow)
+ STDERR.puts "Please use -u or --url to set the URL."
+ STDERR.puts "If you need help, use -h or --help."
+ exit(1)
+ end
+
+ noir_options["exclude_codes"].to_s.split(",").each do |code|
+ begin
+ code.strip.to_i
+ rescue
+ STDERR.puts "ERROR: Invalid --exclude-codes option: '#{code}'".colorize(:yellow)
+ STDERR.puts "Please use comma-separated numbers."
+ exit(1)
+ end
+ end
+end
+
# Run Noir
app = NoirRunner.new noir_options
start_time = Time.monotonic
@@ -43,8 +76,8 @@ app_diff = nil
if noir_options["diff"] != ""
# Diff mode
diff_options = noir_options.dup
- diff_options["base"] = noir_options["diff"].to_s
- diff_options["nolog"] = "yes"
+ diff_options["base"] = noir_options["diff"]
+ diff_options["nolog"] = YAML::Any.new(false)
app_diff = NoirRunner.new diff_options
app.logger.info "Running Noir with Diff mode."
@@ -60,6 +93,10 @@ if app.techs.size == 0
app.logger.sub "➔ Please check tech lists using the --list-techs flag."
if app.options["url"] != ""
app.logger.info "Start file-based analysis as the -u flag has been used."
+ elsif app.passive_results.size > 0
+ app.logger.info "Noir found #{app.passive_results.size} passive results."
+ app.report
+ exit(0)
else
exit(0)
end
diff --git a/src/options.cr b/src/options.cr
index 192a3667..25c08a57 100644
--- a/src/options.cr
+++ b/src/options.cr
@@ -1,5 +1,15 @@
require "./completions.cr"
require "./config_initializer.cr"
+require "yaml"
+
+macro append_to_yaml_array(hash, key, value)
+ tmp = [] of YAML::Any
+ {{hash.id}}[{{key.stringify}}].as_a.each do |item|
+ tmp << item
+ end
+ tmp << YAML::Any.new({{value}})
+ {{hash.id}}[{{key.stringify}}] = YAML::Any.new(tmp)
+end
def run_options_parser
# Check config file
@@ -10,29 +20,71 @@ def run_options_parser
parser.banner = "USAGE: noir \n"
parser.separator "FLAGS:"
parser.separator " BASE:".colorize(:blue)
- parser.on "-b PATH", "--base-path ./app", "(Required) Set base path" { |var| noir_options["base"] = var }
- parser.on "-u URL", "--url http://..", "Set base url for endpoints" { |var| noir_options["url"] = var }
+ parser.on "-b PATH", "--base-path ./app", "(Required) Set base path" { |var| noir_options["base"] = YAML::Any.new(var) }
+ parser.on "-u URL", "--url http://..", "Set base url for endpoints" { |var| noir_options["url"] = YAML::Any.new(var) }
parser.separator "\n OUTPUT:".colorize(:blue)
- parser.on "-f FORMAT", "--format json", "Set output format\n * plain yaml json jsonl markdown-table\n * curl httpie oas2 oas3\n * only-url only-param only-header only-cookie only-tag" { |var| noir_options["format"] = var }
- parser.on "-o PATH", "--output out.txt", "Write result to file" { |var| noir_options["output"] = var }
- parser.on "--set-pvalue VALUE", "Specifies the value of the identified parameter" { |var| noir_options["set_pvalue"] = var }
+ parser.on "-f FORMAT", "--format json", "Set output format\n * plain yaml json jsonl markdown-table\n * curl httpie oas2 oas3\n * only-url only-param only-header only-cookie only-tag" { |var| noir_options["format"] = YAML::Any.new(var) }
+ parser.on "-o PATH", "--output out.txt", "Write result to file" { |var| noir_options["output"] = YAML::Any.new(var) }
+ parser.on "--set-pvalue VALUE", "Specifies the value of the identified parameter for all types" do |var|
+ append_to_yaml_array(noir_options, set_pvalue, var)
+ end
+
+ parser.on "--set-pvalue-header VALUE", "Specifies the value of the identified parameter for headers" do |var|
+ append_to_yaml_array(noir_options, set_pvalue_header, var)
+ end
+
+ parser.on "--set-pvalue-cookie VALUE", "Specifies the value of the identified parameter for cookies" do |var|
+ append_to_yaml_array(noir_options, set_pvalue_cookie, var)
+ end
+
+ parser.on "--set-pvalue-query VALUE", "Specifies the value of the identified parameter for query parameters" do |var|
+ append_to_yaml_array(noir_options, set_pvalue_query, var)
+ end
+
+ parser.on "--set-pvalue-form VALUE", "Specifies the value of the identified parameter for form data" do |var|
+ append_to_yaml_array(noir_options, set_pvalue_form, var)
+ end
+
+ parser.on "--set-pvalue-json VALUE", "Specifies the value of the identified parameter for JSON data" do |var|
+ append_to_yaml_array(noir_options, set_pvalue_json, var)
+ end
+
+ parser.on "--set-pvalue-path VALUE", "Specifies the value of the identified parameter for path parameters" do |var|
+ append_to_yaml_array(noir_options, set_pvalue_path, var)
+ end
+
+ parser.on "--status-codes", "Display HTTP status codes for discovered endpoints" do
+ noir_options["status_codes"] = YAML::Any.new(true)
+ end
+
+ parser.on "--exclude-codes 404,500", "Exclude specific HTTP response codes (comma-separated)" { |var| noir_options["exclude_codes"] = YAML::Any.new(var) }
+
parser.on "--include-path", "Include file path in the plain result" do
- noir_options["include_path"] = "yes"
+ noir_options["include_path"] = YAML::Any.new(true)
end
+
parser.on "--no-color", "Disable color output" do
- noir_options["color"] = "no"
+ noir_options["color"] = YAML::Any.new(false)
end
+
parser.on "--no-log", "Displaying only the results" do
- noir_options["nolog"] = "yes"
+ noir_options["nolog"] = YAML::Any.new(true)
+ end
+
+ parser.separator "\n PASSIVE SCAN:".colorize(:blue)
+
+ parser.on "-P", "--passive-scan", "Perform a passive scan for security issues using rules from the specified path" { |_| noir_options["passive_scan"] = YAML::Any.new(true) }
+ parser.on "--passive-scan-path PATH", "Specify the path for the rules used in the passive security scan" do |var|
+ append_to_yaml_array(noir_options, passive_scan_path, var)
end
parser.separator "\n TAGGER:".colorize(:blue)
- parser.on "-T", "--use-all-taggers", "Activates all taggers for full analysis coverage" { |_| noir_options["all_taggers"] = "yes" }
- parser.on "--use-taggers VALUES", "Activates specific taggers (e.g., --use-taggers hunt,oauth)" { |var| noir_options["use_taggers"] = var }
+ parser.on "-T", "--use-all-taggers", "Activates all taggers for full analysis coverage" { |_| noir_options["all_taggers"] = YAML::Any.new(true) }
+ parser.on "--use-taggers VALUES", "Activates specific taggers (e.g., --use-taggers hunt,oauth)" { |var| noir_options["use_taggers"] = YAML::Any.new(var) }
parser.on "--list-taggers", "Lists all available taggers" do
puts "Available taggers:"
- techs = NoirTaggers.get_taggers
+ techs = NoirTaggers.taggers
techs.each do |tagger, value|
puts " #{tagger.to_s.colorize(:green)}"
value.each do |k, v|
@@ -43,28 +95,28 @@ def run_options_parser
end
parser.separator "\n DELIVER:".colorize(:blue)
- parser.on "--send-req", "Send results to a web request" { |_| noir_options["send_req"] = "yes" }
- parser.on "--send-proxy http://proxy..", "Send results to a web request via an HTTP proxy" { |var| noir_options["send_proxy"] = var }
- parser.on "--send-es http://es..", "Send results to Elasticsearch" { |var| noir_options["send_es"] = var }
+ parser.on "--send-req", "Send results to a web request" { |_| noir_options["send_req"] = YAML::Any.new(true) }
+ parser.on "--send-proxy http://proxy..", "Send results to a web request via an HTTP proxy" { |var| noir_options["send_proxy"] = YAML::Any.new(var) }
+ parser.on "--send-es http://es..", "Send results to Elasticsearch" { |var| noir_options["send_es"] = YAML::Any.new(var) }
parser.on "--with-headers X-Header:Value", "Add custom headers to be included in the delivery" do |var|
- noir_options["send_with_headers"] += "#{var}::NOIR::HEADERS::SPLIT::"
+ append_to_yaml_array(noir_options, send_with_headers, var)
end
parser.on "--use-matchers string", "Send URLs that match specific conditions to the Deliver" do |var|
- noir_options["use_matchers"] += "#{var}::NOIR::MATCHER::SPLIT::"
+ append_to_yaml_array(noir_options, use_matchers, var)
end
parser.on "--use-filters string", "Exclude URLs that match specified conditions and send the rest to Deliver" do |var|
- noir_options["use_filters"] += "#{var}::NOIR::FILTER::SPLIT::"
+ append_to_yaml_array(noir_options, use_filters, var)
end
parser.separator "\n DIFF:".colorize(:blue)
- parser.on "--diff-path ./app2", "Specify the path to the old version of the source code for comparison" { |var| noir_options["diff"] = var }
+ parser.on "--diff-path ./app2", "Specify the path to the old version of the source code for comparison" { |var| noir_options["diff"] = YAML::Any.new(var) }
parser.separator "\n TECHNOLOGIES:".colorize(:blue)
- parser.on "-t TECHS", "--techs rails,php", "Specify the technologies to use" { |var| noir_options["techs"] = var }
- parser.on "--exclude-techs rails,php", "Specify the technologies to be excluded" { |var| noir_options["exclude_techs"] = var }
+ parser.on "-t TECHS", "--techs rails,php", "Specify the technologies to use" { |var| noir_options["techs"] = YAML::Any.new(var) }
+ parser.on "--exclude-techs rails,php", "Specify the technologies to be excluded" { |var| noir_options["exclude_techs"] = YAML::Any.new(var) }
parser.on "--list-techs", "Show all technologies" do
puts "Available technologies:"
- techs = NoirTechs.get_techs
+ techs = NoirTechs.techs
techs.each do |tech, value|
puts " #{tech.to_s.colorize(:green)}"
value.each do |k, v|
@@ -75,22 +127,24 @@ def run_options_parser
end
parser.separator "\n CONFIG:".colorize(:blue)
- parser.on "--config-file ./config.yaml", "Specify the path to a configuration file in YAML format" { |var| noir_options["config_file"] = var }
- parser.on "--concurrency 100", "Set concurrency" { |var| noir_options["concurrency"] = var }
- parser.on "--generate-completion zsh", "Generate Zsh/Bash completion script" do |var|
+ parser.on "--config-file ./config.yaml", "Specify the path to a configuration file in YAML format" { |var| noir_options["config_file"] = YAML::Any.new(var) }
+ parser.on "--concurrency 100", "Set concurrency" { |var| noir_options["concurrency"] = YAML::Any.new(var) }
+ parser.on "--generate-completion zsh", "Generate Zsh/Bash/Fish completion script" do |var|
case var
when "zsh"
puts generate_zsh_completion_script
- puts "\n"
- puts "> Instructions: Copy the content above and save it in the zsh-completion directory as _noir".colorize(:yellow)
+ STDERR.puts "\n> Instructions: Copy the content above and save it in the zsh-completion directory as _noir".colorize(:yellow)
when "bash"
puts generate_bash_completion_script
- puts "\n"
- puts "> Instructions: Copy the content above and save it in the .bashrc file as noir.".colorize(:yellow)
+ STDERR.puts "\n> Instructions: Copy the content above and save it in the .bashrc file as noir.".colorize(:yellow)
+ when "fish"
+ puts generate_fish_completion_script
+ STDERR.puts "\n> Instructions: Copy the content above and save it in the fish-completion directory as noir.fish".colorize(:yellow)
else
- puts "ERROR: Invalid completion type."
+ puts "ERROR: Invalid completion type.".colorize(:yellow)
puts "e.g., noir --generate-completion zsh"
puts "e.g., noir --generate-completion bash"
+ puts "e.g., noir --generate-completion fish"
end
exit
@@ -98,7 +152,7 @@ def run_options_parser
parser.separator "\n DEBUG:".colorize(:blue)
parser.on "-d", "--debug", "Show debug messages" do
- noir_options["debug"] = "yes"
+ noir_options["debug"] = YAML::Any.new(true)
end
parser.on "-v", "--version", "Show version" do
puts Noir::VERSION
@@ -126,12 +180,12 @@ def run_options_parser
exit
end
parser.invalid_option do |flag|
- STDERR.puts "ERROR: #{flag} is not a valid option."
+ STDERR.puts "ERROR: #{flag} is not a valid option.".colorize(:yellow)
STDERR.puts parser
exit(1)
end
parser.missing_option do |flag|
- STDERR.puts "ERROR: #{flag} is missing an argument."
+ STDERR.puts "ERROR: #{flag} is missing an argument.".colorize(:yellow)
exit(1)
end
end
diff --git a/src/output_builder/common.cr b/src/output_builder/common.cr
index 488feb81..88caa892 100644
--- a/src/output_builder/common.cr
+++ b/src/output_builder/common.cr
@@ -20,6 +20,25 @@ class OutputBuilderCommon < OutputBuilder
r_ws = ""
r_buffer = "\n#{r_method} #{r_url}"
+ if any_to_bool(@options["status_codes"]) == true || @options["exclude_codes"] != ""
+ status_color = :light_green
+ status_code = endpoint.details.status_code
+ if status_code
+ if status_code >= 500
+ status_color = :light_magenta
+ elsif status_code >= 400
+ status_color = :light_red
+ elsif status_code >= 300
+ status_color = :cyan
+ end
+ else
+ status_code = "error"
+ status_color = :light_red
+ end
+
+ r_buffer += " [#{status_code}]".to_s.colorize(status_color).toggle(@is_color).to_s
+ end
+
if endpoint.protocol == "ws"
r_ws = "[websocket]".colorize(:light_red).toggle(@is_color)
r_buffer += " #{r_ws}"
@@ -43,6 +62,11 @@ class OutputBuilderCommon < OutputBuilder
end
end
+ if baked[:path_param].size > 0
+ r_path_param = baked[:path_param].join(", ").colorize(:cyan).toggle(@is_color)
+ r_buffer += "\n ○ path: #{r_path_param}"
+ end
+
if baked[:body] != ""
r_body = baked[:body].colorize(:cyan).toggle(@is_color)
r_buffer += "\n ○ body: #{r_body}"
@@ -58,7 +82,7 @@ class OutputBuilderCommon < OutputBuilder
r_buffer += "\n ○ tags: #{r_tags}"
end
- if @options["include_path"] == "yes"
+ if any_to_bool(@options["include_path"]) == true
details = endpoint.details
if details.code_paths && details.code_paths.size > 0
details.code_paths.each do |code_path|
diff --git a/src/output_builder/passive_scan.cr b/src/output_builder/passive_scan.cr
new file mode 100644
index 00000000..7ef6ea41
--- /dev/null
+++ b/src/output_builder/passive_scan.cr
@@ -0,0 +1,33 @@
+require "../models/output_builder"
+require "../models/passive_scan"
+
+require "json"
+require "yaml"
+
+class OutputBuilderPassiveScan < OutputBuilder
+ def print(passive_results : Array(PassiveScanResult), logger : NoirLogger, is_color : Bool)
+ passive_results.each do |result|
+ logger.puts "[#{severity_color(result.info.severity)}][#{result.id.colorize(:light_blue).toggle(is_color)}][#{result.category.colorize(:light_yellow).toggle(is_color)}] #{result.info.name.colorize(:light_green).toggle(is_color)}"
+ logger.sub "├── extract: #{result.extract}"
+ logger.sub "└── file: #{result.file_path}:#{result.line_number}"
+ logger.puts ""
+ end
+ end
+
+ def severity_color(severity : String) : String
+ case severity
+ when "critical"
+ severity.colorize(:red).to_s
+ when "high"
+ severity.colorize(:light_red).to_s
+ when "medium"
+ severity.colorize(:yellow).to_s
+ when "low"
+ severity.colorize(:light_yellow).to_s
+ when "info"
+ severity.colorize(:light_blue).to_s
+ else
+ severity.colorize(:light_white).to_s
+ end
+ end
+end
diff --git a/src/passive_scan/detect.cr b/src/passive_scan/detect.cr
new file mode 100644
index 00000000..0862120f
--- /dev/null
+++ b/src/passive_scan/detect.cr
@@ -0,0 +1,64 @@
+require "../models/passive_scan"
+require "../models/logger"
+require "yaml"
+
+module NoirPassiveScan
+ def self.detect(file_path : String, file_content : String, rules : Array(PassiveScan), logger : NoirLogger) : Array(PassiveScanResult)
+ results = [] of PassiveScanResult
+
+ rules.each do |rule|
+ matchers = rule.matchers
+
+ if rule.matchers_condition == "and"
+ if matchers.all? { |matcher| match_content?(file_content, matcher) }
+ logger.sub "└── Detected: #{rule.info.name}"
+ index = 0
+ file_content.each_line do |line|
+ if matchers.all? { |matcher| match_content?(line, matcher) }
+ results << PassiveScanResult.new(rule, file_path, index + 1, line)
+ end
+ index += 1
+ end
+ end
+ else
+ matchers.each do |matcher|
+ index = 0
+ file_content.each_line do |line|
+ if match_content?(line, matcher)
+ logger.sub "└── Detected: #{rule.info.name}"
+ results << PassiveScanResult.new(rule, file_path, index + 1, line)
+ end
+ index += 1
+ end
+ end
+ end
+ end
+
+ results
+ end
+
+ private def self.match_content?(content : String, matcher : PassiveScan::Matcher) : (Array(YAML::Any) | Bool)
+ case matcher.type
+ when "word"
+ case matcher.condition
+ when "and"
+ matcher.patterns && matcher.patterns.all? { |pattern| content.includes?(pattern.to_s) }
+ when "or"
+ matcher.patterns && matcher.patterns.any? { |pattern| content.includes?(pattern.to_s) }
+ else
+ false
+ end
+ when "regex"
+ case matcher.condition
+ when "and"
+ matcher.patterns && matcher.patterns.all? { |pattern| content.match(Regex.new(pattern.to_s)) }
+ when "or"
+ matcher.patterns && matcher.patterns.any? { |pattern| content.match(Regex.new(pattern.to_s)) }
+ else
+ false
+ end
+ else
+ false
+ end
+ end
+end
diff --git a/src/passive_scan/rules.cr b/src/passive_scan/rules.cr
new file mode 100644
index 00000000..6d5c9aa8
--- /dev/null
+++ b/src/passive_scan/rules.cr
@@ -0,0 +1,30 @@
+require "../models/passive_scan"
+require "../models/logger"
+require "yaml"
+
+module NoirPassiveScan
+ def self.load_rules(path : String, logger : NoirLogger) : Array(PassiveScan)
+ rules = [] of PassiveScan
+
+ # Read all .yml and .yaml files from the specified path
+ Dir.glob("#{path}/**/*.{yml,yaml}").each do |file|
+ begin
+ # Deserialize each file into a PassiveScan object
+ yaml_rule = YAML.parse(File.read(file))
+ passive_rule = PassiveScan.new(yaml_rule)
+ if passive_rule.valid?
+ rules << passive_rule
+ else
+ logger.debug_sub "Invalid rule in #{file}"
+ end
+ rescue e : Exception
+ # Log or handle the error if deserialization fails
+ logger.debug_sub "Failed to load rule from #{file}: #{e.message}"
+ end
+ end
+
+ logger.sub "└── Loaded #{rules.size} valid passive scan rules."
+
+ rules
+ end
+end
diff --git a/src/tagger/tagger.cr b/src/tagger/tagger.cr
index d779f850..9e284e54 100644
--- a/src/tagger/tagger.cr
+++ b/src/tagger/tagger.cr
@@ -30,11 +30,11 @@ module NoirTaggers
},
}
- def self.get_taggers
+ def self.taggers
HasTaggers
end
- def self.run_tagger(endpoints : Array(Endpoint), options : Hash(String, String), use_taggers : String)
+ def self.run_tagger(endpoints : Array(Endpoint), options : Hash(String, YAML::Any), use_taggers : String)
tagger_list = [] of Tagger # This will hold instances of taggers
# Define taggers by creating instances
diff --git a/src/tagger/taggers/cors.cr b/src/tagger/taggers/cors.cr
index 78d2cebe..9fe7e971 100644
--- a/src/tagger/taggers/cors.cr
+++ b/src/tagger/taggers/cors.cr
@@ -4,7 +4,7 @@ require "../../models/endpoint"
class CorsTagger < Tagger
WORDS = ["origin", "access-control-allow-origin", "access-control-request-method"]
- def initialize(options : Hash(String, String))
+ def initialize(options : Hash(String, YAML::Any))
super
@name = "cors"
end
diff --git a/src/tagger/taggers/hunt_param.cr b/src/tagger/taggers/hunt_param.cr
index a93e5c49..fa13db43 100644
--- a/src/tagger/taggers/hunt_param.cr
+++ b/src/tagger/taggers/hunt_param.cr
@@ -16,7 +16,7 @@ class HuntParamTagger < Tagger
"description" => "This parameter may be vulnerable to SQL Injection attacks.",
},
"idor" => {
- "words" => ["id", "user", "account", "number", "order", "no", "doc", "key", "email", "group", "profile", "edit", "report"],
+ "words" => ["id", "user", "account", "number", "order", "false", "doc", "key", "email", "group", "profile", "edit", "report"],
"description" => "This parameter may be vulnerable to Insecure Direct Object Reference (IDOR) attacks.",
},
"file-inclusion" => {
@@ -33,7 +33,7 @@ class HuntParamTagger < Tagger
},
}
- def initialize(options : Hash(String, String))
+ def initialize(options : Hash(String, YAML::Any))
super
@name = "hunt"
end
diff --git a/src/tagger/taggers/oauth.cr b/src/tagger/taggers/oauth.cr
index 37065b82..a0647c9c 100644
--- a/src/tagger/taggers/oauth.cr
+++ b/src/tagger/taggers/oauth.cr
@@ -4,7 +4,7 @@ require "../../models/endpoint"
class OAuthTagger < Tagger
WORDS = ["grant_type", "code", "redirect_uri", "redirect_url", "client_id", "client_secret"]
- def initialize(options : Hash(String, String))
+ def initialize(options : Hash(String, YAML::Any))
super
@name = "oauth"
end
diff --git a/src/tagger/taggers/soap.cr b/src/tagger/taggers/soap.cr
index 98d87607..89a46a7f 100644
--- a/src/tagger/taggers/soap.cr
+++ b/src/tagger/taggers/soap.cr
@@ -4,7 +4,7 @@ require "../../models/endpoint"
class SoapTagger < Tagger
WORDS = ["soapaction"]
- def initialize(options : Hash(String, String))
+ def initialize(options : Hash(String, YAML::Any))
super
@name = "soap"
end
diff --git a/src/tagger/taggers/websocket.cr b/src/tagger/taggers/websocket.cr
index a2ebff4e..26c1f51b 100644
--- a/src/tagger/taggers/websocket.cr
+++ b/src/tagger/taggers/websocket.cr
@@ -4,7 +4,7 @@ require "../../models/endpoint"
class WebsocketTagger < Tagger
WORDS = ["sec-websocket-key", "sec-websocket-accept", "sec-websocket-version"]
- def initialize(options : Hash(String, String))
+ def initialize(options : Hash(String, YAML::Any))
super
@name = "websocket"
end
diff --git a/src/techs/techs.cr b/src/techs/techs.cr
index c3793c09..67adfde0 100644
--- a/src/techs/techs.cr
+++ b/src/techs/techs.cr
@@ -472,9 +472,27 @@ module NoirTechs
:websocket => false,
},
},
+ :rust_actix_web => {
+ :framework => "Actix Web",
+ :language => "Rust",
+ :similar => ["actix-web", "actix_web", "rust-actix-web", "rust_actix_web"],
+ :supported => {
+ :endpoint => true,
+ :method => true,
+ :params => {
+ :query => false,
+ :path => false,
+ :body => false,
+ :header => false,
+ :cookie => false,
+ },
+ :static_path => false,
+ :websocket => false,
+ },
+ },
}
- def self.get_techs
+ def self.techs
TECHS
end
diff --git a/src/utils/home.cr b/src/utils/home.cr
new file mode 100644
index 00000000..06a6cec1
--- /dev/null
+++ b/src/utils/home.cr
@@ -0,0 +1,16 @@
+def get_home
+ config_dir = ""
+
+ if ENV.has_key? "NOIR_HOME"
+ config_dir = ENV["NOIR_HOME"]
+ else
+ # Define the config directory and file based on the OS
+ {% if flag?(:windows) %}
+ @onfig_dir = "#{ENV["APPDATA"]}\\noir"
+ {% else %}
+ config_dir = "#{ENV["HOME"]}/.config/noir"
+ {% end %}
+ end
+
+ config_dir
+end
diff --git a/src/utils/utils.cr b/src/utils/utils.cr
index bad30b1c..1481f6f7 100644
--- a/src/utils/utils.cr
+++ b/src/utils/utils.cr
@@ -24,9 +24,16 @@ def join_path(*segments : String) : String
path
end
-def str_to_bool(str)
- if str == "yes"
+def any_to_bool(any) : Bool
+ case any.to_s
+ when "false"
+ return false
+ when "true"
return true
+ when "yes"
+ return true
+ when "no"
+ return false
end
false