diff --git a/.github/workflows/snapcraft_publish.yml b/.github/workflows/snapcraft_publish.yml new file mode 100644 index 00000000..ce37d688 --- /dev/null +++ b/.github/workflows/snapcraft_publish.yml @@ -0,0 +1,36 @@ +name: Snapcraft tab Publish + +on: + release: + types: [published] + +jobs: + snapcraft-releaser: + runs-on: ubuntu-latest + name: snapcraft-releaser + strategy: + fail-fast: false + matrix: + platform: + - amd64 + - arm64 + steps: + - name: Check out Git repository + uses: actions/checkout@v3 + + - uses: diddlesnaps/snapcraft-multiarch-action@v1 + with: + path: stores/snapcraft/stable + architecture: ${{ matrix.platform }} + id: build + + - uses: diddlesnaps/snapcraft-review-action@v1 + with: + snap: ${{ steps.build.outputs.snap }} + + - uses: snapcore/action-publish@master + env: + SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.SNAP_STORE_LOGIN }} + with: + snap: ${{ steps.build.outputs.snap }} + release: stable \ No newline at end of file diff --git a/shard.yml b/shard.yml index 3f10eae5..e72751c7 100644 --- a/shard.yml +++ b/shard.yml @@ -1,5 +1,5 @@ name: noir -version: 0.12.2 +version: 0.13.0 authors: - hahwul diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml new file mode 100644 index 00000000..4c82fbbb --- /dev/null +++ b/snap/snapcraft.yaml @@ -0,0 +1,41 @@ +name: noir +base: core20 +version: 0.13.0 +summary: Attack surface detector that identifies endpoints by static analysis. +description: | + Noir is your ally in the quest for digital fortification. + A cutting-edge attack surface detector, it unveils hidden endpoints through meticulous static analysis. + +grade: stable # must be 'stable' to release into candidate/stable channels +confinement: strict # use 'strict' once you have the right plugs and slots +license: MIT + +apps: + noir: + command: noir + +parts: + noir: + source: ./ + plugin: nil #crystal + #crystal-channel: latest/stable + override-build: | + curl -fsSL https://crystal-lang.org/install.sh | sudo bash + snapcraftctl pull + shards install + shards build --release + cp ./bin/noir $SNAPCRAFT_PART_INSTALL/ + snapcraftctl build + build-packages: + - git + - libssl-dev + - libxml2-dev + - libz-dev + - libyaml-dev + - libpcre2-dev + - libevent-dev + - libgmp-dev + stage-packages: + - libssl1.1 + - libxml2 + - libevent-2.1-7 diff --git a/spec/functional_test/fixtures/go_echo/server.go b/spec/functional_test/fixtures/go_echo/server.go index 2310e29a..d80707da 100644 --- a/spec/functional_test/fixtures/go_echo/server.go +++ b/spec/functional_test/fixtures/go_echo/server.go @@ -25,6 +25,16 @@ func main() { _ = c.FormValue("name") return c.String(http.StatusOK, "Hello, Pet!") }) + mygroup := e.Group("/admin") + mygroup.GET("/users", func(c echo.Context) error { + return c.String(http.StatusOK, "Hello, Pet!") + }) + + v1 := mygroup.Group("/v1") + v1.GET("/migration", func(c echo.Context) error { + return c.String(http.StatusOK, "Hello, Pet!") + }) + e.Static("/public", "public") e.Static("/public", "./public2") e.Static("/public", "/public3") diff --git a/spec/functional_test/fixtures/go_fiber/server.go b/spec/functional_test/fixtures/go_fiber/server.go index 9daa248b..57d4fa19 100644 --- a/spec/functional_test/fixtures/go_fiber/server.go +++ b/spec/functional_test/fixtures/go_fiber/server.go @@ -28,6 +28,16 @@ func main() { // Websocket logic })) + mygroup := app.Group("/admin") + mygroup.Get("/users", func(c *fiber.Ctx) error { + return c.SendString(msg) // => ✋ register + }) + + v1 := mygroup.Group("/v1") + v1.Get("/migration", func(c *fiber.Ctx) error { + return c.SendString(msg) // => ✋ register + }) + app.Static("/", "/public") log.Fatal(app.Listen(":3000")) diff --git a/spec/functional_test/fixtures/go_gin/server.go b/spec/functional_test/fixtures/go_gin/server.go index fb5e54f8..ad575a8e 100644 --- a/spec/functional_test/fixtures/go_gin/server.go +++ b/spec/functional_test/fixtures/go_gin/server.go @@ -27,6 +27,16 @@ func main() { c.String(http.StatusOK, "Submitted data: Username=%s, Password=%s, userAgent=%s", username, password, userAgent) }) + users := r.Group("/group") + users.GET("/users", func(c *gin.Context) { + c.JSON(http.StatusOK, "users") + }) + + v1 := users.Group("/v1") + v1.GET("/migration", func(c *gin.Context) { + c.JSON(http.StatusOK, "users") + }) + r.Static("/public", "public") r.Run() // listen and serve on 0.0.0.0:8080 (for windows "localhost:8080") } diff --git a/spec/functional_test/fixtures/java_spring/.gitignore b/spec/functional_test/fixtures/java_spring/.gitignore new file mode 100644 index 00000000..08a55c09 --- /dev/null +++ b/spec/functional_test/fixtures/java_spring/.gitignore @@ -0,0 +1 @@ +.gradle diff --git a/spec/functional_test/fixtures/java_spring/src/HttpServletRequest.java b/spec/functional_test/fixtures/java_spring/src/HttpServletRequest.java new file mode 100644 index 00000000..bf25b963 --- /dev/null +++ b/spec/functional_test/fixtures/java_spring/src/HttpServletRequest.java @@ -0,0 +1,22 @@ +package com.test; +import javax.servlet.http.HttpServletRequest; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; + +@RestController +public class MyController { + + @GetMapping("/greet") + public String greet(HttpServletRequest request) { + String name = request.getParameter("name"); + if (name == null || name.isEmpty()) { + name = "World"; + } + + String header = request.getHeader("header"); + if (header == null || header.isEmpty()) { + header = "!"; + } + return "Hello, " + name + header; + } +} diff --git a/spec/functional_test/fixtures/java_spring/src/ItemController.java b/spec/functional_test/fixtures/java_spring/src/ItemController.java index 88ebfb54..0ad6063f 100644 --- a/spec/functional_test/fixtures/java_spring/src/ItemController.java +++ b/spec/functional_test/fixtures/java_spring/src/ItemController.java @@ -1,11 +1,14 @@ +package com.test; import org.springframework.web.bind.annotation.*; +import a.b.c.bind.annotation.*; +import org.springframework.c.d.e.*; @RestController @RequestMapping("/items") public class ItemController { @GetMapping("/{id}") - public Item getItem(@PathVariable Long id) { + public Item getItem(@PathVariable Long id) throws ItemNotFoundException { } @PostMapping @@ -23,4 +26,25 @@ public void deleteItem(@PathVariable Long id) { @GetMapping("/json/{id}", produces = [MediaType.APPLICATION_JSON_VALUE]) public void getItemJson(){ } +} + +class Item { + int id; + String name; + + public void setId(int _id) { + id = _id; + } + + public int getId() { + return id; + } + + public void setName(String _name) { + name = _name; + } + + public String getName() { + return name; + } } \ No newline at end of file diff --git a/spec/functional_test/fixtures/java_spring/src/RequestParam.java b/spec/functional_test/fixtures/java_spring/src/RequestParam.java new file mode 100644 index 00000000..83dfc19e --- /dev/null +++ b/spec/functional_test/fixtures/java_spring/src/RequestParam.java @@ -0,0 +1,13 @@ +package com.test; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +@RestController +public class MyController { + + @GetMapping("/greet2") + public String greet2(@RequestParam("myname") String a, @RequestParam("b") int b, String name) { + return "Hello, " + a + b"!"; + } +} diff --git a/spec/functional_test/fixtures/kotlin_spring/.gitignore b/spec/functional_test/fixtures/kotlin_spring/.gitignore new file mode 100644 index 00000000..08a55c09 --- /dev/null +++ b/spec/functional_test/fixtures/kotlin_spring/.gitignore @@ -0,0 +1 @@ +.gradle diff --git a/spec/functional_test/testers/go_echo_spec.cr b/spec/functional_test/testers/go_echo_spec.cr index 292af232..681db7c3 100644 --- a/spec/functional_test/testers/go_echo_spec.cr +++ b/spec/functional_test/testers/go_echo_spec.cr @@ -17,9 +17,11 @@ extected_endpoints = [ Endpoint.new("/public/secret.html", "GET"), Endpoint.new("/public/mob.txt", "GET"), Endpoint.new("/public/coffee.txt", "GET"), + Endpoint.new("/admin/users", "GET"), + Endpoint.new("/admin/v1/migration", "GET"), ] FunctionalTester.new("fixtures/go_echo/", { :techs => 1, - :endpoints => 7, + :endpoints => 9, }, extected_endpoints).test_all diff --git a/spec/functional_test/testers/go_fiber_spec.cr b/spec/functional_test/testers/go_fiber_spec.cr index 6898bf02..02a26720 100644 --- a/spec/functional_test/testers/go_fiber_spec.cr +++ b/spec/functional_test/testers/go_fiber_spec.cr @@ -12,9 +12,11 @@ extected_endpoints = [ ]), Endpoint.new("/secret.html", "GET"), Endpoint.new("/ws", "GET"), + Endpoint.new("/admin/users", "GET"), + Endpoint.new("/admin/v1/migration", "GET"), ] FunctionalTester.new("fixtures/go_fiber/", { :techs => 1, - :endpoints => 4, + :endpoints => 6, }, extected_endpoints).test_all diff --git a/spec/functional_test/testers/go_gin_spec.cr b/spec/functional_test/testers/go_gin_spec.cr index 59d15d90..7487a32f 100644 --- a/spec/functional_test/testers/go_gin_spec.cr +++ b/spec/functional_test/testers/go_gin_spec.cr @@ -14,9 +14,11 @@ extected_endpoints = [ Param.new("User-Agent", "", "header"), ]), Endpoint.new("/public/secret.html", "GET"), + Endpoint.new("/group/users", "GET"), + Endpoint.new("/group/v1/migration", "GET"), ] FunctionalTester.new("fixtures/go_gin/", { :techs => 1, - :endpoints => 4, + :endpoints => 6, }, extected_endpoints).test_all diff --git a/spec/functional_test/testers/java_spring_spec.cr b/spec/functional_test/testers/java_spring_spec.cr index 0ee49789..04cfa8ed 100644 --- a/spec/functional_test/testers/java_spring_spec.cr +++ b/spec/functional_test/testers/java_spring_spec.cr @@ -16,12 +16,21 @@ extected_endpoints = [ # ItemController.java Endpoint.new("/items/{id}", "GET"), Endpoint.new("/items/json/{id}", "GET"), - Endpoint.new("/items", "POST"), - Endpoint.new("/items/update/{id}", "PUT"), + Endpoint.new("/items", "POST", [Param.new("id", "", "form"), Param.new("name", "", "form")]), + Endpoint.new("/items/update/{id}", "PUT", [Param.new("id", "", "json"), Param.new("name", "", "json")]), Endpoint.new("/items/delete/{id}", "DELETE"), + Endpoint.new("/greet", "GET", [ + Param.new("name", "", "query"), + Param.new("header", "", "header"), + ]), + Endpoint.new("/greet2", "GET", [ + Param.new("myname", "", "query"), + Param.new("b", "", "query"), + Param.new("name", "", "query"), + ]), ] FunctionalTester.new("fixtures/java_spring/", { :techs => 1, - :endpoints => 15, + :endpoints => 17, }, extected_endpoints).test_all diff --git a/spec/unit_test/analyzer/analyzer_go_echo_spec.cr b/spec/unit_test/analyzer/analyzer_go_echo_spec.cr index 4a30860e..9fb0bd51 100644 --- a/spec/unit_test/analyzer/analyzer_go_echo_spec.cr +++ b/spec/unit_test/analyzer/analyzer_go_echo_spec.cr @@ -4,33 +4,34 @@ require "../../../src/options" describe "analyzer_go_echo" do options = default_options() instance = AnalyzerGoEcho.new(options) + groups = [] of Hash(String, String) it "instance.get_route_path - GET" do - instance.get_route_path("e.GET(\"/\", func(c echo.Context) error {").should eq("/") + instance.get_route_path("e.GET(\"/\", func(c echo.Context) error {", groups).should eq("/") end it "instance.get_route_path - POST" do - instance.get_route_path("e.POST(\"/\", func(c echo.Context) error {").should eq("/") + instance.get_route_path("e.POST(\"/\", func(c echo.Context) error {", groups).should eq("/") end it "instance.get_route_path - PUT" do - instance.get_route_path("e.PUT(\"/\", func(c echo.Context) error {").should eq("/") + instance.get_route_path("e.PUT(\"/\", func(c echo.Context) error {", groups).should eq("/") end it "instance.get_route_path - DELETE" do - instance.get_route_path("e.DELETE(\"/\", func(c echo.Context) error {").should eq("/") + instance.get_route_path("e.DELETE(\"/\", func(c echo.Context) error {", groups).should eq("/") end it "instance.get_route_path - PATCH" do - instance.get_route_path("e.PATCH(\"/\", func(c echo.Context) error {").should eq("/") + instance.get_route_path("e.PATCH(\"/\", func(c echo.Context) error {", groups).should eq("/") end it "instance.get_route_path - HEAD" do - instance.get_route_path("e.HEAD(\"/\", func(c echo.Context) error {").should eq("/") + instance.get_route_path("e.HEAD(\"/\", func(c echo.Context) error {", groups).should eq("/") end it "instance.get_route_path - OPTIONS" do - instance.get_route_path("e.OPTIONS(\"/\", func(c echo.Context) error {").should eq("/") + instance.get_route_path("e.OPTIONS(\"/\", func(c echo.Context) error {", groups).should eq("/") end it "instance.get_route_path - customContext1" do - instance.get_route_path("customEnv.OPTIONS(\"/\", func(c echo.Context) error {").should eq("/") + instance.get_route_path("customEnv.OPTIONS(\"/\", func(c echo.Context) error {", groups).should eq("/") end it "instance.get_route_path - customContext2" do - instance.get_route_path("customEnv.OPTIONS(\"/\", func(myContext echo.Context) error {").should eq("/") + instance.get_route_path("customEnv.OPTIONS(\"/\", func(myContext echo.Context) error {", groups).should eq("/") end it "instance.get_static_path - Static" do diff --git a/spec/unit_test/analyzer/analyzer_spring_spec.cr b/spec/unit_test/analyzer/analyzer_kotlin_spring_spec.cr similarity index 95% rename from spec/unit_test/analyzer/analyzer_spring_spec.cr rename to spec/unit_test/analyzer/analyzer_kotlin_spring_spec.cr index ac21d265..a84241b1 100644 --- a/spec/unit_test/analyzer/analyzer_spring_spec.cr +++ b/spec/unit_test/analyzer/analyzer_kotlin_spring_spec.cr @@ -1,9 +1,9 @@ -require "../../../src/analyzer/analyzers/analyzer_spring.cr" +require "../../../src/analyzer/analyzers/analyzer_kotlin_spring.cr" require "../../../src/options" describe "mapping_to_path" do options = default_options() - instance = AnalyzerSpring.new(options) + instance = AnalyzerKotlinSpring.new(options) it "mapping_to_path - GET" do instance.mapping_to_path("@GetMapping(\"/abcd\")").should eq(["/abcd"]) @@ -72,7 +72,7 @@ end describe "utils func" do options = default_options() - instance = AnalyzerSpring.new(options) + instance = AnalyzerKotlinSpring.new(options) it "is_bracket - true" do instance.is_bracket("{abcd=1234}").should eq(true) diff --git a/spec/unit_test/detector/detect_kotlin_spring_spe_spec.cr b/spec/unit_test/detector/detect_kotlin_spring_spe_spec.cr new file mode 100644 index 00000000..76746536 --- /dev/null +++ b/spec/unit_test/detector/detect_kotlin_spring_spe_spec.cr @@ -0,0 +1,10 @@ +require "../../../src/detector/detectors/*" + +describe "Detect Java Spring" do + options = default_options() + instance = DetectorKotlinSpring.new options + + it "build.gradle.kts" do + instance.detect("build.gradle.kts", "'org.springframework.boot' version '2.6.2'").should eq(true) + end +end diff --git a/spec/unit_test/minilexer/golang_spec.cr b/spec/unit_test/minilexer/golang_spec.cr new file mode 100644 index 00000000..95f23086 --- /dev/null +++ b/spec/unit_test/minilexer/golang_spec.cr @@ -0,0 +1,51 @@ +require "../../../src/minilexers/golang" + +describe "initialize" do + lexer = GolangLexer.new + + it "init" do + lexer.class.should eq(GolangLexer) + end + + it "default mode" do + lexer.mode.should eq(:normal) + end + + it "persistent mode" do + lexer.mode = :persistent + lexer.mode.should eq(:persistent) + end +end + +describe "tokenize" do + lexer = GolangLexer.new + + it "simple" do + output = lexer.tokenize(" + users := rg.Group(\"/users\") + + users.GET(\"/\", func(c *gin.Context) { + c.JSON(http.StatusOK, \"users\") + }) + ") + output[0].type.should eq(:newline) + output[1].type.should eq(:code) + output[2].type.should eq(:assign) + output[3].type.should eq(:code) + output[4].type.should eq(:string) + output[4].value.should eq("/users") + output[5].type.should eq(:code) + output[6].type.should eq(:newline) + output[7].type.should eq(:newline) + output[8].type.should eq(:code) + output[9].type.should eq(:string) + output[10].type.should eq(:code) + output[11].type.should eq(:newline) + output[12].type.should eq(:code) + output[13].type.should eq(:string) + output[14].type.should eq(:code) + output[15].type.should eq(:newline) + output[16].type.should eq(:code) + output[17].type.should eq(:newline) + end +end diff --git a/spec/unit_test/models/deliver_spec.cr b/spec/unit_test/models/deliver_spec.cr index d5dd0b1a..c833c869 100644 --- a/spec/unit_test/models/deliver_spec.cr +++ b/spec/unit_test/models/deliver_spec.cr @@ -18,6 +18,12 @@ describe "Initialize" do object.headers["X-API-Key"].should eq("abcdssss") end + it "Deliver with headers (bearer case)" do + options[:send_with_headers] = "Authorization: Bearer gAAAAABl3qwaQqol243Np" + object = Deliver.new options + object.headers["Authorization"].should eq("Bearer gAAAAABl3qwaQqol243Np") + end + it "Deliver with matchers" do options[:use_matchers] = "/admin" object = Deliver.new options diff --git a/src/analyzer/analyzer.cr b/src/analyzer/analyzer.cr index 155531fb..09a82483 100644 --- a/src/analyzer/analyzer.cr +++ b/src/analyzer/analyzer.cr @@ -15,9 +15,9 @@ def initialize_analyzers(logger : NoirLogger) analyzers["go_gin"] = ->analyzer_go_gin(Hash(Symbol, String)) analyzers["java_armeria"] = ->analyzer_armeria(Hash(Symbol, String)) analyzers["java_jsp"] = ->analyzer_jsp(Hash(Symbol, String)) - analyzers["java_spring"] = ->analyzer_spring(Hash(Symbol, String)) + analyzers["java_spring"] = ->analyzer_java_spring(Hash(Symbol, String)) analyzers["js_express"] = ->analyzer_express(Hash(Symbol, String)) - analyzers["kotlin_spring"] = ->analyzer_spring(Hash(Symbol, String)) + analyzers["kotlin_spring"] = ->analyzer_kotlin_spring(Hash(Symbol, String)) analyzers["oas2"] = ->analyzer_oas2(Hash(Symbol, String)) analyzers["oas3"] = ->analyzer_oas3(Hash(Symbol, String)) analyzers["php_pure"] = ->analyzer_php_pure(Hash(Symbol, String)) @@ -51,10 +51,6 @@ def analysis_endpoints(options : Hash(Symbol, String), techs, logger : NoirLogge logger.system "Analysis Started" logger.info_sub "Code Analyzer: #{techs.size} in use" - if (techs.includes? "java_spring") && (techs.includes? "kotlin_spring") - techs.delete("kotlin_spring") - end - techs.each do |tech| if analyzer.has_key?(tech) if NoirTechs.similar_to_tech(options[:exclude_techs]).includes?(tech) diff --git a/src/analyzer/analyzers/analyzer_go_echo.cr b/src/analyzer/analyzers/analyzer_go_echo.cr index fe293dcb..23e8389c 100644 --- a/src/analyzer/analyzers/analyzer_go_echo.cr +++ b/src/analyzer/analyzers/analyzer_go_echo.cr @@ -4,6 +4,8 @@ class AnalyzerGoEcho < Analyzer def analyze # Source Analysis public_dirs = [] of (Hash(String, String)) + groups = [] of Hash(String, String) + begin Dir.glob("#{base_path}/**/*") do |path| next if File.directory?(path) @@ -12,8 +14,41 @@ class AnalyzerGoEcho < Analyzer last_endpoint = Endpoint.new("", "") file.each_line.with_index do |line, index| details = Details.new(PathInfo.new(path, index + 1)) + lexer = GolangLexer.new + + if line.includes?(".Group(") + map = lexer.tokenize(line) + before = Token.new(:unknown, "", 0) + group_name = "" + group_path = "" + map.each do |token| + if token.type == :assign + group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "") + end + + if token.type == :string + group_path = token.value.to_s + groups.each do |group| + group.each do |key, value| + if before.value.to_s.includes? key + group_path = value + group_path + end + end + end + end + + before = token + end + + if group_name.size > 0 && group_path.size > 0 + groups << { + group_name => group_path, + } + end + end + if line.includes?(".GET(") || line.includes?(".POST(") || line.includes?(".PUT(") || line.includes?(".DELETE(") - get_route_path(line).tap do |route_path| + get_route_path(line, groups).tap do |route_path| if route_path.size > 0 new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0], details) result << new_endpoint @@ -126,14 +161,25 @@ class AnalyzerGoEcho < Analyzer } end - def get_route_path(line : String) : String - first = line.strip.split("(") - if first.size > 1 - second = first[1].split(",") - if second.size > 1 - route_path = second[0].gsub("\"", "") - return route_path + def get_route_path(line : String, groups : Array(Hash(String, String))) : String + lexer = GolangLexer.new + map = lexer.tokenize(line) + before = Token.new(:unknown, "", 0) + map.each do |token| + if token.type == :string + final_path = token.value.to_s + groups.each do |group| + group.each do |key, value| + if before.value.to_s.includes? key + final_path = value + final_path + end + end + end + + return final_path end + + before = token end "" diff --git a/src/analyzer/analyzers/analyzer_go_fiber.cr b/src/analyzer/analyzers/analyzer_go_fiber.cr index f133ef2f..d98757cd 100644 --- a/src/analyzer/analyzers/analyzer_go_fiber.cr +++ b/src/analyzer/analyzers/analyzer_go_fiber.cr @@ -4,6 +4,8 @@ class AnalyzerGoFiber < Analyzer def analyze # Source Analysis public_dirs = [] of (Hash(String, String)) + groups = [] of Hash(String, String) + begin Dir.glob("#{base_path}/**/*") do |path| next if File.directory?(path) @@ -12,8 +14,41 @@ class AnalyzerGoFiber < Analyzer last_endpoint = Endpoint.new("", "") file.each_line.with_index do |line, index| details = Details.new(PathInfo.new(path, index + 1)) + lexer = GolangLexer.new + + if line.includes?(".Group(") + map = lexer.tokenize(line) + before = Token.new(:unknown, "", 0) + group_name = "" + group_path = "" + map.each do |token| + if token.type == :assign + group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "") + end + + if token.type == :string + group_path = token.value.to_s + groups.each do |group| + group.each do |key, value| + if before.value.to_s.includes? key + group_path = value + group_path + end + end + end + end + + before = token + end + + if group_name.size > 0 && group_path.size > 0 + groups << { + group_name => group_path, + } + end + end + if line.includes?(".Get(") || line.includes?(".Post(") || line.includes?(".Put(") || line.includes?(".Delete(") - get_route_path(line).tap do |route_path| + get_route_path(line, groups).tap do |route_path| if route_path.size > 0 new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0].upcase, details) if line.includes?("websocket.New(") @@ -137,14 +172,25 @@ class AnalyzerGoFiber < Analyzer } end - def get_route_path(line : String) : String - first = line.strip.split("(") - if first.size > 1 - second = first[1].split(",") - if second.size > 1 - route_path = second[0].gsub("\"", "") - return route_path + def get_route_path(line : String, groups : Array(Hash(String, String))) : String + lexer = GolangLexer.new + map = lexer.tokenize(line) + before = Token.new(:unknown, "", 0) + map.each do |token| + if token.type == :string + final_path = token.value.to_s + groups.each do |group| + group.each do |key, value| + if before.value.to_s.includes? key + final_path = value + final_path + end + end + end + + return final_path end + + before = token end "" diff --git a/src/analyzer/analyzers/analyzer_go_gin.cr b/src/analyzer/analyzers/analyzer_go_gin.cr index c462a95a..4501876b 100644 --- a/src/analyzer/analyzers/analyzer_go_gin.cr +++ b/src/analyzer/analyzers/analyzer_go_gin.cr @@ -1,9 +1,11 @@ require "../../models/analyzer" +require "../../minilexers/golang" class AnalyzerGoGin < Analyzer def analyze # Source Analysis public_dirs = [] of (Hash(String, String)) + groups = [] of Hash(String, String) begin Dir.glob("#{base_path}/**/*") do |path| next if File.directory?(path) @@ -12,8 +14,41 @@ class AnalyzerGoGin < Analyzer last_endpoint = Endpoint.new("", "") file.each_line.with_index do |line, index| details = Details.new(PathInfo.new(path, index + 1)) + lexer = GolangLexer.new + + if line.includes?(".Group(") + map = lexer.tokenize(line) + before = Token.new(:unknown, "", 0) + group_name = "" + group_path = "" + map.each do |token| + if token.type == :assign + group_name = before.value.to_s.gsub(":", "").gsub(/\s/, "") + end + + if token.type == :string + group_path = token.value.to_s + groups.each do |group| + group.each do |key, value| + if before.value.to_s.includes? key + group_path = value + group_path + end + end + end + end + + before = token + end + + if group_name.size > 0 && group_path.size > 0 + groups << { + group_name => group_path, + } + end + end + if line.includes?(".GET(") || line.includes?(".POST(") || line.includes?(".PUT(") || line.includes?(".DELETE(") - get_route_path(line).tap do |route_path| + get_route_path(line, groups).tap do |route_path| if route_path.size > 0 new_endpoint = Endpoint.new("#{route_path}", line.split(".")[1].split("(")[0], details) result << new_endpoint @@ -128,14 +163,25 @@ class AnalyzerGoGin < Analyzer } end - def get_route_path(line : String) : String - first = line.strip.split("(") - if first.size > 1 - second = first[1].split(",") - if second.size > 1 - route_path = second[0].gsub("\"", "") - return route_path + def get_route_path(line : String, groups : Array(Hash(String, String))) : String + lexer = GolangLexer.new + map = lexer.tokenize(line) + before = Token.new(:unknown, "", 0) + map.each do |token| + if token.type == :string + final_path = token.value.to_s + groups.each do |group| + group.each do |key, value| + if before.value.to_s.includes? key + final_path = value + final_path + end + end + end + + return final_path end + + before = token end "" diff --git a/src/analyzer/analyzers/analyzer_java_spring.cr b/src/analyzer/analyzers/analyzer_java_spring.cr new file mode 100644 index 00000000..c1e2d6df --- /dev/null +++ b/src/analyzer/analyzers/analyzer_java_spring.cr @@ -0,0 +1,376 @@ +require "../../models/analyzer" +require "../../minilexers/java" +require "../../miniparsers/java" + +class AnalyzerJavaSpring < Analyzer + REGEX_ROUTER_CODE_BLOCK = /route\(\)?.*?\);/m + REGEX_ROUTE_CODE_LINE = /((?:andRoute|route)\s*\(|\.)\s*(GET|POST|DELETE|PUT)\(\s*"([^"]*)/ + + def analyze + parser_map = Hash(String, JavaParser).new + package_map = Hash(String, Hash(String, ClassModel)).new + Dir.glob("#{@base_path}/**/*") do |path| + next if File.directory?(path) + url = "" + if File.exists?(path) && path.ends_with?(".java") + content = File.read(path, encoding: "utf-8", invalid: :skip) + + # Spring MVC Router (Controller) + spring_web_bind_package = "org.springframework.web.bind.annotation." + has_spring_web_bind_package_been_import = content.includes?(spring_web_bind_package) + if has_spring_web_bind_package_been_import + if parser_map.has_key?(path) + parser = parser_map[path] + tokens = parser.tokens + else + parser = get_parser(Path.new(path), content) + tokens = parser.tokens + parser_map[path] = parser + end + + package_name = parser.get_package_name(tokens) + next if package_name == "" + root_source_directory : Path = parser.get_root_source_directory(path, package_name) + package_directory = Path.new(path).dirname + + # Import packages + import_map = Hash(String, ClassModel).new + parser.import_statements.each do |import_statement| + import_path = import_statement.gsub(".", "/") + if import_path.ends_with?("/*") + import_directory = root_source_directory.join(import_path[..-3]) + if Dir.exists?(import_directory) + Dir.glob("#{import_directory}/*.java") do |_path| + next if path == _path + if !parser_map.has_key?(_path) + _parser = get_parser(Path.new(_path)) + else + _parser = parser_map[_path] + end + + _parser.classes.each do |package_class| + import_map[package_class.name] = package_class + end + end + end + else + source_path = root_source_directory.join(import_path + ".java") + next if source_path.dirname == package_directory || !File.exists?(source_path) + if !parser_map.has_key?(source_path.to_s) + _content = File.read(source_path.to_s, encoding: "utf-8", invalid: :skip) + _parser = get_parser(source_path, _content) + parser_map[source_path.to_s] = _parser + _parser.classes.each do |package_class| + import_map[package_class.name] = package_class + end + else + _parser = parser_map[source_path.to_s] + _parser.classes.each do |package_class| + import_map[package_class.name] = package_class + end + end + end + end + + # Packages in same directory + package_class_map = package_map[package_directory]? + if package_class_map.nil? + package_class_map = Hash(String, ClassModel).new + Dir.glob("#{package_directory}/*.java") do |_path| + next if path == _path + if !parser_map.has_key?(_path) + _parser = get_parser(Path.new(_path)) + else + _parser = parser_map[_path] + end + + _parser.classes.each do |package_class| + package_class_map[package_class.name] = package_class + end + + parser.classes.each do |package_class| + package_class_map[package_class.name] = package_class + end + + package_map[package_directory] = package_class_map + end + end + + class_map = package_class_map.merge(import_map) + parser.classes.each do |class_model| + class_annotation = class_model.annotations["RequestMapping"]? + if !class_annotation.nil? + next if class_annotation.params.size == 0 + class_path_token = class_annotation.params[0][-1] + if class_path_token.type == :STRING_LITERAL + url = class_path_token.value[1..-2] + if url.ends_with? "*" + url = url[0..-2] + end + end + end + + class_model.methods.values.each do |method| + method.annotations.values.each do |method_annotation| # multiline annotations + url_paths = Array(String).new + + # Spring MVC Decorator + request_method = nil + if method_annotation.name.ends_with? "Mapping" + parameter_format = nil + annotation_parameters = method_annotation.params + annotation_parameters.each do |annotation_parameter_tokens| + if annotation_parameter_tokens.size > 2 + annotation_parameter_key = annotation_parameter_tokens[0].value + annotation_parameter_value = annotation_parameter_tokens[-1].value + if annotation_parameter_key == "method" + request_method = annotation_parameter_value + elsif annotation_parameter_key == "consumes" + if annotation_parameter_value.ends_with? "APPLICATION_FORM_URLENCODED_VALUE" + parameter_format = "form" + elsif annotation_parameter_value.ends_with? "APPLICATION_JSON_VALUE" + parameter_format = "json" + end + end + end + end + + if method_annotation.name == "RequestMapping" + url_paths = [""] + if method_annotation.params.size > 0 + url_paths = get_mapping_path(parser, tokens, method_annotation.params) + end + + line = method_annotation.tokens[0].line + details = Details.new(PathInfo.new(path, line)) + + if request_method.nil? + # If the method is not annotated with @RequestMapping, then 5 methods are allowed + ["GET", "POST", "PUT", "DELETE", "PATCH"].each do |_request_method| + parameters = get_endpoint_parameters(parser, _request_method, method, parameter_format, class_map) + url_paths.each do |url_path| + @result << Endpoint.new("#{url}#{url_path}", _request_method, parameters, details) + end + end + else + url_paths.each do |url_path| + parameters = get_endpoint_parameters(parser, request_method, method, parameter_format, class_map) + @result << Endpoint.new("#{url}#{url_path}", request_method, parameters, details) + end + end + break + else + mapping_annotations = ["GetMapping", "PostMapping", "PutMapping", "DeleteMapping", "PatchMapping"] + mapping_index = mapping_annotations.index(method_annotation.name) + if !mapping_index.nil? + line = method_annotation.tokens[0].line + request_method = mapping_annotations[mapping_index][0..-8].upcase + if parameter_format.nil? && request_method == "POST" + parameter_format = "form" + end + parameters = get_endpoint_parameters(parser, request_method, method, parameter_format, class_map) + + url_paths = [""] + if method_annotation.params.size > 0 + url_paths = get_mapping_path(parser, tokens, method_annotation.params) + end + + details = Details.new(PathInfo.new(path, line)) + url_paths.each do |url_path| + @result << Endpoint.new("#{url}#{url_path}", request_method, parameters, details) + end + break + end + end + end + end + end + end + else + # Reactive Router + content.scan(REGEX_ROUTER_CODE_BLOCK) do |route_code| + method_code = route_code[0] + method_code.scan(REGEX_ROUTE_CODE_LINE) do |match| + next if match.size != 4 + method = match[2] + endpoint = match[3].gsub(/\n/, "") + details = Details.new(PathInfo.new(path)) + @result << Endpoint.new("#{url}#{endpoint}", method, details) + end + end + end + end + end + Fiber.yield + + @result + end + + def get_mapping_path(parser : JavaParser, tokens : Array(Token), method_params : Array(Array(Token))) + # 1. Search for the value of the @xxxxxMapping annotation + # 2. If the value is a string literal, return it + # 3. If the value is an array, return each element + # 4. Other case return empty array + url_paths = Array(String).new + if method_params[0].size != 0 + path_argument_index = 0 + method_params.each_with_index do |mapping_parameter, index| + if mapping_parameter[0].type == :IDENTIFIER && mapping_parameter[0].value == "value" + path_argument_index = index + end + end + + path_parameter_tokens = method_params[path_argument_index] + if path_parameter_tokens[-1].type == :STRING_LITERAL + # @GetMapping("/abc") or @GetMapping(value = "/abc") + url_paths << path_parameter_tokens[-1].value[1..-2] + elsif path_parameter_tokens[-1].type == :RBRACE + # @GetMapping({"/abc", "/def"}) or @GetMapping(value = {"/abc", "/def"}) + i = path_parameter_tokens.size - 2 + while i > 0 + parameter_token = path_parameter_tokens[i] + if parameter_token.type == :LBRACE + break + elsif parameter_token.type == :COMMA + i -= 1 + next + elsif parameter_token.type == :STRING_LITERAL + url_paths << parameter_token.value[1..-2] + else + break + end + + i -= 1 + end + end + end + + url_paths + end + + def get_endpoint_parameters(parser : JavaParser, request_method : String, method : MethodModel, parameter_format : String | Nil, package_class_map : Hash(String, ClassModel)) : Array(Param) + endpoint_parameters = Array(Param).new + method.params.each do |method_param_tokens| + next if method_param_tokens.size == 0 + if method_param_tokens[-1].type == :IDENTIFIER + if method_param_tokens[0].type == :AT + if method_param_tokens[1].value == "PathVariable" + next + elsif method_param_tokens[1].value == "RequestBody" + if parameter_format.nil? + parameter_format = "json" + end + elsif method_param_tokens[1].value == "RequestParam" + parameter_format = "query" + elsif method_param_tokens[1].value == "RequestHeader" + parameter_format = "header" + end + end + + if parameter_format.nil? + parameter_format = "query" + end + + default_value = nil + # @RequestParam(@RequestParam long time) -> time + parameter_name = method_param_tokens[-1].value + if method_param_tokens[-1].type != IDENTIFIER && method_param_tokens.size > 2 + if method_param_tokens[2].type == :LPAREN + request_parameters = parser.parse_formal_parameters(method_param_tokens, 2) + request_parameters.each do |request_parameter_tokens| + if request_parameter_tokens.size > 2 + request_param_name = request_parameter_tokens[0].value + request_param_value = request_parameter_tokens[-1].value + + if request_param_name == "value" + # abc(@RequestParam(value = "name") int xx) -> name + parameter_name = request_param_value[1..-2] + elsif request_param_name == "defaultValue" + # abc(@RequestParam(defaultValue = "defaultValue") String xx) -> defaultValue + default_value = request_param_value[1..-2] + end + end + end + if method_param_tokens[3].type == :STRING_LITERAL + # abc(@RequestParam("name") int xx) -> name + parameter_name_token = method_param_tokens[3] + parameter_name = parameter_name_token.value[1..-2] + end + end + end + + argument_name = method_param_tokens[-1].value + parameter_type = method_param_tokens[-2].value + if ["long", "int", "integer", "char", "boolean", "string", "multipartfile"].index(parameter_type.downcase) + param_default_value = default_value.nil? ? "" : default_value + endpoint_parameters << Param.new(parameter_name, param_default_value, parameter_format) + elsif parameter_type == "HttpServletRequest" + i = 0 + while i < method.body.size - 6 + if [:TAB, :WHITESPACE, :NEWLINE].index(method.body[i].type) + i += 1 + next + end + + next if method.body[i].type == :WHITESPACE + next if method.body[i].type == :NEWLINE + + if method.body[i].type == :IDENTIFIER && method.body[i].value == argument_name + if method.body[i + 1].type == :DOT + if method.body[i + 2].type == :IDENTIFIER && method.body[i + 3].type == :LPAREN + servlet_request_method_name = method.body[i + 2].value + if method.body[i + 4].type == :STRING_LITERAL + parameter_name = method.body[i + 4].value[1..-2] + if servlet_request_method_name == "getParameter" + unless endpoint_parameters.any? { |param| param.name == parameter_name } + endpoint_parameters << Param.new(parameter_name, "", parameter_format) + end + i += 6 + next + elsif servlet_request_method_name == "getHeader" + unless endpoint_parameters.any? { |param| param.name == parameter_name } + endpoint_parameters << Param.new(parameter_name, "", "header") + end + i += 6 + next + end + end + end + end + end + + i += 1 + end + else + # custom class" + if package_class_map.has_key?(parameter_type) + package_class = package_class_map[parameter_type] + package_class.fields.values.each do |field| + if field.access_modifier == "public" || field.has_setter? + param_default_value = default_value.nil? ? field.init_value : default_value + endpoint_parameters << Param.new(field.name, param_default_value, parameter_format) + end + end + end + end + end + end + + endpoint_parameters + end +end + +def get_parser(path : Path, content : String = "") + if content == "" + content = File.read(path, encoding: "utf-8", invalid: :skip) + end + lexer = JavaLexer.new + tokens = lexer.tokenize(content) + parser = JavaParser.new(path.to_s, tokens) + parser +end + +def analyzer_java_spring(options : Hash(Symbol, String)) + instance = AnalyzerJavaSpring.new(options) + instance.analyze +end diff --git a/src/analyzer/analyzers/analyzer_spring.cr b/src/analyzer/analyzers/analyzer_kotlin_spring.cr similarity index 77% rename from src/analyzer/analyzers/analyzer_spring.cr rename to src/analyzer/analyzers/analyzer_kotlin_spring.cr index 4e53ad09..32677b0a 100644 --- a/src/analyzer/analyzers/analyzer_spring.cr +++ b/src/analyzer/analyzers/analyzer_kotlin_spring.cr @@ -1,6 +1,6 @@ require "../../models/analyzer" -class AnalyzerSpring < Analyzer +class AnalyzerKotlinSpring < Analyzer REGEX_CLASS_DEFINITION = /^(((public|private|protected|default)\s+)|^)class\s+/ REGEX_ROUTER_CODE_BLOCK = /route\(\)?.*?\);/m REGEX_ROUTE_CODE_LINE = /((?:andRoute|route)\s*\(|\.)\s*(GET|POST|DELETE|PUT)\(\s*"([^"]*)/ @@ -12,8 +12,9 @@ class AnalyzerSpring < Analyzer next if File.directory?(path) url = "" - if File.exists?(path) && (path.ends_with?(".java") || path.ends_with?(".kt")) + if File.exists?(path) && path.ends_with?(".kt") content = File.read(path, encoding: "utf-8", invalid: :skip) + last_endpoint = Endpoint.new("", "") # Spring MVC has_class_been_imported = false @@ -41,7 +42,9 @@ class AnalyzerSpring < Analyzer if line.includes? "RequestMethod" define_requestmapping_handlers(["GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS", "TRACE"]) else - @result << Endpoint.new("#{url}#{mapping_path}", "GET", details) + endpoint = Endpoint.new("#{url}#{mapping_path}", "GET", details) + last_endpoint = endpoint + @result << last_endpoint end end end @@ -50,31 +53,49 @@ class AnalyzerSpring < Analyzer if line.includes? "PostMapping" mapping_paths = mapping_to_path(line) mapping_paths.each do |mapping_path| - @result << Endpoint.new("#{url}#{mapping_path}", "POST", details) + endpoint = Endpoint.new("#{url}#{mapping_path}", "POST", details) + last_endpoint = endpoint + @result << last_endpoint end end if line.includes? "PutMapping" mapping_paths = mapping_to_path(line) mapping_paths.each do |mapping_path| - @result << Endpoint.new("#{url}#{mapping_path}", "PUT", details) + endpoint = Endpoint.new("#{url}#{mapping_path}", "PUT", details) + last_endpoint = endpoint + @result << last_endpoint end end if line.includes? "DeleteMapping" mapping_paths = mapping_to_path(line) mapping_paths.each do |mapping_path| - @result << Endpoint.new("#{url}#{mapping_path}", "DELETE", details) + endpoint = Endpoint.new("#{url}#{mapping_path}", "DELETE", details) + last_endpoint = endpoint + @result << last_endpoint end end if line.includes? "PatchMapping" mapping_paths = mapping_to_path(line) mapping_paths.each do |mapping_path| - @result << Endpoint.new("#{url}#{mapping_path}", "PATCH", details) + endpoint = Endpoint.new("#{url}#{mapping_path}", "PATCH", details) + last_endpoint = endpoint + @result << last_endpoint end end if line.includes? "GetMapping" mapping_paths = mapping_to_path(line) mapping_paths.each do |mapping_path| - @result << Endpoint.new("#{url}#{mapping_path}", "GET", details) + endpoint = Endpoint.new("#{url}#{mapping_path}", "GET", details) + last_endpoint = endpoint + @result << last_endpoint + end + end + + # Param Analysis + param = line_to_param(line) + if param.name != "" + if last_endpoint.method != "" + last_endpoint.push_param(param) end end end @@ -100,6 +121,20 @@ class AnalyzerSpring < Analyzer @result end + def line_to_param(line : String) : Param + if line.includes? "getParameter(\"" + param = line.split("getParameter(\"")[1].split("\"")[0] + return Param.new(param, "", "query") + end + + if line.includes? "@RequestParam(\"" + param = line.split("@RequestParam(\"")[1].split("\"")[0] + return Param.new(param, "", "query") + end + + Param.new("", "", "") + end + def mapping_to_path(line : String) unless line.includes? "(" # no path @@ -207,7 +242,7 @@ class AnalyzerSpring < Analyzer end end -def analyzer_spring(options : Hash(Symbol, String)) - instance = AnalyzerSpring.new(options) +def analyzer_kotlin_spring(options : Hash(Symbol, String)) + instance = AnalyzerKotlinSpring.new(options) instance.analyze end diff --git a/src/detector/detectors/java_spring.cr b/src/detector/detectors/java_spring.cr index 766e635b..49113004 100644 --- a/src/detector/detectors/java_spring.cr +++ b/src/detector/detectors/java_spring.cr @@ -2,13 +2,13 @@ require "../../models/detector" class DetectorJavaSpring < Detector def detect(filename : String, file_contents : String) : Bool - if ( - (filename.includes? "pom.xml") || (filename.ends_with? "build.gradle") - ) && (file_contents.includes? "org.springframework") - true - else - false + if (filename.ends_with? "build.gradle") && (file_contents.includes? "org.springframework") + return true + elsif (filename.ends_with? "pom.xml") && (file_contents.includes? "org.springframework") + return true end + + false end def set_name diff --git a/src/detector/detectors/kotlin_spring.cr b/src/detector/detectors/kotlin_spring.cr index 3ea4cffc..52f32abe 100644 --- a/src/detector/detectors/kotlin_spring.cr +++ b/src/detector/detectors/kotlin_spring.cr @@ -3,10 +3,12 @@ require "../../models/detector" class DetectorKotlinSpring < Detector def detect(filename : String, file_contents : String) : Bool if (filename.ends_with? "build.gradle.kts") && (file_contents.includes? "org.springframework") - true - else - false + return true + elsif (filename.ends_with? "pom.xml") && (file_contents.includes? "org.springframework") && (file_contents.includes? "org.jetbrains.kotlin") + return true end + + false end def set_name diff --git a/src/minilexers/golang.cr b/src/minilexers/golang.cr new file mode 100644 index 00000000..7e652825 --- /dev/null +++ b/src/minilexers/golang.cr @@ -0,0 +1,54 @@ +require "../models/minilexer/*" + +class GolangLexer < MiniLexer + @in_double_quote = false + @in_single_quote = false + @buffer = "" + @identifier = :code + + def initialize + super + end + + def tokenize_logic(input : String) : Array(Token) + results = [] of Token + + input.each_char_with_index do |char, index| + case char.bytes[0] + when 34 # double quote + if @in_double_quote + results << Token.new(:string, @buffer, index) + @in_double_quote = false + @identifier = :code + @buffer = "" + else + results << Token.new(@identifier, @buffer, index) + @in_double_quote = true + @identifier = :string + @buffer = "" + end + when 61 # assign + if @buffer != "" && !@in_double_quote + results << Token.new(@identifier, @buffer, index) + results << Token.new(:assign, "=", index) + @buffer = "" + end + when 10 # newline + if @buffer != "" + results << Token.new(@identifier, @buffer, index) + end + results << Token.new(:newline, "\n", index) + @buffer = "" + @identifier = :code + else + @buffer += char + end + + if index == input.size - 1 + results << Token.new(@identifier, @buffer, index) + end + end + + results + end +end diff --git a/src/minilexers/java.cr b/src/minilexers/java.cr new file mode 100644 index 00000000..e9926f17 --- /dev/null +++ b/src/minilexers/java.cr @@ -0,0 +1,377 @@ +require "../models/minilexer/*" + +# Keywords +ABSTRACT = "abstract" +ASSERT = "assert" +BOOLEAN = "boolean" +BREAK = "break" +BYTE = "byte" +CASE = "case" +CATCH = "catch" +CHAR = "char" +CLASS = "class" +CONST = "const" +CONTINUE = "continue" +DEFAULT = "default" +DO = "do" +DOUBLE = "double" +ELSE = "else" +ENUM = "enum" +EXTENDS = "extends" +FINAL = "final" +FINALLY = "finally" +FLOAT = "float" +FOR = "for" +IF = "if" +GOTO = "goto" +IMPLEMENTS = "implements" +IMPORT = "import" +INSTANCEOF = "instanceof" +INT = "int" +INTERFACE = "interface" +LONG = "long" +NATIVE = "native" +NEW = "new" +PACKAGE = "package" +PRIVATE = "private" +PROTECTED = "protected" +PUBLIC = "public" +RETURN = "return" +SHORT = "short" +STATIC = "static" +STRICTFP = "strictfp" +SUPER = "super" +SWITCH = "switch" +SYNCHRONIZED = "synchronized" +THIS = "this" +THROW = "throw" +THROWS = "throws" +TRANSIENT = "transient" +TRY = "try" +VOID = "void" +VOLATILE = "volatile" +WHILE = "while" + +# Module related keywords +MODULE = "module" +OPEN = "open" +REQUIRES = "requires" +EXPORTS = "exports" +OPENS = "opens" +TO = "to" +USES = "uses" +PROVIDES = "provides" +WITH = "with" +TRANSITIVE = "transitive" + +# Local Variable Type Inference +VAR = "var" # reserved type name + +# Switch Expressions +YIELD = "yield" # reserved type name from Java 14 + +# Records +RECORD = "record" + +# Sealed Classes +SEALED = "sealed" +PERMITS = "permits" +NON_SEALED = "non-sealed" + +# Literals +DECIMAL_LITERAL = /0|[1-9]([_\d]*\d)?[lL]?/ +HEX_LITERAL = /0[xX][0-9a-fA-F]([0-9a-fA-F_]*[0-9a-fA-F])?[lL]?/ +OCT_LITERAL = /0[0-7]([0-7_]*[0-7])?[lL]?/ +BINARY_LITERAL = /0[bB][01]([01_]*[01])?[lL]?/ +FLOAT_LITERAL = /((\d+\.\d*|\.\d+)([eE][+-]?\d+)?|[+-]?\d+[eE][+-]?\d+)[fFdD]?/ +HEX_FLOAT_LITERAL = /0[xX]([0-9a-fA-F]+(\.[0-9a-fA-F]*)?|\.[0-9a-fA-F]+)[pP][+-]?\d+[fFdD]?/ +BOOL_LITERAL = /true|false/ +CHAR_LITERAL = /'([^'\\\r\n]|\\['"\\bfnrt]|\\u[0-9a-fA-F]{4}|\\[^'"\r\n])*'/ +STRING_LITERAL = /"([^"\\\r\n]|\\["\\bfnrt]|\\u[0-9a-fA-F]{4}|\\[^"\r\n])*"/ +TEXT_BLOCK = /"""\s*(.|\\["\\bfnrt])*?\s*"""/ +NULL_LITERAL = "null" + +# Separators +LPAREN = "(" +RPAREN = ")" +LBRACE = "{" +RBRACE = "}" +LBRACK = "[" +RBRACK = "]" +SEMI = ";" +COMMA = "," +DOT = "." + +# Operators +ASSIGN = "=" +GT = ">" +LT = "<" +BANG = "!" +TILDE = "~" +QUESTION = "?" +COLON = ":" +EQUAL = "==" +LE = "<=" +GE = ">=" +NOTEQUAL = "!=" +AND = "&&" +OR = "||" +INC = "++" +DEC = "--" +ADD = "+" +SUB = "-" +MUL = "*" +DIV = "/" +BITAND = "&" +BITOR = "|" +CARET = "^" +MOD = "%" + +ADD_ASSIGN = "+=" +SUB_ASSIGN = "-=" +MUL_ASSIGN = "*=" +DIV_ASSIGN = "/=" +AND_ASSIGN = "&=" +OR_ASSIGN = "|=" +XOR_ASSIGN = "^=" +MOD_ASSIGN = "%=" +LSHIFT_ASSIGN = "<<=" +RSHIFT_ASSIGN = ">>=" +URSHIFT_ASSIGN = ">>>=" + +# Java 8 tokens +ARROW = "->" +COLONCOLON = "::" + +# Additional symbols not defined in the lexical specification +AT = "@" +ELLIPSIS = "..." + +# Whitespace and comments +WS = /[ \t\r\n\x0C]+/ +COMMENT = /\/\*.*?\*\//m +LINE_COMMENT = /\/\/[^\r\n]*/ + +# Identifiers +IDENTIFIER = /[a-zA-Z$_][a-zA-Z\d$_]*/ + +# Fragment rules +ExponentPart = /[eE][+-]?\d+/ +EscapeSequence = /\\(?:u005c)?[btnfr"'\\]|\\u(?:[0-3]?[0-7])?[0-7]|\\u[0-9a-fA-F]{4}/ +HexDigits = /[0-9a-fA-F]([_0-9a-fA-F]*[0-9a-fA-F])?/ +HexDigit = /[0-9a-fA-F]/ +Digits = /\d([_\d]*\d)?/ +LetterOrDigit = /[a-zA-Z\d$_]/ +Letter = /[a-zA-Z$_]|[^[:ascii:]]/ + +class JavaLexer < MiniLexer + def initialize + super + end + + def tokenize(@input : String) : Array(Token) + super + end + + def tokenize_logic(@input : String) : Array(Token) + while @position < @input.size + before_skip_position = -1 + while before_skip_position < @position + skip_whitespace_and_comments + before_skip_position = @position + end + break if @position == @input.size + + case @input[@position] + when '0'..'9' + match_number + when 'a'..'z', 'A'..'Z', '$', '_' + match_identifier_or_keyword + when '\'' + match_char_literal + when '"' + match_string_literal_or_text_block + else + match_symbol_or_operator + end + end + + @tokens + end + + def skip_whitespace_and_comments + c = @input[@position] + if c == '\r' || c == '\t' + @position += 1 + elsif @position != @input.size - 1 + if c == '/' && @input[@position + 1] == '*' + @position += 2 + while @position < @input.size + if @input[@position] == '*' && @input[@position + 1] == '/' + @position += 2 + break + end + @position += 1 + end + elsif c == '/' && @input[@position + 1] == '/' + @position += 2 + while @position < @input.size + if @input[@position] == '\n' + @position += 1 + break + end + @position += 1 + end + end + end + end + + def match_number + if match = @input.match(/0[xX][0-9a-fA-F](_?[0-9a-fA-F])*[lL]?|\d(_?\d)*(\.\d(_?\d)*)?([eE][+-]?\d(_?\d)*)?[fFdD]?/, @position) + literal = match[0] + self << case literal + when /^0[xX]/ + @position += literal.size + Tuple.new(:HEX_LITERAL, literal) + when /^0/ + @position += literal.size + Tuple.new(:OCT_LITERAL, literal) + when /^[\d.]/ + @position += literal.size + Tuple.new(:DECIMAL_LITERAL, literal) + else + @position += 1 + Tuple.new(:IDENTIFIER, @input[@position].to_s) + end + else + self << Tuple.new(:IDENTIFIER, @input[@position].to_s) + @position += 1 + end + end + + def match_identifier_or_keyword + if match = @input.match(/[a-zA-Z$_][a-zA-Z\d$_]*/, @position) + type = case match[0] + when ABSTRACT then :ABSTRACT + when ASSERT then :ASSERT + when BOOLEAN then :BOOLEAN + when BREAK then :BREAK + when BYTE then :BYTE + when CASE then :CASE + when CATCH then :CATCH + when CHAR then :CHAR + when CLASS then :CLASS + when CONST then :CONST + when CONTINUE then :CONTINUE + when DEFAULT then :DEFAULT + when DO then :DO + when DOUBLE then :DOUBLE + when ELSE then :ELSE + when ENUM then :ENUM + when EXTENDS then :EXTENDS + when FINAL then :FINAL + when FINALLY then :FINALLY + when FLOAT then :FLOAT + when FOR then :FOR + when IF then :IF + when GOTO then :GOTO + when IMPLEMENTS then :IMPLEMENTS + when IMPORT then :IMPORT + when INSTANCEOF then :INSTANCEOF + when INT then :INT + when INTERFACE then :INTERFACE + when LONG then :LONG + when NATIVE then :NATIVE + when NEW then :NEW + when PACKAGE then :PACKAGE + when PRIVATE then :PRIVATE + when PROTECTED then :PROTECTED + when PUBLIC then :PUBLIC + when RETURN then :RETURN + when SHORT then :SHORT + when STATIC then :STATIC + when STRICTFP then :STRICTFP + when SUPER then :SUPER + when SWITCH then :SWITCH + when SYNCHRONIZED then :SYNCHRONIZED + when THIS then :THIS + when THROW then :THROW + when THROWS then :THROWS + when TRANSIENT then :TRANSIENT + when TRY then :TRY + when VOID then :VOID + when VOLATILE then :VOLATILE + when WHILE then :WHILE + when MODULE then :MODULE + when OPEN then :OPEN + when REQUIRES then :REQUIRES + when EXPORTS then :EXPORTS + when OPENS then :OPENS + when TO then :TO + when USES then :USES + when PROVIDES then :PROVIDES + when WITH then :WITH + when TRANSITIVE then :TRANSITIVE + when VAR then :VAR + when YIELD then :YIELD + when RECORD then :RECORD + when SEALED then :SEALED + when PERMITS then :PERMITS + when NON_SEALED then :NON_SEALED + else :IDENTIFIER + end + + self << Tuple.new(type, match[0]) + @position += match[0].size + else + self << Tuple.new(:IDENTIFIER, @input[@position].to_s) + @position += 1 + end + end + + def match_char_literal + if match = @input.match(/'([^'\\\r\n]|\\['"\\bfnrt]|\\u[0-9a-fA-F]{4}|\\[^'\r\n])*'/, @position) + self << Tuple.new(:CHAR_LITERAL, match[0]) + @position += match[0].size + else + # impossible to reach here + self << Tuple.new(:IDENTIFIER, @input[@position].to_s) + @position += 1 + end + end + + def match_string_literal_or_text_block + if match = @input.match(/"""[ \t]*[\r\n](.|\\["\\bfnrt])*?[\r\n][ \t]*"""/, @position) + self << Tuple.new(:TEXT_BLOCK, match[0]) + @position += match[0].size + elsif match = @input.match(/"[^"\\\r\n]*(\\["\\bfnrt][^"\\\r\n]*)*"/, @position) + self << Tuple.new(:STRING_LITERAL, match[0]) + @position += match[0].size + else + # impossible to reach here + self << Tuple.new(:IDENTIFIER, @input[@position].to_s) + @position += 1 + end + end + + def match_symbol_or_operator + case @input[@position] + when '(' then self << Tuple.new(:LPAREN, "(") + when ')' then self << Tuple.new(:RPAREN, ")") + when ' ' then self << Tuple.new(:WHITESPACE, " ") + when '.' then self << Tuple.new(:DOT, ".") + when ',' then self << Tuple.new(:COMMA, ",") + when '@' then self << Tuple.new(:AT, "@") + when '{' then self << Tuple.new(:LBRACE, "{") + when '}' then self << Tuple.new(:RBRACE, "}") + when '\t' then self << Tuple.new(:TAB, "\t") + when ';' then self << Tuple.new(:SEMI, ";") + when '=' then self << Tuple.new(:ASSIGN, "=") + when '\n' + self << Tuple.new(:NEWLINE, "\n") + else + self << Tuple.new(:IDENTIFIER, @input[@position].to_s) + end + @position += 1 + end +end diff --git a/src/miniparsers/java.cr b/src/miniparsers/java.cr new file mode 100644 index 00000000..6f1fbe27 --- /dev/null +++ b/src/miniparsers/java.cr @@ -0,0 +1,568 @@ +require "../minilexers/java" +require "../models/minilexer/token" + +class JavaParser + property classes_tokens : Array(Array(Token)) + property classes : Array(ClassModel) + property tokens : Array(Token) + property import_statements : Array(String) + property path : String + + def initialize(@path : String, @tokens : Array(Token)) + @import_statements = Array(String).new + @classes_tokens = Array(Array(Token)).new + @classes = Array(ClassModel).new + + parse() + end + + def parse + parse_import_statements(@tokens) + parse_classes(@tokens) + @classes_tokens.each do |class_tokens| + name = get_class_name(class_tokens) + methods = parse_methods(class_tokens) + annotations = parse_annotations(@tokens, class_tokens[0].index) + fields = parse_fields(class_tokens, methods, annotations) + @classes << ClassModel.new(annotations, name, fields, methods, class_tokens) + end + end + + def get_root_source_directory(path : String, package_name : String) + i = 0 + path = Path.new(path).parent + while i < package_name.split(".").size + path = path.parent + i += 1 + end + + path + end + + def get_package_name(tokens : Array(Token)) + package_start = false + tokens.each_with_index do |token, index| + if token.type == :PACKAGE + package_start = true + i = index + 1 + package_name = "" + while i < tokens.size + if tokens[i].type != :SEMI + if tokens[i].type == :IDENTIFIER || tokens[i].type == :DOT + package_name += tokens[i].value + end + else + return package_name + end + + i += 1 + end + break + end + end + + "" + end + + def parse_import_statements(tokens : Array(Token)) + import_tokens = tokens.select { |token| token.type == :IMPORT } + import_tokens.each do |import_token| + next_token_index = import_token.index + 2 + next_token = tokens[next_token_index] + + if next_token && next_token.type == :IDENTIFIER + import_statement = next_token.value + next_token_index += 1 + + while next_token_index < tokens.size && tokens[next_token_index].type == :DOT + next_token_index += 1 + identifier_token = tokens[next_token_index] + break if !identifier_token || identifier_token.type != :IDENTIFIER + + import_statement += ".#{identifier_token.value}" + next_token_index += 1 + end + + @import_statements << import_statement + end + end + end + + def parse_formal_parameters(tokens : Array(Token), param_start_index : Int32) + lparen_count = 0 + rparen_count = 0 + lbrace_count = 0 + rbrace_count = 0 + parameters = Array(Array(Token)).new + parameter_token = Array(Token).new + return parameters if tokens.size <= param_start_index + + while param_start_index < tokens.size + if tokens[param_start_index].type == :WHITESPACE + param_start_index += 1 + elsif tokens[param_start_index].type == :TAB + param_start_index += 1 + elsif tokens[param_start_index].type == :NEWLINE + param_start_index += 1 + elsif tokens[param_start_index].type == :LPAREN + break + else + return parameters + end + end + + cursor = param_start_index + while cursor < tokens.size + token = tokens[cursor] + if token.type == :LPAREN + lparen_count += 1 + if lparen_count > 1 + parameter_token << token + end + elsif token.type == :LBRACE + lbrace_count += 1 + parameter_token << token + elsif token.type == :RBRACE + rbrace_count += 1 + parameter_token << token + elsif lbrace_count == rbrace_count && lparen_count - 1 == rparen_count && token.type == :COMMA + parameters << parameter_token + parameter_token = Array(Token).new + elsif lparen_count > 0 + if token.type == :RPAREN + rparen_count += 1 + if lparen_count == rparen_count + parameters << parameter_token + break + else + parameter_token << token + end + else + unless token.type == :WHITESPACE || token.type == :TAB || token.type == :NEWLINE + parameter_token << token + end + end + end + + cursor += 1 + end + + parameters + end + + def parse_annotations(tokens : Array(Token), declare_token_index : Int32) + skip_line = 0 + annotation_tokens = Hash(String, AnnotationModel).new + + cursor = declare_token_index - 1 + last_newline_index = -1 + while cursor > 0 + if tokens[cursor].type == :NEWLINE + skip_line += 1 + if skip_line == 1 + last_newline_index = cursor + end + end + + if skip_line == 2 + # :NEWLINE(cursor) @RequestMapping + # :NEWLINE public class Controller(type param) + annotation_token_index = cursor + 1 + starts_with_at = while annotation_token_index < last_newline_index + if tokens[annotation_token_index].type == :AT + break true + elsif tokens[annotation_token_index].type == :WHITESPACE || tokens[annotation_token_index].type == :TAB || tokens[annotation_token_index].type == :WHITESPACE + annotation_token_index += 1 + next + else + break false + end + end + + if starts_with_at + annotation_name = tokens[annotation_token_index + 1].value + annotation_params = parse_formal_parameters(tokens, annotation_token_index + 2) + annotation_tokens[annotation_name] = AnnotationModel.new(annotation_name, annotation_params, tokens[annotation_token_index - 1..last_newline_index - 1]) + skip_line = 1 + last_newline_index = cursor + else + break + end + end + + cursor -= 1 + end + + annotation_tokens + end + + def parse_classes(tokens : Array(Token)) + start_token_parse = false + class_body = Array(Token).new + + lbrace = rbrace = 0 + tokens.each do |token| + if !start_token_parse && token.type == :CLASS && tokens[token.index + 1].type == :WHITESPACE + start_token_parse = true + class_body = Array(Token).new + lbrace = rbrace = 0 + end + + if start_token_parse + if token.type == :LBRACE + lbrace += 1 + elsif token.type == :RBRACE + rbrace += 1 + end + + class_body << token + if lbrace > 0 && lbrace == rbrace + @classes_tokens << class_body + start_token_parse = false + end + end + end + end + + def get_class_name(tokens : Array(Token)) + has_token = false + tokens.each do |token| + if token.index != 0 + if token.type == :CLASS + has_token = true + elsif has_token && token.type == :IDENTIFIER + return token.value + end + end + end + + "" + end + + def parse_fields(class_tokens : Array(Token), methods : Hash(String, MethodModel), annotations : Hash(String, AnnotationModel)) + fields = Hash(String, FieldModel).new + class_body_start = false + + lbrace = 0 + rbrace = 0 + semi_indexs = Array(Int32).new + class_tokens.each_with_index do |token, index| + if token.type == :LBRACE + lbrace += 1 + elsif token.type == :RBRACE + rbrace += 1 + end + + if lbrace == rbrace + 1 + class_body_start = true + elsif class_body_start && lbrace == rbrace + break + end + + if class_body_start && token.type == :SEMI && class_tokens[index + 1].type == :NEWLINE + semi_indexs << index + end + end + + semi_indexs.each do |semi_index| + is_method_token = false + methods.values.each do |method| + method_start = method.@tokens[0].index + method_end = method.@tokens[-1].index + is_method_token = method_start <= semi_index && semi_index <= method_end + end + + if !is_method_token + assign_index = nil + field_name = "" + field_index = semi_index + while 0 < field_index + field_index -= 1 + token = class_tokens[field_index] + if token.type == :ASSIGN + assign_index = field_index + elsif token.type == :NEWLINE + # [access_modifier] [static] [final] type name [= initial value] ; + + if assign_index.nil? + field_name = class_tokens[semi_index - 1].value + else + field_name = class_tokens[assign_index - 1].value + end + + line_tokens = Array(Token).new + class_tokens[field_index + 1..semi_index - 1].each do |line_token| + next if line_token.type == :WHITESPACE || line_token.type == :TAB + line_tokens << line_token + end + + step = 0 + next if line_tokens.size == step + + is_static = false + is_final = false + modifier = "default" + if [:PRIVATE, :PUBLIC, :PROTECTED, :DEFAULT].index(line_tokens[step].type) + modifier = line_tokens[0].value + step += 1 + next if line_tokens.size == step + end + + if line_tokens[step].type == :STATIC + is_static = true + step += 1 + next if line_tokens.size == step + end + + if line_tokens[step].type == :FINAL + is_final = true + step += 1 + next if line_tokens.size == step + end + + # Only support common variable types + if ["int", "integer", "long", "string", "char", "boolean"].index(line_tokens[step].value.downcase) + field_type = line_tokens[step].value + field_name = line_tokens[step + 1].value + init_value = "" + if step + 3 < line_tokens.size && line_tokens[step + 2].type == :ASSIGN + line_tokens[step + 3..semi_index - 1].each do |init_token| + init_value += init_token.value # [TODO] currently support literal value only + end + end + + field = FieldModel.new(modifier, is_static, is_final, field_type, field_name, init_value) + + # getter, setter method + has_getter = false + has_setter = false + pascal_field_name = field_name[0].upcase + field_name[1..] + if methods.has_key?("get" + pascal_field_name) + has_getter = true + end + if methods.has_key?("set" + pascal_field_name) + has_setter = true + end + + # lombok annotaitons + if annotations.has_key?("Data") + has_getter = true + has_setter = true + else + has_getter = has_getter || annotations.has_key?("Getter") + has_setter = has_setter || annotations.has_key?("Setter") + end + + field.has_getter = has_getter + field.has_setter = has_setter + fields[field.name] = field + end + + break + end + end + end + end + + fields + end + + def parse_methods(class_tokens : Array(Token)) + # 1. Skip first line (class declaration) + # 2. Search ":RPAREN :LBRACE" or ":RPAREN throws :IDENTIFIER :LBRACE" pattern (method body entry point) + # 3. Get method declaration from ":NEWLINE" to ":RPAREN" (method declaration) + # 4. Get method body from ":LBRACE" to ":RBRACE" (method body) + # 5. Repeat 2-4 until end of class body + methods = Hash(String, MethodModel).new + method_tokens = Array(Token).new + + lbrace_count = rbrace_count = 0 + lparen_count = rparen_count = 0 + + method_name = nil + enter_class_body = false + enter_method_body = false + method_name_index = -1 + method_body_index = -1 + class_tokens.each_index do |index| + token = class_tokens[index] + if token.type == :NEWLINE && !enter_class_body + # 1. Skip first line (class declaration) + enter_class_body = true + elsif enter_class_body && !enter_method_body + lbrace_count = rbrace_count = 0 + lparen_count = rparen_count = 0 + if token.type == :LBRACE + # 2. Search ":RPAREN :LBRACE" or ":RPAREN throws :IDENTIFIER :LBRACE" pattern (method body entry point) + lbrace_count = 1 + rbrace_count = 0 + lparen_count = rparen_count = 0 + method_body_index = index + previous_token_index = index - 1 + has_exception = false + while 0 < previous_token_index + previous_token = class_tokens[previous_token_index] + if previous_token.type == :RPAREN + rparen_count = 1 + enter_method_body = true + + # 3. Get method declaration from ":NEWLINE" to ":RPAREN" (method declaration) + i = previous_token_index - 1 + while 0 < i + method_declaration_token = class_tokens[i] + if method_declaration_token.type == :RPAREN + rparen_count += 1 + elsif method_declaration_token.type == :LPAREN + lparen_count += 1 + elsif rparen_count == lparen_count + if method_name == nil && method_declaration_token.type == :IDENTIFIER + method_name = method_declaration_token.value + method_name_index = i + elsif method_declaration_token.type == :NEWLINE + method_tokens = class_tokens[i + 1..index] + break + end + end + i -= 1 + end + + break + elsif previous_token.type == :WHITESPACE || previous_token.type == :TAB || previous_token.type == :NEWLINE + previous_token_index -= 1 + next + elsif has_exception + break unless previous_token.type == :THROWS && previous_token.value == "throws" + elsif previous_token.type == :IDENTIFIER + has_exception = true + else + break + end + + previous_token_index -= 1 + end + end + elsif enter_method_body + # 4. Get method body from ":LBRACE" to ":RBRACE" (method body) + method_tokens << token + if token.type == :RBRACE + rbrace_count += 1 + if lbrace_count == rbrace_count + annotations = parse_annotations(class_tokens, method_name_index) + if !method_name.nil? + method_params = parse_formal_parameters(class_tokens, method_name_index + 1) + method_body = class_tokens[method_body_index + 1..index - 1] + methods[method_name] = MethodModel.new(method_name, method_params, annotations, method_tokens, method_body) + end + + # reset + method_tokens = Array(Token).new + enter_method_body = false + method_name = nil + end + elsif token.type == :LBRACE + lbrace_count += 1 + end + end + end + + methods + end + + def print_tokens(tokens : Array(Token), id = "default", trace = false) + puts("\n================ #{id} ===================") + tokens.each do |token| + print(token.value) + if id == "error" + print("(#{token.type})") + end + end + + if trace + puts "" + tokens.each do |token| + print("#{token.value}(#{token.type})") + end + end + end +end + +class AnnotationModel + property name : String + property params : Array(Array(Token)) + property tokens : Array(Token) + + def initialize(@name : String, @params : Array(Array(Token)), @tokens : Array(Token)) + end +end + +class ClassModel + property name : String + property methods : Hash(String, MethodModel) + property fields : Hash(String, FieldModel) + property annotations : Hash(String, AnnotationModel) + property tokens : Array(Token) + + def initialize(@annotations, @name, @fields, @methods, @tokens : Array(Token)) + end +end + +class MethodModel + property name : String + property params : Array(Array(Token)) + property annotations : Hash(String, AnnotationModel) + property tokens : Array(Token) + property body : Array(Token) + + def initialize(@name, @params, @annotations, @tokens, @body) + end +end + +class FieldModel + property access_modifier : String + property? is_static : Bool + property? is_final : Bool + property type : String + property name : String + property init_value : String + property? has_getter : Bool + property? has_setter : Bool + + def initialize(@access_modifier, @is_static, @is_final, @type, @name, @init_value) + # [access_modifier] [static] [final] type name [= initial value] ; + @has_getter = false + @has_setter = false + end + + def has_getter=(value : Bool) + @has_getter = value + end + + def has_setter=(value : Bool) + @has_setter = value + end + + def to_s + l = @access_modifier + " " + if @is_static + l += "static " + end + + if @is_final + l += "final " + end + + l += "#{@type} #{@name}" + if @init_value != "" + l += " = \"#{@init_value}\"" + end + + if @has_getter + l += " (has_getter)" + end + if @has_setter + l += " (has_setter)" + end + + l + end +end diff --git a/src/models/deliver.cr b/src/models/deliver.cr index 1645decb..f47fa262 100644 --- a/src/models/deliver.cr +++ b/src/models/deliver.cr @@ -26,7 +26,18 @@ class Deliver if header.includes? ":" @logger.debug "Adding '#{header}' to headers." splited = header.split(":") - @headers[splited[0]] = splited[1].gsub(/\s/, "") + value = "" + begin + if splited[1][0].to_s == " " + value = splited[1][1..-1].to_s + else + value = splited[1].to_s + end + rescue + value = splited[1].to_s + end + + @headers[splited[0]] = value end end @logger.info_sub "#{@headers.size} headers added." diff --git a/src/models/endpoint.cr b/src/models/endpoint.cr index f104b2e1..a3838281 100644 --- a/src/models/endpoint.cr +++ b/src/models/endpoint.cr @@ -79,6 +79,12 @@ struct Details def add_path(code_path : PathInfo) @code_paths << code_path end + + def ==(other : Details) : Bool + return false if @code_paths.size != other.code_paths.size + return false unless @code_paths.all? { |path| other.code_paths.any? { |other_path| path == other_path } } + true + end end struct PathInfo @@ -92,6 +98,10 @@ struct PathInfo def initialize(@path : String, @line : Int32 | Nil) end + + def ==(other : PathInfo) : Bool + @path == other.path && @line == other.line + end end struct EndpointReference diff --git a/src/models/minilexer/minilexer.cr b/src/models/minilexer/minilexer.cr index 85e4028c..237c7222 100644 --- a/src/models/minilexer/minilexer.cr +++ b/src/models/minilexer/minilexer.cr @@ -5,13 +5,39 @@ class MiniLexer def initialize @mode = :normal @tokens = [] of Token + @position = 0 + @input = "" + @pos_line_array = Array(Tuple(Int32, Int32)).new end def mode=(mode) @mode = mode end - def tokenize(input : String) : Array(Token) + def line : Int + pos_index = 0 + line_index = 1 + i = @pos_line_array.size - 1 + while 0 < i + pos = @pos_line_array[i][pos_index] + line = @pos_line_array[i][line_index] + if pos < @position + return line + @input[pos + 1..@position].count("\n") + end + i -= 1 + end + + line = @input[0..@position].count("\n") + 1 + @pos_line_array << Tuple.new(@position, line) + + line + end + + def <<(t : Tuple(Symbol, String)) + @tokens << Token.new(t[0], t[1], @tokens.size, @position, line()) + end + + def tokenize(@input : String) : Array(Token) results = tokenize_logic(input) if @mode == :persistent @@ -21,7 +47,7 @@ class MiniLexer results end - def tokenize_logic(input : String) : Array(Token) + def tokenize_logic(@input : String) : Array(Token) results = [] of Token results end @@ -29,4 +55,18 @@ class MiniLexer def find(token_type : Symbol) : Array(Token) @tokens.select { |token| token.type == token_type } end + + def trace + line_number = 1 + # source_line = "" + lines = @input.split "\n" + puts "line size: #{lines.size}, token number: #{tokens.size}" + @tokens.each do |token| + if token.line == line_number + puts "\nLine #{line_number}: " + lines[line_number - 1] + line_number += 1 + end + puts token.to_s + end + end end diff --git a/src/models/minilexer/token.cr b/src/models/minilexer/token.cr index 4bc55d1d..9556a057 100644 --- a/src/models/minilexer/token.cr +++ b/src/models/minilexer/token.cr @@ -1,9 +1,16 @@ class Token property type : Symbol property value : String - property position : Int64 + property index : Int32 + property position : Int32 + property line : Int32 - def initialize(@type, @value, @position) + def initialize(@type, @value, @index) + @position = 0 + @line = 0 + end + + def initialize(@type, @value, @index, @position, @line) end def is?(type) @@ -11,6 +18,11 @@ class Token end def to_s - "#{@type} #{@value}" + if @value == "\n" + @value = "\\n" + elsif @value == "\t" + @value = "\\t" + end + "#{@type} '#{@value}'" end end diff --git a/src/models/noir.cr b/src/models/noir.cr index 16453d22..fb0137b4 100644 --- a/src/models/noir.cr +++ b/src/models/noir.cr @@ -118,7 +118,10 @@ class NoirRunner if dup.method == tiny_tmp.method && dup.url == tiny_tmp.url is_new = false tiny_tmp.params.each do |param| - dup.params << param + existing_param = dup.params.find { |p| p.name == param.name } + unless existing_param + dup.params << param + end end end end diff --git a/src/noir.cr b/src/noir.cr index 77225cad..a1d09736 100644 --- a/src/noir.cr +++ b/src/noir.cr @@ -6,7 +6,7 @@ require "./options.cr" require "./techs/techs.cr" module Noir - VERSION = "0.12.2" + VERSION = "0.13.0" end noir_options = default_options() diff --git a/src/output_builder/common.cr b/src/output_builder/common.cr index 37c08c76..a0ee7475 100644 --- a/src/output_builder/common.cr +++ b/src/output_builder/common.cr @@ -38,7 +38,7 @@ class OutputBuilderCommon < OutputBuilder if code_path.line.nil? r_buffer += "\n ○ file: #{code_path.path}" else - r_buffer += "\n ○ file: #{code_path.path}##{code_path.line}" + r_buffer += "\n ○ file: #{code_path.path} (line #{code_path.line})" end end end