Skip to content

Commit

Permalink
Add support for pagination (#85)
Browse files Browse the repository at this point in the history
* fixes issues 26 & 46. Added support for pagination

* Clarified what getNextPath returns

Co-authored-by: Yevgeniy Brikman <[email protected]>

* Added Regex and test cases for getNextPath

Used Regex to parse Link headers instead of splits and index arthemtic.
getNextPath now returns `string` and `*FetchError`.

* Add .iml files to .gitignore

* Implement minor cleanup

* Refactor code and tests

* Make comment clearer

Co-authored-by: mbal <[email protected]>
  • Loading branch information
brikis98 and mbal authored Jan 29, 2021
1 parent 37e8f10 commit c6fdad3
Show file tree
Hide file tree
Showing 3 changed files with 99 additions and 25 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
.idea
*.iml

# Covers Visual Studio Code
.vscode
Expand Down
89 changes: 64 additions & 25 deletions github.go
Original file line number Diff line number Diff line change
Expand Up @@ -107,31 +107,37 @@ func FetchTags(githubRepoUrl string, githubToken string, instance GitHubInstance
return tagsString, wrapError(err)
}

url := createGitHubRepoUrlForPath(repo, "tags")
resp, err := callGitHubApi(repo, url, map[string]string{})
if err != nil {
return tagsString, err
}
// Set per_page to 100, which is the max, to reduce network calls
tagsUrl := formatUrl(repo, createGitHubRepoUrlForPath(repo, "tags?per_page=100"))
for tagsUrl != "" {
resp, err := callGitHubApiRaw(tagsUrl, "GET", repo.Token, map[string]string{})
if err != nil {
return tagsString, err
}

// Convert the response body to a byte array
buf := new(bytes.Buffer)
_, goErr := buf.ReadFrom(resp.Body)
if goErr != nil {
return tagsString, wrapError(goErr)
}
jsonResp := buf.Bytes()
// Convert the response body to a byte array
buf := new(bytes.Buffer)
_, goErr := buf.ReadFrom(resp.Body)
if goErr != nil {
return tagsString, wrapError(goErr)
}
jsonResp := buf.Bytes()

// Extract the JSON into our array of gitHubTagsCommitApiResponse's
var tags []GitHubTagsApiResponse
if err := json.Unmarshal(jsonResp, &tags); err != nil {
return tagsString, wrapError(err)
}
// Extract the JSON into our array of gitHubTagsCommitApiResponse's
var tags []GitHubTagsApiResponse
if err := json.Unmarshal(jsonResp, &tags); err != nil {
return tagsString, wrapError(err)
}

for _, tag := range tags {
// Skip tags that are not semantically versioned so that they don't cause errors. (issue #75)
if _, err := version.NewVersion(tag.Name); err == nil {
tagsString = append(tagsString, tag.Name)
for _, tag := range tags {
// Skip tags that are not semantically versioned so that they don't cause errors. (issue #75)
if _, err := version.NewVersion(tag.Name); err == nil {
tagsString = append(tagsString, tag.Name)
}
}

// Get paginated tags (issue #26 and #46)
tagsUrl = getNextUrl(resp.Header.Get("link"))
}

return tagsString, nil
Expand Down Expand Up @@ -203,17 +209,50 @@ func createGitHubRepoUrlForPath(repo GitHubRepo, path string) string {
return fmt.Sprintf("repos/%s/%s/%s", repo.Owner, repo.Name, path)
}

var nextLinkRegex = regexp.MustCompile(`<(.+?)>;\s*rel="next"`)

// Get the next page URL from the given link header returned by the GitHub API. If there is no next page, return an
// empty string. The link header is expected to be of the form:
//
// <url>; rel="next", <url>; rel="last"
//
func getNextUrl(links string) string {
if len(links) == 0 {
return ""
}

for _, link := range strings.Split(links, ",") {
urlMatches := nextLinkRegex.FindStringSubmatch(link)
if len(urlMatches) == 2 {
return strings.TrimSpace(urlMatches[1])
}
}

return ""
}

// Format a URL for calling the GitHub API for the given repo and path
func formatUrl(repo GitHubRepo, path string) string {
return fmt.Sprintf("https://"+repo.ApiUrl+"/%s", path)
}

// Call the GitHub API at the given path and return the HTTP response
func callGitHubApi(repo GitHubRepo, path string, customHeaders map[string]string) (*http.Response, *FetchError) {
return callGitHubApiRaw(formatUrl(repo, path), "GET", repo.Token, customHeaders)
}

// Call the GitHub API at the given URL, using the given HTTP method, and passing the given token and headers, and
// return the response
func callGitHubApiRaw(url string, method string, token string, customHeaders map[string]string) (*http.Response, *FetchError) {
httpClient := &http.Client{}

request, err := http.NewRequest("GET", fmt.Sprintf("https://"+repo.ApiUrl+"/%s", path), nil)
request, err := http.NewRequest(method, url, nil)
if err != nil {
return nil, wrapError(err)
}

if repo.Token != "" {
request.Header.Set("Authorization", fmt.Sprintf("token %s", repo.Token))
if token != "" {
request.Header.Set("Authorization", fmt.Sprintf("token %s", token))
}

for headerName, headerValue := range customHeaders {
Expand All @@ -236,7 +275,7 @@ func callGitHubApi(repo GitHubRepo, path string, customHeaders map[string]string
respBody := buf.String()

// We leverage the HTTP Response Code as our ErrorCode here.
return nil, newError(resp.StatusCode, fmt.Sprintf("Received HTTP Response %d while fetching releases for GitHub URL %s. Full HTTP response: %s", resp.StatusCode, repo.Url, respBody))
return nil, newError(resp.StatusCode, fmt.Sprintf("Received HTTP Response %d while fetching releases for GitHub URL %s. Full HTTP response: %s", resp.StatusCode, url, respBody))
}

return resp, nil
Expand Down
34 changes: 34 additions & 0 deletions github_test.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package main

import (
"github.com/stretchr/testify/require"
"io/ioutil"
"os"
"reflect"
Expand Down Expand Up @@ -57,6 +58,39 @@ func TestGetListOfReleasesFromGitHubRepo(t *testing.T) {
}
}

func TestGetNextPath(t *testing.T) {
t.Parallel()

cases := []struct {
name string
links string
expectedUrl string
}{
{"next-and-last-urls", `<https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2>; rel="next", <https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=3>; rel="last"`, "https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2"},
{"next-and-last-urls-no-whitespace", `<https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2>;rel="next",<https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=3>;rel="last"`, "https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2"},
{"next-and-last-urls-extra-whitespace", ` <https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2>; rel="next", <https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=3> ; rel="last"`, "https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2"},
{"first-and-next-urls", `<https://api.github.com/repos/123456789/example-repo/tags?page=1>; rel="first", <https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2>; rel="next"`, "https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2"},
{"next-only", `<https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2>; rel="next"`, "https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2"},
{"first-and-last-urls", `<https://api.github.com/repos/123456789/example-repo/tags?page=1>; rel="first", <https://api.github.com/repos/123456789/example-repo/tags?per_page=100&page=2>; rel="last"`, ""},
{"empty", ``, ""},
{"garbage", `junk not related to links header at all`, ""},
}

for _, tc := range cases {
// The following is necessary to make sure tc's values don't
// get updated due to concurrency within the scope of t.Run(..) below
tc := tc

t.Run(tc.name, func(t *testing.T) {
t.Parallel()

nextUrl := getNextUrl(tc.links)
require.Equal(t, tc.expectedUrl, nextUrl)
})
}

}

func TestParseUrlIntoGithubInstance(t *testing.T) {
t.Parallel()

Expand Down

0 comments on commit c6fdad3

Please sign in to comment.