Skip to content

Commit

Permalink
Add config option to block user agents in robots.txt
Browse files Browse the repository at this point in the history
  • Loading branch information
jlelse committed Aug 9, 2023
1 parent 138612a commit 5256540
Show file tree
Hide file tree
Showing 4 changed files with 74 additions and 23 deletions.
5 changes: 5 additions & 0 deletions config.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ type config struct {
TTS *configTTS `mapstructure:"tts"`
Reactions *configReactions `mapstructure:"reactions"`
Pprof *configPprof `mapstructure:"pprof"`
RobotsTxt *configRobotsTxt `mapstructure:"robotstxt"`
Debug bool `mapstructure:"debug"`
initialized bool
}
Expand Down Expand Up @@ -355,6 +356,10 @@ type configPlugin struct {
Config map[string]any `mapstructure:"config"`
}

type configRobotsTxt struct {
BlockedBots []string `mapstructure:"blockedBots"`
}

func (a *goBlog) loadConfigFile(file string) error {
// Use viper to load the config file
v := viper.New()
Expand Down
5 changes: 5 additions & 0 deletions example-config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,11 @@ tts:
reactions:
enabled: true # Enable reactions (default is false)

# Block bots using the robots.txt
robotstxt:
blockedBots: # List all bots that should be disallowed to crawl the site (default is empty)
- GPTBot

# Blogs
defaultBlog: en # Default blog (needed because you can define multiple blogs)
blogs:
Expand Down
8 changes: 8 additions & 0 deletions robotstxt.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,14 @@ func (a *goBlog) serveRobotsTXT(w http.ResponseWriter, _ *http.Request) {
return
}
_, _ = fmt.Fprint(w, "Allow: /\n\n")
if a.cfg.RobotsTxt != nil && len(a.cfg.RobotsTxt.BlockedBots) != 0 {
for _, bot := range a.cfg.RobotsTxt.BlockedBots {
_, _ = fmt.Fprint(w, "User-agent: ")
_, _ = fmt.Fprint(w, bot)
_, _ = fmt.Fprint(w, "\n")
_, _ = fmt.Fprint(w, "Disallow: /\n\n")
}
}
_, _ = fmt.Fprintf(w, "Sitemap: %s\n", a.getFullAddress(sitemapPath))
for _, bc := range a.cfg.Blogs {
_, _ = fmt.Fprintf(w, "Sitemap: %s\n", a.getFullAddress(bc.getRelativePath(sitemapBlogPath)))
Expand Down
79 changes: 56 additions & 23 deletions robotstxt_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,29 +10,62 @@ import (

func Test_robotsTXT(t *testing.T) {

app := &goBlog{
cfg: &config{
Server: &configServer{
PublicAddress: "https://example.com",
t.Run("Default", func(t *testing.T) {
app := &goBlog{
cfg: &config{
Server: &configServer{
PublicAddress: "https://example.com",
},
},
},
}

rec := httptest.NewRecorder()
req := httptest.NewRequest("GET", "/robots.txt", nil)
app.serveRobotsTXT(rec, req)
assert.Equal(t, http.StatusOK, rec.Code)
assert.Equal(t, "User-agent: *\nAllow: /\n\nSitemap: https://example.com/sitemap.xml\n", rec.Body.String())

app.cfg.PrivateMode = &configPrivateMode{
Enabled: true,
}
assert.True(t, app.isPrivate())

rec = httptest.NewRecorder()
req = httptest.NewRequest("GET", "/robots.txt", nil)
app.serveRobotsTXT(rec, req)
assert.Equal(t, http.StatusOK, rec.Code)
assert.Equal(t, "User-agent: *\nDisallow: /\n", rec.Body.String())
}

rec := httptest.NewRecorder()
req := httptest.NewRequest("GET", "/robots.txt", nil)
app.serveRobotsTXT(rec, req)
assert.Equal(t, http.StatusOK, rec.Code)
assert.Equal(t, "User-agent: *\nAllow: /\n\nSitemap: https://example.com/sitemap.xml\n", rec.Body.String())
})

t.Run("Private mode", func(t *testing.T) {
app := &goBlog{
cfg: &config{
Server: &configServer{
PublicAddress: "https://example.com",
},
PrivateMode: &configPrivateMode{
Enabled: true,
},
},
}

assert.True(t, app.isPrivate())

rec := httptest.NewRecorder()
req := httptest.NewRequest("GET", "/robots.txt", nil)
app.serveRobotsTXT(rec, req)
assert.Equal(t, http.StatusOK, rec.Code)
assert.Equal(t, "User-agent: *\nDisallow: /\n", rec.Body.String())
})

t.Run("Blocked bot", func(t *testing.T) {
app := &goBlog{
cfg: &config{
Server: &configServer{
PublicAddress: "https://example.com",
},
RobotsTxt: &configRobotsTxt{
BlockedBots: []string{
"GPTBot",
},
},
},
}

rec := httptest.NewRecorder()
req := httptest.NewRequest("GET", "/robots.txt", nil)
app.serveRobotsTXT(rec, req)
assert.Equal(t, http.StatusOK, rec.Code)
assert.Equal(t, "User-agent: *\nAllow: /\n\nUser-agent: GPTBot\nDisallow: /\n\nSitemap: https://example.com/sitemap.xml\n", rec.Body.String())
})

}

0 comments on commit 5256540

Please sign in to comment.