mirror of
https://github.com/drakkan/sftpgo.git
synced 2025-12-08 07:10:56 +03:00
chore: disallow all crawlers with robots.txt (#833)
Signed-off-by: Tim Birkett <tim.birkett@sainsburys.co.uk>
This commit is contained in:
@@ -86,6 +86,7 @@ const (
|
||||
providerEventsPath = "/api/v2/events/provider"
|
||||
sharesPath = "/api/v2/shares"
|
||||
healthzPath = "/healthz"
|
||||
robotsTxtPath = "/robots.txt"
|
||||
webRootPathDefault = "/"
|
||||
webBasePathDefault = "/web"
|
||||
webBasePathAdminDefault = "/web/admin"
|
||||
|
||||
@@ -118,6 +118,7 @@ const (
|
||||
providerEventsPath = "/api/v2/events/provider"
|
||||
sharesPath = "/api/v2/shares"
|
||||
healthzPath = "/healthz"
|
||||
robotsTxtPath = "/robots.txt"
|
||||
webBasePath = "/web"
|
||||
webBasePathAdmin = "/web/admin"
|
||||
webAdminSetupPath = "/web/admin/setup"
|
||||
@@ -8910,6 +8911,13 @@ func TestHealthCheck(t *testing.T) {
|
||||
assert.Equal(t, "ok", rr.Body.String())
|
||||
}
|
||||
|
||||
func TestRobotsTxtCheck(t *testing.T) {
|
||||
req, _ := http.NewRequest(http.MethodGet, "/robots.txt", nil)
|
||||
rr := executeRequest(req)
|
||||
checkResponseCode(t, http.StatusOK, rr)
|
||||
assert.Equal(t, "User-agent: *\nDisallow: /", rr.Body.String())
|
||||
}
|
||||
|
||||
func TestGetWebRootMock(t *testing.T) {
|
||||
req, _ := http.NewRequest(http.MethodGet, "/", nil)
|
||||
rr := executeRequest(req)
|
||||
|
||||
@@ -1137,6 +1137,10 @@ func (s *httpdServer) initializeRouter() {
|
||||
render.PlainText(w, r, "ok")
|
||||
})
|
||||
|
||||
s.router.Get(robotsTxtPath, func(w http.ResponseWriter, r *http.Request) {
|
||||
render.PlainText(w, r, "User-agent: *\nDisallow: /")
|
||||
})
|
||||
|
||||
// share API exposed to external users
|
||||
s.router.Get(sharesPath+"/{id}", s.downloadFromShare)
|
||||
s.router.Post(sharesPath+"/{id}", s.uploadFilesToShare)
|
||||
|
||||
Reference in New Issue
Block a user