chore: disallow all crawlers with robots.txt (#833)

Signed-off-by: Tim Birkett <tim.birkett@sainsburys.co.uk>
This commit is contained in:
Tim Birkett
2022-05-13 08:23:43 +01:00
committed by GitHub
parent 1e0b3a2a8c
commit 7b1a0d3cd3
3 changed files with 13 additions and 0 deletions

View File

@@ -1137,6 +1137,10 @@ func (s *httpdServer) initializeRouter() {
render.PlainText(w, r, "ok")
})
s.router.Get(robotsTxtPath, func(w http.ResponseWriter, r *http.Request) {
render.PlainText(w, r, "User-agent: *\nDisallow: /")
})
// share API exposed to external users
s.router.Get(sharesPath+"/{id}", s.downloadFromShare)
s.router.Post(sharesPath+"/{id}", s.uploadFilesToShare)