fix: introduce measures to avoid bots crawling and indexing activities (#5728)

* fix: 404 for robots.txt and meta robots tags

* fix: add unit tests for robots txt and tag

* fix: add meta tag robots none for login pages

* fix: weird format issue in header.go

* fix: add x-robots-tag=none to grpcwebserver

* fix linting

---------

Co-authored-by: Silvan <silvan.reusser@gmail.com>
Co-authored-by: Livio Spring <livio.a@gmail.com>
This commit is contained in:
Miguel Cabrerizo
2023-05-05 10:25:02 +02:00
committed by GitHub
parent d224172a31
commit 3ca7147808
10 changed files with 99 additions and 0 deletions

View File

@@ -0,0 +1,19 @@
package robots_txt
import (
"fmt"
"net/http"
)
const (
HandlerPrefix = "/robots.txt"
)
func Start() (http.Handler, error) {
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
w.Header().Set("Content-Type", "application/text")
fmt.Fprintf(w, "User-agent: *\nDisallow: /\n")
})
return handler, nil
}