1
0
mirror of https://github.com/zitadel/zitadel.git synced 2024-12-30 11:37:49 +00:00
Miguel Cabrerizo 3ca7147808
fix: introduce measures to avoid bots crawling and indexing activities ()
* fix: 404 for robots.txt and meta robots tags

* fix: add unit tests for robots txt and tag

* fix: add meta tag robots none for login pages

* fix: weird format issue in header.go

* fix: add x-robots-tag=none to grpcwebserver

* fix linting

---------

Co-authored-by: Silvan <silvan.reusser@gmail.com>
Co-authored-by: Livio Spring <livio.a@gmail.com>
2023-05-05 10:25:02 +02:00

20 lines
366 B
Go

package robots_txt
import (
"fmt"
"net/http"
)
const (
HandlerPrefix = "/robots.txt"
)
func Start() (http.Handler, error) {
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
w.Header().Set("Content-Type", "application/text")
fmt.Fprintf(w, "User-agent: *\nDisallow: /\n")
})
return handler, nil
}