fix: introduce measures to avoid bots crawling and indexing activities (#5728)

* fix: 404 for robots.txt and meta robots tags

* fix: add unit tests for robots txt and tag

* fix: add meta tag robots none for login pages

* fix: weird format issue in header.go

* fix: add x-robots-tag=none to grpcwebserver

* fix linting

---------

Co-authored-by: Silvan <silvan.reusser@gmail.com>
Co-authored-by: Livio Spring <livio.a@gmail.com>
This commit is contained in:
Miguel Cabrerizo
2023-05-05 10:25:02 +02:00
committed by GitHub
parent d224172a31
commit 3ca7147808
10 changed files with 99 additions and 0 deletions

View File

@@ -23,6 +23,7 @@ const (
XUserAgent = "x-user-agent"
XGrpcWeb = "x-grpc-web"
XRequestedWith = "x-requested-with"
XRobotsTag = "x-robots-tag"
IfNoneMatch = "If-None-Match"
LastModified = "Last-Modified"
Etag = "Etag"

View File

@@ -0,0 +1,14 @@
package middleware
import (
"net/http"
http_utils "github.com/zitadel/zitadel/internal/api/http"
)
func RobotsTagHandler(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set(http_utils.XRobotsTag, "none")
next.ServeHTTP(w, r)
})
}

View File

@@ -0,0 +1,24 @@
package middleware
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/stretchr/testify/assert"
)
func Test_RobotsTagInterceptor(t *testing.T) {
testHandler := func(w http.ResponseWriter, r *http.Request) {}
req := httptest.NewRequest(http.MethodGet, "/", nil)
recorder := httptest.NewRecorder()
handler := RobotsTagHandler(http.HandlerFunc(testHandler))
handler.ServeHTTP(recorder, req)
res := recorder.Result()
exp := res.Header.Get("X-Robots-Tag")
assert.Equal(t, "none", exp)
defer res.Body.Close()
}