zitadel/internal/api/robots_txt/robots_txt_test.go
Miguel Cabrerizo 3ca7147808
fix: introduce measures to avoid bots crawling and indexing activities (#5728)
* fix: 404 for robots.txt and meta robots tags

* fix: add unit tests for robots txt and tag

* fix: add meta tag robots none for login pages

* fix: weird format issue in header.go

* fix: add x-robots-tag=none to grpcwebserver

* fix linting

---------

Co-authored-by: Silvan <silvan.reusser@gmail.com>
Co-authored-by: Livio Spring <livio.a@gmail.com>
2023-05-05 10:25:02 +02:00

29 lines
562 B
Go

package robots_txt
import (
"io"
"net/http"
"net/http/httptest"
"testing"
"github.com/stretchr/testify/assert"
)
func Test_RobotsTxt(t *testing.T) {
req := httptest.NewRequest(http.MethodGet, "/robots.txt", nil)
recorder := httptest.NewRecorder()
handler, err := Start()
handler.ServeHTTP(recorder, req)
assert.Equal(t, nil, err)
res := recorder.Result()
body, err := io.ReadAll(res.Body)
assert.Equal(t, nil, err)
assert.Equal(t, 200, res.StatusCode)
assert.Equal(t, "User-agent: *\nDisallow: /\n", string(body))
defer res.Body.Close()
}