fix: introduce measures to avoid bots crawling and indexing activities (#5728)

* fix: 404 for robots.txt and meta robots tags

* fix: add unit tests for robots txt and tag

* fix: add meta tag robots none for login pages

* fix: weird format issue in header.go

* fix: add x-robots-tag=none to grpcwebserver

* fix linting

---------

Co-authored-by: Silvan <silvan.reusser@gmail.com>
Co-authored-by: Livio Spring <livio.a@gmail.com>
This commit is contained in:
Miguel Cabrerizo
2023-05-05 10:25:02 +02:00
committed by GitHub
parent d224172a31
commit 3ca7147808
10 changed files with 99 additions and 0 deletions

View File

@@ -39,6 +39,7 @@ import (
http_util "github.com/zitadel/zitadel/internal/api/http"
"github.com/zitadel/zitadel/internal/api/http/middleware"
"github.com/zitadel/zitadel/internal/api/oidc"
"github.com/zitadel/zitadel/internal/api/robots_txt"
"github.com/zitadel/zitadel/internal/api/saml"
"github.com/zitadel/zitadel/internal/api/ui/console"
"github.com/zitadel/zitadel/internal/api/ui/login"
@@ -305,6 +306,13 @@ func startAPIs(
return err
}
// robots.txt handler
robotsTxtHandler, err := robots_txt.Start()
if err != nil {
return fmt.Errorf("unable to start robots txt handler: %w", err)
}
apis.RegisterHandlerOnPrefix(robots_txt.HandlerPrefix, robotsTxtHandler)
// TODO: Record openapi access logs?
openAPIHandler, err := openapi.Start()
if err != nil {