feat: metrics (#1024)

* refactor: switch from opencensus to opentelemetry

* tempo works as designed nooooot

* fix: log traceids

* with grafana agent

* fix: http tracing

* fix: cleanup files

* chore: remove todo

* fix: bad test

* fix: ignore methods in grpc interceptors

* fix: remove test log

* clean up

* typo

* fix(config): configure tracing endpoint

* fix(span): add error id to span

* feat: metrics package

* feat: metrics package

* fix: counter

* fix: metric

* try metrics

* fix: coutner metrics

* fix: active sessin counter

* fix: active sessin counter

* fix: change current Sequence table

* fix: change current Sequence table

* fix: current sequences

* fix: spooler div metrics

* fix: console view

* fix: merge master

* fix: Last spool run on search result instead of eventtimestamp

* fix: go mod

* Update console/src/assets/i18n/de.json

Co-authored-by: Livio Amstutz <livio.a@gmail.com>

* fix: pr review

* fix: map

* update oidc pkg

* fix: handlers

* fix: value observer

* fix: remove fmt

* fix: handlers

* fix: tests

* fix: handler minimum cycle duration 1s

* fix(spooler): handler channel buffer

* fix interceptors

Co-authored-by: adlerhurst <silvan.reusser@gmail.com>
Co-authored-by: Livio Amstutz <livio.a@gmail.com>
This commit is contained in:
Fabi
2020-12-02 08:50:59 +01:00
committed by GitHub
parent 723b6b5189
commit 6b3f5b984c
194 changed files with 2570 additions and 1096 deletions

View File

@@ -8,7 +8,7 @@ import (
"github.com/caos/zitadel/internal/errors"
"github.com/caos/zitadel/internal/eventstore/models"
es_models "github.com/caos/zitadel/internal/eventstore/models"
"github.com/caos/zitadel/internal/tracing"
"github.com/caos/zitadel/internal/telemetry/tracing"
)
type Querier interface {

View File

@@ -8,7 +8,7 @@ import (
"github.com/caos/logging"
caos_errs "github.com/caos/zitadel/internal/errors"
"github.com/caos/zitadel/internal/eventstore/models"
"github.com/caos/zitadel/internal/tracing"
"github.com/caos/zitadel/internal/telemetry/tracing"
"github.com/cockroachdb/cockroach-go/v2/crdb"
)

View File

@@ -11,6 +11,7 @@ type Handler interface {
EventQuery() (*models.SearchQuery, error)
Reduce(*models.Event) error
OnError(event *models.Event, err error) error
OnSuccess() error
MinimumCycleDuration() time.Duration
QueryLimit() uint64
}

View File

@@ -28,7 +28,7 @@ func (c *Config) New() *Spooler {
lockID: lockID,
eventstore: c.Eventstore,
locker: c.Locker,
queue: make(chan *spooledHandler),
queue: make(chan *spooledHandler, len(c.ViewHandlers)),
workers: c.ConcurrentWorkers,
}
}

View File

@@ -9,7 +9,7 @@ import (
"github.com/caos/zitadel/internal/eventstore"
"github.com/caos/zitadel/internal/eventstore/models"
"github.com/caos/zitadel/internal/eventstore/query"
"github.com/caos/zitadel/internal/tracing"
"github.com/caos/zitadel/internal/telemetry/tracing"
"github.com/caos/zitadel/internal/view/repository"
"time"
@@ -52,8 +52,7 @@ func (s *Spooler) Start() {
}
go func() {
for _, handler := range s.handlers {
handler := &spooledHandler{Handler: handler, locker: s.locker, queuedAt: time.Now(), eventstore: s.eventstore}
s.queue <- handler
s.queue <- &spooledHandler{Handler: handler, locker: s.locker, queuedAt: time.Now(), eventstore: s.eventstore}
}
}()
}
@@ -79,6 +78,7 @@ func (s *spooledHandler) load(workerID string) {
errs <- s.process(ctx, events, workerID)
logging.Log("SPOOL-0pV8o").WithField("view", s.ViewModel()).WithField("worker", workerID).WithField("traceID", tracing.TraceIDFromCtx(ctx)).Debug("process done")
}
}
<-ctx.Done()
}
@@ -103,7 +103,9 @@ func (s *spooledHandler) process(ctx context.Context, events []*models.Event, wo
}
}
}
return nil
err := s.OnSuccess()
logging.LogWithFields("SPOOL-49ods", "view", s.ViewModel(), "worker", workerID, "traceID", tracing.TraceIDFromCtx(ctx)).OnError(err).Warn("could not process on success func")
return err
}
func (s *spooledHandler) query(ctx context.Context) ([]*models.Event, error) {
@@ -165,7 +167,7 @@ func (s *spooledHandler) lock(ctx context.Context, errs chan<- error, workerID s
func HandleError(event *models.Event, failedErr error,
latestFailedEvent func(sequence uint64) (*repository.FailedEvent, error),
processFailedEvent func(*repository.FailedEvent) error,
processSequence func(uint64) error, errorCountUntilSkip uint64) error {
processSequence func(uint64, time.Time) error, errorCountUntilSkip uint64) error {
failedEvent, err := latestFailedEvent(event.Sequence)
if err != nil {
return err
@@ -177,7 +179,11 @@ func HandleError(event *models.Event, failedErr error,
return err
}
if errorCountUntilSkip <= failedEvent.FailureCount {
return processSequence(event.Sequence)
return processSequence(event.Sequence, event.CreationDate)
}
return nil
}
func HandleSuccess(updateSpoolerRunTimestamp func() error) error {
return updateSpoolerRunTimestamp()
}

View File

@@ -40,6 +40,9 @@ func (h *testHandler) Reduce(*models.Event) error {
func (h *testHandler) OnError(event *models.Event, err error) error {
return err
}
func (h *testHandler) OnSuccess() error {
return nil
}
func (h *testHandler) MinimumCycleDuration() time.Duration {
return h.cycleDuration
}
@@ -429,7 +432,7 @@ func TestHandleError(t *testing.T) {
func(*repository.FailedEvent) error {
return nil
},
func(uint64) error {
func(uint64, time.Time) error {
processedSequence = true
return nil
},