diff --git a/.gitignore b/.gitignore index c073cdf..5ed5d18 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ # vendor/ artifacts/ __debug_bin +discovery diff --git a/Dockerfile b/Dockerfile index 95cca32..a6a0eaa 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.16 as build +FROM golang:1.17 as build WORKDIR /build diff --git a/cmd/kafmesh-discovery/main.go b/cmd/kafmesh-discovery/main.go index f5a3366..5871c46 100644 --- a/cmd/kafmesh-discovery/main.go +++ b/cmd/kafmesh-discovery/main.go @@ -66,7 +66,7 @@ func main() { group.Go(graphService.Run(ctx)) - eventChan := make(chan os.Signal) + eventChan := make(chan os.Signal, 1) signal.Notify(eventChan, syscall.SIGINT, syscall.SIGTERM) select { diff --git a/docs/protos/kafmesh/discovery/v1/topic_definition.proto b/docs/protos/kafmesh/discovery/v1/topic_definition.proto index 12cf7da..a945a82 100644 --- a/docs/protos/kafmesh/discovery/v1/topic_definition.proto +++ b/docs/protos/kafmesh/discovery/v1/topic_definition.proto @@ -20,4 +20,5 @@ message TopicDefinition { enum TopicType { TOPIC_TYPE_INVALID = 0; TOPIC_TYPE_PROTOBUF = 1; + TOPIC_TYPE_RAW = 2; } diff --git a/go.mod b/go.mod index d560ea5..359a021 100644 --- a/go.mod +++ b/go.mod @@ -1,65 +1,111 @@ module github.com/syncromatics/kafmesh -go 1.16 +go 1.17 require ( - github.com/99designs/gqlgen v0.13.0 - github.com/Microsoft/go-winio v0.5.0 // indirect + github.com/99designs/gqlgen v0.15.1 github.com/Shopify/sarama v1.27.0 - github.com/agnivade/levenshtein v1.1.0 // indirect - github.com/avast/retry-go v3.0.0+incompatible // indirect github.com/bsm/sarama-cluster v2.1.15+incompatible github.com/burdiyan/kafkautil v0.0.0-20190131162249-eaf83ed22d5b github.com/emicklei/proto v1.9.0 github.com/go-chi/chi v4.1.2+incompatible - github.com/go-redis/redis v6.15.9+incompatible // indirect - github.com/golang-migrate/migrate/v4 v4.14.1 // indirect github.com/golang/mock v1.5.0 github.com/golang/protobuf v1.5.2 - github.com/google/gofuzz v1.2.0 // indirect - github.com/googleapis/gnostic v0.5.5 // indirect github.com/gorilla/websocket v1.4.2 - github.com/hashicorp/errwrap v1.1.0 // indirect - github.com/hashicorp/go-multierror v1.1.1 // indirect - github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/iancoleman/strcase v0.1.3 - github.com/imdario/mergo v0.3.12 // indirect - github.com/json-iterator/go v1.1.11 // indirect - github.com/klauspost/compress v1.12.2 // indirect github.com/lib/pq v1.10.1 github.com/lovoo/goka v1.0.6 - github.com/mitchellh/mapstructure v1.4.1 // indirect github.com/pkg/errors v0.9.1 github.com/prometheus/client_golang v1.10.0 - github.com/prometheus/common v0.23.0 // indirect github.com/rakyll/statik v0.1.7 github.com/rs/cors v1.7.0 - github.com/samuel/go-zookeeper v0.0.0-20201211165307-7117e9ea2414 // indirect github.com/satori/go.uuid v1.2.0 github.com/spf13/cobra v1.1.3 github.com/stretchr/testify v1.7.0 github.com/syncromatics/go-kit v1.5.1 github.com/syncromatics/proto-schema-registry v0.7.3 - github.com/vektah/dataloaden v0.3.0 // indirect - github.com/vektah/gqlparser/v2 v2.1.0 + github.com/vektah/gqlparser/v2 v2.2.0 github.com/yargevad/filepathx v0.0.0-20161019152617-907099cb5a62 + golang.org/x/sync v0.0.0-20210220032951-036812b2e83c + google.golang.org/grpc v1.37.0 + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b + gotest.tools v2.2.0+incompatible + k8s.io/api v0.21.0 + k8s.io/apimachinery v0.21.0 + k8s.io/client-go v0.21.0 +) + +require ( + docker.io/go-docker v1.0.0 // indirect + github.com/Microsoft/go-winio v0.5.0 // indirect + github.com/agnivade/levenshtein v1.1.0 // indirect + github.com/avast/retry-go v3.0.0+incompatible // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bradleyjkemp/cupaloy v2.3.0+incompatible // indirect + github.com/cespare/xxhash/v2 v2.1.1 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/docker/distribution v2.7.1+incompatible // indirect + github.com/docker/go-connections v0.4.0 // indirect + github.com/docker/go-units v0.4.0 // indirect + github.com/eapache/go-resiliency v1.2.0 // indirect + github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect + github.com/eapache/queue v1.1.0 // indirect + github.com/go-logr/logr v0.4.0 // indirect + github.com/go-redis/redis v6.15.9+incompatible // indirect + github.com/go-stack/stack v1.8.0 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang-migrate/migrate/v4 v4.14.1 // indirect + github.com/golang/snappy v0.0.3 // indirect + github.com/google/go-cmp v0.5.5 // indirect + github.com/google/gofuzz v1.2.0 // indirect + github.com/googleapis/gnostic v0.5.5 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-uuid v1.0.2 // indirect + github.com/hashicorp/golang-lru v0.5.4 // indirect + github.com/imdario/mergo v0.3.12 // indirect + github.com/inconshreveable/mousetrap v1.0.0 // indirect + github.com/jcmturner/gofork v1.0.0 // indirect + github.com/json-iterator/go v1.1.11 // indirect + github.com/klauspost/compress v1.12.2 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect + github.com/mitchellh/mapstructure v1.4.1 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.1 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.0.1 // indirect + github.com/pierrec/lz4 v2.5.2+incompatible // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/common v0.23.0 // indirect + github.com/prometheus/procfs v0.6.0 // indirect + github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0 // indirect + github.com/samuel/go-zookeeper v0.0.0-20201211165307-7117e9ea2414 // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/syndtr/goleveldb v1.0.0 // indirect + github.com/wvanbergen/kazoo-go v0.0.0-20180202103751-f72d8611297a // indirect + go.uber.org/atomic v1.7.0 // indirect go.uber.org/multierr v1.6.0 // indirect go.uber.org/zap v1.16.0 // indirect golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b // indirect - golang.org/x/mod v0.4.1 // indirect golang.org/x/net v0.0.0-20210428140749-89ef3d95e781 // indirect golang.org/x/oauth2 v0.0.0-20210427180440-81ed05c6b58c // indirect - golang.org/x/sync v0.0.0-20210220032951-036812b2e83c golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c // indirect + golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d // indirect + golang.org/x/text v0.3.6 // indirect + golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20210429181445-86c259c2b4ab // indirect - google.golang.org/grpc v1.37.0 - gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b - gotest.tools v2.2.0+incompatible + google.golang.org/protobuf v1.26.0 // indirect + gopkg.in/inf.v0 v0.9.1 // indirect + gopkg.in/jcmturner/aescts.v1 v1.0.1 // indirect + gopkg.in/jcmturner/dnsutils.v1 v1.0.1 // indirect + gopkg.in/jcmturner/gokrb5.v7 v7.5.0 // indirect + gopkg.in/jcmturner/rpc.v1 v1.1.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect honnef.co/go/tools v0.1.3 // indirect - k8s.io/api v0.21.0 - k8s.io/apimachinery v0.21.0 - k8s.io/client-go v0.21.0 + k8s.io/klog/v2 v2.8.0 // indirect k8s.io/utils v0.0.0-20210305010621-2afb4311ab10 // indirect sigs.k8s.io/structured-merge-diff/v4 v4.1.1 // indirect + sigs.k8s.io/yaml v1.2.0 // indirect ) diff --git a/go.sum b/go.sum index 50abc69..39ed619 100644 --- a/go.sum +++ b/go.sum @@ -38,8 +38,8 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= docker.io/go-docker v1.0.0 h1:VdXS/aNYQxyA9wdLD5z8Q8Ro688/hG8HzKxYVEVbE6s= docker.io/go-docker v1.0.0/go.mod h1:7tiAn5a0LFmjbPDbyTPOaTTOuG1ZRNXdPA6RvKY+fpY= -github.com/99designs/gqlgen v0.13.0 h1:haLTcUp3Vwp80xMVEg5KRNwzfUrgFdRmtBY8fuB8scA= -github.com/99designs/gqlgen v0.13.0/go.mod h1:NV130r6f4tpRWuAI+zsrSdooO/eWUv+Gyyoi3rEfXIk= +github.com/99designs/gqlgen v0.15.1 h1:48bRXecwlCNTa/n2bMSp2rQsXNxwZ54QHbiULNf78ec= +github.com/99designs/gqlgen v0.15.1/go.mod h1:nbeSjFkqphIqpZsYe1ULVz0yfH8hjpJdJIQoX/e0G2I= github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8= github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= @@ -73,7 +73,6 @@ github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMx github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= -github.com/agnivade/levenshtein v1.0.3/go.mod h1:4SFRZbbXWLF4MU1T9Qg0pGgH3Pjs+t6ie5efyrwRJXs= github.com/agnivade/levenshtein v1.1.0 h1:n6qGwyHG61v3ABce1rPVZklEYRT8NFpCMrpZdBUbYGM= github.com/agnivade/levenshtein v1.1.0/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -108,6 +107,8 @@ github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCS github.com/bkaradzic/go-lz4 v1.0.0/go.mod h1:0YdlkowM3VswSROI7qDxhRvJ3sLhlFrRRwjwegp5jy4= github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= +github.com/bradleyjkemp/cupaloy v2.3.0+incompatible h1:UafIjBvWQmS9i/xRg+CamMrnLTKNzo+bdmT/oH34c2Y= +github.com/bradleyjkemp/cupaloy v2.3.0+incompatible/go.mod h1:Au1Xw1sgaJ5iSFktEhYsS0dbQiS1B0/XMXl+42y9Ilk= github.com/bsm/sarama-cluster v2.1.15+incompatible h1:RkV6WiNRnqEEbp81druK8zYhmnIgdOjqSVi0+9Cnl2A= github.com/bsm/sarama-cluster v2.1.15+incompatible/go.mod h1:r7ao+4tTNXvWm+VRpRJchr2kQhqxgmAp2iEX5W96gMM= github.com/burdiyan/kafkautil v0.0.0-20190131162249-eaf83ed22d5b h1:gRFujk0F/KYFDEalhpaAbLIwmeiDH53ZgdllJ7UHxyQ= @@ -147,8 +148,8 @@ github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7 github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cznic/mathutil v0.0.0-20180504122225-ca4c9f2c1369/go.mod h1:e6NPNENfs9mPDVNRekM7lKScauxd5kXTr1Mfyig6TDM= @@ -159,7 +160,6 @@ github.com/denisenkom/go-mssqldb v0.0.0-20190515213511-eb9f6a1743f3/go.mod h1:zA github.com/denisenkom/go-mssqldb v0.0.0-20200620013148-b91950f658ec/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= -github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48 h1:fRzb/w+pyskVMQ+UbP35JkH8yB7MYb4q/qhBarqZE6g= github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= github.com/dhui/dktest v0.3.1/go.mod h1:cyzIUfGsBEbZ6BT7tnXqAShHSXCZhSNmFl70sZ7c1yc= @@ -215,7 +215,6 @@ github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4 github.com/fsouza/fake-gcs-server v1.7.0/go.mod h1:5XIRs4YvwNbNoz+1JF8j6KLAyDh7RHGAyAK3EP2EsNk= github.com/fsouza/fake-gcs-server v1.17.0/go.mod h1:D1rTE4YCyHFNa99oyJJ5HyclvN/0uQR+pM/VdlL83bw= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= @@ -248,7 +247,6 @@ github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/me github.com/gobuffalo/here v0.6.0/go.mod h1:wAG085dHOYqUpf+Ap+WOdrPTp5IYcDAs/x7PLa8Y5fM= github.com/gocql/gocql v0.0.0-20190301043612-f6df8288f9b4/go.mod h1:4Fw1eo5iaEhDUs8XyuhSVCVy52Jq3L+/3GJgYkwc+/0= github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= -github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= @@ -336,10 +334,8 @@ github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3i github.com/googleapis/gnostic v0.5.5 h1:9fHAtK0uDfpveeqqo1hkEZJcFvYXAiCN3UutL8F9xHw= github.com/googleapis/gnostic v0.5.5/go.mod h1:7+EbHbldMins07ALC74bsA81Ovc97DwqyJO1AENw9kA= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/context v0.0.0-20160226214623-1ea25387ff6f/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/handlers v1.4.2/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= -github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.7.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= @@ -446,6 +442,7 @@ github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8 github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= github.com/k0kubun/pp v2.3.0+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg= github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= +github.com/kevinmbeaulieu/eq-go v1.0.0/go.mod h1:G3S8ajA56gKBZm4UB9AOyoOS37JO3roToPzKNM8dtdM= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= @@ -477,7 +474,7 @@ github.com/lib/pq v1.10.1 h1:6VXZrLU0jHBYyAqrSPa+MgPfnSvTPuMgK+k0o5kVFWo= github.com/lib/pq v1.10.1/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= -github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= +github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc= github.com/lovoo/goka v1.0.6 h1:iabSXOHwSGe0poXs65m7oPC1TekdFlcMFifZan1wjrU= github.com/lovoo/goka v1.0.6/go.mod h1:7wEa36YYw9O/uzQ/abhVhNV2B5URy7fyrW/XMYE1WiI= github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= @@ -485,8 +482,7 @@ github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czP github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/markbates/pkger v0.15.1/go.mod h1:0JoVlrol20BSywW79rN3kdFFsE5xYM+rSCQDXbLhiuI= -github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007 h1:reVOUXwnhsYv/8UqjvhrMOu5CNT9UapHFLbQ2JcXsmg= -github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= +github.com/matryer/moq v0.2.3/go.mod h1:9RtPYjTnH1bSBIkpvtHkFN7nbWAnO7oRpdJkEIn6UtE= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= @@ -509,9 +505,9 @@ github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eI github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v0.0.0-20180220230111-00c29f56e238/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.2.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= @@ -632,14 +628,13 @@ github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0/go.mod h1:bCqn github.com/remyoudompheng/bigfft v0.0.0-20190728182440-6a916e37a237/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= -github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= github.com/samuel/go-zookeeper v0.0.0-20201211165307-7117e9ea2414 h1:AJNDS0kP60X8wwWFvbLPwDuojxubj9pbfK7pjHw0vKg= @@ -650,10 +645,7 @@ github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= -github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= -github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/shurcooL/vfsgen v0.0.0-20180121065927-ffb13db8def0/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -688,7 +680,6 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/testify v1.2.0/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -711,15 +702,8 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1 github.com/uber/jaeger-client-go v2.22.1+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= github.com/uber/jaeger-lib v2.2.0+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= -github.com/urfave/cli v1.22.1 h1:+mkCCcOFKPnCmVYVcURKps1Xe+3zP90gSYGNfRkjoIY= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/urfave/cli/v2 v2.1.1 h1:Qt8FeAtxE/vfdrLmR3rxR6JRE0RoVmbXu8+6kZtYU4k= -github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= -github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= -github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84= -github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= -github.com/vektah/gqlparser/v2 v2.1.0 h1:uiKJ+T5HMGGQM2kRKQ8Pxw8+Zq9qhhZhz/lieYvCMns= -github.com/vektah/gqlparser/v2 v2.1.0/go.mod h1:SyUiHgLATUR8BiYURfTirrTcGpcE+4XkV2se04Px1Ms= +github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= github.com/vektah/gqlparser/v2 v2.2.0 h1:bAc3slekAAJW6sZTi07aGq0OrfaCjj4jxARAaC7g2EM= github.com/vektah/gqlparser/v2 v2.2.0/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4= github.com/wvanbergen/kazoo-go v0.0.0-20180202103751-f72d8611297a h1:ILoU84rj4AQ3q6cjQvtb9jBjx4xzR/Riq/zYhmDQiOk= @@ -734,6 +718,7 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= @@ -816,8 +801,8 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1 h1:Kvvh58BN8Y9/lBi7hTekvtMpm07eUZ0ck5pRHpsMWrY= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -865,6 +850,7 @@ golang.org/x/net v0.0.0-20201029221708-28c70e62bb1d/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20210224082022-3d97a244fca7/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781 h1:DzZ89McO9/gWPsQXS/FVKAlG02ZjaQ6AlZRBimEYOd0= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -950,9 +936,9 @@ golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210309074719-68d13333faf2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210426230700-d19ff857e887 h1:dXfMednGJh/SUUFjTLsWJz3P+TQt9qnR11GgeI3vWKs= -golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= @@ -990,7 +976,6 @@ golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190425222832-ad9eeb80039a/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190515012406-7d7faa4812bd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= @@ -1011,7 +996,6 @@ golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200114235610-7ae403b6b589/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -1032,12 +1016,14 @@ golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200806022845-90696ccdc692/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200814230902-9882f1d1823d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200815165600-90abf76919f3/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200817023811-d00afeaade8f/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200818005847-188abfa75333/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1183,6 +1169,7 @@ gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRN gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= @@ -1244,6 +1231,4 @@ sigs.k8s.io/structured-merge-diff/v4 v4.1.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q= sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= -sourcegraph.com/sourcegraph/appdash v0.0.0-20180110180208-2cc67fd64755/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= -sourcegraph.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67/go.mod h1:L5q+DGLGOQFpo1snNEkLOJT2d1YTW66rWNzatr3He1k= diff --git a/internal/generator/.snapshots/generator_test-validateEmitter-validateEmitter b/internal/generator/.snapshots/generator_test-validateEmitter-validateEmitter new file mode 100644 index 0000000..e6e1d0c --- /dev/null +++ b/internal/generator/.snapshots/generator_test-validateEmitter-validateEmitter @@ -0,0 +1,124 @@ +// Code generated by kafmesh-gen. DO NOT EDIT. + +package details + +import ( + "context" + + "github.com/burdiyan/kafkautil" + "github.com/lovoo/goka" + "github.com/pkg/errors" + "github.com/syncromatics/kafmesh/pkg/runner" + "golang.org/x/sync/errgroup" + + "test/internal/kafmesh/models/testMesh/testSerial" +) + +type TestSerialDetails_Source interface { + Emit(message TestSerialDetails_Source_Message) error + EmitBulk(ctx context.Context, messages []TestSerialDetails_Source_Message) error + Delete(key string) error +} + +type TestSerialDetails_Source_impl struct { + context.Context + emitter *runner.Emitter + metrics *runner.Metrics +} + +type TestSerialDetails_Source_Message struct { + Key string + Value *testSerial.Details +} + +type impl_TestSerialDetails_Source_Message struct { + msg TestSerialDetails_Source_Message +} + +func (m *impl_TestSerialDetails_Source_Message) Key() string { + return m.msg.Key +} + +func (m *impl_TestSerialDetails_Source_Message) Value() interface{} { + return m.msg.Value +} + +func New_TestSerialDetails_Source(service *runner.Service) (*TestSerialDetails_Source_impl, func(context.Context) func() error, error) { + options := service.Options() + brokers := options.Brokers + protoWrapper := options.ProtoWrapper + codec, err := protoWrapper.Codec("testMesh.testSerial.details", &testSerial.Details{}) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to create codec") + } + + emitter, err := goka.NewEmitter(brokers, + goka.Stream("testMesh.testSerial.details"), + codec, + goka.WithEmitterHasher(kafkautil.MurmurHasher)) + + if err != nil { + return nil, nil, errors.Wrap(err, "failed creating source") + } + + emitterCtx, emitterCancel := context.WithCancel(context.Background()) + e := &TestSerialDetails_Source_impl{ + emitterCtx, + runner.NewEmitter(emitter), + service.Metrics, + } + + return e, func(outerCtx context.Context) func() error { + return func() error { + cancelableCtx, cancel := context.WithCancel(outerCtx) + defer cancel() + grp, ctx := errgroup.WithContext(cancelableCtx) + + grp.Go(func() error { + select { + case <-ctx.Done(): + emitterCancel() + return nil + } + }) + grp.Go(e.emitter.Watch(ctx)) + + select { + case <- ctx.Done(): + err := grp.Wait() + return err + } + } + }, nil +} + +func (e *TestSerialDetails_Source_impl) Emit(message TestSerialDetails_Source_Message) error { + err := e.emitter.Emit(message.Key, message.Value) + if err != nil { + e.metrics.SourceError("testMesh", "details", "testMesh.testSerial.details") + return err + } + + e.metrics.SourceHit("testMesh", "details", "testMesh.testSerial.details", 1) + return nil +} + +func (e *TestSerialDetails_Source_impl) EmitBulk(ctx context.Context, messages []TestSerialDetails_Source_Message) error { + b := []runner.EmitMessage{} + for _, m := range messages { + b = append(b, &impl_TestSerialDetails_Source_Message{msg: m}) + } + err := e.emitter.EmitBulk(ctx, b) + if err != nil { + e.metrics.SourceError("testMesh", "details", "testMesh.testSerial.details") + return err + } + + e.metrics.SourceHit("testMesh", "details", "testMesh.testSerial.details", len(b)) + return nil +} + +func (e *TestSerialDetails_Source_impl) Delete(key string) error { + return e.emitter.Emit(key, nil) +} + diff --git a/internal/generator/.snapshots/generator_test-validateProcessors-validateProcessors b/internal/generator/.snapshots/generator_test-validateProcessors-validateProcessors new file mode 100644 index 0000000..591df80 --- /dev/null +++ b/internal/generator/.snapshots/generator_test-validateProcessors-validateProcessors @@ -0,0 +1,156 @@ +// Code generated by kafmesh-gen. DO NOT EDIT. + +package details + +import ( + "context" + "encoding/json" + "os" + "path/filepath" + "time" + + "github.com/Shopify/sarama" + "github.com/burdiyan/kafkautil" + "github.com/lovoo/goka" + "github.com/lovoo/goka/storage" + "github.com/pkg/errors" + "github.com/syndtr/goleveldb/leveldb/opt" + + "github.com/syncromatics/kafmesh/pkg/runner" + + gokaCodecs "github.com/lovoo/goka/codec" + m0 "test/internal/kafmesh/models/testMesh/testId" + m1 "test/internal/kafmesh/models/testMesh/testSerial" +) + +type Enricher_ProcessorContext interface { + Key() string + Timestamp() time.Time + Output_TestSerialDetailsEnriched(key string, message *m1.DetailsEnriched) +} + +type Enricher_Processor interface { + HandleSomeMessage(ctx Enricher_ProcessorContext, message []byte) error + HandleTestIDTest2(ctx Enricher_ProcessorContext, message *m0.Test2) error +} + +type Enricher_ProcessorContext_Impl struct { + ctx goka.Context + processorContext *runner.ProcessorContext +} + +func new_Enricher_ProcessorContext_Impl(ctx goka.Context, pc *runner.ProcessorContext) *Enricher_ProcessorContext_Impl { + return &Enricher_ProcessorContext_Impl{ctx, pc} +} + +func (c *Enricher_ProcessorContext_Impl) Key() string { + return c.ctx.Key() +} + +func (c *Enricher_ProcessorContext_Impl) Timestamp() time.Time { + return c.ctx.Timestamp() +} + +func (c *Enricher_ProcessorContext_Impl) Output_TestSerialDetailsEnriched(key string, message *m1.DetailsEnriched) { + value, _ := json.Marshal(message) + c.processorContext.Output("testMesh.testSerial.detailsEnriched", "testSerial.detailsEnriched", key, string(value)) + c.ctx.Emit("testMesh.testSerial.detailsEnriched", key, message) +} + +func Register_Enricher_Processor(service *runner.Service, impl Enricher_Processor) (func(context.Context) func() error, error) { + options := service.Options() + brokers := options.Brokers + protoWrapper := options.ProtoWrapper + + config := sarama.NewConfig() + config.Version = sarama.MaxVersion + config.Consumer.Offsets.Initial = sarama.OffsetOldest + config.Consumer.Offsets.AutoCommit.Enable = true + config.Consumer.Offsets.CommitInterval = 1 * time.Second + + opts := &opt.Options{ + BlockCacheCapacity: opt.MiB * 1, + WriteBuffer: opt.MiB * 1, + } + + path := filepath.Join("/tmp/storage", "processor", "testMesh.details.enricher") + + err := os.MkdirAll(path, os.ModePerm) + if err != nil { + return nil, errors.Wrap(err, "failed to create processor db directory") + } + + builder := storage.BuilderWithOptions(path, opts) + + c0 := &gokaCodecs.Bytes{} + c1, err := protoWrapper.Codec("testMesh.testId.test2", &m0.Test2{}) + if err != nil { + return nil, errors.Wrap(err, "failed to create codec") + } + c2, err := protoWrapper.Codec("testMesh.testSerial.detailsEnriched", &m1.DetailsEnriched{}) + if err != nil { + return nil, errors.Wrap(err, "failed to create codec") + } + + edges := []goka.Edge{ + goka.Input(goka.Stream("externalTopic"), c0, func(ctx goka.Context, m interface{}) { + msg := m.([]byte) + + pc := service.ProcessorContext(ctx.Context(), "details", "enricher", ctx.Key()) + defer pc.Finish() + + v, err := json.Marshal(msg) + if err != nil { + ctx.Fail(err) + } + pc.Input("externalTopic", "someMessage", string(v)) + + w := new_Enricher_ProcessorContext_Impl(ctx, pc) + err = impl.HandleSomeMessage(w, msg) + if err != nil { + ctx.Fail(err) + } + }), + goka.Input(goka.Stream("testMesh.testId.test2"), c1, func(ctx goka.Context, m interface{}) { + msg := m.(*m0.Test2) + + pc := service.ProcessorContext(ctx.Context(), "details", "enricher", ctx.Key()) + defer pc.Finish() + + v, err := json.Marshal(msg) + if err != nil { + ctx.Fail(err) + } + pc.Input("testMesh.testId.test2", "testId.test2", string(v)) + + w := new_Enricher_ProcessorContext_Impl(ctx, pc) + err = impl.HandleTestIDTest2(w, msg) + if err != nil { + ctx.Fail(err) + } + }), + goka.Output(goka.Stream("testMesh.testSerial.detailsEnriched"), c2), + } + group := goka.DefineGroup(goka.Group("testMesh.details.enricher"), edges...) + + processor, err := goka.NewProcessor(brokers, + group, + goka.WithConsumerGroupBuilder(goka.ConsumerGroupBuilderWithConfig(config)), + goka.WithStorageBuilder(builder), + goka.WithHasher(kafkautil.MurmurHasher)) + if err != nil { + return nil, errors.Wrap(err, "failed to create goka processor") + } + + return func(ctx context.Context) func() error { + return func() error { + err := processor.Run(ctx) + if err != nil { + return errors.Wrap(err, "failed to run goka processor") + } + + return nil + } + }, nil +} + diff --git a/internal/generator/.snapshots/generator_test-validateService-validateService b/internal/generator/.snapshots/generator_test-validateService-validateService new file mode 100644 index 0000000..05f3a79 --- /dev/null +++ b/internal/generator/.snapshots/generator_test-validateService-validateService @@ -0,0 +1,146 @@ +// Code generated by kafmesh-gen. DO NOT EDIT. + +package kafmesh + +import ( + "time" + + "github.com/pkg/errors" + "github.com/syncromatics/kafmesh/pkg/runner" + + "test/internal/kafmesh/details" +) + +func Register_Details_Enricher_Processor(service *runner.Service, processor details.Enricher_Processor) error { + r, err := details.Register_Enricher_Processor(service, processor) + if err != nil { + return errors.Wrap(err, "failed to register processor") + } + + err = service.RegisterRunner(r) + if err != nil { + return errors.Wrap(err, "failed to register runner with service") + } + + err = discover_Details_Enricher_Processor(service) + if err != nil { + return errors.Wrap(err, "failed to register with discovery") + } + + return nil +} + +func Register_Details_Enricher_Processor(service *runner.Service, processor details.Enricher_Processor) error { + r, err := details.Register_Enricher_Processor(service, processor) + if err != nil { + return errors.Wrap(err, "failed to register processor") + } + + err = service.RegisterRunner(r) + if err != nil { + return errors.Wrap(err, "failed to register runner with service") + } + + err = discover_Details_Enricher_Processor(service) + if err != nil { + return errors.Wrap(err, "failed to register with discovery") + } + + return nil +} + +func New_Details_TestSerialDetails_Source(service *runner.Service) (details.TestSerialDetails_Source, error) { + e, r, err := details.New_TestSerialDetails_Source(service) + if err != nil { + return nil, err + } + + err = service.RegisterRunner(r) + if err != nil { + return nil, errors.Wrap(err, "failed to register runner with service") + } + + err = discover_Details_TestSerialDetails_Source(service) + if err != nil { + return nil, errors.Wrap(err, "failed to register with discovery") + } + + return e, nil +} + +func New_Details_TestSerialDetailsEnriched_View(service *runner.Service) (details.TestSerialDetailsEnriched_View, error) { + v, r, err := details.New_TestSerialDetailsEnriched_View(service.Options()) + if err != nil { + return nil, err + } + + err = service.RegisterRunner(r) + if err != nil { + return nil, errors.Wrap(err, "failed to register runner with service") + } + + err = discover_Details_TestSerialDetailsEnriched_View(service) + if err != nil { + return nil, errors.Wrap(err, "failed to register with discovery") + } + + return v, nil +} + +func Register_EnrichedDataPostgres_Sink(service *runner.Service, sink details.EnrichedDataPostgres_Sink, interval time.Duration, maxBufferSize int) error { + r, err := details.Register_EnrichedDataPostgres_Sink(service.Options(), sink, interval, maxBufferSize) + if err != nil { + return errors.Wrap(err, "failed to register sink") + } + + err = service.RegisterRunner(r) + if err != nil { + return errors.Wrap(err, "failed to register runner with service") + } + + err = discover_Details_EnrichedDataPostgres_Sink(service) + if err != nil { + return errors.Wrap(err, "failed to register with discovery") + } + + return nil +} + +func Register_Details_TestToDatabase_ViewSource(service *runner.Service, viewSource details.TestToDatabase_ViewSource, updateInterval time.Duration, syncTimeout time.Duration) error { + r, err := details.Register_TestToDatabase_ViewSource(service.Options(), viewSource, updateInterval, syncTimeout) + if err != nil { + return errors.Wrap(err, "failed to register viewSource") + } + + err = service.RegisterRunner(r) + if err != nil { + return errors.Wrap(err, "failed to register runner with service") + } + + err = discover_Details_TestToDatabase_ViewSource(service) + if err != nil { + return errors.Wrap(err, "failed to register with discovery") + } + + return nil +} + +func Register_Details_TestToApi_ViewSink(service *runner.Service, viewSink details.TestToApi_ViewSink, updateInterval time.Duration, syncTimeout time.Duration) error { + r, err := details.Register_TestToApi_ViewSink(service.Options(), viewSink, updateInterval, syncTimeout) + if err != nil { + return errors.Wrap(err, "failed to register viewSink") + } + + err = service.RegisterRunner(r) + if err != nil { + return errors.Wrap(err, "failed to register runner with service") + } + + err = discover_Details_TestToApi_ViewSink(service) + if err != nil { + return errors.Wrap(err, "failed to register with discovery") + } + + return nil +} + diff --git a/internal/generator/.snapshots/generator_test-validateSink-validateSink b/internal/generator/.snapshots/generator_test-validateSink-validateSink new file mode 100644 index 0000000..aebeb5e --- /dev/null +++ b/internal/generator/.snapshots/generator_test-validateSink-validateSink @@ -0,0 +1,88 @@ +// Code generated by kafmesh-gen. DO NOT EDIT. + +package details + +import ( + "context" + "time" + + "github.com/lovoo/goka" + "github.com/pkg/errors" + + "github.com/syncromatics/kafmesh/pkg/runner" + + "test/internal/kafmesh/models/testMesh/testSerial" +) + +type EnrichedDataPostgres_Sink interface { + Flush() error + Collect(ctx runner.MessageContext, key string, msg *testSerial.DetailsEnriched) error +} + +type impl_EnrichedDataPostgres_Sink struct { + sink EnrichedDataPostgres_Sink + codec goka.Codec + group string + topic string + maxBufferSize int + interval time.Duration +} + +func (s *impl_EnrichedDataPostgres_Sink) Codec() goka.Codec { + return s.codec +} + +func (s *impl_EnrichedDataPostgres_Sink) Group() string { + return s.group +} + +func (s *impl_EnrichedDataPostgres_Sink) Topic() string { + return s.topic +} + +func (s *impl_EnrichedDataPostgres_Sink) MaxBufferSize() int { + return s.maxBufferSize +} + +func (s *impl_EnrichedDataPostgres_Sink) Interval() time.Duration { + return s.interval +} + +func (s *impl_EnrichedDataPostgres_Sink) Flush() error { + return s.sink.Flush() +} + +func (s *impl_EnrichedDataPostgres_Sink) Collect(ctx runner.MessageContext, key string, msg interface{}) error { + m, ok := msg.(*testSerial.DetailsEnriched) + + if !ok { + return errors.Errorf("expecting message of type '*testSerial.DetailsEnriched' got type '%t'", msg) + } + + return s.sink.Collect(ctx, key, m) +} + +func Register_EnrichedDataPostgres_Sink(options runner.ServiceOptions, sink EnrichedDataPostgres_Sink, interval time.Duration, maxBufferSize int) (func(ctx context.Context) func() error, error) { + brokers := options.Brokers + protoWrapper := options.ProtoWrapper + codec, err := protoWrapper.Codec("testMesh.testSerial.detailsEnriched", &testSerial.DetailsEnriched{}) + if err != nil { + return nil, errors.Wrap(err, "failed to create codec") + } + + d := &impl_EnrichedDataPostgres_Sink{ + sink: sink, + codec: codec, + group: "testMesh.details.enricheddatapostgres-sink", + topic: "testMesh.testSerial.detailsEnriched", + maxBufferSize: maxBufferSize, + interval: interval, + } + + s := runner.NewSinkRunner(d, brokers) + + return func(ctx context.Context) func() error { + return s.Run(ctx) + }, nil +} + diff --git a/internal/generator/.snapshots/generator_test-validateTopic-validateTopic b/internal/generator/.snapshots/generator_test-validateTopic-validateTopic new file mode 100644 index 0000000..5e81200 --- /dev/null +++ b/internal/generator/.snapshots/generator_test-validateTopic-validateTopic @@ -0,0 +1,74 @@ +// Code generated by kafmesh-gen. DO NOT EDIT. + +package kafmesh + +import ( + "context" + "time" + + "github.com/syncromatics/kafmesh/pkg/runner" +) + +var ( + topics = []runner.Topic{ + runner.Topic { + Name: "externalTopic", + Partitions: 0, + Replicas: 0, + Compact: false, + Retention: 0 * time.Millisecond, + Segment: 0 * time.Millisecond, + Create: false, + }, + runner.Topic { + Name: "testMesh.details.enricher-table", + Partitions: 10, + Replicas: 1, + Compact: true, + Retention: 86400000 * time.Millisecond, + Segment: 43200000 * time.Millisecond, + Create: true, + }, + runner.Topic { + Name: "testMesh.testId.test", + Partitions: 10, + Replicas: 1, + Compact: true, + Retention: 86400000 * time.Millisecond, + Segment: 43200000 * time.Millisecond, + Create: true, + }, + runner.Topic { + Name: "testMesh.testId.test2", + Partitions: 0, + Replicas: 0, + Compact: false, + Retention: 0 * time.Millisecond, + Segment: 0 * time.Millisecond, + Create: false, + }, + runner.Topic { + Name: "testMesh.testSerial.details", + Partitions: 10, + Replicas: 1, + Compact: false, + Retention: 86400000 * time.Millisecond, + Segment: 43200000 * time.Millisecond, + Create: true, + }, + runner.Topic { + Name: "testMesh.testSerial.detailsEnriched", + Partitions: 10, + Replicas: 1, + Compact: false, + Retention: 86400000 * time.Millisecond, + Segment: 43200000 * time.Millisecond, + Create: true, + }, + } +) + +func ConfigureTopics(ctx context.Context, brokers []string) error { + return runner.ConfigureTopics(ctx, brokers, topics) +} + diff --git a/internal/generator/.snapshots/generator_test-validateView-validateView b/internal/generator/.snapshots/generator_test-validateView-validateView new file mode 100644 index 0000000..0e71b03 --- /dev/null +++ b/internal/generator/.snapshots/generator_test-validateView-validateView @@ -0,0 +1,137 @@ +// Code generated by kafmesh-gen. DO NOT EDIT. + +package details + +import ( + "context" + "os" + "path/filepath" + + "github.com/burdiyan/kafkautil" + "github.com/lovoo/goka" + "github.com/lovoo/goka/storage" + "github.com/pkg/errors" + "github.com/syncromatics/kafmesh/pkg/runner" + "github.com/syndtr/goleveldb/leveldb/opt" + "golang.org/x/sync/errgroup" + + "test/internal/kafmesh/models/testMesh/testSerial" +) + +type TestSerialDetailsEnriched_View interface { + Keys() []string + Get(key string) (*testSerial.DetailsEnriched, error) +} + +type TestSerialDetailsEnriched_View_impl struct { + context.Context + view *goka.View +} + +func New_TestSerialDetailsEnriched_View(options runner.ServiceOptions) (*TestSerialDetailsEnriched_View_impl, func(context.Context) func() error, error) { + brokers := options.Brokers + var err error + protoWrapper := options.ProtoWrapper + codec, err := protoWrapper.Codec("testMesh.testSerial.detailsEnriched", &testSerial.DetailsEnriched{}) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to create codec") + } + + opts := &opt.Options{ + BlockCacheCapacity: opt.MiB * 1, + WriteBuffer: opt.MiB * 1, + } + + path := filepath.Join("/tmp/storage", "view", "testMesh.testSerial.detailsEnriched") + + err = os.MkdirAll(path, os.ModePerm) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to create view db directory") + } + + builder := storage.BuilderWithOptions(path, opts) + + view, err := goka.NewView(brokers, + goka.Table("testMesh.testSerial.detailsEnriched"), + codec, + goka.WithViewStorageBuilder(builder), + goka.WithViewHasher(kafkautil.MurmurHasher), + ) + if err != nil { + return nil, nil, errors.Wrap(err, "failed creating view") + } + + viewCtx, viewCancel := context.WithCancel(context.Background()) + v := &TestSerialDetailsEnriched_View_impl{ + viewCtx, + view, + } + + return v, func(outerCtx context.Context) func() error { + return func() error { + cancelableCtx, cancel := context.WithCancel(outerCtx) + defer cancel() + grp, ctx := errgroup.WithContext(cancelableCtx) + + grp.Go(func() error { + select { + case <-ctx.Done(): + viewCancel() + return nil + } + }) + grp.Go(func() error { + return v.view.Run(ctx) + }) + + select { + case <- ctx.Done(): + err := grp.Wait() + return err + } + } + }, nil +} + +func (v *TestSerialDetailsEnriched_View_impl) Keys() ([]string, error) { + select { + case <-v.Done(): + return nil, errors.New("context cancelled while waiting for partition to become running") + case <-v.view.WaitRunning(): + } + + it, err := v.view.Iterator() + if err != nil { + return nil, errors.Wrap(err, "failed to get iterator from view") + } + + keys := []string{} + for it.Next() { + keys = append(keys, it.Key()) + } + + return keys, nil +} +func (v *TestSerialDetailsEnriched_View_impl) Get(key string) (*testSerial.DetailsEnriched, error) { + select { + case <-v.Done(): + return nil, errors.New("context cancelled while waiting for partition to become running") + case <-v.view.WaitRunning(): + } + + m, err := v.view.Get(key) + if err != nil { + return nil, errors.Wrap(err, "failed to get value from view") + } + + if m == nil { + return nil, nil + } + msg, ok := m.(*testSerial.DetailsEnriched) + if !ok { + return nil, errors.Errorf("expecting message of type '*testSerial.DetailsEnriched' got type '%t'", m) + } + + return msg, nil +} + diff --git a/internal/generator/.snapshots/generator_test-validateViewSink-validateViewSink b/internal/generator/.snapshots/generator_test-validateViewSink-validateViewSink new file mode 100644 index 0000000..c72a37d --- /dev/null +++ b/internal/generator/.snapshots/generator_test-validateViewSink-validateViewSink @@ -0,0 +1,155 @@ +// Code generated by kafmesh-gen. DO NOT EDIT. + +package details + +import ( + "context" + "fmt" + "os" + "path/filepath" + "time" + + "github.com/burdiyan/kafkautil" + "github.com/lovoo/goka" + "github.com/lovoo/goka/storage" + "github.com/pkg/errors" + "github.com/syncromatics/kafmesh/pkg/runner" + "github.com/syndtr/goleveldb/leveldb/opt" + "golang.org/x/sync/errgroup" + + "test/internal/kafmesh/models/testMesh/testId" +) + +type TestToApi_ViewSink_Context interface { + Context() context.Context + Keys() ([]string, error) + Get(string) (*testId.Test, error) +} + +type TestToApi_ViewSink_Context_impl struct { + context context.Context + view *goka.View +} + +func (c *TestToApi_ViewSink_Context_impl) Context() context.Context { + return c.context +} + +func (c *TestToApi_ViewSink_Context_impl) Keys() ([]string, error) { + select { + case <-c.Done(): + return nil, errors.New("context cancelled while waiting for partition to become running") + case <-c.view.WaitRunning(): + } + + it, err := c.view.Iterator() + if err != nil { + return nil, errors.Wrap(err, "failed to get iterator") + } + keys := []string{} + for it.Next() { + keys = append(keys, it.Key()) + } + return keys, nil +} + +func (c *TestToApi_ViewSink_Context_impl) Get(key string) (*testId.Test, error) { + m, err := c.view.Get(key) + if err != nil { + return nil, errors.Wrap(err, "failed to get value from view") + } + if m == nil { + return nil, nil + } + msg, ok := m.(*testId.Test) + if !ok { + return nil, errors.Errorf("expecting message of type '*testId.Test' got type '%t'", m) + } + return msg, nil +} + +type TestToApi_ViewSink interface { + Sync(TestToApi_ViewSink_Context) error +} + +func Register_TestToApi_ViewSink(options runner.ServiceOptions, synchronizer TestToApi_ViewSink, updateInterval time.Duration, syncTimeout time.Duration) (func(context.Context) func() error, error) { + brokers := options.Brokers + protoWrapper := options.ProtoWrapper + codec, err := protoWrapper.Codec("testMesh.testId.test", &testId.Test{}) + if err != nil { + return nil, errors.Wrap(err, "failed to create codec") + } + + opts := &opt.Options{ + BlockCacheCapacity: opt.MiB * 1, + WriteBuffer: opt.MiB * 1, + } + + path := filepath.Join("/tmp/storage", "viewSink", "testMesh.testId.test") + err = os.MkdirAll(path, os.ModePerm) + if err != nil { + return nil, errors.Wrap(err, "failed to create view sink db directory") + } + + builder := storage.BuilderWithOptions(path, opts) + view, err := goka.NewView(brokers, + goka.Table("testMesh.testId.test"), + codec, + goka.WithViewStorageBuilder(builder), + goka.WithViewHasher(kafkautil.MurmurHasher), + ) + if err != nil { + return nil, errors.Wrap(err, "failed creating view sink view") + } + + return func(outerCtx context.Context) func() error { + return func() error { + cancelableCtx, cancel := context.WithCancel(outerCtx) + defer cancel() + grp, ctx := errgroup.WithContext(cancelableCtx) + + timer := time.NewTimer(0) + grp.Go(func() error { + for { + select { + case <-ctx.Done(): + return nil + case <-timer.C: + select { + case <-ctx.Done(): + return nil + case <-view.WaitRunning(): + } + + newContext, cancel := context.WithTimeout(ctx, syncTimeout) + c := &TestToApi_ViewSink_Context_impl{ + context: newContext, + view: view, + } + err := synchronizer.Sync(c) + if err != nil { + cancel() + fmt.Printf("sync error '%v'", err) + return err + } + cancel() + timer = time.NewTimer(updateInterval) + } + } + }) + + grp.Go(func() error { + return view.Run(ctx) + }) + + select { + case <- ctx.Done(): + return nil + case <- ctx.Done(): + err := grp.Wait() + return err + } + } + }, nil +} + diff --git a/internal/generator/.snapshots/generator_test-validateViewSource-validateViewSource b/internal/generator/.snapshots/generator_test-validateViewSource-validateViewSource new file mode 100644 index 0000000..a45e87c --- /dev/null +++ b/internal/generator/.snapshots/generator_test-validateViewSource-validateViewSource @@ -0,0 +1,140 @@ +// Code generated by kafmesh-gen. DO NOT EDIT. + +package details + +import ( + "context" + "fmt" + "os" + "path/filepath" + "time" + + "github.com/burdiyan/kafkautil" + "github.com/lovoo/goka" + "github.com/lovoo/goka/storage" + "github.com/pkg/errors" + "github.com/syncromatics/kafmesh/pkg/runner" + "github.com/syndtr/goleveldb/leveldb/opt" + "golang.org/x/sync/errgroup" + + "test/internal/kafmesh/models/testMesh/testId" +) + +type TestToDatabase_ViewSource_Context interface { + Context() context.Context + Update(string, *testId.Test) error +} + +type TestToDatabase_ViewSource interface { + Sync(TestToDatabase_ViewSource_Context) error +} + +type contextWrap_TestToDatabase struct { + context context.Context + job *runner.ProtoViewSourceJob +} + +func (c *contextWrap_TestToDatabase) Context() context.Context { + return c.context +} +func (c *contextWrap_TestToDatabase) Update(key string, msg *testId.Test) error { + return c.job.Update(key, msg) +} + +func Register_TestToDatabase_ViewSource(options runner.ServiceOptions, synchronizer TestToDatabase_ViewSource, updateInterval time.Duration, syncTimeout time.Duration) (func(context.Context) func() error, error) { + brokers := options.Brokers + var err error + protoWrapper := options.ProtoWrapper + codec, err := protoWrapper.Codec("testMesh.testId.test", &testId.Test{}) + if err != nil { + return nil, errors.Wrap(err, "failed to create codec") + } + + opts := &opt.Options{ + BlockCacheCapacity: opt.MiB * 1, + WriteBuffer: opt.MiB * 1, + } + + path := filepath.Join("/tmp/storage", "viewSource", "testMesh.testId.test") + + err = os.MkdirAll(path, os.ModePerm) + if err != nil { + return nil, errors.Wrap(err, "failed to create view source db directory") + } + + builder := storage.BuilderWithOptions(path, opts) + view, err := goka.NewView(brokers, + goka.Table("testMesh.testId.test"), + codec, + goka.WithViewStorageBuilder(builder), + goka.WithViewHasher(kafkautil.MurmurHasher), + ) + if err != nil { + return nil, errors.Wrap(err, "failed creating synchronizer view") + } + + e, err := goka.NewEmitter(brokers, + goka.Stream("testMesh.testId.test"), + codec, + goka.WithEmitterHasher(kafkautil.MurmurHasher)) + + if err != nil { + return nil, errors.Wrap(err, "failed creating synchronizer emitter") + } + + emitter := runner.NewEmitter(e) + + return func(outerCtx context.Context) func() error { + return func() error { + cancelableCtx, cancel := context.WithCancel(outerCtx) + defer cancel() + grp, ctx := errgroup.WithContext(cancelableCtx) + + timer := time.NewTimer(0) + grp.Go(func() error { + for { + select { + case <-ctx.Done(): + return nil + case <-timer.C: + select { + case <-ctx.Done(): + return nil + case <-view.WaitRunning(): + } + + newContext, cancel := context.WithTimeout(ctx, syncTimeout) + c := runner.NewProtoViewSourceJob(newContext, view, emitter) + cw := &contextWrap_TestToDatabase{newContext, c} + err := synchronizer.Sync(cw) + if err != nil { + cancel() + fmt.Printf("sync error '%v'", err) + return err + } + err = c.Finish() + if err != nil { + cancel() + fmt.Printf("sync finish error '%v'", err) + return err + } + cancel() + timer = time.NewTimer(updateInterval) + } + } + }) + + grp.Go(emitter.Watch(ctx)) + grp.Go(func() error { + return view.Run(ctx) + }) + + select { + case <- ctx.Done(): + err := grp.Wait() + return err + } + } + }, nil +} + diff --git a/internal/generator/discoverTemplate.go b/internal/generator/discoverTemplate.go index f2a8947..13d5c0f 100644 --- a/internal/generator/discoverTemplate.go +++ b/internal/generator/discoverTemplate.go @@ -5,6 +5,7 @@ import ( "io" "text/template" + "github.com/syncromatics/kafmesh/internal/generator/templates" "github.com/syncromatics/kafmesh/internal/models" "github.com/syncromatics/kafmesh/pkg/runner" @@ -12,205 +13,7 @@ import ( ) var ( - discoverTemplate = template.Must(template.New("").Parse(`// Code generated by kafmesh-gen. DO NOT EDIT. - -package {{ .Package }} - -import ( - "github.com/syncromatics/kafmesh/pkg/runner" -) - -{{ range .Processors }} -func discover_{{ .MethodName }}(service *runner.Service) error { - processor := runner.ProcessorDiscovery{ - ServiceDiscovery : runner.ServiceDiscovery { - Name: "{{ .Service.Name}}", - Description: "{{ .Service.Description }}", - }, - ComponentDiscovery: runner.ComponentDiscovery{ - Name: "{{ .Component.Name}}", - Description: "{{ .Component.Description }}", - }, - Name: "{{ .Name }}", - Description: "{{ .Description }}", - GroupName: "{{ .GroupName }}", - Inputs: []runner.InputDiscovery{ -{{- range .Inputs }} - { - TopicDiscovery: runner.TopicDiscovery{ - Message: "{{ .Message }}", - Topic: "{{ .Topic }}", - Type: {{ .Type }}, - }, - }, -{{- end }} - }, - Joins: []runner.JoinDiscovery{ -{{- range .Joins }} - { - TopicDiscovery: runner.TopicDiscovery{ - Message: "{{ .Message }}", - Topic: "{{ .Topic }}", - Type: {{ .Type }}, - }, - }, -{{- end }} - }, - Lookups: []runner.LookupDiscovery{ -{{- range .Lookups }} - { - TopicDiscovery: runner.TopicDiscovery{ - Message: "{{ .Message }}", - Topic: "{{ .Topic }}", - Type: {{ .Type }}, - }, - }, -{{- end }} - }, - Outputs: []runner.OutputDiscovery{ -{{- range .Outputs }} - runner.OutputDiscovery{ - TopicDiscovery: runner.TopicDiscovery{ - Message: "{{ .Message }}", - Topic: "{{ .Topic }}", - Type: {{ .Type }}, - }, - }, -{{- end }} - }, -{{- if .Persistence }} - Persistence: &runner.PersistentDiscovery{ - TopicDiscovery: runner.TopicDiscovery{ - Message: "{{ .Persistence.Message }}", - Topic: "{{ .Persistence.Topic }}", - Type: {{ .Persistence.Type }}, - }, - }, -{{- end }} - } - - return service.RegisterProcessor(processor) -} - -{{- end }} - -{{ range .Sources }} -func discover_{{ .MethodName }}(service *runner.Service) error { - source := runner.SourceDiscovery{ - ServiceDiscovery : runner.ServiceDiscovery { - Name: "{{ .Service.Name}}", - Description: "{{ .Service.Description }}", - }, - ComponentDiscovery: runner.ComponentDiscovery{ - Name: "{{ .Component.Name}}", - Description: "{{ .Component.Description }}", - }, - TopicDiscovery: runner.TopicDiscovery{ - Message: "{{ .Source.Message }}", - Topic: "{{ .Source.Topic }}", - Type: {{ .Source.Type }}, - }, - } - - return service.RegisterSource(source) -} - -{{- end }} - -{{ range .Sinks }} -func discover_{{ .MethodName }}(service *runner.Service) error { - sink := runner.SinkDiscovery{ - ServiceDiscovery : runner.ServiceDiscovery { - Name: "{{ .Service.Name}}", - Description: "{{ .Service.Description }}", - }, - ComponentDiscovery: runner.ComponentDiscovery{ - Name: "{{ .Component.Name}}", - Description: "{{ .Component.Description }}", - }, - TopicDiscovery: runner.TopicDiscovery{ - Message: "{{ .Source.Message }}", - Topic: "{{ .Source.Topic }}", - Type: {{ .Source.Type }}, - }, - Name: "{{ .Name }}", - Description: "{{ .Description }}", - } - - return service.RegisterSink(sink) -} -{{- end }} - -{{ range .Views }} -func discover_{{ .MethodName }}(service *runner.Service) error { - view := runner.ViewDiscovery{ - ServiceDiscovery : runner.ServiceDiscovery { - Name: "{{ .Service.Name}}", - Description: "{{ .Service.Description }}", - }, - ComponentDiscovery: runner.ComponentDiscovery{ - Name: "{{ .Component.Name}}", - Description: "{{ .Component.Description }}", - }, - TopicDiscovery: runner.TopicDiscovery{ - Message: "{{ .TopicDiscovery.Message }}", - Topic: "{{ .TopicDiscovery.Topic }}", - Type: {{ .TopicDiscovery.Type }}, - }, - } - - return service.RegisterView(view) -} -{{- end }} - -{{ range .ViewSinks }} -func discover_{{ .MethodName }}(service *runner.Service) error { - sink := runner.ViewSinkDiscovery{ - ServiceDiscovery : runner.ServiceDiscovery { - Name: "{{ .Service.Name}}", - Description: "{{ .Service.Description }}", - }, - ComponentDiscovery: runner.ComponentDiscovery{ - Name: "{{ .Component.Name}}", - Description: "{{ .Component.Description }}", - }, - TopicDiscovery: runner.TopicDiscovery{ - Message: "{{ .Source.Message }}", - Topic: "{{ .Source.Topic }}", - Type: {{ .Source.Type }}, - }, - Name: "{{ .Name }}", - Description: "{{ .Description }}", - } - - return service.RegisterViewSink(sink) -} -{{- end }} - -{{ range .ViewSources }} -func discover_{{ .MethodName }}(service *runner.Service) error { - source := runner.ViewSourceDiscovery{ - ServiceDiscovery : runner.ServiceDiscovery { - Name: "{{ .Service.Name}}", - Description: "{{ .Service.Description }}", - }, - ComponentDiscovery: runner.ComponentDiscovery{ - Name: "{{ .Component.Name}}", - Description: "{{ .Component.Description }}", - }, - TopicDiscovery: runner.TopicDiscovery{ - Message: "{{ .Source.Message }}", - Topic: "{{ .Source.Topic }}", - Type: {{ .Source.Type }}, - }, - Name: "{{ .Name }}", - Description: "{{ .Description }}", - } - - return service.RegisterViewSource(source) -} -{{- end }} -`)) + discoverTemplate = template.Must(template.New("").Parse(templates.Discover)) ) type serviceDiscoveryOptions struct { @@ -511,6 +314,8 @@ func getDiscoveryTopicType(service *models.Service, t *string) (runner.MessageTy switch messageType { case "protobuf": return runner.MessageTypeProtobuf, nil + case "raw": + return runner.MessageTypeRaw, nil default: return -1, errors.Errorf("unknown message type '%s'", messageType) diff --git a/internal/generator/gen_test.go b/internal/generator/gen_test.go index b7ec328..373c18e 100644 --- a/internal/generator/gen_test.go +++ b/internal/generator/gen_test.go @@ -80,6 +80,8 @@ message DetailsEnriched { string name = 1; }`), os.ModePerm) + rawType := "raw" + externalTopicName := "externalTopic" newPath := path.Join(tmpDir, "defin") options := generator.Options{ Service: &models.Service{ @@ -105,39 +107,39 @@ message DetailsEnriched { RootPath: newPath, DefinitionsPath: newPath, Components: []*models.Component{ - &models.Component{ + { Name: "details", Processors: []models.Processor{ - models.Processor{ + { Name: "enricher", Inputs: []models.Input{ - models.Input{ + { TopicDefinition: models.TopicDefinition{ Message: "testId.test", }, }, - models.Input{ + { TopicDefinition: models.TopicDefinition{ Message: "testId.test2", }, }, }, Lookups: []models.Lookup{ - models.Lookup{ + { TopicDefinition: models.TopicDefinition{ Message: "testSerial.details", }, }, }, Joins: []models.Join{ - models.Join{ + { TopicDefinition: models.TopicDefinition{ Message: "testSerial.details", }, }, }, Outputs: []models.Output{ - models.Output{ + { TopicDefinition: models.TopicDefinition{ Message: "testSerial.detailsEnriched", }, @@ -149,16 +151,40 @@ message DetailsEnriched { }, }, }, + { + Name: "enricher", + Inputs: []models.Input{ + { + TopicDefinition: models.TopicDefinition{ + Message: "someMessage", + Type: &rawType, + Topic: &externalTopicName, + }, + }, + { + TopicDefinition: models.TopicDefinition{ + Message: "testId.test2", + }, + }, + }, + Outputs: []models.Output{ + { + TopicDefinition: models.TopicDefinition{ + Message: "testSerial.detailsEnriched", + }, + }, + }, + }, }, Sources: []models.Source{ - models.Source{ + { TopicDefinition: models.TopicDefinition{ Message: "testSerial.details", }, }, }, Sinks: []models.Sink{ - models.Sink{ + { Name: "Enriched Data Postgres", TopicDefinition: models.TopicDefinition{ Message: "testSerial.detailsEnriched", @@ -166,14 +192,14 @@ message DetailsEnriched { }, }, Views: []models.View{ - models.View{ + { TopicDefinition: models.TopicDefinition{ Message: "testSerial.detailsEnriched", }, }, }, ViewSources: []models.ViewSource{ - models.ViewSource{ + { Name: "test to database", TopicDefinition: models.TopicDefinition{ Message: "testId.test", @@ -181,7 +207,7 @@ message DetailsEnriched { }, }, ViewSinks: []models.ViewSink{ - models.ViewSink{ + { Name: "test to api", TopicDefinition: models.TopicDefinition{ Message: "testId.test", diff --git a/internal/generator/processorTemplate.go b/internal/generator/processorTemplate.go index b986e27..b82fe95 100644 --- a/internal/generator/processorTemplate.go +++ b/internal/generator/processorTemplate.go @@ -7,6 +7,7 @@ import ( "strings" "text/template" + "github.com/syncromatics/kafmesh/internal/generator/templates" "github.com/syncromatics/kafmesh/internal/models" "github.com/iancoleman/strcase" @@ -14,227 +15,17 @@ import ( ) var ( - processorTemplate = template.Must(template.New("").Parse(`// Code generated by kafmesh-gen. DO NOT EDIT. - -package {{ .Package }} - -import ( - "context" - "encoding/json" - "os" - "path/filepath" - "time" - - "github.com/Shopify/sarama" - "github.com/burdiyan/kafkautil" - "github.com/lovoo/goka" - "github.com/lovoo/goka/storage" - "github.com/pkg/errors" - "github.com/syndtr/goleveldb/leveldb/opt" - - "github.com/syncromatics/kafmesh/pkg/runner" -{{ range .Imports }} - {{ . }} -{{- end }} -) - -{{ with .Context -}} -type {{ .Name }}_ProcessorContext interface { - Key() string - Timestamp() time.Time - {{- range .Methods }} - {{.Name}}({{ .Args }} -{{- end}} -} -{{- end }} - -{{ with .Interface -}} -type {{ .Name }}_Processor interface { - {{- range .Methods }} - {{.Name}}({{ .Args }}) error -{{- end}} -} -{{- end}} -{{ $impl := "" }} -{{ with .Context -}} -type {{ .Name }}_ProcessorContext_Impl struct { - ctx goka.Context - processorContext *runner.ProcessorContext -} - -func new_{{ .Name }}_ProcessorContext_Impl(ctx goka.Context, pc *runner.ProcessorContext) *{{ .Name }}_ProcessorContext_Impl { - return &{{ .Name }}_ProcessorContext_Impl{ctx, pc} -} -{{$c := .Name}} -func (c *{{$c}}_ProcessorContext_Impl) Key() string { - return c.ctx.Key() -} - -func (c *{{$c}}_ProcessorContext_Impl) Timestamp() time.Time { - return c.ctx.Timestamp() -} -{{ range .Methods }} -func (c *{{$c}}_ProcessorContext_Impl) {{.Name}}({{ .Args }} { -{{- $t := . -}} -{{- with (eq .Type "lookup" ) }} - v := c.ctx.Lookup("{{- $t.Topic -}}", key) - if v == nil { - c.processorContext.Lookup("{{$t.Topic}}", "{{$t.MessageTypeName}}", key, "") - return nil - } - - m := v.(*{{- $t.MessageType -}}) - value, _ := json.Marshal(m) - c.processorContext.Lookup("{{ $t.Topic }}", "{{$t.MessageTypeName}}", key, string(value)) - - return m -{{- end -}} -{{- with (eq .Type "join" ) }} - v := c.ctx.Join("{{- $t.Topic -}}") - if v == nil { - c.processorContext.Join("{{$t.Topic}}", "{{$t.MessageTypeName}}", "") - return nil - } - - m := v.(*{{- $t.MessageType -}}) - value, _ := json.Marshal(m) - c.processorContext.Join("{{ $t.Topic }}", "{{$t.MessageTypeName}}", string(value)) - - return m -{{- end -}} -{{- with (eq .Type "output" ) }} - value, _ := json.Marshal(message) - c.processorContext.Output("{{ $t.Topic }}", "{{$t.MessageTypeName}}", key, string(value)) - c.ctx.Emit("{{- $t.Topic -}}", key, message) -{{- end -}} -{{- with (eq .Type "save") }} - value, _ := json.Marshal(state) - c.processorContext.SetState("{{ $t.Topic }}", "{{$t.MessageTypeName}}", string(value)) - - c.ctx.SetValue(state) -{{- end -}} -{{- with (eq .Type "state") }} - v := c.ctx.Value() - var m *{{- $t.MessageType }} - if v == nil { - m = &{{- $t.MessageType -}}{} - } else { - m = v.(*{{- $t.MessageType -}}) - } - - value, _ := json.Marshal(m) - c.processorContext.GetState("{{ $t.Topic }}", "{{$t.MessageTypeName}}", string(value)) - - return m -{{- end }} -} -{{ end}} -{{- end}} -{{ $c := .Context -}} -{{- $componentName := .Component -}} -{{- $processorName := .ProcessorName -}} -{{ with .Interface -}} -func Register_{{ .Name }}_Processor(service *runner.Service, impl {{ .Name }}_Processor) (func(context.Context) func() error, error) { -{{- end }} - options := service.Options() - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - config := sarama.NewConfig() - config.Version = sarama.MaxVersion - config.Consumer.Offsets.Initial = sarama.OffsetOldest - config.Consumer.Offsets.AutoCommit.Enable = true - config.Consumer.Offsets.CommitInterval = 1 * time.Second - - opts := &opt.Options{ - BlockCacheCapacity: opt.MiB * 1, - WriteBuffer: opt.MiB * 1, - } - - path := filepath.Join("/tmp/storage", "processor", "{{ .Group }}") - - err := os.MkdirAll(path, os.ModePerm) - if err != nil { - return nil, errors.Wrap(err, "failed to create processor db directory") - } - - builder := storage.BuilderWithOptions(path, opts) - -{{ range .Codecs }} - c{{ .Index }}, err := protoWrapper.Codec("{{ .Topic }}", &{{ .Message }}{}) - if err != nil { - return nil, errors.Wrap(err, "failed to create codec") - } -{{ end }} - edges := []goka.Edge{ -{{- range .Edges -}} -{{ $e := . }} -{{- with (eq .Type "input" ) }} - goka.Input(goka.Stream("{{ $e.Topic }}"), c{{ $e.Codec }}, func(ctx goka.Context, m interface{}) { - msg := m.(*{{ $e.Message }}) - - pc := service.ProcessorContext(ctx.Context(), "{{$componentName}}", "{{$processorName}}", ctx.Key()) - defer pc.Finish() - - v, err := json.Marshal(msg) - if err != nil { - ctx.Fail(err) - } - pc.Input("{{ $e.Topic }}", "{{ $e.MessageType}}", string(v)) - - w := new_{{ $c.Name }}_ProcessorContext_Impl(ctx, pc) - err = impl.{{ $e.Func }}(w, msg) - if err != nil { - ctx.Fail(err) - } - }), -{{- end -}} -{{- with (eq .Type "lookup" ) }} - goka.Lookup(goka.Table("{{ $e.Topic }}"), c{{ $e.Codec }}), -{{- end -}} -{{- with (eq .Type "join" ) }} - goka.Join(goka.Table("{{ $e.Topic }}"), c{{ $e.Codec }}), -{{- end -}} -{{- with (eq .Type "output" ) }} - goka.Output(goka.Stream("{{ $e.Topic }}"), c{{ $e.Codec }}), -{{- end -}} -{{- with (eq .Type "state" ) }} - goka.Persist(c{{ $e.Codec }}), -{{- end -}} -{{ end }} - } - group := goka.DefineGroup(goka.Group("{{ .Group }}"), edges...) - - processor, err := goka.NewProcessor(brokers, - group, - goka.WithConsumerGroupBuilder(goka.ConsumerGroupBuilderWithConfig(config)), - goka.WithStorageBuilder(builder), - goka.WithHasher(kafkautil.MurmurHasher)) - if err != nil { - return nil, errors.Wrap(err, "failed to create goka processor") - } - - return func(ctx context.Context) func() error { - return func() error { - err := processor.Run(ctx) - if err != nil { - return errors.Wrap(err, "failed to run goka processor") - } - - return nil - } - }, nil -} -`)) + processorTemplate = template.Must(template.New("").Parse(templates.Processor)) ) type edge struct { - Type string - Topic string - Message string - MessageType string - Codec int - Func string + Type string + Topic string + Message string + MessageType string + Codec int + Func string + RequiresPointer bool } type processorInterface struct { @@ -253,6 +44,7 @@ type contextMethod struct { Topic string MessageType string MessageTypeName string + RequiresPointer bool } type processorContext struct { @@ -264,6 +56,7 @@ type codec struct { Index int Message string Topic string + Type string } type processorOptions struct { @@ -315,6 +108,8 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m Methods: []interfaceMethod{}, } + addGokaCodecs := false + for _, input := range processor.Inputs { var name strings.Builder name.WriteString("Handle") @@ -323,26 +118,46 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m name.WriteString(strcase.ToCamel(f)) } - var mPkg strings.Builder - for _, p := range nameFrags[:len(nameFrags)-1] { - mPkg.WriteString("/") - mPkg.WriteString(p) - } - - modulePackage := input.ToPackage(service) - - i, ok := imports[modulePackage] - if !ok { - imports[modulePackage] = importIndex - i = importIndex - - importIndex++ + topicType := "protobuf" + if input.TopicDefinition.Type != nil { + switch *input.TopicDefinition.Type { + case "raw": + topicType = "raw" + } } var args strings.Builder args.WriteString(fmt.Sprintf("ctx %s_ProcessorContext", options.Context.Name)) message := nameFrags[len(nameFrags)-1] - args.WriteString(fmt.Sprintf(", message *m%d.%s", i, strcase.ToCamel(message))) + + fullMessageTypeName := "" + requiresPointer := false + switch topicType { + case "protobuf": + var mPkg strings.Builder + for _, p := range nameFrags[:len(nameFrags)-1] { + mPkg.WriteString("/") + mPkg.WriteString(p) + } + + modulePackage := input.ToPackage(service) + + i, ok := imports[modulePackage] + if !ok { + imports[modulePackage] = importIndex + i = importIndex + + importIndex++ + } + + fullMessageTypeName = fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)) + args.WriteString(fmt.Sprintf(", message *%s", fullMessageTypeName)) + requiresPointer = true + case "raw": + addGokaCodecs = true + fullMessageTypeName = "[]byte" + args.WriteString(", message []byte") + } method := interfaceMethod{ Name: fmt.Sprintf("Handle%s", input.ToSafeMessageTypeName()), @@ -356,19 +171,21 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m c = codec{ Index: codecIndex, Topic: topic, - Message: fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)), + Message: fullMessageTypeName, + Type: topicType, } codecs[topic] = c codecIndex++ } options.Edges = append(options.Edges, edge{ - Type: "input", - Topic: topic, - Message: fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)), - MessageType: input.Message, - Codec: c.Index, - Func: method.Name, + Type: "input", + Topic: topic, + Message: fullMessageTypeName, + MessageType: input.Message, + Codec: c.Index, + Func: method.Name, + RequiresPointer: requiresPointer, }) } options.Interface = intr @@ -382,25 +199,45 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m name.WriteString(strcase.ToCamel(f)) } - var mPkg strings.Builder - for _, p := range nameFrags[:len(nameFrags)-1] { - mPkg.WriteString("/") - mPkg.WriteString(p) + topicType := "protobuf" + if lookup.TopicDefinition.Type != nil { + switch *lookup.TopicDefinition.Type { + case "raw": + topicType = "raw" + } } - modulePackage := lookup.ToPackage(service) + var args strings.Builder + message := nameFrags[len(nameFrags)-1] - i, ok := imports[modulePackage] - if !ok { - imports[modulePackage] = importIndex - i = importIndex + fullMessageTypeName := "" + requiresPointer := false + switch topicType { + case "protobuf": + requiresPointer = true - importIndex++ - } + var mPkg strings.Builder + for _, p := range nameFrags[:len(nameFrags)-1] { + mPkg.WriteString("/") + mPkg.WriteString(p) + } - var args strings.Builder - message := nameFrags[len(nameFrags)-1] - args.WriteString(fmt.Sprintf("key string) *m%d.%s", i, strcase.ToCamel(message))) + modulePackage := lookup.ToPackage(service) + + i, ok := imports[modulePackage] + if !ok { + imports[modulePackage] = importIndex + i = importIndex + + importIndex++ + } + fullMessageTypeName = fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)) + args.WriteString(fmt.Sprintf("key string) *%s", fullMessageTypeName)) + case "raw": + addGokaCodecs = true + fullMessageTypeName = "[]byte" + args.WriteString("key string) []byte") + } m := contextMethod{ interfaceMethod: interfaceMethod{ @@ -408,8 +245,9 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m Args: args.String(), }, Type: "lookup", - MessageType: fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)), + MessageType: fullMessageTypeName, MessageTypeName: lookup.Message, + RequiresPointer: requiresPointer, } m.Topic = lookup.ToTopicName(service) @@ -421,16 +259,18 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m c = codec{ Index: codecIndex, Topic: m.Topic, - Message: fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)), + Message: fullMessageTypeName, + Type: topicType, } codecs[m.Topic] = c codecIndex++ } options.Edges = append(options.Edges, edge{ - Type: "lookup", - Codec: c.Index, - Topic: m.Topic, + Type: "lookup", + Codec: c.Index, + Topic: m.Topic, + RequiresPointer: requiresPointer, }) } @@ -442,25 +282,45 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m name.WriteString(strcase.ToCamel(f)) } - var mPkg strings.Builder - for _, p := range nameFrags[:len(nameFrags)-1] { - mPkg.WriteString("/") - mPkg.WriteString(p) + topicType := "protobuf" + if join.TopicDefinition.Type != nil { + switch *join.TopicDefinition.Type { + case "raw": + topicType = "raw" + } } - modulePackage := join.ToPackage(service) + var args strings.Builder + message := nameFrags[len(nameFrags)-1] - i, ok := imports[modulePackage] - if !ok { - imports[modulePackage] = importIndex - i = importIndex + fullMessageTypeName := "" + requiresPointer := false + switch topicType { + case "protobuf": + requiresPointer = true - importIndex++ - } + var mPkg strings.Builder + for _, p := range nameFrags[:len(nameFrags)-1] { + mPkg.WriteString("/") + mPkg.WriteString(p) + } - var args strings.Builder - message := nameFrags[len(nameFrags)-1] - args.WriteString(fmt.Sprintf(") *m%d.%s", i, strcase.ToCamel(message))) + modulePackage := join.ToPackage(service) + + i, ok := imports[modulePackage] + if !ok { + imports[modulePackage] = importIndex + i = importIndex + + importIndex++ + } + fullMessageTypeName = fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)) + args.WriteString(fmt.Sprintf(") *%s", fullMessageTypeName)) + case "raw": + addGokaCodecs = true + fullMessageTypeName = "[]byte" + args.WriteString(") []byte") + } m := contextMethod{ interfaceMethod: interfaceMethod{ @@ -468,8 +328,9 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m Args: args.String(), }, Type: "join", - MessageType: fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)), + MessageType: fullMessageTypeName, MessageTypeName: join.Message, + RequiresPointer: requiresPointer, } m.Topic = join.ToTopicName(service) @@ -481,16 +342,18 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m c = codec{ Index: codecIndex, Topic: m.Topic, - Message: fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)), + Message: fullMessageTypeName, + Type: topicType, } codecs[m.Topic] = c codecIndex++ } options.Edges = append(options.Edges, edge{ - Type: "join", - Codec: c.Index, - Topic: m.Topic, + Type: "join", + Codec: c.Index, + Topic: m.Topic, + RequiresPointer: requiresPointer, }) } @@ -502,25 +365,44 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m name.WriteString(strcase.ToCamel(f)) } - var mPkg strings.Builder - for _, p := range nameFrags[:len(nameFrags)-1] { - mPkg.WriteString("/") - mPkg.WriteString(p) + topicType := "protobuf" + if output.TopicDefinition.Type != nil { + switch *output.TopicDefinition.Type { + case "raw": + topicType = "raw" + } } - modulePackage := output.ToPackage(service) + var args strings.Builder + message := nameFrags[len(nameFrags)-1] - i, ok := imports[modulePackage] - if !ok { - imports[modulePackage] = importIndex - i = importIndex + fullMessageTypeName := "" + requiresPointer := false + switch topicType { + case "protobuf": + requiresPointer = true + var mPkg strings.Builder + for _, p := range nameFrags[:len(nameFrags)-1] { + mPkg.WriteString("/") + mPkg.WriteString(p) + } - importIndex++ - } + modulePackage := output.ToPackage(service) - var args strings.Builder - message := nameFrags[len(nameFrags)-1] - args.WriteString(fmt.Sprintf("key string, message *m%d.%s)", i, strcase.ToCamel(message))) + i, ok := imports[modulePackage] + if !ok { + imports[modulePackage] = importIndex + i = importIndex + + importIndex++ + } + fullMessageTypeName = fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)) + args.WriteString(fmt.Sprintf("key string, message *%s)", fullMessageTypeName)) + case "raw": + addGokaCodecs = true + fullMessageTypeName = "[]byte" + args.WriteString("key string, message []byte)") + } m := contextMethod{ interfaceMethod: interfaceMethod{ @@ -530,6 +412,7 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m Type: "output", MessageTypeName: output.Message, Topic: output.ToTopicName(service), + RequiresPointer: requiresPointer, } m.Topic = output.ToTopicName(service) @@ -541,43 +424,67 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m c = codec{ Index: codecIndex, Topic: m.Topic, - Message: fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)), + Message: fullMessageTypeName, + Type: topicType, } codecs[m.Topic] = c codecIndex++ } options.Edges = append(options.Edges, edge{ - Type: "output", - Codec: c.Index, - Topic: m.Topic, + Type: "output", + Codec: c.Index, + Topic: m.Topic, + RequiresPointer: requiresPointer, }) } if processor.Persistence != nil { nameFrags := strings.Split(processor.Persistence.Message, ".") - var mPkg strings.Builder - for _, p := range nameFrags[:len(nameFrags)-1] { - mPkg.WriteString("/") - mPkg.WriteString(p) + topicType := "protobuf" + if processor.Persistence.TopicDefinition.Type != nil { + switch *processor.Persistence.TopicDefinition.Type { + case "raw": + topicType = "raw" + } } - modulePackage := processor.Persistence.ToPackage(service) + fullMessageTypeName := "" + args := "" + argsReturn := "" + switch topicType { + case "protobuf": - i, ok := imports[modulePackage] - if !ok { - imports[modulePackage] = importIndex - i = importIndex + var mPkg strings.Builder + for _, p := range nameFrags[:len(nameFrags)-1] { + mPkg.WriteString("/") + mPkg.WriteString(p) + } + + modulePackage := processor.Persistence.ToPackage(service) - importIndex++ + i, ok := imports[modulePackage] + if !ok { + imports[modulePackage] = importIndex + i = importIndex + + importIndex++ + } + fullMessageTypeName = fmt.Sprintf("m%d.%s", i, nameFrags[len(nameFrags)-1]) + args = fmt.Sprintf("state *%s)", fullMessageTypeName) + argsReturn = fmt.Sprintf(") *%s", fullMessageTypeName) + + case "raw": + fullMessageTypeName = "[]byte" + args = fmt.Sprintf("state %s)", fullMessageTypeName) + argsReturn = ") []byte" } - message := nameFrags[len(nameFrags)-1] options.Context.Methods = append(options.Context.Methods, contextMethod{ interfaceMethod: interfaceMethod{ Name: "SaveState", - Args: fmt.Sprintf("state *m%d.%s)", i, strcase.ToCamel(message)), + Args: args, }, Type: "save", MessageTypeName: processor.Persistence.Message, @@ -587,10 +494,10 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m options.Context.Methods = append(options.Context.Methods, contextMethod{ interfaceMethod: interfaceMethod{ Name: "State", - Args: fmt.Sprintf(") *m%d.%s", i, strcase.ToCamel(message)), + Args: argsReturn, }, Type: "state", - MessageType: fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)), + MessageType: fullMessageTypeName, MessageTypeName: processor.Persistence.Message, Topic: options.Group + "-table", }) @@ -600,7 +507,8 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m c = codec{ Index: codecIndex, Topic: options.Group + "-table", - Message: fmt.Sprintf("m%d.%s", i, strcase.ToCamel(message)), + Message: fullMessageTypeName, + Type: topicType, } codecs[options.Group+"-table"] = c codecIndex++ @@ -625,6 +533,10 @@ func buildProcessorOptions(pkg string, mod string, modelsPath string, service *m options.Imports = append(options.Imports, imp) } + if addGokaCodecs { + options.Imports = append(options.Imports, "gokaCodecs \"github.com/lovoo/goka/codec\"") + } + sort.Strings(options.Imports) options.Processor = processor diff --git a/internal/generator/processorTemplate_test.go b/internal/generator/processorTemplate_test.go index a6368b9..8b7c856 100644 --- a/internal/generator/processorTemplate_test.go +++ b/internal/generator/processorTemplate_test.go @@ -5,7 +5,7 @@ import ( "path" "testing" - "github.com/stretchr/testify/assert" + "github.com/bradleyjkemp/cupaloy" ) func validateProcessors(tmpDir string, t *testing.T) { @@ -14,236 +14,8 @@ func validateProcessors(tmpDir string, t *testing.T) { t.Fatal(err) } - assert.Equal(t, expectedDetailsProcessor, string(s)) -} - -var ( - expectedDetailsProcessor = `// Code generated by kafmesh-gen. DO NOT EDIT. - -package details - -import ( - "context" - "encoding/json" - "os" - "path/filepath" - "time" - - "github.com/Shopify/sarama" - "github.com/burdiyan/kafkautil" - "github.com/lovoo/goka" - "github.com/lovoo/goka/storage" - "github.com/pkg/errors" - "github.com/syndtr/goleveldb/leveldb/opt" - - "github.com/syncromatics/kafmesh/pkg/runner" - - m0 "test/internal/kafmesh/models/testMesh/testId" - m1 "test/internal/kafmesh/models/testMesh/testSerial" -) - -type Enricher_ProcessorContext interface { - Key() string - Timestamp() time.Time - Lookup_TestSerialDetails(key string) *m1.Details - Join_TestSerialDetails() *m1.Details - Output_TestSerialDetailsEnriched(key string, message *m1.DetailsEnriched) - SaveState(state *m1.DetailsState) - State() *m1.DetailsState -} - -type Enricher_Processor interface { - HandleTestIDTest(ctx Enricher_ProcessorContext, message *m0.Test) error - HandleTestIDTest2(ctx Enricher_ProcessorContext, message *m0.Test2) error -} - -type Enricher_ProcessorContext_Impl struct { - ctx goka.Context - processorContext *runner.ProcessorContext -} - -func new_Enricher_ProcessorContext_Impl(ctx goka.Context, pc *runner.ProcessorContext) *Enricher_ProcessorContext_Impl { - return &Enricher_ProcessorContext_Impl{ctx, pc} -} - -func (c *Enricher_ProcessorContext_Impl) Key() string { - return c.ctx.Key() -} - -func (c *Enricher_ProcessorContext_Impl) Timestamp() time.Time { - return c.ctx.Timestamp() -} - -func (c *Enricher_ProcessorContext_Impl) Lookup_TestSerialDetails(key string) *m1.Details { - v := c.ctx.Lookup("testMesh.testSerial.details", key) - if v == nil { - c.processorContext.Lookup("testMesh.testSerial.details", "testSerial.details", key, "") - return nil - } - - m := v.(*m1.Details) - value, _ := json.Marshal(m) - c.processorContext.Lookup("testMesh.testSerial.details", "testSerial.details", key, string(value)) - - return m -} - -func (c *Enricher_ProcessorContext_Impl) Join_TestSerialDetails() *m1.Details { - v := c.ctx.Join("testMesh.testSerial.details") - if v == nil { - c.processorContext.Join("testMesh.testSerial.details", "testSerial.details", "") - return nil - } - - m := v.(*m1.Details) - value, _ := json.Marshal(m) - c.processorContext.Join("testMesh.testSerial.details", "testSerial.details", string(value)) - - return m -} - -func (c *Enricher_ProcessorContext_Impl) Output_TestSerialDetailsEnriched(key string, message *m1.DetailsEnriched) { - value, _ := json.Marshal(message) - c.processorContext.Output("testMesh.testSerial.detailsEnriched", "testSerial.detailsEnriched", key, string(value)) - c.ctx.Emit("testMesh.testSerial.detailsEnriched", key, message) -} - -func (c *Enricher_ProcessorContext_Impl) SaveState(state *m1.DetailsState) { - value, _ := json.Marshal(state) - c.processorContext.SetState("testMesh.details.enricher-table", "testSerial.detailsState", string(value)) - - c.ctx.SetValue(state) -} - -func (c *Enricher_ProcessorContext_Impl) State() *m1.DetailsState { - v := c.ctx.Value() - var m *m1.DetailsState - if v == nil { - m = &m1.DetailsState{} - } else { - m = v.(*m1.DetailsState) - } - - value, _ := json.Marshal(m) - c.processorContext.GetState("testMesh.details.enricher-table", "testSerial.detailsState", string(value)) - - return m -} - -func Register_Enricher_Processor(service *runner.Service, impl Enricher_Processor) (func(context.Context) func() error, error) { - options := service.Options() - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - config := sarama.NewConfig() - config.Version = sarama.MaxVersion - config.Consumer.Offsets.Initial = sarama.OffsetOldest - config.Consumer.Offsets.AutoCommit.Enable = true - config.Consumer.Offsets.CommitInterval = 1 * time.Second - - opts := &opt.Options{ - BlockCacheCapacity: opt.MiB * 1, - WriteBuffer: opt.MiB * 1, - } - - path := filepath.Join("/tmp/storage", "processor", "testMesh.details.enricher") - - err := os.MkdirAll(path, os.ModePerm) - if err != nil { - return nil, errors.Wrap(err, "failed to create processor db directory") - } - - builder := storage.BuilderWithOptions(path, opts) - - - c0, err := protoWrapper.Codec("testMesh.testId.test", &m0.Test{}) - if err != nil { - return nil, errors.Wrap(err, "failed to create codec") - } - - c1, err := protoWrapper.Codec("testMesh.testId.test2", &m0.Test2{}) + err = cupaloy.SnapshotMulti("validateProcessors", s) if err != nil { - return nil, errors.Wrap(err, "failed to create codec") + t.Fatalf("error: %s", err) } - - c2, err := protoWrapper.Codec("testMesh.testSerial.details", &m1.Details{}) - if err != nil { - return nil, errors.Wrap(err, "failed to create codec") - } - - c3, err := protoWrapper.Codec("testMesh.testSerial.detailsEnriched", &m1.DetailsEnriched{}) - if err != nil { - return nil, errors.Wrap(err, "failed to create codec") - } - - c4, err := protoWrapper.Codec("testMesh.details.enricher-table", &m1.DetailsState{}) - if err != nil { - return nil, errors.Wrap(err, "failed to create codec") - } - - edges := []goka.Edge{ - goka.Input(goka.Stream("testMesh.testId.test"), c0, func(ctx goka.Context, m interface{}) { - msg := m.(*m0.Test) - - pc := service.ProcessorContext(ctx.Context(), "details", "enricher", ctx.Key()) - defer pc.Finish() - - v, err := json.Marshal(msg) - if err != nil { - ctx.Fail(err) - } - pc.Input("testMesh.testId.test", "testId.test", string(v)) - - w := new_Enricher_ProcessorContext_Impl(ctx, pc) - err = impl.HandleTestIDTest(w, msg) - if err != nil { - ctx.Fail(err) - } - }), - goka.Input(goka.Stream("testMesh.testId.test2"), c1, func(ctx goka.Context, m interface{}) { - msg := m.(*m0.Test2) - - pc := service.ProcessorContext(ctx.Context(), "details", "enricher", ctx.Key()) - defer pc.Finish() - - v, err := json.Marshal(msg) - if err != nil { - ctx.Fail(err) - } - pc.Input("testMesh.testId.test2", "testId.test2", string(v)) - - w := new_Enricher_ProcessorContext_Impl(ctx, pc) - err = impl.HandleTestIDTest2(w, msg) - if err != nil { - ctx.Fail(err) - } - }), - goka.Lookup(goka.Table("testMesh.testSerial.details"), c2), - goka.Join(goka.Table("testMesh.testSerial.details"), c2), - goka.Output(goka.Stream("testMesh.testSerial.detailsEnriched"), c3), - goka.Persist(c4), - } - group := goka.DefineGroup(goka.Group("testMesh.details.enricher"), edges...) - - processor, err := goka.NewProcessor(brokers, - group, - goka.WithConsumerGroupBuilder(goka.ConsumerGroupBuilderWithConfig(config)), - goka.WithStorageBuilder(builder), - goka.WithHasher(kafkautil.MurmurHasher)) - if err != nil { - return nil, errors.Wrap(err, "failed to create goka processor") - } - - return func(ctx context.Context) func() error { - return func() error { - err := processor.Run(ctx) - if err != nil { - return errors.Wrap(err, "failed to run goka processor") - } - - return nil - } - }, nil } -` -) diff --git a/internal/generator/serviceTemplate.go b/internal/generator/serviceTemplate.go index d89cc38..f4b7d81 100644 --- a/internal/generator/serviceTemplate.go +++ b/internal/generator/serviceTemplate.go @@ -6,153 +6,14 @@ import ( "path" "text/template" + "github.com/syncromatics/kafmesh/internal/generator/templates" "github.com/syncromatics/kafmesh/internal/models" "github.com/pkg/errors" ) var ( - serviceTemplate = template.Must(template.New("").Parse(`// Code generated by kafmesh-gen. DO NOT EDIT. - -package {{ .Package }} - -import ( -{{- $sinkLength := len .Sinks -}} {{- $syncLength := len .ViewSources -}} {{- if or (ne $sinkLength 0) (ne $syncLength 0) }} - "time" -{{- end }} - - "github.com/pkg/errors" - "github.com/syncromatics/kafmesh/pkg/runner" -{{ range .Imports }} - {{ printf "%q" . }} -{{- end }} -) -{{ range .Processors }} -func Register_{{ .ExportName }}(service *runner.Service, processor {{ .Package }}.{{ .Name }}) error { - r, err := {{ .Package }}.Register_{{ .Name }}(service, processor) - if err != nil { - return errors.Wrap(err, "failed to register processor") - } - - err = service.RegisterRunner(r) - if err != nil { - return errors.Wrap(err, "failed to register runner with service") - } - - err = discover_{{ .ExportName }}(service) - if err != nil { - return errors.Wrap(err, "failed to register with discovery") - } - - return nil -} -{{ end -}} - -{{ range .Sources }} -func New_{{ .ExportName }}_Source(service *runner.Service) ({{ .Package }}.{{ .Name }}_Source, error) { - e, r, err := {{ .Package }}.New_{{ .Name }}_Source(service) - if err != nil { - return nil, err - } - - err = service.RegisterRunner(r) - if err != nil { - return nil, errors.Wrap(err, "failed to register runner with service") - } - - err = discover_{{ .ExportName }}_Source(service) - if err != nil { - return nil, errors.Wrap(err, "failed to register with discovery") - } - - return e, nil -} -{{ end -}} - -{{ range .Views }} -func New_{{ .ExportName }}_View(service *runner.Service) ({{ .Package }}.{{ .Name }}_View, error) { - v, r, err := {{ .Package }}.New_{{ .Name }}_View(service.Options()) - if err != nil { - return nil, err - } - - err = service.RegisterRunner(r) - if err != nil { - return nil, errors.Wrap(err, "failed to register runner with service") - } - - err = discover_{{ .ExportName }}_View(service) - if err != nil { - return nil, errors.Wrap(err, "failed to register with discovery") - } - - return v, nil -} -{{ end -}} - -{{ range .Sinks }} -func Register_{{ .Name }}_Sink(service *runner.Service, sink {{ .Package }}.{{ .Name }}_Sink, interval time.Duration, maxBufferSize int) error { - r, err := {{ .Package }}.Register_{{ .Name }}_Sink(service.Options(), sink, interval, maxBufferSize) - if err != nil { - return errors.Wrap(err, "failed to register sink") - } - - err = service.RegisterRunner(r) - if err != nil { - return errors.Wrap(err, "failed to register runner with service") - } - - err = discover_{{ .ExportName }}_Sink(service) - if err != nil { - return errors.Wrap(err, "failed to register with discovery") - } - - return nil -} -{{ end -}} - -{{ range .ViewSources }} -func Register_{{ .ExportName }}_ViewSource(service *runner.Service, viewSource {{ .Package }}.{{ .Name }}_ViewSource, updateInterval time.Duration, syncTimeout time.Duration) error { - r, err := {{ .Package }}.Register_{{ .Name }}_ViewSource(service.Options(), viewSource, updateInterval, syncTimeout) - if err != nil { - return errors.Wrap(err, "failed to register viewSource") - } - - err = service.RegisterRunner(r) - if err != nil { - return errors.Wrap(err, "failed to register runner with service") - } - - err = discover_{{ .ExportName }}_ViewSource(service) - if err != nil { - return errors.Wrap(err, "failed to register with discovery") - } - - return nil -} -{{ end -}} - -{{ range .ViewSinks }} -func Register_{{ .ExportName }}_ViewSink(service *runner.Service, viewSink {{ .Package }}.{{ .Name }}_ViewSink, updateInterval time.Duration, syncTimeout time.Duration) error { - r, err := {{ .Package }}.Register_{{ .Name }}_ViewSink(service.Options(), viewSink, updateInterval, syncTimeout) - if err != nil { - return errors.Wrap(err, "failed to register viewSink") - } - - err = service.RegisterRunner(r) - if err != nil { - return errors.Wrap(err, "failed to register runner with service") - } - - err = discover_{{ .ExportName }}_ViewSink(service) - if err != nil { - return errors.Wrap(err, "failed to register with discovery") - } - - return nil -} -{{ end -}} -`)) + serviceTemplate = template.Must(template.New("").Parse(templates.Service)) ) type serviceProcessor struct { diff --git a/internal/generator/serviceTemplate_test.go b/internal/generator/serviceTemplate_test.go index c624315..b084e0d 100644 --- a/internal/generator/serviceTemplate_test.go +++ b/internal/generator/serviceTemplate_test.go @@ -5,7 +5,7 @@ import ( "path" "testing" - "github.com/stretchr/testify/assert" + "github.com/bradleyjkemp/cupaloy" ) func validateService(tmpDir string, t *testing.T) { @@ -14,135 +14,8 @@ func validateService(tmpDir string, t *testing.T) { t.Fatal(err) } - assert.Equal(t, expectedService, string(s)) -} - -var ( - expectedService = `// Code generated by kafmesh-gen. DO NOT EDIT. - -package kafmesh - -import ( - "time" - - "github.com/pkg/errors" - "github.com/syncromatics/kafmesh/pkg/runner" - - "test/internal/kafmesh/details" -) - -func Register_Details_Enricher_Processor(service *runner.Service, processor details.Enricher_Processor) error { - r, err := details.Register_Enricher_Processor(service, processor) - if err != nil { - return errors.Wrap(err, "failed to register processor") - } - - err = service.RegisterRunner(r) - if err != nil { - return errors.Wrap(err, "failed to register runner with service") - } - - err = discover_Details_Enricher_Processor(service) - if err != nil { - return errors.Wrap(err, "failed to register with discovery") - } - - return nil -} - -func New_Details_TestSerialDetails_Source(service *runner.Service) (details.TestSerialDetails_Source, error) { - e, r, err := details.New_TestSerialDetails_Source(service) - if err != nil { - return nil, err - } - - err = service.RegisterRunner(r) - if err != nil { - return nil, errors.Wrap(err, "failed to register runner with service") - } - - err = discover_Details_TestSerialDetails_Source(service) - if err != nil { - return nil, errors.Wrap(err, "failed to register with discovery") - } - - return e, nil -} - -func New_Details_TestSerialDetailsEnriched_View(service *runner.Service) (details.TestSerialDetailsEnriched_View, error) { - v, r, err := details.New_TestSerialDetailsEnriched_View(service.Options()) - if err != nil { - return nil, err - } - - err = service.RegisterRunner(r) - if err != nil { - return nil, errors.Wrap(err, "failed to register runner with service") - } - - err = discover_Details_TestSerialDetailsEnriched_View(service) - if err != nil { - return nil, errors.Wrap(err, "failed to register with discovery") - } - - return v, nil -} - -func Register_EnrichedDataPostgres_Sink(service *runner.Service, sink details.EnrichedDataPostgres_Sink, interval time.Duration, maxBufferSize int) error { - r, err := details.Register_EnrichedDataPostgres_Sink(service.Options(), sink, interval, maxBufferSize) - if err != nil { - return errors.Wrap(err, "failed to register sink") - } - - err = service.RegisterRunner(r) - if err != nil { - return errors.Wrap(err, "failed to register runner with service") - } - - err = discover_Details_EnrichedDataPostgres_Sink(service) - if err != nil { - return errors.Wrap(err, "failed to register with discovery") - } - - return nil -} - -func Register_Details_TestToDatabase_ViewSource(service *runner.Service, viewSource details.TestToDatabase_ViewSource, updateInterval time.Duration, syncTimeout time.Duration) error { - r, err := details.Register_TestToDatabase_ViewSource(service.Options(), viewSource, updateInterval, syncTimeout) - if err != nil { - return errors.Wrap(err, "failed to register viewSource") - } - - err = service.RegisterRunner(r) - if err != nil { - return errors.Wrap(err, "failed to register runner with service") - } - - err = discover_Details_TestToDatabase_ViewSource(service) - if err != nil { - return errors.Wrap(err, "failed to register with discovery") - } - - return nil -} - -func Register_Details_TestToApi_ViewSink(service *runner.Service, viewSink details.TestToApi_ViewSink, updateInterval time.Duration, syncTimeout time.Duration) error { - r, err := details.Register_TestToApi_ViewSink(service.Options(), viewSink, updateInterval, syncTimeout) + err = cupaloy.SnapshotMulti("validateService", s) if err != nil { - return errors.Wrap(err, "failed to register viewSink") + t.Fatalf("error: %s", err) } - - err = service.RegisterRunner(r) - if err != nil { - return errors.Wrap(err, "failed to register runner with service") - } - - err = discover_Details_TestToApi_ViewSink(service) - if err != nil { - return errors.Wrap(err, "failed to register with discovery") - } - - return nil } -` -) diff --git a/internal/generator/sinkTemplate.go b/internal/generator/sinkTemplate.go index 5a88d96..093bfd0 100644 --- a/internal/generator/sinkTemplate.go +++ b/internal/generator/sinkTemplate.go @@ -6,100 +6,14 @@ import ( "strings" "text/template" + "github.com/syncromatics/kafmesh/internal/generator/templates" "github.com/syncromatics/kafmesh/internal/models" "github.com/pkg/errors" ) var ( - sinkTemplate = template.Must(template.New("").Parse(`// Code generated by kafmesh-gen. DO NOT EDIT. - -package {{ .Package }} - -import ( - "context" - "time" - - "github.com/lovoo/goka" - "github.com/pkg/errors" - - "github.com/syncromatics/kafmesh/pkg/runner" - - "{{ .Import }}" -) - -type {{ .Name }}_Sink interface { - Flush() error - Collect(ctx runner.MessageContext, key string, msg *{{ .MessageType }}) error -} - -type impl_{{ .Name }}_Sink struct { - sink {{ .Name }}_Sink - codec goka.Codec - group string - topic string - maxBufferSize int - interval time.Duration -} - -func (s *impl_{{ .Name }}_Sink) Codec() goka.Codec { - return s.codec -} - -func (s *impl_{{ .Name }}_Sink) Group() string { - return s.group -} - -func (s *impl_{{ .Name }}_Sink) Topic() string { - return s.topic -} - -func (s *impl_{{ .Name }}_Sink) MaxBufferSize() int { - return s.maxBufferSize -} - -func (s *impl_{{ .Name }}_Sink) Interval() time.Duration { - return s.interval -} - -func (s *impl_{{ .Name }}_Sink) Flush() error { - return s.sink.Flush() -} - -func (s *impl_{{ .Name }}_Sink) Collect(ctx runner.MessageContext, key string, msg interface{}) error { - m, ok := msg.(*{{ .MessageType }}) - if !ok { - return errors.Errorf("expecting message of type '*{{ .MessageType }}' got type '%t'", msg) - } - - return s.sink.Collect(ctx, key, m) -} - -func Register_{{ .Name }}_Sink(options runner.ServiceOptions, sink {{ .Name }}_Sink, interval time.Duration, maxBufferSize int) (func(ctx context.Context) func() error, error) { - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - codec, err := protoWrapper.Codec("{{ .TopicName }}", &{{ .MessageType }}{}) - if err != nil { - return nil, errors.Wrap(err, "failed to create codec") - } - - d := &impl_{{ .Name }}_Sink{ - sink: sink, - codec: codec, - group: "{{ .GroupName }}", - topic: "{{ .TopicName }}", - maxBufferSize: maxBufferSize, - interval: interval, - } - - s := runner.NewSinkRunner(d, brokers) - - return func(ctx context.Context) func() error { - return s.Run(ctx) - }, nil -} -`)) + sinkTemplate = template.Must(template.New("").Parse(templates.Sink)) ) type sinkOptions struct { @@ -109,6 +23,7 @@ type sinkOptions struct { TopicName string MessageType string GroupName string + Type string } func generateSink(writer io.Writer, sink *sinkOptions) error { @@ -124,11 +39,27 @@ func buildSinkOptions(pkg string, mod string, modelsPath string, sink models.Sin Package: pkg, } + topicType := "protobuf" + if sink.TopicDefinition.Type != nil { + switch *sink.TopicDefinition.Type { + case "raw": + topicType = "raw" + } + } + + options.Type = topicType options.TopicName = sink.ToTopicName(service) options.Name = sink.ToSafeName() options.GroupName = fmt.Sprintf("%s.%s.%s-sink", service.Name, component.Name, strings.ToLower(options.Name)) - options.Import = sink.ToPackage(service) - options.MessageType = sink.ToMessageTypeWithPackage() + + switch topicType { + case "protobuf": + options.Import = fmt.Sprintf("\"%s\"", sink.ToPackage(service)) + options.MessageType = sink.ToMessageTypeWithPackage() + + case "raw": + options.Import = "gokaCodecs \"github.com/lovoo/goka/codec\"" + } return options, nil } diff --git a/internal/generator/sinkTemplate_test.go b/internal/generator/sinkTemplate_test.go index 7788737..3902fcf 100644 --- a/internal/generator/sinkTemplate_test.go +++ b/internal/generator/sinkTemplate_test.go @@ -5,7 +5,7 @@ import ( "path" "testing" - "github.com/stretchr/testify/assert" + "github.com/bradleyjkemp/cupaloy" ) func validateSink(tmpDir string, t *testing.T) { @@ -14,96 +14,8 @@ func validateSink(tmpDir string, t *testing.T) { t.Fatal(err) } - assert.Equal(t, expectedSink, string(s)) -} - -var ( - expectedSink = `// Code generated by kafmesh-gen. DO NOT EDIT. - -package details - -import ( - "context" - "time" - - "github.com/lovoo/goka" - "github.com/pkg/errors" - - "github.com/syncromatics/kafmesh/pkg/runner" - - "test/internal/kafmesh/models/testMesh/testSerial" -) - -type EnrichedDataPostgres_Sink interface { - Flush() error - Collect(ctx runner.MessageContext, key string, msg *testSerial.DetailsEnriched) error -} - -type impl_EnrichedDataPostgres_Sink struct { - sink EnrichedDataPostgres_Sink - codec goka.Codec - group string - topic string - maxBufferSize int - interval time.Duration -} - -func (s *impl_EnrichedDataPostgres_Sink) Codec() goka.Codec { - return s.codec -} - -func (s *impl_EnrichedDataPostgres_Sink) Group() string { - return s.group -} - -func (s *impl_EnrichedDataPostgres_Sink) Topic() string { - return s.topic -} - -func (s *impl_EnrichedDataPostgres_Sink) MaxBufferSize() int { - return s.maxBufferSize -} - -func (s *impl_EnrichedDataPostgres_Sink) Interval() time.Duration { - return s.interval -} - -func (s *impl_EnrichedDataPostgres_Sink) Flush() error { - return s.sink.Flush() -} - -func (s *impl_EnrichedDataPostgres_Sink) Collect(ctx runner.MessageContext, key string, msg interface{}) error { - m, ok := msg.(*testSerial.DetailsEnriched) - if !ok { - return errors.Errorf("expecting message of type '*testSerial.DetailsEnriched' got type '%t'", msg) - } - - return s.sink.Collect(ctx, key, m) -} - -func Register_EnrichedDataPostgres_Sink(options runner.ServiceOptions, sink EnrichedDataPostgres_Sink, interval time.Duration, maxBufferSize int) (func(ctx context.Context) func() error, error) { - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - codec, err := protoWrapper.Codec("testMesh.testSerial.detailsEnriched", &testSerial.DetailsEnriched{}) + err = cupaloy.SnapshotMulti("validateSink", s) if err != nil { - return nil, errors.Wrap(err, "failed to create codec") + t.Fatalf("error: %s", err) } - - d := &impl_EnrichedDataPostgres_Sink{ - sink: sink, - codec: codec, - group: "testMesh.details.enricheddatapostgres-sink", - topic: "testMesh.testSerial.detailsEnriched", - maxBufferSize: maxBufferSize, - interval: interval, - } - - s := runner.NewSinkRunner(d, brokers) - - return func(ctx context.Context) func() error { - return s.Run(ctx) - }, nil } -` -) diff --git a/internal/generator/sourceTemplate.go b/internal/generator/sourceTemplate.go index 168d912..cb16f4c 100644 --- a/internal/generator/sourceTemplate.go +++ b/internal/generator/sourceTemplate.go @@ -1,140 +1,18 @@ package generator import ( + "fmt" "io" "text/template" + "github.com/syncromatics/kafmesh/internal/generator/templates" "github.com/syncromatics/kafmesh/internal/models" "github.com/pkg/errors" ) var ( - sourceTemplate = template.Must(template.New("").Parse(`// Code generated by kafmesh-gen. DO NOT EDIT. - -package {{ .Package }} - -import ( - "context" - - "github.com/burdiyan/kafkautil" - "github.com/lovoo/goka" - "github.com/pkg/errors" - "github.com/syncromatics/kafmesh/pkg/runner" - "golang.org/x/sync/errgroup" - - "{{ .Import }}" -) - -type {{ .Name }}_Source interface { - Emit(message {{ .Name }}_Source_Message) error - EmitBulk(ctx context.Context, messages []{{ .Name }}_Source_Message) error - Delete(key string) error -} - -type {{ .Name }}_Source_impl struct { - context.Context - emitter *runner.Emitter - metrics *runner.Metrics -} - -type {{ .Name }}_Source_Message struct { - Key string - Value *{{ .MessageType }} -} - -type impl_{{ .Name }}_Source_Message struct { - msg {{ .Name }}_Source_Message -} - -func (m *impl_{{ .Name }}_Source_Message) Key() string { - return m.msg.Key -} - -func (m *impl_{{ .Name }}_Source_Message) Value() interface{} { - return m.msg.Value -} - -func New_{{ .Name }}_Source(service *runner.Service) (*{{ .Name }}_Source_impl, func(context.Context) func() error, error) { - options := service.Options() - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - codec, err := protoWrapper.Codec("{{ .TopicName }}", &{{ .MessageType }}{}) - if err != nil { - return nil, nil, errors.Wrap(err, "failed to create codec") - } - - emitter, err := goka.NewEmitter(brokers, - goka.Stream("{{ .TopicName }}"), - codec, - goka.WithEmitterHasher(kafkautil.MurmurHasher)) - - if err != nil { - return nil, nil, errors.Wrap(err, "failed creating source") - } - - emitterCtx, emitterCancel := context.WithCancel(context.Background()) - e := &{{ .Name }}_Source_impl{ - emitterCtx, - runner.NewEmitter(emitter), - service.Metrics, - } - - return e, func(outerCtx context.Context) func() error { - return func() error { - cancelableCtx, cancel := context.WithCancel(outerCtx) - defer cancel() - grp, ctx := errgroup.WithContext(cancelableCtx) - - grp.Go(func() error { - select { - case <-ctx.Done(): - emitterCancel() - return nil - } - }) - grp.Go(e.emitter.Watch(ctx)) - - select { - case <- ctx.Done(): - err := grp.Wait() - return err - } - } - }, nil -} - -func (e *{{ .Name }}_Source_impl) Emit(message {{ .Name }}_Source_Message) error { - err := e.emitter.Emit(message.Key, message.Value) - if err != nil { - e.metrics.SourceError("{{ .ServiceName }}", "{{ .ComponentName }}", "{{ .TopicName }}") - return err - } - - e.metrics.SourceHit("{{ .ServiceName }}", "{{ .ComponentName }}", "{{ .TopicName }}", 1) - return nil -} - -func (e *{{ .Name }}_Source_impl) EmitBulk(ctx context.Context, messages []{{ .Name }}_Source_Message) error { - b := []runner.EmitMessage{} - for _, m := range messages { - b = append(b, &impl_{{ .Name }}_Source_Message{msg: m}) - } - err := e.emitter.EmitBulk(ctx, b) - if err != nil { - e.metrics.SourceError("{{ .ServiceName }}", "{{ .ComponentName }}", "{{ .TopicName }}") - return err - } - - e.metrics.SourceHit("{{ .ServiceName }}", "{{ .ComponentName }}", "{{ .TopicName }}", len(b)) - return nil -} - -func (e *{{ .Name }}_Source_impl) Delete(key string) error { - return e.emitter.Emit(key, nil) -} -`)) + sourceTemplate = template.Must(template.New("").Parse(templates.Source)) ) type sourceOptions struct { @@ -145,6 +23,7 @@ type sourceOptions struct { MessageType string ComponentName string ServiceName string + Type string } func generateSource(writer io.Writer, source *sourceOptions) error { @@ -162,10 +41,26 @@ func buildSourceOptions(pkg string, mod string, modelsPath string, service *mode options.TopicName = source.ToTopicName(service) options.Name = source.ToSafeMessageTypeName() - options.Import = source.ToPackage(service) - options.MessageType = source.ToMessageTypeWithPackage() - options.ComponentName = component.Name options.ServiceName = service.Name + options.ComponentName = component.Name + + topicType := "protobuf" + if source.TopicDefinition.Type != nil { + switch *source.TopicDefinition.Type { + case "raw": + topicType = "raw" + } + } + options.Type = topicType + + switch topicType { + case "protobuf": + options.MessageType = source.ToMessageTypeWithPackage() + options.Import = fmt.Sprintf("\"%s\"", source.ToPackage(service)) + + case "raw": + options.Import = "gokaCodecs \"github.com/lovoo/goka/codec\"" + } return options, nil } diff --git a/internal/generator/sourceTemplate_test.go b/internal/generator/sourceTemplate_test.go index 802eb04..9887b3f 100644 --- a/internal/generator/sourceTemplate_test.go +++ b/internal/generator/sourceTemplate_test.go @@ -5,7 +5,7 @@ import ( "path" "testing" - "github.com/stretchr/testify/assert" + "github.com/bradleyjkemp/cupaloy" ) func validateEmitter(tmpDir string, t *testing.T) { @@ -14,133 +14,8 @@ func validateEmitter(tmpDir string, t *testing.T) { t.Fatal(err) } - assert.Equal(t, expectedEmitter, string(s)) -} - -var ( - expectedEmitter = `// Code generated by kafmesh-gen. DO NOT EDIT. - -package details - -import ( - "context" - - "github.com/burdiyan/kafkautil" - "github.com/lovoo/goka" - "github.com/pkg/errors" - "github.com/syncromatics/kafmesh/pkg/runner" - "golang.org/x/sync/errgroup" - - "test/internal/kafmesh/models/testMesh/testSerial" -) - -type TestSerialDetails_Source interface { - Emit(message TestSerialDetails_Source_Message) error - EmitBulk(ctx context.Context, messages []TestSerialDetails_Source_Message) error - Delete(key string) error -} - -type TestSerialDetails_Source_impl struct { - context.Context - emitter *runner.Emitter - metrics *runner.Metrics -} - -type TestSerialDetails_Source_Message struct { - Key string - Value *testSerial.Details -} - -type impl_TestSerialDetails_Source_Message struct { - msg TestSerialDetails_Source_Message -} - -func (m *impl_TestSerialDetails_Source_Message) Key() string { - return m.msg.Key -} - -func (m *impl_TestSerialDetails_Source_Message) Value() interface{} { - return m.msg.Value -} - -func New_TestSerialDetails_Source(service *runner.Service) (*TestSerialDetails_Source_impl, func(context.Context) func() error, error) { - options := service.Options() - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - codec, err := protoWrapper.Codec("testMesh.testSerial.details", &testSerial.Details{}) - if err != nil { - return nil, nil, errors.Wrap(err, "failed to create codec") - } - - emitter, err := goka.NewEmitter(brokers, - goka.Stream("testMesh.testSerial.details"), - codec, - goka.WithEmitterHasher(kafkautil.MurmurHasher)) - - if err != nil { - return nil, nil, errors.Wrap(err, "failed creating source") - } - - emitterCtx, emitterCancel := context.WithCancel(context.Background()) - e := &TestSerialDetails_Source_impl{ - emitterCtx, - runner.NewEmitter(emitter), - service.Metrics, - } - - return e, func(outerCtx context.Context) func() error { - return func() error { - cancelableCtx, cancel := context.WithCancel(outerCtx) - defer cancel() - grp, ctx := errgroup.WithContext(cancelableCtx) - - grp.Go(func() error { - select { - case <-ctx.Done(): - emitterCancel() - return nil - } - }) - grp.Go(e.emitter.Watch(ctx)) - - select { - case <- ctx.Done(): - err := grp.Wait() - return err - } - } - }, nil -} - -func (e *TestSerialDetails_Source_impl) Emit(message TestSerialDetails_Source_Message) error { - err := e.emitter.Emit(message.Key, message.Value) + err = cupaloy.SnapshotMulti("validateEmitter", s) if err != nil { - e.metrics.SourceError("testMesh", "details", "testMesh.testSerial.details") - return err + t.Fatalf("error: %s", err) } - - e.metrics.SourceHit("testMesh", "details", "testMesh.testSerial.details", 1) - return nil } - -func (e *TestSerialDetails_Source_impl) EmitBulk(ctx context.Context, messages []TestSerialDetails_Source_Message) error { - b := []runner.EmitMessage{} - for _, m := range messages { - b = append(b, &impl_TestSerialDetails_Source_Message{msg: m}) - } - err := e.emitter.EmitBulk(ctx, b) - if err != nil { - e.metrics.SourceError("testMesh", "details", "testMesh.testSerial.details") - return err - } - - e.metrics.SourceHit("testMesh", "details", "testMesh.testSerial.details", len(b)) - return nil -} - -func (e *TestSerialDetails_Source_impl) Delete(key string) error { - return e.emitter.Emit(key, nil) -} -` -) diff --git a/internal/generator/templates/discover.go.tpl b/internal/generator/templates/discover.go.tpl new file mode 100644 index 0000000..c621b59 --- /dev/null +++ b/internal/generator/templates/discover.go.tpl @@ -0,0 +1,196 @@ +package {{ .Package }} + +import ( + "github.com/syncromatics/kafmesh/pkg/runner" +) + +{{ range .Processors }} +func discover_{{ .MethodName }}(service *runner.Service) error { + processor := runner.ProcessorDiscovery{ + ServiceDiscovery : runner.ServiceDiscovery { + Name: "{{ .Service.Name}}", + Description: "{{ .Service.Description }}", + }, + ComponentDiscovery: runner.ComponentDiscovery{ + Name: "{{ .Component.Name}}", + Description: "{{ .Component.Description }}", + }, + Name: "{{ .Name }}", + Description: "{{ .Description }}", + GroupName: "{{ .GroupName }}", + Inputs: []runner.InputDiscovery{ +{{- range .Inputs }} + { + TopicDiscovery: runner.TopicDiscovery{ + Message: "{{ .Message }}", + Topic: "{{ .Topic }}", + Type: {{ .Type }}, + }, + }, +{{- end }} + }, + Joins: []runner.JoinDiscovery{ +{{- range .Joins }} + { + TopicDiscovery: runner.TopicDiscovery{ + Message: "{{ .Message }}", + Topic: "{{ .Topic }}", + Type: {{ .Type }}, + }, + }, +{{- end }} + }, + Lookups: []runner.LookupDiscovery{ +{{- range .Lookups }} + { + TopicDiscovery: runner.TopicDiscovery{ + Message: "{{ .Message }}", + Topic: "{{ .Topic }}", + Type: {{ .Type }}, + }, + }, +{{- end }} + }, + Outputs: []runner.OutputDiscovery{ +{{- range .Outputs }} + runner.OutputDiscovery{ + TopicDiscovery: runner.TopicDiscovery{ + Message: "{{ .Message }}", + Topic: "{{ .Topic }}", + Type: {{ .Type }}, + }, + }, +{{- end }} + }, +{{- if .Persistence }} + Persistence: &runner.PersistentDiscovery{ + TopicDiscovery: runner.TopicDiscovery{ + Message: "{{ .Persistence.Message }}", + Topic: "{{ .Persistence.Topic }}", + Type: {{ .Persistence.Type }}, + }, + }, +{{- end }} + } + + return service.RegisterProcessor(processor) +} + +{{- end }} + +{{ range .Sources }} +func discover_{{ .MethodName }}(service *runner.Service) error { + source := runner.SourceDiscovery{ + ServiceDiscovery : runner.ServiceDiscovery { + Name: "{{ .Service.Name}}", + Description: "{{ .Service.Description }}", + }, + ComponentDiscovery: runner.ComponentDiscovery{ + Name: "{{ .Component.Name}}", + Description: "{{ .Component.Description }}", + }, + TopicDiscovery: runner.TopicDiscovery{ + Message: "{{ .Source.Message }}", + Topic: "{{ .Source.Topic }}", + Type: {{ .Source.Type }}, + }, + } + + return service.RegisterSource(source) +} + +{{- end }} + +{{ range .Sinks }} +func discover_{{ .MethodName }}(service *runner.Service) error { + sink := runner.SinkDiscovery{ + ServiceDiscovery : runner.ServiceDiscovery { + Name: "{{ .Service.Name}}", + Description: "{{ .Service.Description }}", + }, + ComponentDiscovery: runner.ComponentDiscovery{ + Name: "{{ .Component.Name}}", + Description: "{{ .Component.Description }}", + }, + TopicDiscovery: runner.TopicDiscovery{ + Message: "{{ .Source.Message }}", + Topic: "{{ .Source.Topic }}", + Type: {{ .Source.Type }}, + }, + Name: "{{ .Name }}", + Description: "{{ .Description }}", + } + + return service.RegisterSink(sink) +} +{{- end }} + +{{ range .Views }} +func discover_{{ .MethodName }}(service *runner.Service) error { + view := runner.ViewDiscovery{ + ServiceDiscovery : runner.ServiceDiscovery { + Name: "{{ .Service.Name}}", + Description: "{{ .Service.Description }}", + }, + ComponentDiscovery: runner.ComponentDiscovery{ + Name: "{{ .Component.Name}}", + Description: "{{ .Component.Description }}", + }, + TopicDiscovery: runner.TopicDiscovery{ + Message: "{{ .TopicDiscovery.Message }}", + Topic: "{{ .TopicDiscovery.Topic }}", + Type: {{ .TopicDiscovery.Type }}, + }, + } + + return service.RegisterView(view) +} +{{- end }} + +{{ range .ViewSinks }} +func discover_{{ .MethodName }}(service *runner.Service) error { + sink := runner.ViewSinkDiscovery{ + ServiceDiscovery : runner.ServiceDiscovery { + Name: "{{ .Service.Name}}", + Description: "{{ .Service.Description }}", + }, + ComponentDiscovery: runner.ComponentDiscovery{ + Name: "{{ .Component.Name}}", + Description: "{{ .Component.Description }}", + }, + TopicDiscovery: runner.TopicDiscovery{ + Message: "{{ .Source.Message }}", + Topic: "{{ .Source.Topic }}", + Type: {{ .Source.Type }}, + }, + Name: "{{ .Name }}", + Description: "{{ .Description }}", + } + + return service.RegisterViewSink(sink) +} +{{- end }} + +{{ range .ViewSources }} +func discover_{{ .MethodName }}(service *runner.Service) error { + source := runner.ViewSourceDiscovery{ + ServiceDiscovery : runner.ServiceDiscovery { + Name: "{{ .Service.Name}}", + Description: "{{ .Service.Description }}", + }, + ComponentDiscovery: runner.ComponentDiscovery{ + Name: "{{ .Component.Name}}", + Description: "{{ .Component.Description }}", + }, + TopicDiscovery: runner.TopicDiscovery{ + Message: "{{ .Source.Message }}", + Topic: "{{ .Source.Topic }}", + Type: {{ .Source.Type }}, + }, + Name: "{{ .Name }}", + Description: "{{ .Description }}", + } + + return service.RegisterViewSource(source) +} +{{- end }} diff --git a/internal/generator/templates/header.go.tpl b/internal/generator/templates/header.go.tpl new file mode 100644 index 0000000..c2d1fce --- /dev/null +++ b/internal/generator/templates/header.go.tpl @@ -0,0 +1,2 @@ +// Code generated by kafmesh-gen. DO NOT EDIT. + diff --git a/internal/generator/templates/processor.go.tpl b/internal/generator/templates/processor.go.tpl new file mode 100644 index 0000000..0b55f55 --- /dev/null +++ b/internal/generator/templates/processor.go.tpl @@ -0,0 +1,233 @@ +package {{ .Package }} + +import ( + "context" + "encoding/json" + "os" + "path/filepath" + "time" + + "github.com/Shopify/sarama" + "github.com/burdiyan/kafkautil" + "github.com/lovoo/goka" + "github.com/lovoo/goka/storage" + "github.com/pkg/errors" + "github.com/syndtr/goleveldb/leveldb/opt" + + "github.com/syncromatics/kafmesh/pkg/runner" +{{ range .Imports }} + {{ . }} +{{- end }} +) + +{{ with .Context -}} +type {{ .Name }}_ProcessorContext interface { + Key() string + Timestamp() time.Time + {{- range .Methods }} + {{.Name}}({{ .Args }} +{{- end}} +} +{{- end }} + +{{ with .Interface -}} +type {{ .Name }}_Processor interface { + {{- range .Methods }} + {{.Name}}({{ .Args }}) error +{{- end}} +} +{{- end}} +{{ $impl := "" }} +{{ with .Context -}} +type {{ .Name }}_ProcessorContext_Impl struct { + ctx goka.Context + processorContext *runner.ProcessorContext +} + +func new_{{ .Name }}_ProcessorContext_Impl(ctx goka.Context, pc *runner.ProcessorContext) *{{ .Name }}_ProcessorContext_Impl { + return &{{ .Name }}_ProcessorContext_Impl{ctx, pc} +} +{{$c := .Name}} +func (c *{{$c}}_ProcessorContext_Impl) Key() string { + return c.ctx.Key() +} + +func (c *{{$c}}_ProcessorContext_Impl) Timestamp() time.Time { + return c.ctx.Timestamp() +} +{{ range .Methods }} +func (c *{{$c}}_ProcessorContext_Impl) {{.Name}}({{ .Args }} { +{{- $t := . -}} +{{- with (eq .Type "lookup" ) }} + v := c.ctx.Lookup("{{- $t.Topic -}}", key) + if v == nil { + c.processorContext.Lookup("{{$t.Topic}}", "{{$t.MessageTypeName}}", key, "") + return nil + } + + {{ with (eq $t.RequiresPointer true) -}} + m := v.(*{{ $t.MessageType }}) + {{- end -}} + {{- with (eq $t.RequiresPointer false) -}} + m := v.({{ $t.MessageType }}) + {{- end }} + + value, _ := json.Marshal(m) + c.processorContext.Lookup("{{ $t.Topic }}", "{{$t.MessageTypeName}}", key, string(value)) + + return m +{{- end -}} +{{- with (eq .Type "join" ) }} + v := c.ctx.Join("{{- $t.Topic -}}") + if v == nil { + c.processorContext.Join("{{$t.Topic}}", "{{$t.MessageTypeName}}", "") + return nil + } + + {{ with (eq $t.RequiresPointer true) -}} + m := v.(*{{ $t.MessageType }}) + {{- end -}} + {{- with (eq $t.RequiresPointer false) -}} + m := v.({{ $t.MessageType }}) + {{- end }} + + value, _ := json.Marshal(m) + c.processorContext.Join("{{ $t.Topic }}", "{{$t.MessageTypeName}}", string(value)) + + return m +{{- end -}} +{{- with (eq .Type "output" ) }} + value, _ := json.Marshal(message) + c.processorContext.Output("{{ $t.Topic }}", "{{$t.MessageTypeName}}", key, string(value)) + c.ctx.Emit("{{- $t.Topic -}}", key, message) +{{- end -}} +{{- with (eq .Type "save") }} + value, _ := json.Marshal(state) + c.processorContext.SetState("{{ $t.Topic }}", "{{$t.MessageTypeName}}", string(value)) + + c.ctx.SetValue(state) +{{- end -}} +{{- with (eq .Type "state") }} + v := c.ctx.Value() + var m *{{- $t.MessageType }} + if v == nil { + m = &{{- $t.MessageType -}}{} + } else { + m = v.(*{{- $t.MessageType -}}) + } + + value, _ := json.Marshal(m) + c.processorContext.GetState("{{ $t.Topic }}", "{{$t.MessageTypeName}}", string(value)) + + return m +{{- end }} +} +{{ end}} +{{- end}} +{{ $c := .Context -}} +{{- $componentName := .Component -}} +{{- $processorName := .ProcessorName -}} +{{ with .Interface -}} +func Register_{{ .Name }}_Processor(service *runner.Service, impl {{ .Name }}_Processor) (func(context.Context) func() error, error) { +{{- end }} + options := service.Options() + brokers := options.Brokers + protoWrapper := options.ProtoWrapper + + config := sarama.NewConfig() + config.Version = sarama.MaxVersion + config.Consumer.Offsets.Initial = sarama.OffsetOldest + config.Consumer.Offsets.AutoCommit.Enable = true + config.Consumer.Offsets.CommitInterval = 1 * time.Second + + opts := &opt.Options{ + BlockCacheCapacity: opt.MiB * 1, + WriteBuffer: opt.MiB * 1, + } + + path := filepath.Join("/tmp/storage", "processor", "{{ .Group }}") + + err := os.MkdirAll(path, os.ModePerm) + if err != nil { + return nil, errors.Wrap(err, "failed to create processor db directory") + } + + builder := storage.BuilderWithOptions(path, opts) +{{ range .Codecs }} + {{- $cd := . -}} + {{- with (eq .Type "protobuf") }} + c{{ $cd.Index }}, err := protoWrapper.Codec("{{ $cd.Topic }}", &{{ $cd.Message }}{}) + if err != nil { + return nil, errors.Wrap(err, "failed to create codec") + } + {{- end -}} + {{- with (eq .Type "raw") }} + c{{ $cd.Index }} := &gokaCodecs.Bytes{} + {{- end -}} +{{ end }} + + edges := []goka.Edge{ +{{- range .Edges -}} +{{ $e := . }} +{{- with (eq .Type "input" ) }} + goka.Input(goka.Stream("{{ $e.Topic }}"), c{{ $e.Codec }}, func(ctx goka.Context, m interface{}) { + {{- $mt := . -}} + {{- with (eq $e.RequiresPointer true) }} + msg := m.(*{{ $e.Message }}) + {{- end }} + {{- with (eq $e.RequiresPointer false) }} + msg := m.({{ $e.Message }}) + {{- end }} + + pc := service.ProcessorContext(ctx.Context(), "{{$componentName}}", "{{$processorName}}", ctx.Key()) + defer pc.Finish() + + v, err := json.Marshal(msg) + if err != nil { + ctx.Fail(err) + } + pc.Input("{{ $e.Topic }}", "{{ $e.MessageType}}", string(v)) + + w := new_{{ $c.Name }}_ProcessorContext_Impl(ctx, pc) + err = impl.{{ $e.Func }}(w, msg) + if err != nil { + ctx.Fail(err) + } + }), +{{- end -}} +{{- with (eq .Type "lookup" ) }} + goka.Lookup(goka.Table("{{ $e.Topic }}"), c{{ $e.Codec }}), +{{- end -}} +{{- with (eq .Type "join" ) }} + goka.Join(goka.Table("{{ $e.Topic }}"), c{{ $e.Codec }}), +{{- end -}} +{{- with (eq .Type "output" ) }} + goka.Output(goka.Stream("{{ $e.Topic }}"), c{{ $e.Codec }}), +{{- end -}} +{{- with (eq .Type "state" ) }} + goka.Persist(c{{ $e.Codec }}), +{{- end -}} +{{ end }} + } + group := goka.DefineGroup(goka.Group("{{ .Group }}"), edges...) + + processor, err := goka.NewProcessor(brokers, + group, + goka.WithConsumerGroupBuilder(goka.ConsumerGroupBuilderWithConfig(config)), + goka.WithStorageBuilder(builder), + goka.WithHasher(kafkautil.MurmurHasher)) + if err != nil { + return nil, errors.Wrap(err, "failed to create goka processor") + } + + return func(ctx context.Context) func() error { + return func() error { + err := processor.Run(ctx) + if err != nil { + return errors.Wrap(err, "failed to run goka processor") + } + + return nil + } + }, nil +} diff --git a/internal/generator/templates/service.go.tpl b/internal/generator/templates/service.go.tpl new file mode 100644 index 0000000..7943368 --- /dev/null +++ b/internal/generator/templates/service.go.tpl @@ -0,0 +1,138 @@ +package {{ .Package }} + +import ( +{{- $sinkLength := len .Sinks -}} {{- $syncLength := len .ViewSources -}} {{- if or (ne $sinkLength 0) (ne $syncLength 0) }} + "time" +{{- end }} + + "github.com/pkg/errors" + "github.com/syncromatics/kafmesh/pkg/runner" +{{ range .Imports }} + {{ printf "%q" . }} +{{- end }} +) +{{ range .Processors }} +func Register_{{ .ExportName }}(service *runner.Service, processor {{ .Package }}.{{ .Name }}) error { + r, err := {{ .Package }}.Register_{{ .Name }}(service, processor) + if err != nil { + return errors.Wrap(err, "failed to register processor") + } + + err = service.RegisterRunner(r) + if err != nil { + return errors.Wrap(err, "failed to register runner with service") + } + + err = discover_{{ .ExportName }}(service) + if err != nil { + return errors.Wrap(err, "failed to register with discovery") + } + + return nil +} +{{ end -}} + +{{ range .Sources }} +func New_{{ .ExportName }}_Source(service *runner.Service) ({{ .Package }}.{{ .Name }}_Source, error) { + e, r, err := {{ .Package }}.New_{{ .Name }}_Source(service) + if err != nil { + return nil, err + } + + err = service.RegisterRunner(r) + if err != nil { + return nil, errors.Wrap(err, "failed to register runner with service") + } + + err = discover_{{ .ExportName }}_Source(service) + if err != nil { + return nil, errors.Wrap(err, "failed to register with discovery") + } + + return e, nil +} +{{ end -}} + +{{ range .Views }} +func New_{{ .ExportName }}_View(service *runner.Service) ({{ .Package }}.{{ .Name }}_View, error) { + v, r, err := {{ .Package }}.New_{{ .Name }}_View(service.Options()) + if err != nil { + return nil, err + } + + err = service.RegisterRunner(r) + if err != nil { + return nil, errors.Wrap(err, "failed to register runner with service") + } + + err = discover_{{ .ExportName }}_View(service) + if err != nil { + return nil, errors.Wrap(err, "failed to register with discovery") + } + + return v, nil +} +{{ end -}} + +{{ range .Sinks }} +func Register_{{ .Name }}_Sink(service *runner.Service, sink {{ .Package }}.{{ .Name }}_Sink, interval time.Duration, maxBufferSize int) error { + r, err := {{ .Package }}.Register_{{ .Name }}_Sink(service.Options(), sink, interval, maxBufferSize) + if err != nil { + return errors.Wrap(err, "failed to register sink") + } + + err = service.RegisterRunner(r) + if err != nil { + return errors.Wrap(err, "failed to register runner with service") + } + + err = discover_{{ .ExportName }}_Sink(service) + if err != nil { + return errors.Wrap(err, "failed to register with discovery") + } + + return nil +} +{{ end -}} + +{{ range .ViewSources }} +func Register_{{ .ExportName }}_ViewSource(service *runner.Service, viewSource {{ .Package }}.{{ .Name }}_ViewSource, updateInterval time.Duration, syncTimeout time.Duration) error { + r, err := {{ .Package }}.Register_{{ .Name }}_ViewSource(service.Options(), viewSource, updateInterval, syncTimeout) + if err != nil { + return errors.Wrap(err, "failed to register viewSource") + } + + err = service.RegisterRunner(r) + if err != nil { + return errors.Wrap(err, "failed to register runner with service") + } + + err = discover_{{ .ExportName }}_ViewSource(service) + if err != nil { + return errors.Wrap(err, "failed to register with discovery") + } + + return nil +} +{{ end -}} + +{{ range .ViewSinks }} +func Register_{{ .ExportName }}_ViewSink(service *runner.Service, viewSink {{ .Package }}.{{ .Name }}_ViewSink, updateInterval time.Duration, syncTimeout time.Duration) error { + r, err := {{ .Package }}.Register_{{ .Name }}_ViewSink(service.Options(), viewSink, updateInterval, syncTimeout) + if err != nil { + return errors.Wrap(err, "failed to register viewSink") + } + + err = service.RegisterRunner(r) + if err != nil { + return errors.Wrap(err, "failed to register runner with service") + } + + err = discover_{{ .ExportName }}_ViewSink(service) + if err != nil { + return errors.Wrap(err, "failed to register with discovery") + } + + return nil +} +{{ end -}} diff --git a/internal/generator/templates/sink.go.tpl b/internal/generator/templates/sink.go.tpl new file mode 100644 index 0000000..f19a8ff --- /dev/null +++ b/internal/generator/templates/sink.go.tpl @@ -0,0 +1,102 @@ +package {{ .Package }} + +import ( + "context" + "time" + + "github.com/lovoo/goka" + "github.com/pkg/errors" + + "github.com/syncromatics/kafmesh/pkg/runner" + + {{ .Import }} +) + +{{ $t := . -}} +type {{ .Name }}_Sink interface { + Flush() error + {{- with (eq .Type "protobuf") }} + Collect(ctx runner.MessageContext, key string, msg *{{ $t.MessageType }}) error + {{- end -}} + {{- with (eq .Type "raw") }} + Collect(ctx runner.MessageContext, key string, msg []byte) error + {{- end }} +} + +type impl_{{ .Name }}_Sink struct { + sink {{ .Name }}_Sink + codec goka.Codec + group string + topic string + maxBufferSize int + interval time.Duration +} + +func (s *impl_{{ .Name }}_Sink) Codec() goka.Codec { + return s.codec +} + +func (s *impl_{{ .Name }}_Sink) Group() string { + return s.group +} + +func (s *impl_{{ .Name }}_Sink) Topic() string { + return s.topic +} + +func (s *impl_{{ .Name }}_Sink) MaxBufferSize() int { + return s.maxBufferSize +} + +func (s *impl_{{ .Name }}_Sink) Interval() time.Duration { + return s.interval +} + +func (s *impl_{{ .Name }}_Sink) Flush() error { + return s.sink.Flush() +} + +func (s *impl_{{ .Name }}_Sink) Collect(ctx runner.MessageContext, key string, msg interface{}) error { + {{- with (eq .Type "protobuf") }} + m, ok := msg.(*{{ $t.MessageType }}) + {{- end -}} + {{- with (eq .Type "raw") }} + m, ok := msg.([]byte) + {{- end }} + + if !ok { + return errors.Errorf("expecting message of type '*{{ .MessageType }}' got type '%t'", msg) + } + + return s.sink.Collect(ctx, key, m) +} + +func Register_{{ .Name }}_Sink(options runner.ServiceOptions, sink {{ .Name }}_Sink, interval time.Duration, maxBufferSize int) (func(ctx context.Context) func() error, error) { + brokers := options.Brokers + + {{- with (eq .Type "protobuf") }} + protoWrapper := options.ProtoWrapper + codec, err := protoWrapper.Codec("{{ $t.TopicName }}", &{{ $t.MessageType }}{}) + if err != nil { + return nil, errors.Wrap(err, "failed to create codec") + } + {{- end -}} + {{- with (eq .Type "raw") }} + codec := &gokaCodecs.Bytes{} + {{- end }} + + d := &impl_{{ .Name }}_Sink{ + sink: sink, + codec: codec, + group: "{{ .GroupName }}", + topic: "{{ .TopicName }}", + maxBufferSize: maxBufferSize, + interval: interval, + } + + s := runner.NewSinkRunner(d, brokers) + + return func(ctx context.Context) func() error { + return s.Run(ctx) + }, nil +} diff --git a/internal/generator/templates/source.go.tpl b/internal/generator/templates/source.go.tpl new file mode 100644 index 0000000..0860c6d --- /dev/null +++ b/internal/generator/templates/source.go.tpl @@ -0,0 +1,133 @@ +package {{ .Package }} + +import ( + "context" + + "github.com/burdiyan/kafkautil" + "github.com/lovoo/goka" + "github.com/pkg/errors" + "github.com/syncromatics/kafmesh/pkg/runner" + "golang.org/x/sync/errgroup" + + {{ .Import }} +) + +{{ $t := . -}} +type {{ .Name }}_Source interface { + Emit(message {{ .Name }}_Source_Message) error + EmitBulk(ctx context.Context, messages []{{ .Name }}_Source_Message) error + Delete(key string) error +} + +type {{ .Name }}_Source_impl struct { + context.Context + emitter *runner.Emitter + metrics *runner.Metrics +} + +type {{ .Name }}_Source_Message struct { + Key string + {{- with (eq .Type "protobuf") }} + Value *{{ $t.MessageType }} + {{- end -}} + {{- with (eq .Type "raw") }} + Value []byte + {{- end }} +} + +type impl_{{ .Name }}_Source_Message struct { + msg {{ .Name }}_Source_Message +} + +func (m *impl_{{ .Name }}_Source_Message) Key() string { + return m.msg.Key +} + +func (m *impl_{{ .Name }}_Source_Message) Value() interface{} { + return m.msg.Value +} + +func New_{{ .Name }}_Source(service *runner.Service) (*{{ .Name }}_Source_impl, func(context.Context) func() error, error) { + options := service.Options() + brokers := options.Brokers + + {{- with (eq .Type "protobuf") }} + protoWrapper := options.ProtoWrapper + codec, err := protoWrapper.Codec("{{ $t.TopicName }}", &{{ $t.MessageType }}{}) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to create codec") + } + {{- end -}} + {{- with (eq .Type "raw") }} + codec := &gokaCodecs.Bytes{} + {{- end }} + + emitter, err := goka.NewEmitter(brokers, + goka.Stream("{{ .TopicName }}"), + codec, + goka.WithEmitterHasher(kafkautil.MurmurHasher)) + + if err != nil { + return nil, nil, errors.Wrap(err, "failed creating source") + } + + emitterCtx, emitterCancel := context.WithCancel(context.Background()) + e := &{{ .Name }}_Source_impl{ + emitterCtx, + runner.NewEmitter(emitter), + service.Metrics, + } + + return e, func(outerCtx context.Context) func() error { + return func() error { + cancelableCtx, cancel := context.WithCancel(outerCtx) + defer cancel() + grp, ctx := errgroup.WithContext(cancelableCtx) + + grp.Go(func() error { + select { + case <-ctx.Done(): + emitterCancel() + return nil + } + }) + grp.Go(e.emitter.Watch(ctx)) + + select { + case <- ctx.Done(): + err := grp.Wait() + return err + } + } + }, nil +} + +func (e *{{ .Name }}_Source_impl) Emit(message {{ .Name }}_Source_Message) error { + err := e.emitter.Emit(message.Key, message.Value) + if err != nil { + e.metrics.SourceError("{{ .ServiceName }}", "{{ .ComponentName }}", "{{ .TopicName }}") + return err + } + + e.metrics.SourceHit("{{ .ServiceName }}", "{{ .ComponentName }}", "{{ .TopicName }}", 1) + return nil +} + +func (e *{{ .Name }}_Source_impl) EmitBulk(ctx context.Context, messages []{{ .Name }}_Source_Message) error { + b := []runner.EmitMessage{} + for _, m := range messages { + b = append(b, &impl_{{ .Name }}_Source_Message{msg: m}) + } + err := e.emitter.EmitBulk(ctx, b) + if err != nil { + e.metrics.SourceError("{{ .ServiceName }}", "{{ .ComponentName }}", "{{ .TopicName }}") + return err + } + + e.metrics.SourceHit("{{ .ServiceName }}", "{{ .ComponentName }}", "{{ .TopicName }}", len(b)) + return nil +} + +func (e *{{ .Name }}_Source_impl) Delete(key string) error { + return e.emitter.Emit(key, nil) +} diff --git a/internal/generator/templates/templates.go b/internal/generator/templates/templates.go new file mode 100644 index 0000000..ee80f1a --- /dev/null +++ b/internal/generator/templates/templates.go @@ -0,0 +1,60 @@ +package templates + +import _ "embed" // required to use embeds + +//go:embed header.go.tpl +var header string + +//go:embed processor.go.tpl +var processor string + +// Processor template +var Processor = header + processor + +//go:embed service.go.tpl +var service string + +// Service template +var Service = header + service + +//go:embed sink.go.tpl +var sink string + +// Sink template +var Sink = header + sink + +//go:embed source.go.tpl +var source string + +// Source template +var Source = header + source + +//go:embed topic.go.tpl +var topic string + +// Topic template +var Topic = header + topic + +//go:embed viewSink.go.tpl +var viewSink string + +// ViewSink template +var ViewSink = header + viewSink + +//go:embed viewSource.go.tpl +var viewSource string + +// ViewSource template +var ViewSource = header + viewSource + +//go:embed view.go.tpl +var view string + +// View template +var View = header + view + +//go:embed discover.go.tpl +var discover string + +// Discover template +var Discover = header + discover diff --git a/internal/generator/templates/topic.go.tpl b/internal/generator/templates/topic.go.tpl new file mode 100644 index 0000000..8cf8ad8 --- /dev/null +++ b/internal/generator/templates/topic.go.tpl @@ -0,0 +1,28 @@ +package {{ .Package }} + +import ( + "context" + "time" + + "github.com/syncromatics/kafmesh/pkg/runner" +) + +var ( + topics = []runner.Topic{ + {{- range .Topics }} + runner.Topic { + Name: "{{ .Name }}", + Partitions: {{ .Partitions}}, + Replicas: {{ .Replicas }}, + Compact: {{ .Compact }}, + Retention: {{ .Retention.Milliseconds }} * time.Millisecond, + Segment: {{ .Segment.Milliseconds }} * time.Millisecond, + Create: {{ .Create }}, + }, + {{- end }} + } +) + +func ConfigureTopics(ctx context.Context, brokers []string) error { + return runner.ConfigureTopics(ctx, brokers, topics) +} diff --git a/internal/generator/templates/view.go.tpl b/internal/generator/templates/view.go.tpl new file mode 100644 index 0000000..e515a01 --- /dev/null +++ b/internal/generator/templates/view.go.tpl @@ -0,0 +1,163 @@ +package {{ .Package }} + +import ( + "context" + "os" + "path/filepath" + + "github.com/burdiyan/kafkautil" + "github.com/lovoo/goka" + "github.com/lovoo/goka/storage" + "github.com/pkg/errors" + "github.com/syncromatics/kafmesh/pkg/runner" + "github.com/syndtr/goleveldb/leveldb/opt" + "golang.org/x/sync/errgroup" + + {{ .Import }} +) + +{{ $t := . -}} +type {{ .Name }}_View interface { + Keys() []string + {{- with (eq .Type "protobuf") }} + Get(key string) (*{{ $t.MessageType }}, error) + {{- end -}} + {{- with (eq .Type "raw") }} + Get(key string) ([]byte, error) + {{- end }} +} + +type {{ .Name }}_View_impl struct { + context.Context + view *goka.View +} + +func New_{{ .Name }}_View(options runner.ServiceOptions) (*{{ .Name }}_View_impl, func(context.Context) func() error, error) { + brokers := options.Brokers + var err error + + {{- with (eq .Type "protobuf") }} + protoWrapper := options.ProtoWrapper + codec, err := protoWrapper.Codec("{{ $t.TopicName }}", &{{ $t.MessageType }}{}) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to create codec") + } + {{- end -}} + {{- with (eq .Type "raw") }} + codec := &gokaCodecs.Bytes{} + {{- end }} + + opts := &opt.Options{ + BlockCacheCapacity: opt.MiB * 1, + WriteBuffer: opt.MiB * 1, + } + + path := filepath.Join("/tmp/storage", "view", "{{ .TopicName }}") + + err = os.MkdirAll(path, os.ModePerm) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to create view db directory") + } + + builder := storage.BuilderWithOptions(path, opts) + + view, err := goka.NewView(brokers, + goka.Table("{{ .TopicName }}"), + codec, + goka.WithViewStorageBuilder(builder), + goka.WithViewHasher(kafkautil.MurmurHasher), + ) + if err != nil { + return nil, nil, errors.Wrap(err, "failed creating view") + } + + viewCtx, viewCancel := context.WithCancel(context.Background()) + v := &{{ .Name }}_View_impl{ + viewCtx, + view, + } + + return v, func(outerCtx context.Context) func() error { + return func() error { + cancelableCtx, cancel := context.WithCancel(outerCtx) + defer cancel() + grp, ctx := errgroup.WithContext(cancelableCtx) + + grp.Go(func() error { + select { + case <-ctx.Done(): + viewCancel() + return nil + } + }) + grp.Go(func() error { + return v.view.Run(ctx) + }) + + select { + case <- ctx.Done(): + err := grp.Wait() + return err + } + } + }, nil +} + +func (v *{{ .Name }}_View_impl) Keys() ([]string, error) { + select { + case <-v.Done(): + return nil, errors.New("context cancelled while waiting for partition to become running") + case <-v.view.WaitRunning(): + } + + it, err := v.view.Iterator() + if err != nil { + return nil, errors.Wrap(err, "failed to get iterator from view") + } + + keys := []string{} + for it.Next() { + keys = append(keys, it.Key()) + } + + return keys, nil +} + +{{- with (eq .Type "protobuf") }} +func (v *{{ $t.Name }}_View_impl) Get(key string) (*{{ $t.MessageType }}, error) { +{{- end -}} +{{- with (eq .Type "raw") }} +func (v *{{ $t.Name }}_View_impl) Get(key string) ([]byte, error) { +{{- end }} + select { + case <-v.Done(): + return nil, errors.New("context cancelled while waiting for partition to become running") + case <-v.view.WaitRunning(): + } + + m, err := v.view.Get(key) + if err != nil { + return nil, errors.Wrap(err, "failed to get value from view") + } + + if m == nil { + return nil, nil + } + + {{- with (eq .Type "protobuf") }} + msg, ok := m.(*{{ $t.MessageType }}) + {{- end -}} + {{- with (eq .Type "raw") }} + msg, ok := m.([]byte) + {{- end }} + if !ok { + {{- with (eq .Type "protobuf") }} + return nil, errors.Errorf("expecting message of type '*{{ $t.MessageType }}' got type '%t'", m) + {{- end -}} + {{- with (eq .Type "raw") }} + return nil, errors.Errorf("expecting message of type '[]byte' got type '%t'", m) + {{- end }} + } + + return msg, nil +} diff --git a/internal/generator/templates/viewSink.go.tpl b/internal/generator/templates/viewSink.go.tpl new file mode 100644 index 0000000..c1e7352 --- /dev/null +++ b/internal/generator/templates/viewSink.go.tpl @@ -0,0 +1,185 @@ +package {{ .Package }} + +import ( + "context" + "fmt" + "os" + "path/filepath" + "time" + + "github.com/burdiyan/kafkautil" + "github.com/lovoo/goka" + "github.com/lovoo/goka/storage" + "github.com/pkg/errors" + "github.com/syncromatics/kafmesh/pkg/runner" + "github.com/syndtr/goleveldb/leveldb/opt" + "golang.org/x/sync/errgroup" + + {{ .Import }} +) + +{{ $t := . -}} +type {{ .Name }}_ViewSink_Context interface { + Context() context.Context + Keys() ([]string, error) + {{- with (eq .Type "protobuf") }} + Get(string) (*{{ $t.MessageType }}, error) + {{- end -}} + {{- with (eq .Type "raw") }} + Get(string) ([]byte, error) + {{- end }} +} + +type {{ .Name }}_ViewSink_Context_impl struct { + context context.Context + view *goka.View +} + +func (c *{{ .Name }}_ViewSink_Context_impl) Context() context.Context { + return c.context +} + +func (c *{{ .Name }}_ViewSink_Context_impl) Keys() ([]string, error) { + select { + case <-c.Done(): + return nil, errors.New("context cancelled while waiting for partition to become running") + case <-c.view.WaitRunning(): + } + + it, err := c.view.Iterator() + if err != nil { + return nil, errors.Wrap(err, "failed to get iterator") + } + keys := []string{} + for it.Next() { + keys = append(keys, it.Key()) + } + return keys, nil +} + +{{ with (eq .Type "protobuf") -}} +func (c *{{ $t.Name }}_ViewSink_Context_impl) Get(key string) (*{{ $t.MessageType }}, error) { +{{- end -}} +{{- with (eq .Type "raw") -}} +func (c *{{ $t.Name }}_ViewSink_Context_impl) Get(key string) ([]byte, error) { +{{- end }} + m, err := c.view.Get(key) + if err != nil { + return nil, errors.Wrap(err, "failed to get value from view") + } + if m == nil { + return nil, nil + } + {{- with (eq .Type "protobuf") }} + msg, ok := m.(*{{ $t.MessageType }}) + {{- end -}} + {{- with (eq .Type "raw") }} + msg, ok := m.([]byte) + {{- end }} + if !ok { + {{- with (eq .Type "protobuf") }} + return nil, errors.Errorf("expecting message of type '*{{ $t.MessageType }}' got type '%t'", m) + {{- end -}} + {{- with (eq .Type "raw") }} + return nil, errors.Errorf("expecting message of type '[]byte' got type '%t'", m) + {{- end }} + } + return msg, nil +} + +type {{ .Name }}_ViewSink interface { + Sync({{ .Name }}_ViewSink_Context) error +} + +func Register_{{ .Name }}_ViewSink(options runner.ServiceOptions, synchronizer {{ .Name }}_ViewSink, updateInterval time.Duration, syncTimeout time.Duration) (func(context.Context) func() error, error) { + brokers := options.Brokers + + {{- with (eq .Type "protobuf") }} + protoWrapper := options.ProtoWrapper + codec, err := protoWrapper.Codec("{{ $t.TopicName }}", &{{ $t.MessageType }}{}) + if err != nil { + return nil, errors.Wrap(err, "failed to create codec") + } + {{- end -}} + {{- with (eq .Type "raw") }} + codec := &gokaCodecs.Bytes{} + {{- end }} + + opts := &opt.Options{ + BlockCacheCapacity: opt.MiB * 1, + WriteBuffer: opt.MiB * 1, + } + + path := filepath.Join("/tmp/storage", "viewSink", "{{ .TopicName }}") + + {{- with (eq .Type "protobuf") }} + err = os.MkdirAll(path, os.ModePerm) + {{- end -}} + {{- with (eq .Type "raw") }} + err := os.MkdirAll(path, os.ModePerm) + {{- end }} + if err != nil { + return nil, errors.Wrap(err, "failed to create view sink db directory") + } + + builder := storage.BuilderWithOptions(path, opts) + view, err := goka.NewView(brokers, + goka.Table("{{ .TopicName }}"), + codec, + goka.WithViewStorageBuilder(builder), + goka.WithViewHasher(kafkautil.MurmurHasher), + ) + if err != nil { + return nil, errors.Wrap(err, "failed creating view sink view") + } + + return func(outerCtx context.Context) func() error { + return func() error { + cancelableCtx, cancel := context.WithCancel(outerCtx) + defer cancel() + grp, ctx := errgroup.WithContext(cancelableCtx) + + timer := time.NewTimer(0) + grp.Go(func() error { + for { + select { + case <-ctx.Done(): + return nil + case <-timer.C: + select { + case <-ctx.Done(): + return nil + case <-view.WaitRunning(): + } + + newContext, cancel := context.WithTimeout(ctx, syncTimeout) + c := &{{ .Name }}_ViewSink_Context_impl{ + context: newContext, + view: view, + } + err := synchronizer.Sync(c) + if err != nil { + cancel() + fmt.Printf("sync error '%v'", err) + return err + } + cancel() + timer = time.NewTimer(updateInterval) + } + } + }) + + grp.Go(func() error { + return view.Run(ctx) + }) + + select { + case <- ctx.Done(): + return nil + case <- ctx.Done(): + err := grp.Wait() + return err + } + } + }, nil +} diff --git a/internal/generator/templates/viewSource.go.tpl b/internal/generator/templates/viewSource.go.tpl new file mode 100644 index 0000000..b77123d --- /dev/null +++ b/internal/generator/templates/viewSource.go.tpl @@ -0,0 +1,165 @@ +package {{ .Package }} + +import ( + "context" + "fmt" + "os" + "path/filepath" + "time" + + "github.com/burdiyan/kafkautil" + "github.com/lovoo/goka" + "github.com/lovoo/goka/storage" + "github.com/pkg/errors" + "github.com/syncromatics/kafmesh/pkg/runner" + "github.com/syndtr/goleveldb/leveldb/opt" + "golang.org/x/sync/errgroup" + + {{ .Import }} +) + +{{ $t := . -}} +type {{ .Name }}_ViewSource_Context interface { + Context() context.Context + {{- with (eq .Type "protobuf") }} + Update(string, *{{ $t.MessageType }}) error + {{- end -}} + {{- with (eq .Type "raw") }} + Update(string, []byte) error + {{- end }} +} + +type {{ .Name }}_ViewSource interface { + Sync({{ .Name }}_ViewSource_Context) error +} + +type contextWrap_{{ .Name }} struct { + context context.Context + {{- with (eq .Type "protobuf") }} + job *runner.ProtoViewSourceJob + {{- end -}} + {{- with (eq .Type "raw") }} + job *runner.RawViewSourceJob + {{- end }} +} + +func (c *contextWrap_{{ $t.Name }}) Context() context.Context { + return c.context +} + +{{- with (eq .Type "protobuf") }} +func (c *contextWrap_{{ $t.Name }}) Update(key string, msg *{{ $t.MessageType }}) error { +{{- end -}} +{{- with (eq .Type "raw") }} +func (c *contextWrap_{{ $t.Name }}) Update(key string, msg []byte) error { +{{- end }} + return c.job.Update(key, msg) +} + +func Register_{{ .Name }}_ViewSource(options runner.ServiceOptions, synchronizer {{ .Name }}_ViewSource, updateInterval time.Duration, syncTimeout time.Duration) (func(context.Context) func() error, error) { + brokers := options.Brokers + var err error + + {{- with (eq .Type "protobuf") }} + protoWrapper := options.ProtoWrapper + codec, err := protoWrapper.Codec("{{ $t.TopicName }}", &{{ $t.MessageType }}{}) + if err != nil { + return nil, errors.Wrap(err, "failed to create codec") + } + {{- end -}} + {{- with (eq .Type "raw") }} + codec := &gokaCodecs.Bytes{} + {{- end }} + + opts := &opt.Options{ + BlockCacheCapacity: opt.MiB * 1, + WriteBuffer: opt.MiB * 1, + } + + path := filepath.Join("/tmp/storage", "viewSource", "{{ .TopicName }}") + + err = os.MkdirAll(path, os.ModePerm) + if err != nil { + return nil, errors.Wrap(err, "failed to create view source db directory") + } + + builder := storage.BuilderWithOptions(path, opts) + view, err := goka.NewView(brokers, + goka.Table("{{ .TopicName }}"), + codec, + goka.WithViewStorageBuilder(builder), + goka.WithViewHasher(kafkautil.MurmurHasher), + ) + if err != nil { + return nil, errors.Wrap(err, "failed creating synchronizer view") + } + + e, err := goka.NewEmitter(brokers, + goka.Stream("{{ .TopicName }}"), + codec, + goka.WithEmitterHasher(kafkautil.MurmurHasher)) + + if err != nil { + return nil, errors.Wrap(err, "failed creating synchronizer emitter") + } + + emitter := runner.NewEmitter(e) + + return func(outerCtx context.Context) func() error { + return func() error { + cancelableCtx, cancel := context.WithCancel(outerCtx) + defer cancel() + grp, ctx := errgroup.WithContext(cancelableCtx) + + timer := time.NewTimer(0) + grp.Go(func() error { + for { + select { + case <-ctx.Done(): + return nil + case <-timer.C: + select { + case <-ctx.Done(): + return nil + case <-view.WaitRunning(): + } + + newContext, cancel := context.WithTimeout(ctx, syncTimeout) + {{- with (eq .Type "protobuf") }} + c := runner.NewProtoViewSourceJob(newContext, view, emitter) + {{- end -}} + {{- with (eq .Type "raw") }} + c := runner.NewRawViewSourceJob(newContext, view, emitter) + {{- end }} + cw := &contextWrap_{{ .Name }}{newContext, c} + err := synchronizer.Sync(cw) + if err != nil { + cancel() + fmt.Printf("sync error '%v'", err) + return err + } + err = c.Finish() + if err != nil { + cancel() + fmt.Printf("sync finish error '%v'", err) + return err + } + cancel() + timer = time.NewTimer(updateInterval) + } + } + }) + + grp.Go(emitter.Watch(ctx)) + grp.Go(func() error { + return view.Run(ctx) + }) + + select { + case <- ctx.Done(): + err := grp.Wait() + return err + } + } + }, nil +} diff --git a/internal/generator/topicTemplate.go b/internal/generator/topicTemplate.go index a5d787b..573b1d8 100644 --- a/internal/generator/topicTemplate.go +++ b/internal/generator/topicTemplate.go @@ -6,6 +6,7 @@ import ( "text/template" "time" + "github.com/syncromatics/kafmesh/internal/generator/templates" "github.com/syncromatics/kafmesh/internal/models" "github.com/syncromatics/kafmesh/pkg/runner" @@ -13,37 +14,7 @@ import ( ) var ( - topicTemplate = template.Must(template.New("").Parse(`// Code generated by kafmesh-gen. DO NOT EDIT. - -package {{ .Package }} - -import ( - "context" - "time" - - "github.com/syncromatics/kafmesh/pkg/runner" -) - -var ( - topics = []runner.Topic{ - {{- range .Topics }} - runner.Topic { - Name: "{{ .Name }}", - Partitions: {{ .Partitions}}, - Replicas: {{ .Replicas }}, - Compact: {{ .Compact }}, - Retention: {{ .Retention.Milliseconds }} * time.Millisecond, - Segment: {{ .Segment.Milliseconds }} * time.Millisecond, - Create: {{ .Create }}, - }, - {{- end }} - } -) - -func ConfigureTopics(ctx context.Context, brokers []string) error { - return runner.ConfigureTopics(ctx, brokers, topics) -} -`)) + topicTemplate = template.Must(template.New("").Parse(templates.Topic)) ) type topicDefinition struct { diff --git a/internal/generator/topicTemplate_test.go b/internal/generator/topicTemplate_test.go index e1cc907..0039be6 100644 --- a/internal/generator/topicTemplate_test.go +++ b/internal/generator/topicTemplate_test.go @@ -5,7 +5,7 @@ import ( "path" "testing" - "github.com/stretchr/testify/assert" + "github.com/bradleyjkemp/cupaloy" ) func validateTopic(tmpDir string, t *testing.T) { @@ -14,73 +14,8 @@ func validateTopic(tmpDir string, t *testing.T) { t.Fatal(err) } - assert.Equal(t, expectedTopics, string(s)) -} - -var ( - expectedTopics = `// Code generated by kafmesh-gen. DO NOT EDIT. - -package kafmesh - -import ( - "context" - "time" - - "github.com/syncromatics/kafmesh/pkg/runner" -) - -var ( - topics = []runner.Topic{ - runner.Topic { - Name: "testMesh.details.enricher-table", - Partitions: 10, - Replicas: 1, - Compact: true, - Retention: 86400000 * time.Millisecond, - Segment: 43200000 * time.Millisecond, - Create: true, - }, - runner.Topic { - Name: "testMesh.testId.test", - Partitions: 10, - Replicas: 1, - Compact: true, - Retention: 86400000 * time.Millisecond, - Segment: 43200000 * time.Millisecond, - Create: true, - }, - runner.Topic { - Name: "testMesh.testId.test2", - Partitions: 0, - Replicas: 0, - Compact: false, - Retention: 0 * time.Millisecond, - Segment: 0 * time.Millisecond, - Create: false, - }, - runner.Topic { - Name: "testMesh.testSerial.details", - Partitions: 10, - Replicas: 1, - Compact: false, - Retention: 86400000 * time.Millisecond, - Segment: 43200000 * time.Millisecond, - Create: true, - }, - runner.Topic { - Name: "testMesh.testSerial.detailsEnriched", - Partitions: 10, - Replicas: 1, - Compact: false, - Retention: 86400000 * time.Millisecond, - Segment: 43200000 * time.Millisecond, - Create: true, - }, + err = cupaloy.SnapshotMulti("validateTopic", s) + if err != nil { + t.Fatalf("error: %s", err) } -) - -func ConfigureTopics(ctx context.Context, brokers []string) error { - return runner.ConfigureTopics(ctx, brokers, topics) } -` -) diff --git a/internal/generator/viewSinkTemplate.go b/internal/generator/viewSinkTemplate.go index 3bdab18..b6f2075 100644 --- a/internal/generator/viewSinkTemplate.go +++ b/internal/generator/viewSinkTemplate.go @@ -1,10 +1,12 @@ package generator import ( + "fmt" "io" "strings" "text/template" + "github.com/syncromatics/kafmesh/internal/generator/templates" "github.com/syncromatics/kafmesh/internal/models" "github.com/iancoleman/strcase" @@ -12,165 +14,7 @@ import ( ) var ( - viewSinkTemplate = template.Must(template.New("").Parse(`// Code generated by kafmesh-gen. DO NOT EDIT. - -package {{ .Package }} - -import ( - "context" - "fmt" - "os" - "path/filepath" - "time" - - "github.com/burdiyan/kafkautil" - "github.com/lovoo/goka" - "github.com/lovoo/goka/storage" - "github.com/pkg/errors" - "github.com/syncromatics/kafmesh/pkg/runner" - "github.com/syndtr/goleveldb/leveldb/opt" - "golang.org/x/sync/errgroup" - - "{{ .Import }}" -) - -type {{ .Name }}_ViewSink_Context interface { - context.Context - Keys() ([]string, error) - Get(string) (*{{ .MessageType }}, error) -} - -type {{ .Name }}_ViewSink_Context_impl struct { - context.Context - view *goka.View -} - -func (c *{{ .Name }}_ViewSink_Context_impl) Keys() ([]string, error) { - select { - case <-c.Done(): - return nil, errors.New("context cancelled while waiting for partition to become running") - case <-c.view.WaitRunning(): - } - - it, err := c.view.Iterator() - if err != nil { - return nil, errors.Wrap(err, "failed to get iterator") - } - keys := []string{} - for it.Next() { - keys = append(keys, it.Key()) - } - return keys, nil -} - -func (c *{{ .Name }}_ViewSink_Context_impl) Get(key string) (*{{ .MessageType }}, error) { - select { - case <-c.Done(): - return nil, errors.New("context cancelled while waiting for partition to become running") - case <-c.view.WaitRunning(): - } - - m, err := c.view.Get(key) - if err != nil { - return nil, errors.Wrap(err, "failed to get value from view") - } - if m == nil { - return nil, nil - } - msg, ok := m.(*{{ .MessageType }}) - if !ok { - return nil, errors.Errorf("expecting message of type '*{{ .MessageType }}' got type '%t'", m) - } - return msg, nil -} - -type {{ .Name }}_ViewSink interface { - Sync({{ .Name }}_ViewSink_Context) error -} - -func Register_{{ .Name }}_ViewSink(options runner.ServiceOptions, synchronizer {{ .Name }}_ViewSink, updateInterval time.Duration, syncTimeout time.Duration) (func(context.Context) func() error, error) { - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - codec, err := protoWrapper.Codec("{{ .TopicName }}", &{{ .MessageType }}{}) - if err != nil { - return nil, errors.Wrap(err, "failed to create codec") - } - - opts := &opt.Options{ - BlockCacheCapacity: opt.MiB * 1, - WriteBuffer: opt.MiB * 1, - } - - path := filepath.Join("/tmp/storage", "viewSink", "{{ .TopicName }}") - - err = os.MkdirAll(path, os.ModePerm) - if err != nil { - return nil, errors.Wrap(err, "failed to create view sink db directory") - } - - builder := storage.BuilderWithOptions(path, opts) - view, err := goka.NewView(brokers, - goka.Table("{{ .TopicName }}"), - codec, - goka.WithViewStorageBuilder(builder), - goka.WithViewHasher(kafkautil.MurmurHasher), - ) - if err != nil { - return nil, errors.Wrap(err, "failed creating view sink view") - } - - return func(outerCtx context.Context) func() error { - return func() error { - cancelableCtx, cancel := context.WithCancel(outerCtx) - defer cancel() - grp, ctx := errgroup.WithContext(cancelableCtx) - - timer := time.NewTimer(0) - grp.Go(func() error { - for { - select { - case <-ctx.Done(): - return nil - case <-timer.C: - select { - case <-ctx.Done(): - return nil - case <-view.WaitRunning(): - } - - newContext, cancel := context.WithTimeout(ctx, syncTimeout) - c := &{{ .Name }}_ViewSink_Context_impl{ - Context: newContext, - view: view, - } - err := synchronizer.Sync(c) - if err != nil { - cancel() - fmt.Printf("sync error '%v'", err) - return err - } - cancel() - timer = time.NewTimer(updateInterval) - } - } - }) - - grp.Go(func() error { - return view.Run(ctx) - }) - - select { - case <- ctx.Done(): - return nil - case <- ctx.Done(): - err := grp.Wait() - return err - } - } - }, nil -} -`)) + viewSinkTemplate = template.Must(template.New("").Parse(templates.ViewSink)) ) type viewSinkOptions struct { @@ -179,6 +23,7 @@ type viewSinkOptions struct { Name string TopicName string MessageType string + Type string } func generateViewSink(writer io.Writer, viewSink *viewSinkOptions) error { @@ -202,8 +47,24 @@ func buildViewSinkOptions(pkg string, mod string, modelsPath string, service *mo } options.TopicName = viewSink.ToTopicName(service) - options.Import = viewSink.ToPackage(service) - options.MessageType = nameFrags[len(nameFrags)-2] + "." + strcase.ToCamel(nameFrags[len(nameFrags)-1]) + + topicType := "protobuf" + if viewSink.TopicDefinition.Type != nil { + switch *viewSink.TopicDefinition.Type { + case "raw": + topicType = "raw" + } + } + options.Type = topicType + + switch topicType { + case "protobuf": + options.Import = fmt.Sprintf("\"%s\"", viewSink.ToPackage(service)) + options.MessageType = nameFrags[len(nameFrags)-2] + "." + strcase.ToCamel(nameFrags[len(nameFrags)-1]) + + case "raw": + options.Import = "gokaCodecs \"github.com/lovoo/goka/codec\"" + } return options, nil } diff --git a/internal/generator/viewSinkTemplate_test.go b/internal/generator/viewSinkTemplate_test.go index bd80818..905dc08 100644 --- a/internal/generator/viewSinkTemplate_test.go +++ b/internal/generator/viewSinkTemplate_test.go @@ -5,7 +5,7 @@ import ( "path" "testing" - "github.com/stretchr/testify/assert" + "github.com/bradleyjkemp/cupaloy" ) func validateViewSink(tmpDir string, t *testing.T) { @@ -14,167 +14,8 @@ func validateViewSink(tmpDir string, t *testing.T) { t.Fatal(err) } - assert.Equal(t, expectedViewSink, string(s)) -} - -var ( - expectedViewSink = `// Code generated by kafmesh-gen. DO NOT EDIT. - -package details - -import ( - "context" - "fmt" - "os" - "path/filepath" - "time" - - "github.com/burdiyan/kafkautil" - "github.com/lovoo/goka" - "github.com/lovoo/goka/storage" - "github.com/pkg/errors" - "github.com/syncromatics/kafmesh/pkg/runner" - "github.com/syndtr/goleveldb/leveldb/opt" - "golang.org/x/sync/errgroup" - - "test/internal/kafmesh/models/testMesh/testId" -) - -type TestToApi_ViewSink_Context interface { - context.Context - Keys() ([]string, error) - Get(string) (*testId.Test, error) -} - -type TestToApi_ViewSink_Context_impl struct { - context.Context - view *goka.View -} - -func (c *TestToApi_ViewSink_Context_impl) Keys() ([]string, error) { - select { - case <-c.Done(): - return nil, errors.New("context cancelled while waiting for partition to become running") - case <-c.view.WaitRunning(): - } - - it, err := c.view.Iterator() + err = cupaloy.SnapshotMulti("validateViewSink", s) if err != nil { - return nil, errors.Wrap(err, "failed to get iterator") + t.Fatalf("error: %s", err) } - keys := []string{} - for it.Next() { - keys = append(keys, it.Key()) - } - return keys, nil } - -func (c *TestToApi_ViewSink_Context_impl) Get(key string) (*testId.Test, error) { - select { - case <-c.Done(): - return nil, errors.New("context cancelled while waiting for partition to become running") - case <-c.view.WaitRunning(): - } - - m, err := c.view.Get(key) - if err != nil { - return nil, errors.Wrap(err, "failed to get value from view") - } - if m == nil { - return nil, nil - } - msg, ok := m.(*testId.Test) - if !ok { - return nil, errors.Errorf("expecting message of type '*testId.Test' got type '%t'", m) - } - return msg, nil -} - -type TestToApi_ViewSink interface { - Sync(TestToApi_ViewSink_Context) error -} - -func Register_TestToApi_ViewSink(options runner.ServiceOptions, synchronizer TestToApi_ViewSink, updateInterval time.Duration, syncTimeout time.Duration) (func(context.Context) func() error, error) { - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - codec, err := protoWrapper.Codec("testMesh.testId.test", &testId.Test{}) - if err != nil { - return nil, errors.Wrap(err, "failed to create codec") - } - - opts := &opt.Options{ - BlockCacheCapacity: opt.MiB * 1, - WriteBuffer: opt.MiB * 1, - } - - path := filepath.Join("/tmp/storage", "viewSink", "testMesh.testId.test") - - err = os.MkdirAll(path, os.ModePerm) - if err != nil { - return nil, errors.Wrap(err, "failed to create view sink db directory") - } - - builder := storage.BuilderWithOptions(path, opts) - view, err := goka.NewView(brokers, - goka.Table("testMesh.testId.test"), - codec, - goka.WithViewStorageBuilder(builder), - goka.WithViewHasher(kafkautil.MurmurHasher), - ) - if err != nil { - return nil, errors.Wrap(err, "failed creating view sink view") - } - - return func(outerCtx context.Context) func() error { - return func() error { - cancelableCtx, cancel := context.WithCancel(outerCtx) - defer cancel() - grp, ctx := errgroup.WithContext(cancelableCtx) - - timer := time.NewTimer(0) - grp.Go(func() error { - for { - select { - case <-ctx.Done(): - return nil - case <-timer.C: - select { - case <-ctx.Done(): - return nil - case <-view.WaitRunning(): - } - - newContext, cancel := context.WithTimeout(ctx, syncTimeout) - c := &TestToApi_ViewSink_Context_impl{ - Context: newContext, - view: view, - } - err := synchronizer.Sync(c) - if err != nil { - cancel() - fmt.Printf("sync error '%v'", err) - return err - } - cancel() - timer = time.NewTimer(updateInterval) - } - } - }) - - grp.Go(func() error { - return view.Run(ctx) - }) - - select { - case <- ctx.Done(): - return nil - case <- ctx.Done(): - err := grp.Wait() - return err - } - } - }, nil -} -` -) diff --git a/internal/generator/viewSourceTemplate.go b/internal/generator/viewSourceTemplate.go index 3c688b8..1313a86 100644 --- a/internal/generator/viewSourceTemplate.go +++ b/internal/generator/viewSourceTemplate.go @@ -1,10 +1,12 @@ package generator import ( + "fmt" "io" "strings" "text/template" + "github.com/syncromatics/kafmesh/internal/generator/templates" "github.com/syncromatics/kafmesh/internal/models" "github.com/iancoleman/strcase" @@ -12,143 +14,7 @@ import ( ) var ( - viewSourceTemplate = template.Must(template.New("").Parse(`// Code generated by kafmesh-gen. DO NOT EDIT. - -package {{ .Package }} - -import ( - "context" - "fmt" - "os" - "path/filepath" - "time" - - "github.com/burdiyan/kafkautil" - "github.com/lovoo/goka" - "github.com/lovoo/goka/storage" - "github.com/pkg/errors" - "github.com/syncromatics/kafmesh/pkg/runner" - "github.com/syndtr/goleveldb/leveldb/opt" - "golang.org/x/sync/errgroup" - - "{{ .Import }}" -) - -type {{ .Name }}_ViewSource_Context interface { - context.Context - Update(string, *{{ .MessageType }}) error -} - -type {{ .Name }}_ViewSource interface { - Sync({{ .Name }}_ViewSource_Context) error -} - -type contextWrap_{{ .Name }} struct { - context.Context - job *runner.ProtoViewSourceJob -} - -func (c *contextWrap_{{ .Name }}) Update(key string, msg *{{ .MessageType }}) error { - return c.job.Update(key, msg) -} - -func Register_{{ .Name }}_ViewSource(options runner.ServiceOptions, synchronizer {{ .Name }}_ViewSource, updateInterval time.Duration, syncTimeout time.Duration) (func(context.Context) func() error, error) { - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - codec, err := protoWrapper.Codec("{{ .TopicName }}", &{{ .MessageType }}{}) - if err != nil { - return nil, errors.Wrap(err, "failed to create codec") - } - - opts := &opt.Options{ - BlockCacheCapacity: opt.MiB * 1, - WriteBuffer: opt.MiB * 1, - } - - path := filepath.Join("/tmp/storage", "viewSource", "{{ .TopicName }}") - - err = os.MkdirAll(path, os.ModePerm) - if err != nil { - return nil, errors.Wrap(err, "failed to create view source db directory") - } - - builder := storage.BuilderWithOptions(path, opts) - view, err := goka.NewView(brokers, - goka.Table("{{ .TopicName }}"), - codec, - goka.WithViewStorageBuilder(builder), - goka.WithViewHasher(kafkautil.MurmurHasher), - ) - if err != nil { - return nil, errors.Wrap(err, "failed creating synchronizer view") - } - - e, err := goka.NewEmitter(brokers, - goka.Stream("{{ .TopicName }}"), - codec, - goka.WithEmitterHasher(kafkautil.MurmurHasher)) - - if err != nil { - return nil, errors.Wrap(err, "failed creating synchronizer emitter") - } - - emitter := runner.NewEmitter(e) - - return func(outerCtx context.Context) func() error { - return func() error { - cancelableCtx, cancel := context.WithCancel(outerCtx) - defer cancel() - grp, ctx := errgroup.WithContext(cancelableCtx) - - timer := time.NewTimer(0) - grp.Go(func() error { - for { - select { - case <-ctx.Done(): - return nil - case <-timer.C: - select { - case <-ctx.Done(): - return nil - case <-view.WaitRunning(): - } - - newContext, cancel := context.WithTimeout(ctx, syncTimeout) - c := runner.NewProtoViewSourceJob(newContext, view, emitter) - cw := &contextWrap_{{ .Name }}{newContext, c} - err := synchronizer.Sync(cw) - if err != nil { - cancel() - fmt.Printf("sync error '%v'", err) - return err - } - err = c.Finish() - if err != nil { - cancel() - fmt.Printf("sync finish error '%v'", err) - return err - } - cancel() - timer = time.NewTimer(updateInterval) - } - } - }) - - grp.Go(emitter.Watch(ctx)) - grp.Go(func() error { - return view.Run(ctx) - }) - - select { - case <- ctx.Done(): - err := grp.Wait() - return err - } - } - }, nil -} -`)) + viewSourceTemplate = template.Must(template.New("").Parse(templates.ViewSource)) ) type viewSourceOptions struct { @@ -157,6 +23,7 @@ type viewSourceOptions struct { Name string TopicName string MessageType string + Type string } func generateViewSource(writer io.Writer, viewSource *viewSourceOptions) error { @@ -173,15 +40,27 @@ func buildViewSourceOptions(pkg string, mod string, modelsPath string, service * Name: viewSource.ToSafeName(), } - var name strings.Builder nameFrags := strings.Split(viewSource.Message, ".") - for _, f := range nameFrags[1:] { - name.WriteString(strcase.ToCamel(f)) + + topicType := "protobuf" + if viewSource.TopicDefinition.Type != nil { + switch *viewSource.TopicDefinition.Type { + case "raw": + topicType = "raw" + } + } + options.Type = topicType + + switch topicType { + case "protobuf": + options.Import = fmt.Sprintf("\"%s\"", viewSource.ToPackage(service)) + options.MessageType = nameFrags[len(nameFrags)-2] + "." + strcase.ToCamel(nameFrags[len(nameFrags)-1]) + + case "raw": + options.Import = "gokaCodecs \"github.com/lovoo/goka/codec\"" } options.TopicName = viewSource.ToTopicName(service) - options.Import = viewSource.ToPackage(service) - options.MessageType = nameFrags[len(nameFrags)-2] + "." + strcase.ToCamel(nameFrags[len(nameFrags)-1]) return options, nil } diff --git a/internal/generator/viewSourceTemplate_test.go b/internal/generator/viewSourceTemplate_test.go index 2de24a6..557dd09 100644 --- a/internal/generator/viewSourceTemplate_test.go +++ b/internal/generator/viewSourceTemplate_test.go @@ -5,7 +5,7 @@ import ( "path" "testing" - "github.com/stretchr/testify/assert" + "github.com/bradleyjkemp/cupaloy" ) func validateViewSource(tmpDir string, t *testing.T) { @@ -14,145 +14,8 @@ func validateViewSource(tmpDir string, t *testing.T) { t.Fatal(err) } - assert.Equal(t, expectedViewSource, string(s)) -} - -var ( - expectedViewSource = `// Code generated by kafmesh-gen. DO NOT EDIT. - -package details - -import ( - "context" - "fmt" - "os" - "path/filepath" - "time" - - "github.com/burdiyan/kafkautil" - "github.com/lovoo/goka" - "github.com/lovoo/goka/storage" - "github.com/pkg/errors" - "github.com/syncromatics/kafmesh/pkg/runner" - "github.com/syndtr/goleveldb/leveldb/opt" - "golang.org/x/sync/errgroup" - - "test/internal/kafmesh/models/testMesh/testId" -) - -type TestToDatabase_ViewSource_Context interface { - context.Context - Update(string, *testId.Test) error -} - -type TestToDatabase_ViewSource interface { - Sync(TestToDatabase_ViewSource_Context) error -} - -type contextWrap_TestToDatabase struct { - context.Context - job *runner.ProtoViewSourceJob -} - -func (c *contextWrap_TestToDatabase) Update(key string, msg *testId.Test) error { - return c.job.Update(key, msg) -} - -func Register_TestToDatabase_ViewSource(options runner.ServiceOptions, synchronizer TestToDatabase_ViewSource, updateInterval time.Duration, syncTimeout time.Duration) (func(context.Context) func() error, error) { - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - codec, err := protoWrapper.Codec("testMesh.testId.test", &testId.Test{}) + err = cupaloy.SnapshotMulti("validateViewSource", s) if err != nil { - return nil, errors.Wrap(err, "failed to create codec") + t.Fatalf("error: %s", err) } - - opts := &opt.Options{ - BlockCacheCapacity: opt.MiB * 1, - WriteBuffer: opt.MiB * 1, - } - - path := filepath.Join("/tmp/storage", "viewSource", "testMesh.testId.test") - - err = os.MkdirAll(path, os.ModePerm) - if err != nil { - return nil, errors.Wrap(err, "failed to create view source db directory") - } - - builder := storage.BuilderWithOptions(path, opts) - view, err := goka.NewView(brokers, - goka.Table("testMesh.testId.test"), - codec, - goka.WithViewStorageBuilder(builder), - goka.WithViewHasher(kafkautil.MurmurHasher), - ) - if err != nil { - return nil, errors.Wrap(err, "failed creating synchronizer view") - } - - e, err := goka.NewEmitter(brokers, - goka.Stream("testMesh.testId.test"), - codec, - goka.WithEmitterHasher(kafkautil.MurmurHasher)) - - if err != nil { - return nil, errors.Wrap(err, "failed creating synchronizer emitter") - } - - emitter := runner.NewEmitter(e) - - return func(outerCtx context.Context) func() error { - return func() error { - cancelableCtx, cancel := context.WithCancel(outerCtx) - defer cancel() - grp, ctx := errgroup.WithContext(cancelableCtx) - - timer := time.NewTimer(0) - grp.Go(func() error { - for { - select { - case <-ctx.Done(): - return nil - case <-timer.C: - select { - case <-ctx.Done(): - return nil - case <-view.WaitRunning(): - } - - newContext, cancel := context.WithTimeout(ctx, syncTimeout) - c := runner.NewProtoViewSourceJob(newContext, view, emitter) - cw := &contextWrap_TestToDatabase{newContext, c} - err := synchronizer.Sync(cw) - if err != nil { - cancel() - fmt.Printf("sync error '%v'", err) - return err - } - err = c.Finish() - if err != nil { - cancel() - fmt.Printf("sync finish error '%v'", err) - return err - } - cancel() - timer = time.NewTimer(updateInterval) - } - } - }) - - grp.Go(emitter.Watch(ctx)) - grp.Go(func() error { - return view.Run(ctx) - }) - - select { - case <- ctx.Done(): - err := grp.Wait() - return err - } - } - }, nil } -` -) diff --git a/internal/generator/viewTemplate.go b/internal/generator/viewTemplate.go index a9cef75..59232b0 100644 --- a/internal/generator/viewTemplate.go +++ b/internal/generator/viewTemplate.go @@ -1,154 +1,18 @@ package generator import ( + "fmt" "io" "text/template" + "github.com/syncromatics/kafmesh/internal/generator/templates" "github.com/syncromatics/kafmesh/internal/models" "github.com/pkg/errors" ) var ( - viewTemplate = template.Must(template.New("").Parse(`// Code generated by kafmesh-gen. DO NOT EDIT. - -package {{ .Package }} - -import ( - "context" - "os" - "path/filepath" - - "github.com/burdiyan/kafkautil" - "github.com/lovoo/goka" - "github.com/lovoo/goka/storage" - "github.com/pkg/errors" - "github.com/syncromatics/kafmesh/pkg/runner" - "github.com/syndtr/goleveldb/leveldb/opt" - "golang.org/x/sync/errgroup" - - "{{ .Import }}" -) - -type {{ .Name }}_View interface { - Keys() ([]string, error) - Get(key string) (*{{ .MessageType }}, error) -} - -type {{ .Name }}_View_impl struct { - context.Context - view *goka.View -} - -func New_{{ .Name }}_View(options runner.ServiceOptions) (*{{ .Name }}_View_impl, func(context.Context) func() error, error) { - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - codec, err := protoWrapper.Codec("{{ .TopicName }}", &{{ .MessageType }}{}) - if err != nil { - return nil, nil, errors.Wrap(err, "failed to create codec") - } - - opts := &opt.Options{ - BlockCacheCapacity: opt.MiB * 1, - WriteBuffer: opt.MiB * 1, - } - - path := filepath.Join("/tmp/storage", "view", "{{ .TopicName }}") - - err = os.MkdirAll(path, os.ModePerm) - if err != nil { - return nil, nil, errors.Wrap(err, "failed to create view db directory") - } - - builder := storage.BuilderWithOptions(path, opts) - - view, err := goka.NewView(brokers, - goka.Table("{{ .TopicName }}"), - codec, - goka.WithViewStorageBuilder(builder), - goka.WithViewHasher(kafkautil.MurmurHasher), - ) - if err != nil { - return nil, nil, errors.Wrap(err, "failed creating view") - } - - viewCtx, viewCancel := context.WithCancel(context.Background()) - v := &{{ .Name }}_View_impl{ - viewCtx, - view, - } - - return v, func(outerCtx context.Context) func() error { - return func() error { - cancelableCtx, cancel := context.WithCancel(outerCtx) - defer cancel() - grp, ctx := errgroup.WithContext(cancelableCtx) - - grp.Go(func() error { - select { - case <-ctx.Done(): - viewCancel() - return nil - } - }) - grp.Go(func() error { - return v.view.Run(ctx) - }) - - select { - case <- ctx.Done(): - err := grp.Wait() - return err - } - } - }, nil -} - -func (v *{{ .Name }}_View_impl) Keys() ([]string, error) { - select { - case <-v.Done(): - return nil, errors.New("context cancelled while waiting for partition to become running") - case <-v.view.WaitRunning(): - } - - it, err := v.view.Iterator() - if err != nil { - return nil, errors.Wrap(err, "failed to get iterator from view") - } - - keys := []string{} - for it.Next() { - keys = append(keys, it.Key()) - } - - return keys, nil -} - -func (v *{{ .Name }}_View_impl) Get(key string) (*{{ .MessageType }}, error) { - select { - case <-v.Done(): - return nil, errors.New("context cancelled while waiting for partition to become running") - case <-v.view.WaitRunning(): - } - - m, err := v.view.Get(key) - if err != nil { - return nil, errors.Wrap(err, "failed to get value from view") - } - - if m == nil { - return nil, nil - } - - msg, ok := m.(*{{ .MessageType }}) - if !ok { - return nil, errors.Errorf("expecting message of type '*{{ .MessageType }}' got type '%t'", m) - } - - return msg, nil -} -`)) + viewTemplate = template.Must(template.New("").Parse(templates.View)) ) type viewOptions struct { @@ -157,6 +21,7 @@ type viewOptions struct { Name string TopicName string MessageType string + Type string } func generateView(writer io.Writer, view *viewOptions) error { @@ -174,8 +39,24 @@ func buildViewOptions(pkg string, mod string, modelsPath string, service *models options.TopicName = view.ToTopicName(service) options.Name = view.ToSafeMessageTypeName() - options.Import = view.ToPackage(service) - options.MessageType = view.ToMessageTypeWithPackage() + + topicType := "protobuf" + if view.TopicDefinition.Type != nil { + switch *view.TopicDefinition.Type { + case "raw": + topicType = "raw" + } + } + options.Type = topicType + + switch topicType { + case "protobuf": + options.Import = fmt.Sprintf("\"%s\"", view.ToPackage(service)) + options.MessageType = view.ToMessageTypeWithPackage() + + case "raw": + options.Import = "gokaCodecs \"github.com/lovoo/goka/codec\"" + } return options, nil } diff --git a/internal/generator/viewTemplate_test.go b/internal/generator/viewTemplate_test.go index 77142c5..fd65f0f 100644 --- a/internal/generator/viewTemplate_test.go +++ b/internal/generator/viewTemplate_test.go @@ -5,7 +5,7 @@ import ( "path" "testing" - "github.com/stretchr/testify/assert" + "github.com/bradleyjkemp/cupaloy" ) func validateView(tmpDir string, t *testing.T) { @@ -14,147 +14,8 @@ func validateView(tmpDir string, t *testing.T) { t.Fatal(err) } - assert.Equal(t, expectedView, string(s)) -} - -var ( - expectedView = `// Code generated by kafmesh-gen. DO NOT EDIT. - -package details - -import ( - "context" - "os" - "path/filepath" - - "github.com/burdiyan/kafkautil" - "github.com/lovoo/goka" - "github.com/lovoo/goka/storage" - "github.com/pkg/errors" - "github.com/syncromatics/kafmesh/pkg/runner" - "github.com/syndtr/goleveldb/leveldb/opt" - "golang.org/x/sync/errgroup" - - "test/internal/kafmesh/models/testMesh/testSerial" -) - -type TestSerialDetailsEnriched_View interface { - Keys() ([]string, error) - Get(key string) (*testSerial.DetailsEnriched, error) -} - -type TestSerialDetailsEnriched_View_impl struct { - context.Context - view *goka.View -} - -func New_TestSerialDetailsEnriched_View(options runner.ServiceOptions) (*TestSerialDetailsEnriched_View_impl, func(context.Context) func() error, error) { - brokers := options.Brokers - protoWrapper := options.ProtoWrapper - - codec, err := protoWrapper.Codec("testMesh.testSerial.detailsEnriched", &testSerial.DetailsEnriched{}) - if err != nil { - return nil, nil, errors.Wrap(err, "failed to create codec") - } - - opts := &opt.Options{ - BlockCacheCapacity: opt.MiB * 1, - WriteBuffer: opt.MiB * 1, - } - - path := filepath.Join("/tmp/storage", "view", "testMesh.testSerial.detailsEnriched") - - err = os.MkdirAll(path, os.ModePerm) - if err != nil { - return nil, nil, errors.Wrap(err, "failed to create view db directory") - } - - builder := storage.BuilderWithOptions(path, opts) - - view, err := goka.NewView(brokers, - goka.Table("testMesh.testSerial.detailsEnriched"), - codec, - goka.WithViewStorageBuilder(builder), - goka.WithViewHasher(kafkautil.MurmurHasher), - ) + err = cupaloy.SnapshotMulti("validateView", s) if err != nil { - return nil, nil, errors.Wrap(err, "failed creating view") + t.Fatalf("error: %s", err) } - - viewCtx, viewCancel := context.WithCancel(context.Background()) - v := &TestSerialDetailsEnriched_View_impl{ - viewCtx, - view, - } - - return v, func(outerCtx context.Context) func() error { - return func() error { - cancelableCtx, cancel := context.WithCancel(outerCtx) - defer cancel() - grp, ctx := errgroup.WithContext(cancelableCtx) - - grp.Go(func() error { - select { - case <-ctx.Done(): - viewCancel() - return nil - } - }) - grp.Go(func() error { - return v.view.Run(ctx) - }) - - select { - case <- ctx.Done(): - err := grp.Wait() - return err - } - } - }, nil -} - -func (v *TestSerialDetailsEnriched_View_impl) Keys() ([]string, error) { - select { - case <-v.Done(): - return nil, errors.New("context cancelled while waiting for partition to become running") - case <-v.view.WaitRunning(): - } - - it, err := v.view.Iterator() - if err != nil { - return nil, errors.Wrap(err, "failed to get iterator from view") - } - - keys := []string{} - for it.Next() { - keys = append(keys, it.Key()) - } - - return keys, nil } - -func (v *TestSerialDetailsEnriched_View_impl) Get(key string) (*testSerial.DetailsEnriched, error) { - select { - case <-v.Done(): - return nil, errors.New("context cancelled while waiting for partition to become running") - case <-v.view.WaitRunning(): - } - - m, err := v.view.Get(key) - if err != nil { - return nil, errors.Wrap(err, "failed to get value from view") - } - - if m == nil { - return nil, nil - } - - msg, ok := m.(*testSerial.DetailsEnriched) - if !ok { - return nil, errors.Errorf("expecting message of type '*testSerial.DetailsEnriched' got type '%t'", m) - } - - return msg, nil -} -` -) diff --git a/internal/graph/generated/generated.go b/internal/graph/generated/generated.go index f4225fc..29cf84b 100644 --- a/internal/graph/generated/generated.go +++ b/internal/graph/generated/generated.go @@ -5857,6 +5857,41 @@ func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql return ec.marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx, field.Selections, res) } +func (ec *executionContext) ___Directive_isRepeatable(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Directive", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsRepeatable, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -6809,7 +6844,10 @@ func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.Co func (ec *executionContext) unmarshalInputWatchProcessorInput(ctx context.Context, obj interface{}) (model.WatchProcessorInput, error) { var it model.WatchProcessorInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -6891,7 +6929,6 @@ var componentImplementors = []string{"Component"} func (ec *executionContext) _Component(ctx context.Context, sel ast.SelectionSet, obj *model.Component) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, componentImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -6899,13 +6936,19 @@ func (ec *executionContext) _Component(ctx context.Context, sel ast.SelectionSet case "__typename": out.Values[i] = graphql.MarshalString("Component") case "id": - out.Values[i] = ec._Component_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Component_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "service": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -6916,20 +6959,36 @@ func (ec *executionContext) _Component(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "name": - out.Values[i] = ec._Component_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Component_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - out.Values[i] = ec._Component_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Component_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "processors": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -6940,10 +6999,16 @@ func (ec *executionContext) _Component(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "sinks": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -6954,10 +7019,16 @@ func (ec *executionContext) _Component(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "sources": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -6968,10 +7039,16 @@ func (ec *executionContext) _Component(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "viewSinks": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -6982,10 +7059,16 @@ func (ec *executionContext) _Component(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "viewSources": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -6996,10 +7079,16 @@ func (ec *executionContext) _Component(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "views": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7010,10 +7099,16 @@ func (ec *executionContext) _Component(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "dependsOn": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7024,6 +7119,11 @@ func (ec *executionContext) _Component(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -7040,7 +7140,6 @@ var getStateImplementors = []string{"GetState", "Action"} func (ec *executionContext) _GetState(ctx context.Context, sel ast.SelectionSet, obj *model.GetState) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, getStateImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7048,17 +7147,32 @@ func (ec *executionContext) _GetState(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("GetState") case "topic": - out.Values[i] = ec._GetState_topic(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._GetState_topic(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "message": - out.Values[i] = ec._GetState_message(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._GetState_message(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "value": - out.Values[i] = ec._GetState_value(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._GetState_value(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -7077,7 +7191,6 @@ var inputImplementors = []string{"Input"} func (ec *executionContext) _Input(ctx context.Context, sel ast.SelectionSet, obj *model.Input) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, inputImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7085,17 +7198,32 @@ func (ec *executionContext) _Input(ctx context.Context, sel ast.SelectionSet, ob case "__typename": out.Values[i] = graphql.MarshalString("Input") case "topic": - out.Values[i] = ec._Input_topic(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Input_topic(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "message": - out.Values[i] = ec._Input_message(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Input_message(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "value": - out.Values[i] = ec._Input_value(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Input_value(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -7114,7 +7242,6 @@ var joinImplementors = []string{"Join", "Action"} func (ec *executionContext) _Join(ctx context.Context, sel ast.SelectionSet, obj *model.Join) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, joinImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7122,17 +7249,32 @@ func (ec *executionContext) _Join(ctx context.Context, sel ast.SelectionSet, obj case "__typename": out.Values[i] = graphql.MarshalString("Join") case "topic": - out.Values[i] = ec._Join_topic(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Join_topic(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "message": - out.Values[i] = ec._Join_message(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Join_message(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "value": - out.Values[i] = ec._Join_value(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Join_value(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -7151,7 +7293,6 @@ var lookupImplementors = []string{"Lookup", "Action"} func (ec *executionContext) _Lookup(ctx context.Context, sel ast.SelectionSet, obj *model.Lookup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, lookupImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7159,22 +7300,42 @@ func (ec *executionContext) _Lookup(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("Lookup") case "topic": - out.Values[i] = ec._Lookup_topic(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Lookup_topic(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "message": - out.Values[i] = ec._Lookup_message(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Lookup_message(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "value": - out.Values[i] = ec._Lookup_value(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Lookup_value(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "key": - out.Values[i] = ec._Lookup_key(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Lookup_key(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -7193,7 +7354,6 @@ var operationImplementors = []string{"Operation"} func (ec *executionContext) _Operation(ctx context.Context, sel ast.SelectionSet, obj *model.Operation) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, operationImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7201,22 +7361,42 @@ func (ec *executionContext) _Operation(ctx context.Context, sel ast.SelectionSet case "__typename": out.Values[i] = graphql.MarshalString("Operation") case "input": - out.Values[i] = ec._Operation_input(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Operation_input(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "startTime": - out.Values[i] = ec._Operation_startTime(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Operation_startTime(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "endTime": - out.Values[i] = ec._Operation_endTime(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Operation_endTime(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "actions": - out.Values[i] = ec._Operation_actions(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Operation_actions(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -7235,7 +7415,6 @@ var outputImplementors = []string{"Output", "Action"} func (ec *executionContext) _Output(ctx context.Context, sel ast.SelectionSet, obj *model.Output) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, outputImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7243,22 +7422,42 @@ func (ec *executionContext) _Output(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("Output") case "topic": - out.Values[i] = ec._Output_topic(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Output_topic(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "message": - out.Values[i] = ec._Output_message(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Output_message(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "value": - out.Values[i] = ec._Output_value(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Output_value(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "key": - out.Values[i] = ec._Output_key(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Output_key(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -7277,7 +7476,6 @@ var podImplementors = []string{"Pod"} func (ec *executionContext) _Pod(ctx context.Context, sel ast.SelectionSet, obj *model.Pod) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, podImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7285,18 +7483,29 @@ func (ec *executionContext) _Pod(ctx context.Context, sel ast.SelectionSet, obj case "__typename": out.Values[i] = graphql.MarshalString("Pod") case "id": - out.Values[i] = ec._Pod_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Pod_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._Pod_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Pod_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "processors": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7307,10 +7516,16 @@ func (ec *executionContext) _Pod(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "sinks": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7321,10 +7536,16 @@ func (ec *executionContext) _Pod(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "sources": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7335,10 +7556,16 @@ func (ec *executionContext) _Pod(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "viewSinks": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7349,10 +7576,16 @@ func (ec *executionContext) _Pod(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "viewSources": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7363,10 +7596,16 @@ func (ec *executionContext) _Pod(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "views": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7377,6 +7616,11 @@ func (ec *executionContext) _Pod(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -7393,7 +7637,6 @@ var processorImplementors = []string{"Processor"} func (ec *executionContext) _Processor(ctx context.Context, sel ast.SelectionSet, obj *model.Processor) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, processorImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7401,13 +7644,19 @@ func (ec *executionContext) _Processor(ctx context.Context, sel ast.SelectionSet case "__typename": out.Values[i] = graphql.MarshalString("Processor") case "id": - out.Values[i] = ec._Processor_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Processor_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "component": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7418,25 +7667,46 @@ func (ec *executionContext) _Processor(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "name": - out.Values[i] = ec._Processor_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Processor_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - out.Values[i] = ec._Processor_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Processor_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "groupName": - out.Values[i] = ec._Processor_groupName(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Processor_groupName(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "persistence": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7444,10 +7714,16 @@ func (ec *executionContext) _Processor(ctx context.Context, sel ast.SelectionSet }() res = ec._Processor_persistence(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "pods": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7458,10 +7734,16 @@ func (ec *executionContext) _Processor(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "inputs": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7472,10 +7754,16 @@ func (ec *executionContext) _Processor(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "joins": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7486,10 +7774,16 @@ func (ec *executionContext) _Processor(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "lookups": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7500,10 +7794,16 @@ func (ec *executionContext) _Processor(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "outputs": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7514,6 +7814,11 @@ func (ec *executionContext) _Processor(ctx context.Context, sel ast.SelectionSet atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -7530,7 +7835,6 @@ var processorInputImplementors = []string{"ProcessorInput"} func (ec *executionContext) _ProcessorInput(ctx context.Context, sel ast.SelectionSet, obj *model.ProcessorInput) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, processorInputImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7538,13 +7842,19 @@ func (ec *executionContext) _ProcessorInput(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("ProcessorInput") case "id": - out.Values[i] = ec._ProcessorInput_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProcessorInput_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "processor": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7555,10 +7865,16 @@ func (ec *executionContext) _ProcessorInput(ctx context.Context, sel ast.Selecti atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "topic": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7569,6 +7885,11 @@ func (ec *executionContext) _ProcessorInput(ctx context.Context, sel ast.Selecti atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -7585,7 +7906,6 @@ var processorJoinImplementors = []string{"ProcessorJoin"} func (ec *executionContext) _ProcessorJoin(ctx context.Context, sel ast.SelectionSet, obj *model.ProcessorJoin) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, processorJoinImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7593,13 +7913,19 @@ func (ec *executionContext) _ProcessorJoin(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("ProcessorJoin") case "id": - out.Values[i] = ec._ProcessorJoin_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProcessorJoin_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "processor": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7610,10 +7936,16 @@ func (ec *executionContext) _ProcessorJoin(ctx context.Context, sel ast.Selectio atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "topic": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7624,6 +7956,11 @@ func (ec *executionContext) _ProcessorJoin(ctx context.Context, sel ast.Selectio atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -7640,7 +7977,6 @@ var processorLookupImplementors = []string{"ProcessorLookup"} func (ec *executionContext) _ProcessorLookup(ctx context.Context, sel ast.SelectionSet, obj *model.ProcessorLookup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, processorLookupImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7648,13 +7984,19 @@ func (ec *executionContext) _ProcessorLookup(ctx context.Context, sel ast.Select case "__typename": out.Values[i] = graphql.MarshalString("ProcessorLookup") case "id": - out.Values[i] = ec._ProcessorLookup_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProcessorLookup_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "processor": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7665,10 +8007,16 @@ func (ec *executionContext) _ProcessorLookup(ctx context.Context, sel ast.Select atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "topic": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7679,6 +8027,11 @@ func (ec *executionContext) _ProcessorLookup(ctx context.Context, sel ast.Select atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -7695,7 +8048,6 @@ var processorOutputImplementors = []string{"ProcessorOutput"} func (ec *executionContext) _ProcessorOutput(ctx context.Context, sel ast.SelectionSet, obj *model.ProcessorOutput) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, processorOutputImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7703,13 +8055,19 @@ func (ec *executionContext) _ProcessorOutput(ctx context.Context, sel ast.Select case "__typename": out.Values[i] = graphql.MarshalString("ProcessorOutput") case "id": - out.Values[i] = ec._ProcessorOutput_id(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProcessorOutput_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) } case "processor": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7720,10 +8078,16 @@ func (ec *executionContext) _ProcessorOutput(ctx context.Context, sel ast.Select atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "topic": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7734,6 +8098,11 @@ func (ec *executionContext) _ProcessorOutput(ctx context.Context, sel ast.Select atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -7750,7 +8119,6 @@ var queryImplementors = []string{"Query"} func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, queryImplementors) - ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{ Object: "Query", }) @@ -7758,12 +8126,18 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { + innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{ + Object: field.Name, + Field: field, + }) + switch field.Name { case "__typename": out.Values[i] = graphql.MarshalString("Query") case "services": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7774,10 +8148,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "pods": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7788,10 +8171,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "topics": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7802,10 +8194,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "serviceById": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7813,10 +8214,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr }() res = ec._Query_serviceById(ctx, field) return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "componentById": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7824,11 +8234,29 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr }() res = ec._Query_componentById(ctx, field) return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "__type": - out.Values[i] = ec._Query___type(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___type(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "__schema": - out.Values[i] = ec._Query___schema(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___schema(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -7844,7 +8272,6 @@ var serviceImplementors = []string{"Service"} func (ec *executionContext) _Service(ctx context.Context, sel ast.SelectionSet, obj *model.Service) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, serviceImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7852,23 +8279,39 @@ func (ec *executionContext) _Service(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Service") case "id": - out.Values[i] = ec._Service_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Service_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._Service_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Service_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - out.Values[i] = ec._Service_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Service_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "components": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7879,10 +8322,16 @@ func (ec *executionContext) _Service(ctx context.Context, sel ast.SelectionSet, atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "dependsOn": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7893,6 +8342,11 @@ func (ec *executionContext) _Service(ctx context.Context, sel ast.SelectionSet, atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -7909,7 +8363,6 @@ var setStateImplementors = []string{"SetState", "Action"} func (ec *executionContext) _SetState(ctx context.Context, sel ast.SelectionSet, obj *model.SetState) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, setStateImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7917,17 +8370,32 @@ func (ec *executionContext) _SetState(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("SetState") case "topic": - out.Values[i] = ec._SetState_topic(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SetState_topic(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "message": - out.Values[i] = ec._SetState_message(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SetState_message(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "value": - out.Values[i] = ec._SetState_value(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SetState_value(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -7946,7 +8414,6 @@ var sinkImplementors = []string{"Sink"} func (ec *executionContext) _Sink(ctx context.Context, sel ast.SelectionSet, obj *model.Sink) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, sinkImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -7954,13 +8421,19 @@ func (ec *executionContext) _Sink(ctx context.Context, sel ast.SelectionSet, obj case "__typename": out.Values[i] = graphql.MarshalString("Sink") case "id": - out.Values[i] = ec._Sink_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Sink_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "component": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7971,20 +8444,36 @@ func (ec *executionContext) _Sink(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "name": - out.Values[i] = ec._Sink_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Sink_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - out.Values[i] = ec._Sink_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Sink_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "topic": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -7995,10 +8484,16 @@ func (ec *executionContext) _Sink(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "pods": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8009,6 +8504,11 @@ func (ec *executionContext) _Sink(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -8025,7 +8525,6 @@ var sourceImplementors = []string{"Source"} func (ec *executionContext) _Source(ctx context.Context, sel ast.SelectionSet, obj *model.Source) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, sourceImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8033,13 +8532,19 @@ func (ec *executionContext) _Source(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("Source") case "id": - out.Values[i] = ec._Source_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Source_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "component": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8050,10 +8555,16 @@ func (ec *executionContext) _Source(ctx context.Context, sel ast.SelectionSet, o atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "topic": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8064,10 +8575,16 @@ func (ec *executionContext) _Source(ctx context.Context, sel ast.SelectionSet, o atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "pods": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8078,6 +8595,11 @@ func (ec *executionContext) _Source(ctx context.Context, sel ast.SelectionSet, o atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -8114,7 +8636,6 @@ var topicImplementors = []string{"Topic"} func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, obj *model.Topic) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, topicImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8122,23 +8643,39 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob case "__typename": out.Values[i] = graphql.MarshalString("Topic") case "id": - out.Values[i] = ec._Topic_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Topic_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._Topic_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Topic_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "message": - out.Values[i] = ec._Topic_message(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Topic_message(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "processorInputs": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8149,10 +8686,16 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "processorJoins": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8163,10 +8706,16 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "processorLookups": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8177,10 +8726,16 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "processorOutputs": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8191,10 +8746,16 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "processorPersistences": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8205,10 +8766,16 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "sinks": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8219,10 +8786,16 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "sources": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8233,10 +8806,16 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "viewSinks": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8247,10 +8826,16 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "viewSources": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8261,10 +8846,16 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "views": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8275,6 +8866,11 @@ func (ec *executionContext) _Topic(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -8291,7 +8887,6 @@ var viewImplementors = []string{"View"} func (ec *executionContext) _View(ctx context.Context, sel ast.SelectionSet, obj *model.View) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, viewImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8299,13 +8894,19 @@ func (ec *executionContext) _View(ctx context.Context, sel ast.SelectionSet, obj case "__typename": out.Values[i] = graphql.MarshalString("View") case "id": - out.Values[i] = ec._View_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._View_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "component": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8316,10 +8917,16 @@ func (ec *executionContext) _View(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "topic": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8330,10 +8937,16 @@ func (ec *executionContext) _View(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "pods": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8344,6 +8957,11 @@ func (ec *executionContext) _View(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -8360,7 +8978,6 @@ var viewSinkImplementors = []string{"ViewSink"} func (ec *executionContext) _ViewSink(ctx context.Context, sel ast.SelectionSet, obj *model.ViewSink) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, viewSinkImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8368,13 +8985,19 @@ func (ec *executionContext) _ViewSink(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("ViewSink") case "id": - out.Values[i] = ec._ViewSink_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ViewSink_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "component": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8385,20 +9008,36 @@ func (ec *executionContext) _ViewSink(ctx context.Context, sel ast.SelectionSet, atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "name": - out.Values[i] = ec._ViewSink_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ViewSink_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - out.Values[i] = ec._ViewSink_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ViewSink_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "topic": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8409,10 +9048,16 @@ func (ec *executionContext) _ViewSink(ctx context.Context, sel ast.SelectionSet, atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "pods": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8423,6 +9068,11 @@ func (ec *executionContext) _ViewSink(ctx context.Context, sel ast.SelectionSet, atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -8439,7 +9089,6 @@ var viewSourceImplementors = []string{"ViewSource"} func (ec *executionContext) _ViewSource(ctx context.Context, sel ast.SelectionSet, obj *model.ViewSource) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, viewSourceImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8447,13 +9096,19 @@ func (ec *executionContext) _ViewSource(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("ViewSource") case "id": - out.Values[i] = ec._ViewSource_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ViewSource_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "component": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8464,20 +9119,36 @@ func (ec *executionContext) _ViewSource(ctx context.Context, sel ast.SelectionSe atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "description": - out.Values[i] = ec._ViewSource_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ViewSource_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._ViewSource_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ViewSource_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "topic": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8488,10 +9159,16 @@ func (ec *executionContext) _ViewSource(ctx context.Context, sel ast.SelectionSe atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "pods": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8502,6 +9179,11 @@ func (ec *executionContext) _ViewSource(ctx context.Context, sel ast.SelectionSe atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -8518,7 +9200,6 @@ var __DirectiveImplementors = []string{"__Directive"} func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __DirectiveImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8526,19 +9207,49 @@ func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("__Directive") case "name": - out.Values[i] = ec.___Directive_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Directive_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "description": - out.Values[i] = ec.___Directive_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Directive_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "locations": - out.Values[i] = ec.___Directive_locations(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Directive_locations(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "args": - out.Values[i] = ec.___Directive_args(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Directive_args(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "isRepeatable": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Directive_isRepeatable(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -8557,7 +9268,6 @@ var __EnumValueImplementors = []string{"__EnumValue"} func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.EnumValue) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __EnumValueImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8565,19 +9275,39 @@ func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("__EnumValue") case "name": - out.Values[i] = ec.___EnumValue_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___EnumValue_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "description": - out.Values[i] = ec.___EnumValue_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___EnumValue_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "isDeprecated": - out.Values[i] = ec.___EnumValue_isDeprecated(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___EnumValue_isDeprecated(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "deprecationReason": - out.Values[i] = ec.___EnumValue_deprecationReason(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___EnumValue_deprecationReason(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -8593,7 +9323,6 @@ var __FieldImplementors = []string{"__Field"} func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, obj *introspection.Field) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __FieldImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8601,29 +9330,59 @@ func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("__Field") case "name": - out.Values[i] = ec.___Field_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "description": - out.Values[i] = ec.___Field_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "args": - out.Values[i] = ec.___Field_args(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_args(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "type": - out.Values[i] = ec.___Field_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "isDeprecated": - out.Values[i] = ec.___Field_isDeprecated(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_isDeprecated(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "deprecationReason": - out.Values[i] = ec.___Field_deprecationReason(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_deprecationReason(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -8639,7 +9398,6 @@ var __InputValueImplementors = []string{"__InputValue"} func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.InputValue) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __InputValueImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8647,19 +9405,39 @@ func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("__InputValue") case "name": - out.Values[i] = ec.___InputValue_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___InputValue_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "description": - out.Values[i] = ec.___InputValue_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___InputValue_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "type": - out.Values[i] = ec.___InputValue_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___InputValue_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "defaultValue": - out.Values[i] = ec.___InputValue_defaultValue(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___InputValue_defaultValue(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -8675,7 +9453,6 @@ var __SchemaImplementors = []string{"__Schema"} func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, obj *introspection.Schema) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __SchemaImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8683,21 +9460,46 @@ func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("__Schema") case "types": - out.Values[i] = ec.___Schema_types(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_types(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "queryType": - out.Values[i] = ec.___Schema_queryType(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_queryType(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "mutationType": - out.Values[i] = ec.___Schema_mutationType(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_mutationType(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "subscriptionType": - out.Values[i] = ec.___Schema_subscriptionType(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_subscriptionType(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "directives": - out.Values[i] = ec.___Schema_directives(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_directives(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -8716,7 +9518,6 @@ var __TypeImplementors = []string{"__Type"} func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, obj *introspection.Type) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __TypeImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -8724,26 +9525,71 @@ func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("__Type") case "kind": - out.Values[i] = ec.___Type_kind(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_kind(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "name": - out.Values[i] = ec.___Type_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "description": - out.Values[i] = ec.___Type_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "fields": - out.Values[i] = ec.___Type_fields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_fields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "interfaces": - out.Values[i] = ec.___Type_interfaces(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_interfaces(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "possibleTypes": - out.Values[i] = ec.___Type_possibleTypes(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_possibleTypes(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "enumValues": - out.Values[i] = ec.___Type_enumValues(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_enumValues(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "inputFields": - out.Values[i] = ec.___Type_inputFields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_inputFields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "ofType": - out.Values[i] = ec.___Type_ofType(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_ofType(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -8803,6 +9649,13 @@ func (ec *executionContext) marshalNAction2ᚕgithubᚗcomᚋsyncromaticsᚋkafm } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -8859,6 +9712,13 @@ func (ec *executionContext) marshalNComponent2ᚕᚖgithubᚗcomᚋsyncromatics } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -8960,6 +9820,13 @@ func (ec *executionContext) marshalNPod2ᚕᚖgithubᚗcomᚋsyncromaticsᚋkafm } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9011,6 +9878,13 @@ func (ec *executionContext) marshalNProcessor2ᚕᚖgithubᚗcomᚋsyncromatics } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9058,6 +9932,13 @@ func (ec *executionContext) marshalNProcessorInput2ᚕᚖgithubᚗcomᚋsyncroma } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9105,6 +9986,13 @@ func (ec *executionContext) marshalNProcessorJoin2ᚕᚖgithubᚗcomᚋsyncromat } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9152,6 +10040,13 @@ func (ec *executionContext) marshalNProcessorLookup2ᚕᚖgithubᚗcomᚋsyncrom } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9199,6 +10094,13 @@ func (ec *executionContext) marshalNProcessorOutput2ᚕᚖgithubᚗcomᚋsyncrom } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9250,6 +10152,13 @@ func (ec *executionContext) marshalNService2ᚕᚖgithubᚗcomᚋsyncromaticsᚋ } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9297,6 +10206,13 @@ func (ec *executionContext) marshalNSink2ᚕᚖgithubᚗcomᚋsyncromaticsᚋkaf } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9344,6 +10260,13 @@ func (ec *executionContext) marshalNSource2ᚕᚖgithubᚗcomᚋsyncromaticsᚋk } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9410,6 +10333,13 @@ func (ec *executionContext) marshalNTopic2ᚕᚖgithubᚗcomᚋsyncromaticsᚋka } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9457,6 +10387,13 @@ func (ec *executionContext) marshalNView2ᚕᚖgithubᚗcomᚋsyncromaticsᚋkaf } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9504,6 +10441,13 @@ func (ec *executionContext) marshalNViewSink2ᚕᚖgithubᚗcomᚋsyncromatics } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9551,6 +10495,13 @@ func (ec *executionContext) marshalNViewSource2ᚕᚖgithubᚗcomᚋsyncromatics } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9602,6 +10553,13 @@ func (ec *executionContext) marshalN__Directive2ᚕgithubᚗcomᚋ99designsᚋgq } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9623,11 +10581,7 @@ func (ec *executionContext) marshalN__DirectiveLocation2string(ctx context.Conte func (ec *executionContext) unmarshalN__DirectiveLocation2ᚕstringᚄ(ctx context.Context, v interface{}) ([]string, error) { var vSlice []interface{} if v != nil { - if tmp1, ok := v.([]interface{}); ok { - vSlice = tmp1 - } else { - vSlice = []interface{}{v} - } + vSlice = graphql.CoerceList(v) } var err error res := make([]string, len(vSlice)) @@ -9675,6 +10629,13 @@ func (ec *executionContext) marshalN__DirectiveLocation2ᚕstringᚄ(ctx context } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9724,6 +10685,13 @@ func (ec *executionContext) marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋg } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9765,6 +10733,13 @@ func (ec *executionContext) marshalN__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgen } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9799,7 +10774,8 @@ func (ec *executionContext) unmarshalOBoolean2bool(ctx context.Context, v interf } func (ec *executionContext) marshalOBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler { - return graphql.MarshalBoolean(v) + res := graphql.MarshalBoolean(v) + return res } func (ec *executionContext) unmarshalOBoolean2ᚖbool(ctx context.Context, v interface{}) (*bool, error) { @@ -9814,7 +10790,8 @@ func (ec *executionContext) marshalOBoolean2ᚖbool(ctx context.Context, sel ast if v == nil { return graphql.Null } - return graphql.MarshalBoolean(*v) + res := graphql.MarshalBoolean(*v) + return res } func (ec *executionContext) marshalOComponent2ᚖgithubᚗcomᚋsyncromaticsᚋkafmeshᚋinternalᚋgraphᚋmodelᚐComponent(ctx context.Context, sel ast.SelectionSet, v *model.Component) graphql.Marshaler { @@ -9837,7 +10814,8 @@ func (ec *executionContext) unmarshalOString2string(ctx context.Context, v inter } func (ec *executionContext) marshalOString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { - return graphql.MarshalString(v) + res := graphql.MarshalString(v) + return res } func (ec *executionContext) unmarshalOString2ᚕstringᚄ(ctx context.Context, v interface{}) ([]string, error) { @@ -9846,11 +10824,7 @@ func (ec *executionContext) unmarshalOString2ᚕstringᚄ(ctx context.Context, v } var vSlice []interface{} if v != nil { - if tmp1, ok := v.([]interface{}); ok { - vSlice = tmp1 - } else { - vSlice = []interface{}{v} - } + vSlice = graphql.CoerceList(v) } var err error res := make([]string, len(vSlice)) @@ -9873,6 +10847,12 @@ func (ec *executionContext) marshalOString2ᚕstringᚄ(ctx context.Context, sel ret[i] = ec.marshalNString2string(ctx, sel, v[i]) } + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9888,7 +10868,8 @@ func (ec *executionContext) marshalOString2ᚖstring(ctx context.Context, sel as if v == nil { return graphql.Null } - return graphql.MarshalString(*v) + res := graphql.MarshalString(*v) + return res } func (ec *executionContext) marshalOTopic2ᚖgithubᚗcomᚋsyncromaticsᚋkafmeshᚋinternalᚋgraphᚋmodelᚐTopic(ctx context.Context, sel ast.SelectionSet, v *model.Topic) graphql.Marshaler { @@ -9943,6 +10924,13 @@ func (ec *executionContext) marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgq } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -9983,6 +10971,13 @@ func (ec *executionContext) marshalO__Field2ᚕgithubᚗcomᚋ99designsᚋgqlgen } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -10023,6 +11018,13 @@ func (ec *executionContext) marshalO__InputValue2ᚕgithubᚗcomᚋ99designsᚋg } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -10070,6 +11072,13 @@ func (ec *executionContext) marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgen } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } diff --git a/internal/protos/kafmesh/discovery/v1/topic_definition.pb.go b/internal/protos/kafmesh/discovery/v1/topic_definition.pb.go index 96305f0..5f04250 100644 --- a/internal/protos/kafmesh/discovery/v1/topic_definition.pb.go +++ b/internal/protos/kafmesh/discovery/v1/topic_definition.pb.go @@ -26,16 +26,19 @@ type TopicType int32 const ( TopicType_TOPIC_TYPE_INVALID TopicType = 0 TopicType_TOPIC_TYPE_PROTOBUF TopicType = 1 + TopicType_TOPIC_TYPE_RAW TopicType = 2 ) var TopicType_name = map[int32]string{ 0: "TOPIC_TYPE_INVALID", 1: "TOPIC_TYPE_PROTOBUF", + 2: "TOPIC_TYPE_RAW", } var TopicType_value = map[string]int32{ "TOPIC_TYPE_INVALID": 0, "TOPIC_TYPE_PROTOBUF": 1, + "TOPIC_TYPE_RAW": 2, } func (x TopicType) String() string { @@ -112,7 +115,7 @@ func init() { } var fileDescriptor_8bae9ce406235a0f = []byte{ - // 244 bytes of a gzipped FileDescriptorProto + // 255 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0xce, 0x4e, 0x4c, 0xcb, 0x4d, 0x2d, 0xce, 0xd0, 0x4f, 0xc9, 0x2c, 0x4e, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0xd4, 0x2f, 0x33, 0xd4, 0x2f, 0xc9, 0x2f, 0xc8, 0x4c, 0x8e, 0x4f, 0x49, 0x4d, 0xcb, 0xcc, 0xcb, 0x2c, 0xc9, 0xcc, @@ -121,12 +124,12 @@ var fileDescriptor_8bae9ce406235a0f = []byte{ 0x1b, 0x21, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x19, 0x04, 0xe1, 0x08, 0x49, 0x70, 0xb1, 0xe7, 0xa6, 0x16, 0x17, 0x27, 0xa6, 0xa7, 0x4a, 0x30, 0x81, 0xc5, 0x61, 0x5c, 0x21, 0x63, 0x2e, 0x96, 0x92, 0xca, 0x82, 0x54, 0x09, 0x66, 0x05, 0x46, 0x0d, 0x3e, 0x23, 0x79, 0x3d, 0x6c, 0xf6, 0xe8, 0x81, - 0x2d, 0x09, 0xa9, 0x2c, 0x48, 0x0d, 0x02, 0x2b, 0xd6, 0xb2, 0xe1, 0xe2, 0x84, 0x0b, 0x09, 0x89, + 0x2d, 0x09, 0xa9, 0x2c, 0x48, 0x0d, 0x02, 0x2b, 0xd6, 0x0a, 0xe0, 0xe2, 0x84, 0x0b, 0x09, 0x89, 0x71, 0x09, 0x85, 0xf8, 0x07, 0x78, 0x3a, 0xc7, 0x87, 0x44, 0x06, 0xb8, 0xc6, 0x7b, 0xfa, 0x85, 0x39, 0xfa, 0x78, 0xba, 0x08, 0x30, 0x08, 0x89, 0x73, 0x09, 0x23, 0x89, 0x07, 0x04, 0xf9, 0x87, - 0xf8, 0x3b, 0x85, 0xba, 0x09, 0x30, 0x3a, 0xc5, 0x70, 0x49, 0x24, 0xe7, 0xe7, 0x62, 0xb5, 0xc9, - 0x49, 0x04, 0xcd, 0x3f, 0x01, 0x20, 0xdf, 0x07, 0x30, 0x46, 0x71, 0xc3, 0x55, 0x95, 0x19, 0x2e, - 0x62, 0x62, 0xf6, 0x76, 0x89, 0x58, 0xc5, 0x24, 0xe2, 0x0d, 0x35, 0xc1, 0x05, 0x6e, 0x42, 0x98, - 0x61, 0x12, 0x1b, 0x38, 0xc0, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x22, 0xb1, 0x8c, 0x14, - 0x5f, 0x01, 0x00, 0x00, + 0xf8, 0x3b, 0x85, 0xba, 0x09, 0x30, 0x0a, 0x09, 0x71, 0xf1, 0x21, 0x49, 0x04, 0x39, 0x86, 0x0b, + 0x30, 0x39, 0xc5, 0x70, 0x49, 0x24, 0xe7, 0xe7, 0x62, 0xb5, 0xdd, 0x49, 0x04, 0xcd, 0x8f, 0x01, + 0xa0, 0x10, 0x09, 0x60, 0x8c, 0xe2, 0x86, 0xab, 0x2a, 0x33, 0x5c, 0xc4, 0xc4, 0xec, 0xed, 0x12, + 0xb1, 0x8a, 0x49, 0xc4, 0x1b, 0x6a, 0x82, 0x0b, 0xdc, 0x84, 0x30, 0xc3, 0x24, 0x36, 0x70, 0x20, + 0x1a, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x8e, 0x71, 0x48, 0x7f, 0x73, 0x01, 0x00, 0x00, } diff --git a/internal/storage/statik/statik.go b/internal/storage/statik/statik.go index eda7562..4875472 100644 --- a/internal/storage/statik/statik.go +++ b/internal/storage/statik/statik.go @@ -8,7 +8,7 @@ import ( func init() { - data := "PK\x03\x04\x14\x00\x08\x00\x08\x00\x02!\xf2P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a\x00 \x001_initialize_schema.up.sqlUT\x05\x00\x01\xa4u\x12_\xec\x96\xcb\x8e\x9b0\x14\x86\xf7<\x85\x97\x89\xc4\x1btE#WE\xa5\xb4\xe3\x90\x91\xb2B\x11X\x91'\x13\x1f\x0bC\xe6\xf5G\\\x02\xb9\xd8\xd8&Q4\x8a\x86M\x16\x9c\xcb\xff\xfd\xc7\x9cxAp\x90`\x94\x04?#\x8cJ\x10,\x93h\xe6!\x84\x10\xcbQ\xf3,1 \x83\x08\xfd'\xe1\xdf\x80\xac\xd1\x1f\xbc\xf6\x9b\xf7|\xb3\xa7\xf5\xefk@\x16\xbf\x03\x82\xe2\x7f \x8aWQ\xd4\xbe\xddS)7[\xaay\xbb\x8a\xc3\x97\x15\x9e\xd5%\xe6\xde\xfc\x87\xe7\x9d\xc9\x90\xb48\xb0\x8c^\n\xb1\x11\xa3\x17\x94S\x99\x15L\x94\x0c\xf8$Q\x19\xec\x05p\xca\xcb\xfb\xca\xeaX\x9b\x880N\x10\xc1\xbf0\xc1\xf1\x02/{\x1bf,\x9f\xdf\x02\xe3\x1f+\xa9\xac\x86\xaaps\xba\xf7\xe1Z\xf0`\x91Brs\xb6\xba\xda\x17y\xed\xb1S\xe4t\x10}]\xbf\x0dUp\x88\x022*%\x14.(\xe6\xe9\x98}\x9ej\xc7\xb6\x80J\xa4\xad\x02ueA\x0b\xc9dIyF\xf5\x86)\x86\xdd\xb7\x1d\xb3)e\\TNG\xb9O\xbd\x06\x1d\xcc\xbf\xef\xdc\xfb\xba\x16sO\xdf\x80\xf1g\xe2y\x07\xd8U\xe2\x99\x88\xa0*\x9f\xe2\xcc\x1d\x18\xfdp\xa1\x98\xba\"n\xa0\xb0\xd8\x98\x92\xf1\xdd#(\xccKv\n\xa7y1_;\xa1\xf9c\xaf\xc7\x99~\xbbq\xe6\xc6\xc3n\x05_\xde\x0f\x01\xf9}.\xc4\xe3\xb7K\x01y:\xe9\x0e#`\x08\xba\\\x91\x90\xab\xcc\x99\xbaT\x8f\x0b\x12r\x7f\x08\xd4\xa0\xb8\x1f w\x8e\xb6\x872\xa3ko\x80h\xa34\x04\xaeK\xde]\x7f\xddA\x93\xd147\xa8\xafct\xee;.3w\xedu\x07MF\xd3\xdc\xe4<\xe3\xbb\x11\xdf\xb5\x00\xb6\x10\xf6 };u\xd6\xa0\xc6b\x1a\xa9\x11K\xfbU\x1c\x1f[8\x85\xd4Q\xc0\x93OE\x07i\xf5\xc9\x9c\x846\xa0\x9f\x01\x00\x00\xff\xffPK\x07\x08\xeb\xbf\xa0\xce\xd3\x01\x00\x00>\x10\x00\x00PK\x03\x04\x14\x00\x08\x00\x08\x00\x02!\xf2P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1b\x00 \x002_add_service_topics.up.sqlUT\x05\x00\x01\xa4u\x12_\xcc\x94An\xc20\x10E\xf7>\xc5\x1c\xa0\xe2\x02\x15\x0bT\xb1\xe8\xb6\xad\xdae\x16f\xaaN 3\x96\xc7\x86\xebW\x18;!\x12I\xa5\x84\xa0\xb0\"\x13\xff?O?\xc9\x7fy\xdbn>\xb6\xf0\xf9\xba\xfd\x02E\x7f$\x8bU\x10G\xb6\xda\xa1C\xde![B\x85\xcd\xbb\x01\x00P\xac\xd1\x86\xf4\xf7\xfc\xb3rp\xc2\xc8AWY\xfb\xd4\xdc;\x12\x9et\x95\xac\xd2\xec\xdb\xcb\xe1\x860\x8d\x88\x19=\xfc\nqW\x0e\xc2\xd9\xa7\x11\xac\xafv\xd2\xce\xa4\xf3\x91I\xd8\x8c\x01\xac\x94x?\x91\xf2\xe2QP\xb3\xe3\x0c\xbc\x13Q\x1b\xcaa\xc0r|\x02\xa8\xf3bQU|E\xecb\x98\xc0\xdc8%\xf0\xf6j\x90~\xc8'\x13u\xdc\ne3X_-\xbaw\"\xb5\xc8>\xbaEE\x92\x91\xba\x99\x14\xce\xbeP\xee\x12\xc6\x19hQQ$\xa0n\x10\x17\xc6\x9e\x18\x9e\x8d\xe9\xefO\x95\xe8\xed\xa8\xea\xcc\xca\xf1\xc9\x94\xd5\xe7\xaf={\xcdPH\x97\xba\x1b\xc9\n7m\xda\x16\x9d\x0f\xbb}\xb6\x12\xc3\xd2\xfa)#u\xdf\xc1\xc2\xf9\x88\x86\xd2\x95C\xaf\xa4\x01\xd9\xe2\xc329\xfd\xa0\xc7\x7f`\x80\x14X\x02p\xack\xf3\x17\x00\x00\xff\xffPK\x07\x08\xf0l\xe0\x18-\x01\x00\x00\xb7\x08\x00\x00PK\x03\x04\x14\x00\x08\x00\x08\x00\x02!\xf2P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1d\x00 \x003_add_component_topics.up.sqlUT\x05\x00\x01\xa4u\x12_\xcc\x94Aj\xf30\x10\x85\xf7:\xc5\x1c\xe0'\x17\xf8\xc9\"\x94,\xbamK\xbb4E\x9eR5\xce\x8c\xd0H\xcd\xf5\x8beI\xae q!\x8e\x83\xbd\xb2\x87\x99\xa7o\x9e\xc5{x\xda\xef^\xf6\xf0\xfa\xb8\x7f\x03\xcdG\xcb\x84\xe4\x1b\xcf\xd6\xe8\xa6E\x8b\xd4\"i\x83\x02\xbbg\x05\x00 \xd8\xa1\xf6\xf1\xb5\x7f\xca\x88lL\x0b\xef2\x16\xfe\x95\x9eo\x83'\xd9D\xc9X\xfbp|<#\x10K\x86\x08\x1d|\xb1\xa1z\x1c\x98\x92N\x19\xd8Vg\xab\xd8\x1f\xc80\xa99\xa0\x8d\x18:\xcc\xa4\x1d42rR\\\x90{&r\xa1\x9d\x06\xcd\xed7\x00\xb6\x8e5\x8a\xb0k\x0c\xd9\xe0g\xb0\x17\xa5\xb8\xc0\xf85\xb9\xc5\x94N\"\xaa\xd42e)l\x7f\x1d\xb4\x943\x1d\xf3!\xd8UY\x93\x90jo2\xe7%snjJ\x0f\xb6*K\"Pm\xc8\xc0x\xc1\x8e\xffJM%\xaeppzV\xd8&\x85\xeb=\xca\x08}\x1e$\xad\x05\xa3k\x08\xc8+\x99\xe1\xac\xcc\x98\xbb\xcb\xe3\x8f\x7f\x9d\x83_[\x92%\xa4\xfavf\xce{f\x99l,:1\xe2\x914\xf6\xcd\xf75\xe9\xf4\x89\x0e\xff\xa22\x02\xc4\x1e(t\x9d\xfa \x00\x00\xff\xffPK\x07\x08\x9a\xb8\xcd\xd5/\x01\x00\x00\x14 \x00\x00PK\x01\x02\x14\x03\x14\x00\x08\x00\x08\x00\x02!\xf2P\xeb\xbf\xa0\xce\xd3\x01\x00\x00>\x10\x00\x00\x1a\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\x00\x00\x00\x001_initialize_schema.up.sqlUT\x05\x00\x01\xa4u\x12_PK\x01\x02\x14\x03\x14\x00\x08\x00\x08\x00\x02!\xf2P\xf0l\xe0\x18-\x01\x00\x00\xb7\x08\x00\x00\x1b\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81$\x02\x00\x002_add_service_topics.up.sqlUT\x05\x00\x01\xa4u\x12_PK\x01\x02\x14\x03\x14\x00\x08\x00\x08\x00\x02!\xf2P\x9a\xb8\xcd\xd5/\x01\x00\x00\x14 \x00\x00\x1d\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\xa3\x03\x00\x003_add_component_topics.up.sqlUT\x05\x00\x01\xa4u\x12_PK\x05\x06\x00\x00\x00\x00\x03\x00\x03\x00\xf7\x00\x00\x00&\x05\x00\x00\x00\x00" + data := "PK\x03\x04\x14\x00\x08\x00\x08\x00\x1b\x99\x9eR\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a\x00 \x001_initialize_schema.up.sqlUT\x05\x00\x01\xc6U\x8c`\xec\x96\xcb\x8e\x9b0\x14\x86\xf7<\x85\x97\x89\xc4\x1btE#WE\xa5\xb4\xe3\x90\x91\xb2B\x11X\x91'\x13\x1f\x0bC\xe6\xf5G\\\x02\xb9\xd8\xd8&Q4\x8a\x86M\x16\x9c\xcb\xff\xfd\xc7\x9cxAp\x90`\x94\x04?#\x8cJ\x10,\x93h\xe6!\x84\x10\xcbQ\xf3,1 \x83\x08\xfd'\xe1\xdf\x80\xac\xd1\x1f\xbc\xf6\x9b\xf7|\xb3\xa7\xf5\xefk@\x16\xbf\x03\x82\xe2\x7f \x8aWQ\xd4\xbe\xddS)7[\xaay\xbb\x8a\xc3\x97\x15\x9e\xd5%\xe6\xde\xfc\x87\xe7\x9d\xc9\x90\xb48\xb0\x8c^\n\xb1\x11\xa3\x17\x94S\x99\x15L\x94\x0c\xf8$Q\x19\xec\x05p\xca\xcb\xfb\xca\xeaX\x9b\x880N\x10\xc1\xbf0\xc1\xf1\x02/{\x1bf,\x9f\xdf\x02\xe3\x1f+\xa9\xac\x86\xaaps\xba\xf7\xe1Z\xf0`\x91Brs\xb6\xba\xda\x17y\xed\xb1S\xe4t\x10}]\xbf\x0dUp\x88\x022*%\x14.(\xe6\xe9\x98}\x9ej\xc7\xb6\x80J\xa4\xad\x02ueA\x0b\xc9dIyF\xf5\x86)\x86\xdd\xb7\x1d\xb3)e\\TNG\xb9O\xbd\x06\x1d\xcc\xbf\xef\xdc\xfb\xba\x16sO\xdf\x80\xf1g\xe2y\x07\xd8U\xe2\x99\x88\xa0*\x9f\xe2\xcc\x1d\x18\xfdp\xa1\x98\xba\"n\xa0\xb0\xd8\x98\x92\xf1\xdd#(\xccKv\n\xa7y1_;\xa1\xf9c\xaf\xc7\x99~\xbbq\xe6\xc6\xc3n\x05_\xde\x0f\x01\xf9}.\xc4\xe3\xb7K\x01y:\xe9\x0e#`\x08\xba\\\x91\x90\xab\xcc\x99\xbaT\x8f\x0b\x12r\x7f\x08\xd4\xa0\xb8\x1f w\x8e\xb6\x872\xa3ko\x80h\xa34\x04\xaeK\xde]\x7f\xddA\x93\xd147\xa8\xafct\xee;.3w\xedu\x07MF\xd3\xdc\xe4<\xe3\xbb\x11\xdf\xb5\x00\xb6\x10\xf6 };u\xd6\xa0\xc6b\x1a\xa9\x11K\xfbU\x1c\x1f[8\x85\xd4Q\xc0\x93OE\x07i\xf5\xc9\x9c\x846\xa0\x9f\x01\x00\x00\xff\xffPK\x07\x08\xeb\xbf\xa0\xce\xd3\x01\x00\x00>\x10\x00\x00PK\x03\x04\x14\x00\x08\x00\x08\x00\x1b\x99\x9eR\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1b\x00 \x002_add_service_topics.up.sqlUT\x05\x00\x01\xc6U\x8c`\xcc\x94An\xc20\x10E\xf7>\xc5\x1c\xa0\xe2\x02\x15\x0bT\xb1\xe8\xb6\xad\xdae\x16f\xaaN 3\x96\xc7\x86\xebW\x18;!\x12I\xa5\x84\xa0\xb0\"\x13\xff?O?\xc9\x7fy\xdbn>\xb6\xf0\xf9\xba\xfd\x02E\x7f$\x8bU\x10G\xb6\xda\xa1C\xde![B\x85\xcd\xbb\x01\x00P\xac\xd1\x86\xf4\xf7\xfc\xb3rp\xc2\xc8AWY\xfb\xd4\xdc;\x12\x9et\x95\xac\xd2\xec\xdb\xcb\xe1\x860\x8d\x88\x19=\xfc\nqW\x0e\xc2\xd9\xa7\x11\xac\xafv\xd2\xce\xa4\xf3\x91I\xd8\x8c\x01\xac\x94x?\x91\xf2\xe2QP\xb3\xe3\x0c\xbc\x13Q\x1b\xcaa\xc0r|\x02\xa8\xf3bQU|E\xecb\x98\xc0\xdc8%\xf0\xf6j\x90~\xc8'\x13u\xdc\ne3X_-\xbaw\"\xb5\xc8>\xbaEE\x92\x91\xba\x99\x14\xce\xbeP\xee\x12\xc6\x19hQQ$\xa0n\x10\x17\xc6\x9e\x18\x9e\x8d\xe9\xefO\x95\xe8\xed\xa8\xea\xcc\xca\xf1\xc9\x94\xd5\xe7\xaf={\xcdPH\x97\xba\x1b\xc9\n7m\xda\x16\x9d\x0f\xbb}\xb6\x12\xc3\xd2\xfa)#u\xdf\xc1\xc2\xf9\x88\x86\xd2\x95C\xaf\xa4\x01\xd9\xe2\xc329\xfd\xa0\xc7\x7f`\x80\x14X\x02p\xack\xf3\x17\x00\x00\xff\xffPK\x07\x08\xf0l\xe0\x18-\x01\x00\x00\xb7\x08\x00\x00PK\x03\x04\x14\x00\x08\x00\x08\x00\x1b\x99\x9eR\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1d\x00 \x003_add_component_topics.up.sqlUT\x05\x00\x01\xc6U\x8c`\xcc\x94Aj\xf30\x10\x85\xf7:\xc5\x1c\xe0'\x17\xf8\xc9\"\x94,\xbamK\xbb4E\x9eR5\xce\x8c\xd0H\xcd\xf5\x8beI\xae q!\x8e\x83\xbd\xb2\x87\x99\xa7o\x9e\xc5{x\xda\xef^\xf6\xf0\xfa\xb8\x7f\x03\xcdG\xcb\x84\xe4\x1b\xcf\xd6\xe8\xa6E\x8b\xd4\"i\x83\x02\xbbg\x05\x00 \xd8\xa1\xf6\xf1\xb5\x7f\xca\x88lL\x0b\xef2\x16\xfe\x95\x9eo\x83'\xd9D\xc9X\xfbp|<#\x10K\x86\x08\x1d|\xb1\xa1z\x1c\x98\x92N\x19\xd8Vg\xab\xd8\x1f\xc80\xa99\xa0\x8d\x18:\xcc\xa4\x1d42rR\\\x90{&r\xa1\x9d\x06\xcd\xed7\x00\xb6\x8e5\x8a\xb0k\x0c\xd9\xe0g\xb0\x17\xa5\xb8\xc0\xf85\xb9\xc5\x94N\"\xaa\xd42e)l\x7f\x1d\xb4\x943\x1d\xf3!\xd8UY\x93\x90jo2\xe7%snjJ\x0f\xb6*K\"Pm\xc8\xc0x\xc1\x8e\xffJM%\xaeppzV\xd8&\x85\xeb=\xca\x08}\x1e$\xad\x05\xa3k\x08\xc8+\x99\xe1\xac\xcc\x98\xbb\xcb\xe3\x8f\x7f\x9d\x83_[\x92%\xa4\xfavf\xce{f\x99l,:1\xe2\x914\xf6\xcd\xf75\xe9\xf4\x89\x0e\xff\xa22\x02\xc4\x1e(t\x9d\xfa \x00\x00\xff\xffPK\x07\x08\x9a\xb8\xcd\xd5/\x01\x00\x00\x14 \x00\x00PK\x01\x02\x14\x03\x14\x00\x08\x00\x08\x00\x1b\x99\x9eR\xeb\xbf\xa0\xce\xd3\x01\x00\x00>\x10\x00\x00\x1a\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\x00\x00\x00\x001_initialize_schema.up.sqlUT\x05\x00\x01\xc6U\x8c`PK\x01\x02\x14\x03\x14\x00\x08\x00\x08\x00\x1b\x99\x9eR\xf0l\xe0\x18-\x01\x00\x00\xb7\x08\x00\x00\x1b\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81$\x02\x00\x002_add_service_topics.up.sqlUT\x05\x00\x01\xc6U\x8c`PK\x01\x02\x14\x03\x14\x00\x08\x00\x08\x00\x1b\x99\x9eR\x9a\xb8\xcd\xd5/\x01\x00\x00\x14 \x00\x00\x1d\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\xa3\x03\x00\x003_add_component_topics.up.sqlUT\x05\x00\x01\xc6U\x8c`PK\x05\x06\x00\x00\x00\x00\x03\x00\x03\x00\xf7\x00\x00\x00&\x05\x00\x00\x00\x00" fs.Register(data) } \ No newline at end of file diff --git a/pkg/runner/discovery.go b/pkg/runner/discovery.go index 16b7329..05623b1 100644 --- a/pkg/runner/discovery.go +++ b/pkg/runner/discovery.go @@ -30,6 +30,8 @@ type MessageType int const ( // MessageTypeProtobuf uses protobuf serialization MessageTypeProtobuf MessageType = iota + // MessageTypeRaw uses no serialization and will return the raw byte slice + MessageTypeRaw ) // TopicDiscovery provides topic information for discovery @@ -368,6 +370,8 @@ func convertMessageType(messageType MessageType) (discoveryv1.TopicType, error) switch messageType { case MessageTypeProtobuf: return discoveryv1.TopicType_TOPIC_TYPE_PROTOBUF, nil + case MessageTypeRaw: + return discoveryv1.TopicType_TOPIC_TYPE_RAW, nil } return discoveryv1.TopicType_TOPIC_TYPE_INVALID, errors.Errorf("unknown message type '%d'", messageType) diff --git a/pkg/runner/rawSynchronizer.go b/pkg/runner/rawSynchronizer.go new file mode 100644 index 0000000..11bb786 --- /dev/null +++ b/pkg/runner/rawSynchronizer.go @@ -0,0 +1,102 @@ +package runner + +import ( + "context" + + "github.com/lovoo/goka" + "github.com/pkg/errors" +) + +// RawViewSourceJob executes a raw bytes synchronize +type RawViewSourceJob struct { + context.Context + view *goka.View + emitter *Emitter + keysSeen map[string]struct{} +} + +// NewRawViewSourceJob creates a new raw bytes view source job +func NewRawViewSourceJob(ctx context.Context, view *goka.View, emitter *Emitter) *RawViewSourceJob { + keysSeen := map[string]struct{}{} + return &RawViewSourceJob{ + ctx, + view, + emitter, + keysSeen, + } +} + +// Update adds a key/value pair to the job +func (s *RawViewSourceJob) Update(key string, msg []byte) error { + s.keysSeen[key] = struct{}{} + + current, err := s.view.Get(key) + if err != nil { + return errors.Wrap(err, "failed to get object") + } + + var shouldUpdate = false + + if current == nil { + shouldUpdate = true + } else { + c := current.([]byte) + if len(c) != len(msg) { + shouldUpdate = true + } else { + for i, b := range c { + if msg[i] != b { + shouldUpdate = true + break + } + } + } + } + + if !shouldUpdate { + return nil + } + + err = s.emitter.Emit(key, msg) + if err != nil { + return errors.Wrap(err, "failed to emit update") + } + + return nil +} + +// Finish the job and run deletes +func (s *RawViewSourceJob) Finish() error { + currentKeys, err := s.keys() + if err != nil { + return errors.Wrap(err, "failed to get current keys") + } + + for _, k := range currentKeys { + _, ok := s.keysSeen[k] + if ok { + continue + } + + err = s.emitter.Delete(k) + if err != nil { + return errors.Wrap(err, "failed to delete key") + } + } + + return nil +} + +func (s *RawViewSourceJob) keys() ([]string, error) { + it, err := s.view.Iterator() + if err != nil { + return nil, errors.Wrap(err, "failed to get iterator") + } + + keys := []string{} + for it.Next() { + keys = append(keys, it.Key()) + } + + return keys, nil +}