diff --git a/control-plane/pkg/client/clientset/versioned/fake/clientset_generated.go b/control-plane/pkg/client/clientset/versioned/fake/clientset_generated.go index a756c4f938..15b43cdf62 100644 --- a/control-plane/pkg/client/clientset/versioned/fake/clientset_generated.go +++ b/control-plane/pkg/client/clientset/versioned/fake/clientset_generated.go @@ -43,8 +43,12 @@ import ( // NewSimpleClientset returns a clientset that will respond with the provided objects. // It's backed by a very simple object tracker that processes creates, updates and deletions as-is, -// without applying any validations and/or defaults. It shouldn't be considered a replacement +// without applying any field management, validations and/or defaults. It shouldn't be considered a replacement // for a real clientset and is mostly useful in simple unit tests. +// +// DEPRECATED: NewClientset replaces this with support for field management, which significantly improves +// server side apply testing. NewClientset is only available when apply configurations are generated (e.g. +// via --with-applyconfig). func NewSimpleClientset(objects ...runtime.Object) *Clientset { o := testing.NewObjectTracker(scheme, codecs.UniversalDecoder()) for _, obj := range objects { diff --git a/control-plane/pkg/client/clientset/versioned/typed/bindings/v1/fake/fake_kafkabinding.go b/control-plane/pkg/client/clientset/versioned/typed/bindings/v1/fake/fake_kafkabinding.go index e5a92aa147..c3716bf665 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/bindings/v1/fake/fake_kafkabinding.go +++ b/control-plane/pkg/client/clientset/versioned/typed/bindings/v1/fake/fake_kafkabinding.go @@ -41,22 +41,24 @@ var kafkabindingsKind = v1.SchemeGroupVersion.WithKind("KafkaBinding") // Get takes name of the kafkaBinding, and returns the corresponding kafkaBinding object, and an error if there is any. func (c *FakeKafkaBindings) Get(ctx context.Context, name string, options metav1.GetOptions) (result *v1.KafkaBinding, err error) { + emptyResult := &v1.KafkaBinding{} obj, err := c.Fake. - Invokes(testing.NewGetAction(kafkabindingsResource, c.ns, name), &v1.KafkaBinding{}) + Invokes(testing.NewGetActionWithOptions(kafkabindingsResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1.KafkaBinding), err } // List takes label and field selectors, and returns the list of KafkaBindings that match those selectors. func (c *FakeKafkaBindings) List(ctx context.Context, opts metav1.ListOptions) (result *v1.KafkaBindingList, err error) { + emptyResult := &v1.KafkaBindingList{} obj, err := c.Fake. - Invokes(testing.NewListAction(kafkabindingsResource, kafkabindingsKind, c.ns, opts), &v1.KafkaBindingList{}) + Invokes(testing.NewListActionWithOptions(kafkabindingsResource, kafkabindingsKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -75,40 +77,43 @@ func (c *FakeKafkaBindings) List(ctx context.Context, opts metav1.ListOptions) ( // Watch returns a watch.Interface that watches the requested kafkaBindings. func (c *FakeKafkaBindings) Watch(ctx context.Context, opts metav1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(kafkabindingsResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(kafkabindingsResource, c.ns, opts)) } // Create takes the representation of a kafkaBinding and creates it. Returns the server's representation of the kafkaBinding, and an error, if there is any. func (c *FakeKafkaBindings) Create(ctx context.Context, kafkaBinding *v1.KafkaBinding, opts metav1.CreateOptions) (result *v1.KafkaBinding, err error) { + emptyResult := &v1.KafkaBinding{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(kafkabindingsResource, c.ns, kafkaBinding), &v1.KafkaBinding{}) + Invokes(testing.NewCreateActionWithOptions(kafkabindingsResource, c.ns, kafkaBinding, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1.KafkaBinding), err } // Update takes the representation of a kafkaBinding and updates it. Returns the server's representation of the kafkaBinding, and an error, if there is any. func (c *FakeKafkaBindings) Update(ctx context.Context, kafkaBinding *v1.KafkaBinding, opts metav1.UpdateOptions) (result *v1.KafkaBinding, err error) { + emptyResult := &v1.KafkaBinding{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(kafkabindingsResource, c.ns, kafkaBinding), &v1.KafkaBinding{}) + Invokes(testing.NewUpdateActionWithOptions(kafkabindingsResource, c.ns, kafkaBinding, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1.KafkaBinding), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeKafkaBindings) UpdateStatus(ctx context.Context, kafkaBinding *v1.KafkaBinding, opts metav1.UpdateOptions) (*v1.KafkaBinding, error) { +func (c *FakeKafkaBindings) UpdateStatus(ctx context.Context, kafkaBinding *v1.KafkaBinding, opts metav1.UpdateOptions) (result *v1.KafkaBinding, err error) { + emptyResult := &v1.KafkaBinding{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(kafkabindingsResource, "status", c.ns, kafkaBinding), &v1.KafkaBinding{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(kafkabindingsResource, "status", c.ns, kafkaBinding, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1.KafkaBinding), err } @@ -123,7 +128,7 @@ func (c *FakeKafkaBindings) Delete(ctx context.Context, name string, opts metav1 // DeleteCollection deletes a collection of objects. func (c *FakeKafkaBindings) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error { - action := testing.NewDeleteCollectionAction(kafkabindingsResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(kafkabindingsResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1.KafkaBindingList{}) return err @@ -131,11 +136,12 @@ func (c *FakeKafkaBindings) DeleteCollection(ctx context.Context, opts metav1.De // Patch applies the patch and returns the patched kafkaBinding. func (c *FakeKafkaBindings) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts metav1.PatchOptions, subresources ...string) (result *v1.KafkaBinding, err error) { + emptyResult := &v1.KafkaBinding{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(kafkabindingsResource, c.ns, name, pt, data, subresources...), &v1.KafkaBinding{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(kafkabindingsResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1.KafkaBinding), err } diff --git a/control-plane/pkg/client/clientset/versioned/typed/bindings/v1/kafkabinding.go b/control-plane/pkg/client/clientset/versioned/typed/bindings/v1/kafkabinding.go index 7204c155df..5c5943a8d2 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/bindings/v1/kafkabinding.go +++ b/control-plane/pkg/client/clientset/versioned/typed/bindings/v1/kafkabinding.go @@ -20,12 +20,11 @@ package v1 import ( "context" - "time" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/bindings/v1" scheme "knative.dev/eventing-kafka-broker/control-plane/pkg/client/clientset/versioned/scheme" ) @@ -40,6 +39,7 @@ type KafkaBindingsGetter interface { type KafkaBindingInterface interface { Create(ctx context.Context, kafkaBinding *v1.KafkaBinding, opts metav1.CreateOptions) (*v1.KafkaBinding, error) Update(ctx context.Context, kafkaBinding *v1.KafkaBinding, opts metav1.UpdateOptions) (*v1.KafkaBinding, error) + // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). UpdateStatus(ctx context.Context, kafkaBinding *v1.KafkaBinding, opts metav1.UpdateOptions) (*v1.KafkaBinding, error) Delete(ctx context.Context, name string, opts metav1.DeleteOptions) error DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error @@ -52,144 +52,18 @@ type KafkaBindingInterface interface { // kafkaBindings implements KafkaBindingInterface type kafkaBindings struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1.KafkaBinding, *v1.KafkaBindingList] } // newKafkaBindings returns a KafkaBindings func newKafkaBindings(c *BindingsV1Client, namespace string) *kafkaBindings { return &kafkaBindings{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1.KafkaBinding, *v1.KafkaBindingList]( + "kafkabindings", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1.KafkaBinding { return &v1.KafkaBinding{} }, + func() *v1.KafkaBindingList { return &v1.KafkaBindingList{} }), } } - -// Get takes name of the kafkaBinding, and returns the corresponding kafkaBinding object, and an error if there is any. -func (c *kafkaBindings) Get(ctx context.Context, name string, options metav1.GetOptions) (result *v1.KafkaBinding, err error) { - result = &v1.KafkaBinding{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkabindings"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of KafkaBindings that match those selectors. -func (c *kafkaBindings) List(ctx context.Context, opts metav1.ListOptions) (result *v1.KafkaBindingList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1.KafkaBindingList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkabindings"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested kafkaBindings. -func (c *kafkaBindings) Watch(ctx context.Context, opts metav1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("kafkabindings"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a kafkaBinding and creates it. Returns the server's representation of the kafkaBinding, and an error, if there is any. -func (c *kafkaBindings) Create(ctx context.Context, kafkaBinding *v1.KafkaBinding, opts metav1.CreateOptions) (result *v1.KafkaBinding, err error) { - result = &v1.KafkaBinding{} - err = c.client.Post(). - Namespace(c.ns). - Resource("kafkabindings"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaBinding). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a kafkaBinding and updates it. Returns the server's representation of the kafkaBinding, and an error, if there is any. -func (c *kafkaBindings) Update(ctx context.Context, kafkaBinding *v1.KafkaBinding, opts metav1.UpdateOptions) (result *v1.KafkaBinding, err error) { - result = &v1.KafkaBinding{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkabindings"). - Name(kafkaBinding.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaBinding). - Do(ctx). - Into(result) - return -} - -// UpdateStatus was generated because the type contains a Status member. -// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *kafkaBindings) UpdateStatus(ctx context.Context, kafkaBinding *v1.KafkaBinding, opts metav1.UpdateOptions) (result *v1.KafkaBinding, err error) { - result = &v1.KafkaBinding{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkabindings"). - Name(kafkaBinding.Name). - SubResource("status"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaBinding). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the kafkaBinding and deletes it. Returns an error if one occurs. -func (c *kafkaBindings) Delete(ctx context.Context, name string, opts metav1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkabindings"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *kafkaBindings) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkabindings"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched kafkaBinding. -func (c *kafkaBindings) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts metav1.PatchOptions, subresources ...string) (result *v1.KafkaBinding, err error) { - result = &v1.KafkaBinding{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("kafkabindings"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} diff --git a/control-plane/pkg/client/clientset/versioned/typed/bindings/v1beta1/fake/fake_kafkabinding.go b/control-plane/pkg/client/clientset/versioned/typed/bindings/v1beta1/fake/fake_kafkabinding.go index a1302849f7..40b0508a19 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/bindings/v1beta1/fake/fake_kafkabinding.go +++ b/control-plane/pkg/client/clientset/versioned/typed/bindings/v1beta1/fake/fake_kafkabinding.go @@ -41,22 +41,24 @@ var kafkabindingsKind = v1beta1.SchemeGroupVersion.WithKind("KafkaBinding") // Get takes name of the kafkaBinding, and returns the corresponding kafkaBinding object, and an error if there is any. func (c *FakeKafkaBindings) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.KafkaBinding, err error) { + emptyResult := &v1beta1.KafkaBinding{} obj, err := c.Fake. - Invokes(testing.NewGetAction(kafkabindingsResource, c.ns, name), &v1beta1.KafkaBinding{}) + Invokes(testing.NewGetActionWithOptions(kafkabindingsResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaBinding), err } // List takes label and field selectors, and returns the list of KafkaBindings that match those selectors. func (c *FakeKafkaBindings) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.KafkaBindingList, err error) { + emptyResult := &v1beta1.KafkaBindingList{} obj, err := c.Fake. - Invokes(testing.NewListAction(kafkabindingsResource, kafkabindingsKind, c.ns, opts), &v1beta1.KafkaBindingList{}) + Invokes(testing.NewListActionWithOptions(kafkabindingsResource, kafkabindingsKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -75,40 +77,43 @@ func (c *FakeKafkaBindings) List(ctx context.Context, opts v1.ListOptions) (resu // Watch returns a watch.Interface that watches the requested kafkaBindings. func (c *FakeKafkaBindings) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(kafkabindingsResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(kafkabindingsResource, c.ns, opts)) } // Create takes the representation of a kafkaBinding and creates it. Returns the server's representation of the kafkaBinding, and an error, if there is any. func (c *FakeKafkaBindings) Create(ctx context.Context, kafkaBinding *v1beta1.KafkaBinding, opts v1.CreateOptions) (result *v1beta1.KafkaBinding, err error) { + emptyResult := &v1beta1.KafkaBinding{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(kafkabindingsResource, c.ns, kafkaBinding), &v1beta1.KafkaBinding{}) + Invokes(testing.NewCreateActionWithOptions(kafkabindingsResource, c.ns, kafkaBinding, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaBinding), err } // Update takes the representation of a kafkaBinding and updates it. Returns the server's representation of the kafkaBinding, and an error, if there is any. func (c *FakeKafkaBindings) Update(ctx context.Context, kafkaBinding *v1beta1.KafkaBinding, opts v1.UpdateOptions) (result *v1beta1.KafkaBinding, err error) { + emptyResult := &v1beta1.KafkaBinding{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(kafkabindingsResource, c.ns, kafkaBinding), &v1beta1.KafkaBinding{}) + Invokes(testing.NewUpdateActionWithOptions(kafkabindingsResource, c.ns, kafkaBinding, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaBinding), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeKafkaBindings) UpdateStatus(ctx context.Context, kafkaBinding *v1beta1.KafkaBinding, opts v1.UpdateOptions) (*v1beta1.KafkaBinding, error) { +func (c *FakeKafkaBindings) UpdateStatus(ctx context.Context, kafkaBinding *v1beta1.KafkaBinding, opts v1.UpdateOptions) (result *v1beta1.KafkaBinding, err error) { + emptyResult := &v1beta1.KafkaBinding{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(kafkabindingsResource, "status", c.ns, kafkaBinding), &v1beta1.KafkaBinding{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(kafkabindingsResource, "status", c.ns, kafkaBinding, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaBinding), err } @@ -123,7 +128,7 @@ func (c *FakeKafkaBindings) Delete(ctx context.Context, name string, opts v1.Del // DeleteCollection deletes a collection of objects. func (c *FakeKafkaBindings) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewDeleteCollectionAction(kafkabindingsResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(kafkabindingsResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1beta1.KafkaBindingList{}) return err @@ -131,11 +136,12 @@ func (c *FakeKafkaBindings) DeleteCollection(ctx context.Context, opts v1.Delete // Patch applies the patch and returns the patched kafkaBinding. func (c *FakeKafkaBindings) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.KafkaBinding, err error) { + emptyResult := &v1beta1.KafkaBinding{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(kafkabindingsResource, c.ns, name, pt, data, subresources...), &v1beta1.KafkaBinding{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(kafkabindingsResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaBinding), err } diff --git a/control-plane/pkg/client/clientset/versioned/typed/bindings/v1beta1/kafkabinding.go b/control-plane/pkg/client/clientset/versioned/typed/bindings/v1beta1/kafkabinding.go index 2b602ea7f8..6b8dc0a2ea 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/bindings/v1beta1/kafkabinding.go +++ b/control-plane/pkg/client/clientset/versioned/typed/bindings/v1beta1/kafkabinding.go @@ -20,12 +20,11 @@ package v1beta1 import ( "context" - "time" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1beta1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/bindings/v1beta1" scheme "knative.dev/eventing-kafka-broker/control-plane/pkg/client/clientset/versioned/scheme" ) @@ -40,6 +39,7 @@ type KafkaBindingsGetter interface { type KafkaBindingInterface interface { Create(ctx context.Context, kafkaBinding *v1beta1.KafkaBinding, opts v1.CreateOptions) (*v1beta1.KafkaBinding, error) Update(ctx context.Context, kafkaBinding *v1beta1.KafkaBinding, opts v1.UpdateOptions) (*v1beta1.KafkaBinding, error) + // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). UpdateStatus(ctx context.Context, kafkaBinding *v1beta1.KafkaBinding, opts v1.UpdateOptions) (*v1beta1.KafkaBinding, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error @@ -52,144 +52,18 @@ type KafkaBindingInterface interface { // kafkaBindings implements KafkaBindingInterface type kafkaBindings struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1beta1.KafkaBinding, *v1beta1.KafkaBindingList] } // newKafkaBindings returns a KafkaBindings func newKafkaBindings(c *BindingsV1beta1Client, namespace string) *kafkaBindings { return &kafkaBindings{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1beta1.KafkaBinding, *v1beta1.KafkaBindingList]( + "kafkabindings", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1beta1.KafkaBinding { return &v1beta1.KafkaBinding{} }, + func() *v1beta1.KafkaBindingList { return &v1beta1.KafkaBindingList{} }), } } - -// Get takes name of the kafkaBinding, and returns the corresponding kafkaBinding object, and an error if there is any. -func (c *kafkaBindings) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.KafkaBinding, err error) { - result = &v1beta1.KafkaBinding{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkabindings"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of KafkaBindings that match those selectors. -func (c *kafkaBindings) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.KafkaBindingList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1beta1.KafkaBindingList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkabindings"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested kafkaBindings. -func (c *kafkaBindings) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("kafkabindings"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a kafkaBinding and creates it. Returns the server's representation of the kafkaBinding, and an error, if there is any. -func (c *kafkaBindings) Create(ctx context.Context, kafkaBinding *v1beta1.KafkaBinding, opts v1.CreateOptions) (result *v1beta1.KafkaBinding, err error) { - result = &v1beta1.KafkaBinding{} - err = c.client.Post(). - Namespace(c.ns). - Resource("kafkabindings"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaBinding). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a kafkaBinding and updates it. Returns the server's representation of the kafkaBinding, and an error, if there is any. -func (c *kafkaBindings) Update(ctx context.Context, kafkaBinding *v1beta1.KafkaBinding, opts v1.UpdateOptions) (result *v1beta1.KafkaBinding, err error) { - result = &v1beta1.KafkaBinding{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkabindings"). - Name(kafkaBinding.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaBinding). - Do(ctx). - Into(result) - return -} - -// UpdateStatus was generated because the type contains a Status member. -// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *kafkaBindings) UpdateStatus(ctx context.Context, kafkaBinding *v1beta1.KafkaBinding, opts v1.UpdateOptions) (result *v1beta1.KafkaBinding, err error) { - result = &v1beta1.KafkaBinding{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkabindings"). - Name(kafkaBinding.Name). - SubResource("status"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaBinding). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the kafkaBinding and deletes it. Returns an error if one occurs. -func (c *kafkaBindings) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkabindings"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *kafkaBindings) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkabindings"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched kafkaBinding. -func (c *kafkaBindings) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.KafkaBinding, err error) { - result = &v1beta1.KafkaBinding{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("kafkabindings"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} diff --git a/control-plane/pkg/client/clientset/versioned/typed/eventing/v1alpha1/fake/fake_kafkasink.go b/control-plane/pkg/client/clientset/versioned/typed/eventing/v1alpha1/fake/fake_kafkasink.go index 86c0995a1e..0d5f13a581 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/eventing/v1alpha1/fake/fake_kafkasink.go +++ b/control-plane/pkg/client/clientset/versioned/typed/eventing/v1alpha1/fake/fake_kafkasink.go @@ -41,22 +41,24 @@ var kafkasinksKind = v1alpha1.SchemeGroupVersion.WithKind("KafkaSink") // Get takes name of the kafkaSink, and returns the corresponding kafkaSink object, and an error if there is any. func (c *FakeKafkaSinks) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.KafkaSink, err error) { + emptyResult := &v1alpha1.KafkaSink{} obj, err := c.Fake. - Invokes(testing.NewGetAction(kafkasinksResource, c.ns, name), &v1alpha1.KafkaSink{}) + Invokes(testing.NewGetActionWithOptions(kafkasinksResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.KafkaSink), err } // List takes label and field selectors, and returns the list of KafkaSinks that match those selectors. func (c *FakeKafkaSinks) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.KafkaSinkList, err error) { + emptyResult := &v1alpha1.KafkaSinkList{} obj, err := c.Fake. - Invokes(testing.NewListAction(kafkasinksResource, kafkasinksKind, c.ns, opts), &v1alpha1.KafkaSinkList{}) + Invokes(testing.NewListActionWithOptions(kafkasinksResource, kafkasinksKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -75,40 +77,43 @@ func (c *FakeKafkaSinks) List(ctx context.Context, opts v1.ListOptions) (result // Watch returns a watch.Interface that watches the requested kafkaSinks. func (c *FakeKafkaSinks) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(kafkasinksResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(kafkasinksResource, c.ns, opts)) } // Create takes the representation of a kafkaSink and creates it. Returns the server's representation of the kafkaSink, and an error, if there is any. func (c *FakeKafkaSinks) Create(ctx context.Context, kafkaSink *v1alpha1.KafkaSink, opts v1.CreateOptions) (result *v1alpha1.KafkaSink, err error) { + emptyResult := &v1alpha1.KafkaSink{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(kafkasinksResource, c.ns, kafkaSink), &v1alpha1.KafkaSink{}) + Invokes(testing.NewCreateActionWithOptions(kafkasinksResource, c.ns, kafkaSink, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.KafkaSink), err } // Update takes the representation of a kafkaSink and updates it. Returns the server's representation of the kafkaSink, and an error, if there is any. func (c *FakeKafkaSinks) Update(ctx context.Context, kafkaSink *v1alpha1.KafkaSink, opts v1.UpdateOptions) (result *v1alpha1.KafkaSink, err error) { + emptyResult := &v1alpha1.KafkaSink{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(kafkasinksResource, c.ns, kafkaSink), &v1alpha1.KafkaSink{}) + Invokes(testing.NewUpdateActionWithOptions(kafkasinksResource, c.ns, kafkaSink, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.KafkaSink), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeKafkaSinks) UpdateStatus(ctx context.Context, kafkaSink *v1alpha1.KafkaSink, opts v1.UpdateOptions) (*v1alpha1.KafkaSink, error) { +func (c *FakeKafkaSinks) UpdateStatus(ctx context.Context, kafkaSink *v1alpha1.KafkaSink, opts v1.UpdateOptions) (result *v1alpha1.KafkaSink, err error) { + emptyResult := &v1alpha1.KafkaSink{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(kafkasinksResource, "status", c.ns, kafkaSink), &v1alpha1.KafkaSink{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(kafkasinksResource, "status", c.ns, kafkaSink, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.KafkaSink), err } @@ -123,7 +128,7 @@ func (c *FakeKafkaSinks) Delete(ctx context.Context, name string, opts v1.Delete // DeleteCollection deletes a collection of objects. func (c *FakeKafkaSinks) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewDeleteCollectionAction(kafkasinksResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(kafkasinksResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1alpha1.KafkaSinkList{}) return err @@ -131,11 +136,12 @@ func (c *FakeKafkaSinks) DeleteCollection(ctx context.Context, opts v1.DeleteOpt // Patch applies the patch and returns the patched kafkaSink. func (c *FakeKafkaSinks) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.KafkaSink, err error) { + emptyResult := &v1alpha1.KafkaSink{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(kafkasinksResource, c.ns, name, pt, data, subresources...), &v1alpha1.KafkaSink{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(kafkasinksResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.KafkaSink), err } diff --git a/control-plane/pkg/client/clientset/versioned/typed/eventing/v1alpha1/kafkasink.go b/control-plane/pkg/client/clientset/versioned/typed/eventing/v1alpha1/kafkasink.go index b0c483ae13..92352190cf 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/eventing/v1alpha1/kafkasink.go +++ b/control-plane/pkg/client/clientset/versioned/typed/eventing/v1alpha1/kafkasink.go @@ -20,12 +20,11 @@ package v1alpha1 import ( "context" - "time" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1alpha1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/eventing/v1alpha1" scheme "knative.dev/eventing-kafka-broker/control-plane/pkg/client/clientset/versioned/scheme" ) @@ -40,6 +39,7 @@ type KafkaSinksGetter interface { type KafkaSinkInterface interface { Create(ctx context.Context, kafkaSink *v1alpha1.KafkaSink, opts v1.CreateOptions) (*v1alpha1.KafkaSink, error) Update(ctx context.Context, kafkaSink *v1alpha1.KafkaSink, opts v1.UpdateOptions) (*v1alpha1.KafkaSink, error) + // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). UpdateStatus(ctx context.Context, kafkaSink *v1alpha1.KafkaSink, opts v1.UpdateOptions) (*v1alpha1.KafkaSink, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error @@ -52,144 +52,18 @@ type KafkaSinkInterface interface { // kafkaSinks implements KafkaSinkInterface type kafkaSinks struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1alpha1.KafkaSink, *v1alpha1.KafkaSinkList] } // newKafkaSinks returns a KafkaSinks func newKafkaSinks(c *EventingV1alpha1Client, namespace string) *kafkaSinks { return &kafkaSinks{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1alpha1.KafkaSink, *v1alpha1.KafkaSinkList]( + "kafkasinks", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1alpha1.KafkaSink { return &v1alpha1.KafkaSink{} }, + func() *v1alpha1.KafkaSinkList { return &v1alpha1.KafkaSinkList{} }), } } - -// Get takes name of the kafkaSink, and returns the corresponding kafkaSink object, and an error if there is any. -func (c *kafkaSinks) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.KafkaSink, err error) { - result = &v1alpha1.KafkaSink{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkasinks"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of KafkaSinks that match those selectors. -func (c *kafkaSinks) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.KafkaSinkList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1alpha1.KafkaSinkList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkasinks"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested kafkaSinks. -func (c *kafkaSinks) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("kafkasinks"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a kafkaSink and creates it. Returns the server's representation of the kafkaSink, and an error, if there is any. -func (c *kafkaSinks) Create(ctx context.Context, kafkaSink *v1alpha1.KafkaSink, opts v1.CreateOptions) (result *v1alpha1.KafkaSink, err error) { - result = &v1alpha1.KafkaSink{} - err = c.client.Post(). - Namespace(c.ns). - Resource("kafkasinks"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaSink). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a kafkaSink and updates it. Returns the server's representation of the kafkaSink, and an error, if there is any. -func (c *kafkaSinks) Update(ctx context.Context, kafkaSink *v1alpha1.KafkaSink, opts v1.UpdateOptions) (result *v1alpha1.KafkaSink, err error) { - result = &v1alpha1.KafkaSink{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkasinks"). - Name(kafkaSink.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaSink). - Do(ctx). - Into(result) - return -} - -// UpdateStatus was generated because the type contains a Status member. -// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *kafkaSinks) UpdateStatus(ctx context.Context, kafkaSink *v1alpha1.KafkaSink, opts v1.UpdateOptions) (result *v1alpha1.KafkaSink, err error) { - result = &v1alpha1.KafkaSink{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkasinks"). - Name(kafkaSink.Name). - SubResource("status"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaSink). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the kafkaSink and deletes it. Returns an error if one occurs. -func (c *kafkaSinks) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkasinks"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *kafkaSinks) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkasinks"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched kafkaSink. -func (c *kafkaSinks) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.KafkaSink, err error) { - result = &v1alpha1.KafkaSink{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("kafkasinks"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} diff --git a/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/consumer.go b/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/consumer.go index d4cd8a7348..4f6928f89f 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/consumer.go +++ b/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/consumer.go @@ -20,12 +20,11 @@ package v1alpha1 import ( "context" - "time" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1alpha1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/internalskafkaeventing/v1alpha1" scheme "knative.dev/eventing-kafka-broker/control-plane/pkg/client/clientset/versioned/scheme" ) @@ -40,6 +39,7 @@ type ConsumersGetter interface { type ConsumerInterface interface { Create(ctx context.Context, consumer *v1alpha1.Consumer, opts v1.CreateOptions) (*v1alpha1.Consumer, error) Update(ctx context.Context, consumer *v1alpha1.Consumer, opts v1.UpdateOptions) (*v1alpha1.Consumer, error) + // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). UpdateStatus(ctx context.Context, consumer *v1alpha1.Consumer, opts v1.UpdateOptions) (*v1alpha1.Consumer, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error @@ -52,144 +52,18 @@ type ConsumerInterface interface { // consumers implements ConsumerInterface type consumers struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1alpha1.Consumer, *v1alpha1.ConsumerList] } // newConsumers returns a Consumers func newConsumers(c *InternalV1alpha1Client, namespace string) *consumers { return &consumers{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1alpha1.Consumer, *v1alpha1.ConsumerList]( + "consumers", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1alpha1.Consumer { return &v1alpha1.Consumer{} }, + func() *v1alpha1.ConsumerList { return &v1alpha1.ConsumerList{} }), } } - -// Get takes name of the consumer, and returns the corresponding consumer object, and an error if there is any. -func (c *consumers) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.Consumer, err error) { - result = &v1alpha1.Consumer{} - err = c.client.Get(). - Namespace(c.ns). - Resource("consumers"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of Consumers that match those selectors. -func (c *consumers) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.ConsumerList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1alpha1.ConsumerList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("consumers"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested consumers. -func (c *consumers) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("consumers"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a consumer and creates it. Returns the server's representation of the consumer, and an error, if there is any. -func (c *consumers) Create(ctx context.Context, consumer *v1alpha1.Consumer, opts v1.CreateOptions) (result *v1alpha1.Consumer, err error) { - result = &v1alpha1.Consumer{} - err = c.client.Post(). - Namespace(c.ns). - Resource("consumers"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(consumer). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a consumer and updates it. Returns the server's representation of the consumer, and an error, if there is any. -func (c *consumers) Update(ctx context.Context, consumer *v1alpha1.Consumer, opts v1.UpdateOptions) (result *v1alpha1.Consumer, err error) { - result = &v1alpha1.Consumer{} - err = c.client.Put(). - Namespace(c.ns). - Resource("consumers"). - Name(consumer.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(consumer). - Do(ctx). - Into(result) - return -} - -// UpdateStatus was generated because the type contains a Status member. -// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *consumers) UpdateStatus(ctx context.Context, consumer *v1alpha1.Consumer, opts v1.UpdateOptions) (result *v1alpha1.Consumer, err error) { - result = &v1alpha1.Consumer{} - err = c.client.Put(). - Namespace(c.ns). - Resource("consumers"). - Name(consumer.Name). - SubResource("status"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(consumer). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the consumer and deletes it. Returns an error if one occurs. -func (c *consumers) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("consumers"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *consumers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("consumers"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched consumer. -func (c *consumers) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.Consumer, err error) { - result = &v1alpha1.Consumer{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("consumers"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} diff --git a/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/consumergroup.go b/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/consumergroup.go index 28356d49ed..2db11861da 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/consumergroup.go +++ b/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/consumergroup.go @@ -20,13 +20,12 @@ package v1alpha1 import ( "context" - "time" autoscalingv1 "k8s.io/api/autoscaling/v1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1alpha1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/internalskafkaeventing/v1alpha1" scheme "knative.dev/eventing-kafka-broker/control-plane/pkg/client/clientset/versioned/scheme" ) @@ -41,6 +40,7 @@ type ConsumerGroupsGetter interface { type ConsumerGroupInterface interface { Create(ctx context.Context, consumerGroup *v1alpha1.ConsumerGroup, opts v1.CreateOptions) (*v1alpha1.ConsumerGroup, error) Update(ctx context.Context, consumerGroup *v1alpha1.ConsumerGroup, opts v1.UpdateOptions) (*v1alpha1.ConsumerGroup, error) + // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). UpdateStatus(ctx context.Context, consumerGroup *v1alpha1.ConsumerGroup, opts v1.UpdateOptions) (*v1alpha1.ConsumerGroup, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error @@ -56,153 +56,27 @@ type ConsumerGroupInterface interface { // consumerGroups implements ConsumerGroupInterface type consumerGroups struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1alpha1.ConsumerGroup, *v1alpha1.ConsumerGroupList] } // newConsumerGroups returns a ConsumerGroups func newConsumerGroups(c *InternalV1alpha1Client, namespace string) *consumerGroups { return &consumerGroups{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1alpha1.ConsumerGroup, *v1alpha1.ConsumerGroupList]( + "consumergroups", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1alpha1.ConsumerGroup { return &v1alpha1.ConsumerGroup{} }, + func() *v1alpha1.ConsumerGroupList { return &v1alpha1.ConsumerGroupList{} }), } } -// Get takes name of the consumerGroup, and returns the corresponding consumerGroup object, and an error if there is any. -func (c *consumerGroups) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.ConsumerGroup, err error) { - result = &v1alpha1.ConsumerGroup{} - err = c.client.Get(). - Namespace(c.ns). - Resource("consumergroups"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of ConsumerGroups that match those selectors. -func (c *consumerGroups) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.ConsumerGroupList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1alpha1.ConsumerGroupList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("consumergroups"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested consumerGroups. -func (c *consumerGroups) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("consumergroups"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a consumerGroup and creates it. Returns the server's representation of the consumerGroup, and an error, if there is any. -func (c *consumerGroups) Create(ctx context.Context, consumerGroup *v1alpha1.ConsumerGroup, opts v1.CreateOptions) (result *v1alpha1.ConsumerGroup, err error) { - result = &v1alpha1.ConsumerGroup{} - err = c.client.Post(). - Namespace(c.ns). - Resource("consumergroups"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(consumerGroup). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a consumerGroup and updates it. Returns the server's representation of the consumerGroup, and an error, if there is any. -func (c *consumerGroups) Update(ctx context.Context, consumerGroup *v1alpha1.ConsumerGroup, opts v1.UpdateOptions) (result *v1alpha1.ConsumerGroup, err error) { - result = &v1alpha1.ConsumerGroup{} - err = c.client.Put(). - Namespace(c.ns). - Resource("consumergroups"). - Name(consumerGroup.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(consumerGroup). - Do(ctx). - Into(result) - return -} - -// UpdateStatus was generated because the type contains a Status member. -// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *consumerGroups) UpdateStatus(ctx context.Context, consumerGroup *v1alpha1.ConsumerGroup, opts v1.UpdateOptions) (result *v1alpha1.ConsumerGroup, err error) { - result = &v1alpha1.ConsumerGroup{} - err = c.client.Put(). - Namespace(c.ns). - Resource("consumergroups"). - Name(consumerGroup.Name). - SubResource("status"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(consumerGroup). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the consumerGroup and deletes it. Returns an error if one occurs. -func (c *consumerGroups) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("consumergroups"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *consumerGroups) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("consumergroups"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched consumerGroup. -func (c *consumerGroups) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.ConsumerGroup, err error) { - result = &v1alpha1.ConsumerGroup{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("consumergroups"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} - // GetScale takes name of the consumerGroup, and returns the corresponding autoscalingv1.Scale object, and an error if there is any. func (c *consumerGroups) GetScale(ctx context.Context, consumerGroupName string, options v1.GetOptions) (result *autoscalingv1.Scale, err error) { result = &autoscalingv1.Scale{} - err = c.client.Get(). - Namespace(c.ns). + err = c.GetClient().Get(). + Namespace(c.GetNamespace()). Resource("consumergroups"). Name(consumerGroupName). SubResource("scale"). @@ -215,8 +89,8 @@ func (c *consumerGroups) GetScale(ctx context.Context, consumerGroupName string, // UpdateScale takes the top resource name and the representation of a scale and updates it. Returns the server's representation of the scale, and an error, if there is any. func (c *consumerGroups) UpdateScale(ctx context.Context, consumerGroupName string, scale *autoscalingv1.Scale, opts v1.UpdateOptions) (result *autoscalingv1.Scale, err error) { result = &autoscalingv1.Scale{} - err = c.client.Put(). - Namespace(c.ns). + err = c.GetClient().Put(). + Namespace(c.GetNamespace()). Resource("consumergroups"). Name(consumerGroupName). SubResource("scale"). diff --git a/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/fake/fake_consumer.go b/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/fake/fake_consumer.go index ec3f968be6..5660fa6c30 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/fake/fake_consumer.go +++ b/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/fake/fake_consumer.go @@ -41,22 +41,24 @@ var consumersKind = v1alpha1.SchemeGroupVersion.WithKind("Consumer") // Get takes name of the consumer, and returns the corresponding consumer object, and an error if there is any. func (c *FakeConsumers) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.Consumer, err error) { + emptyResult := &v1alpha1.Consumer{} obj, err := c.Fake. - Invokes(testing.NewGetAction(consumersResource, c.ns, name), &v1alpha1.Consumer{}) + Invokes(testing.NewGetActionWithOptions(consumersResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.Consumer), err } // List takes label and field selectors, and returns the list of Consumers that match those selectors. func (c *FakeConsumers) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.ConsumerList, err error) { + emptyResult := &v1alpha1.ConsumerList{} obj, err := c.Fake. - Invokes(testing.NewListAction(consumersResource, consumersKind, c.ns, opts), &v1alpha1.ConsumerList{}) + Invokes(testing.NewListActionWithOptions(consumersResource, consumersKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -75,40 +77,43 @@ func (c *FakeConsumers) List(ctx context.Context, opts v1.ListOptions) (result * // Watch returns a watch.Interface that watches the requested consumers. func (c *FakeConsumers) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(consumersResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(consumersResource, c.ns, opts)) } // Create takes the representation of a consumer and creates it. Returns the server's representation of the consumer, and an error, if there is any. func (c *FakeConsumers) Create(ctx context.Context, consumer *v1alpha1.Consumer, opts v1.CreateOptions) (result *v1alpha1.Consumer, err error) { + emptyResult := &v1alpha1.Consumer{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(consumersResource, c.ns, consumer), &v1alpha1.Consumer{}) + Invokes(testing.NewCreateActionWithOptions(consumersResource, c.ns, consumer, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.Consumer), err } // Update takes the representation of a consumer and updates it. Returns the server's representation of the consumer, and an error, if there is any. func (c *FakeConsumers) Update(ctx context.Context, consumer *v1alpha1.Consumer, opts v1.UpdateOptions) (result *v1alpha1.Consumer, err error) { + emptyResult := &v1alpha1.Consumer{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(consumersResource, c.ns, consumer), &v1alpha1.Consumer{}) + Invokes(testing.NewUpdateActionWithOptions(consumersResource, c.ns, consumer, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.Consumer), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeConsumers) UpdateStatus(ctx context.Context, consumer *v1alpha1.Consumer, opts v1.UpdateOptions) (*v1alpha1.Consumer, error) { +func (c *FakeConsumers) UpdateStatus(ctx context.Context, consumer *v1alpha1.Consumer, opts v1.UpdateOptions) (result *v1alpha1.Consumer, err error) { + emptyResult := &v1alpha1.Consumer{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(consumersResource, "status", c.ns, consumer), &v1alpha1.Consumer{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(consumersResource, "status", c.ns, consumer, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.Consumer), err } @@ -123,7 +128,7 @@ func (c *FakeConsumers) Delete(ctx context.Context, name string, opts v1.DeleteO // DeleteCollection deletes a collection of objects. func (c *FakeConsumers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewDeleteCollectionAction(consumersResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(consumersResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1alpha1.ConsumerList{}) return err @@ -131,11 +136,12 @@ func (c *FakeConsumers) DeleteCollection(ctx context.Context, opts v1.DeleteOpti // Patch applies the patch and returns the patched consumer. func (c *FakeConsumers) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.Consumer, err error) { + emptyResult := &v1alpha1.Consumer{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(consumersResource, c.ns, name, pt, data, subresources...), &v1alpha1.Consumer{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(consumersResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.Consumer), err } diff --git a/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/fake/fake_consumergroup.go b/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/fake/fake_consumergroup.go index 493118e429..cb39647326 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/fake/fake_consumergroup.go +++ b/control-plane/pkg/client/clientset/versioned/typed/internalskafkaeventing/v1alpha1/fake/fake_consumergroup.go @@ -42,22 +42,24 @@ var consumergroupsKind = v1alpha1.SchemeGroupVersion.WithKind("ConsumerGroup") // Get takes name of the consumerGroup, and returns the corresponding consumerGroup object, and an error if there is any. func (c *FakeConsumerGroups) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.ConsumerGroup, err error) { + emptyResult := &v1alpha1.ConsumerGroup{} obj, err := c.Fake. - Invokes(testing.NewGetAction(consumergroupsResource, c.ns, name), &v1alpha1.ConsumerGroup{}) + Invokes(testing.NewGetActionWithOptions(consumergroupsResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ConsumerGroup), err } // List takes label and field selectors, and returns the list of ConsumerGroups that match those selectors. func (c *FakeConsumerGroups) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.ConsumerGroupList, err error) { + emptyResult := &v1alpha1.ConsumerGroupList{} obj, err := c.Fake. - Invokes(testing.NewListAction(consumergroupsResource, consumergroupsKind, c.ns, opts), &v1alpha1.ConsumerGroupList{}) + Invokes(testing.NewListActionWithOptions(consumergroupsResource, consumergroupsKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -76,40 +78,43 @@ func (c *FakeConsumerGroups) List(ctx context.Context, opts v1.ListOptions) (res // Watch returns a watch.Interface that watches the requested consumerGroups. func (c *FakeConsumerGroups) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(consumergroupsResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(consumergroupsResource, c.ns, opts)) } // Create takes the representation of a consumerGroup and creates it. Returns the server's representation of the consumerGroup, and an error, if there is any. func (c *FakeConsumerGroups) Create(ctx context.Context, consumerGroup *v1alpha1.ConsumerGroup, opts v1.CreateOptions) (result *v1alpha1.ConsumerGroup, err error) { + emptyResult := &v1alpha1.ConsumerGroup{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(consumergroupsResource, c.ns, consumerGroup), &v1alpha1.ConsumerGroup{}) + Invokes(testing.NewCreateActionWithOptions(consumergroupsResource, c.ns, consumerGroup, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ConsumerGroup), err } // Update takes the representation of a consumerGroup and updates it. Returns the server's representation of the consumerGroup, and an error, if there is any. func (c *FakeConsumerGroups) Update(ctx context.Context, consumerGroup *v1alpha1.ConsumerGroup, opts v1.UpdateOptions) (result *v1alpha1.ConsumerGroup, err error) { + emptyResult := &v1alpha1.ConsumerGroup{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(consumergroupsResource, c.ns, consumerGroup), &v1alpha1.ConsumerGroup{}) + Invokes(testing.NewUpdateActionWithOptions(consumergroupsResource, c.ns, consumerGroup, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ConsumerGroup), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeConsumerGroups) UpdateStatus(ctx context.Context, consumerGroup *v1alpha1.ConsumerGroup, opts v1.UpdateOptions) (*v1alpha1.ConsumerGroup, error) { +func (c *FakeConsumerGroups) UpdateStatus(ctx context.Context, consumerGroup *v1alpha1.ConsumerGroup, opts v1.UpdateOptions) (result *v1alpha1.ConsumerGroup, err error) { + emptyResult := &v1alpha1.ConsumerGroup{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(consumergroupsResource, "status", c.ns, consumerGroup), &v1alpha1.ConsumerGroup{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(consumergroupsResource, "status", c.ns, consumerGroup, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ConsumerGroup), err } @@ -124,7 +129,7 @@ func (c *FakeConsumerGroups) Delete(ctx context.Context, name string, opts v1.De // DeleteCollection deletes a collection of objects. func (c *FakeConsumerGroups) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewDeleteCollectionAction(consumergroupsResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(consumergroupsResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1alpha1.ConsumerGroupList{}) return err @@ -132,33 +137,36 @@ func (c *FakeConsumerGroups) DeleteCollection(ctx context.Context, opts v1.Delet // Patch applies the patch and returns the patched consumerGroup. func (c *FakeConsumerGroups) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.ConsumerGroup, err error) { + emptyResult := &v1alpha1.ConsumerGroup{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(consumergroupsResource, c.ns, name, pt, data, subresources...), &v1alpha1.ConsumerGroup{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(consumergroupsResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ConsumerGroup), err } // GetScale takes name of the consumerGroup, and returns the corresponding scale object, and an error if there is any. func (c *FakeConsumerGroups) GetScale(ctx context.Context, consumerGroupName string, options v1.GetOptions) (result *autoscalingv1.Scale, err error) { + emptyResult := &autoscalingv1.Scale{} obj, err := c.Fake. - Invokes(testing.NewGetSubresourceAction(consumergroupsResource, c.ns, "scale", consumerGroupName), &autoscalingv1.Scale{}) + Invokes(testing.NewGetSubresourceActionWithOptions(consumergroupsResource, c.ns, "scale", consumerGroupName, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*autoscalingv1.Scale), err } // UpdateScale takes the representation of a scale and updates it. Returns the server's representation of the scale, and an error, if there is any. func (c *FakeConsumerGroups) UpdateScale(ctx context.Context, consumerGroupName string, scale *autoscalingv1.Scale, opts v1.UpdateOptions) (result *autoscalingv1.Scale, err error) { + emptyResult := &autoscalingv1.Scale{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(consumergroupsResource, "scale", c.ns, scale), &autoscalingv1.Scale{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(consumergroupsResource, "scale", c.ns, scale, opts), &autoscalingv1.Scale{}) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*autoscalingv1.Scale), err } diff --git a/control-plane/pkg/client/clientset/versioned/typed/messaging/v1beta1/fake/fake_kafkachannel.go b/control-plane/pkg/client/clientset/versioned/typed/messaging/v1beta1/fake/fake_kafkachannel.go index 0e52c85dc9..4315fd05c5 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/messaging/v1beta1/fake/fake_kafkachannel.go +++ b/control-plane/pkg/client/clientset/versioned/typed/messaging/v1beta1/fake/fake_kafkachannel.go @@ -41,22 +41,24 @@ var kafkachannelsKind = v1beta1.SchemeGroupVersion.WithKind("KafkaChannel") // Get takes name of the kafkaChannel, and returns the corresponding kafkaChannel object, and an error if there is any. func (c *FakeKafkaChannels) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.KafkaChannel, err error) { + emptyResult := &v1beta1.KafkaChannel{} obj, err := c.Fake. - Invokes(testing.NewGetAction(kafkachannelsResource, c.ns, name), &v1beta1.KafkaChannel{}) + Invokes(testing.NewGetActionWithOptions(kafkachannelsResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaChannel), err } // List takes label and field selectors, and returns the list of KafkaChannels that match those selectors. func (c *FakeKafkaChannels) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.KafkaChannelList, err error) { + emptyResult := &v1beta1.KafkaChannelList{} obj, err := c.Fake. - Invokes(testing.NewListAction(kafkachannelsResource, kafkachannelsKind, c.ns, opts), &v1beta1.KafkaChannelList{}) + Invokes(testing.NewListActionWithOptions(kafkachannelsResource, kafkachannelsKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -75,40 +77,43 @@ func (c *FakeKafkaChannels) List(ctx context.Context, opts v1.ListOptions) (resu // Watch returns a watch.Interface that watches the requested kafkaChannels. func (c *FakeKafkaChannels) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(kafkachannelsResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(kafkachannelsResource, c.ns, opts)) } // Create takes the representation of a kafkaChannel and creates it. Returns the server's representation of the kafkaChannel, and an error, if there is any. func (c *FakeKafkaChannels) Create(ctx context.Context, kafkaChannel *v1beta1.KafkaChannel, opts v1.CreateOptions) (result *v1beta1.KafkaChannel, err error) { + emptyResult := &v1beta1.KafkaChannel{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(kafkachannelsResource, c.ns, kafkaChannel), &v1beta1.KafkaChannel{}) + Invokes(testing.NewCreateActionWithOptions(kafkachannelsResource, c.ns, kafkaChannel, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaChannel), err } // Update takes the representation of a kafkaChannel and updates it. Returns the server's representation of the kafkaChannel, and an error, if there is any. func (c *FakeKafkaChannels) Update(ctx context.Context, kafkaChannel *v1beta1.KafkaChannel, opts v1.UpdateOptions) (result *v1beta1.KafkaChannel, err error) { + emptyResult := &v1beta1.KafkaChannel{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(kafkachannelsResource, c.ns, kafkaChannel), &v1beta1.KafkaChannel{}) + Invokes(testing.NewUpdateActionWithOptions(kafkachannelsResource, c.ns, kafkaChannel, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaChannel), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeKafkaChannels) UpdateStatus(ctx context.Context, kafkaChannel *v1beta1.KafkaChannel, opts v1.UpdateOptions) (*v1beta1.KafkaChannel, error) { +func (c *FakeKafkaChannels) UpdateStatus(ctx context.Context, kafkaChannel *v1beta1.KafkaChannel, opts v1.UpdateOptions) (result *v1beta1.KafkaChannel, err error) { + emptyResult := &v1beta1.KafkaChannel{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(kafkachannelsResource, "status", c.ns, kafkaChannel), &v1beta1.KafkaChannel{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(kafkachannelsResource, "status", c.ns, kafkaChannel, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaChannel), err } @@ -123,7 +128,7 @@ func (c *FakeKafkaChannels) Delete(ctx context.Context, name string, opts v1.Del // DeleteCollection deletes a collection of objects. func (c *FakeKafkaChannels) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewDeleteCollectionAction(kafkachannelsResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(kafkachannelsResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1beta1.KafkaChannelList{}) return err @@ -131,11 +136,12 @@ func (c *FakeKafkaChannels) DeleteCollection(ctx context.Context, opts v1.Delete // Patch applies the patch and returns the patched kafkaChannel. func (c *FakeKafkaChannels) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.KafkaChannel, err error) { + emptyResult := &v1beta1.KafkaChannel{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(kafkachannelsResource, c.ns, name, pt, data, subresources...), &v1beta1.KafkaChannel{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(kafkachannelsResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaChannel), err } diff --git a/control-plane/pkg/client/clientset/versioned/typed/messaging/v1beta1/kafkachannel.go b/control-plane/pkg/client/clientset/versioned/typed/messaging/v1beta1/kafkachannel.go index f1575a1cbf..8105d74639 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/messaging/v1beta1/kafkachannel.go +++ b/control-plane/pkg/client/clientset/versioned/typed/messaging/v1beta1/kafkachannel.go @@ -20,12 +20,11 @@ package v1beta1 import ( "context" - "time" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1beta1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/messaging/v1beta1" scheme "knative.dev/eventing-kafka-broker/control-plane/pkg/client/clientset/versioned/scheme" ) @@ -40,6 +39,7 @@ type KafkaChannelsGetter interface { type KafkaChannelInterface interface { Create(ctx context.Context, kafkaChannel *v1beta1.KafkaChannel, opts v1.CreateOptions) (*v1beta1.KafkaChannel, error) Update(ctx context.Context, kafkaChannel *v1beta1.KafkaChannel, opts v1.UpdateOptions) (*v1beta1.KafkaChannel, error) + // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). UpdateStatus(ctx context.Context, kafkaChannel *v1beta1.KafkaChannel, opts v1.UpdateOptions) (*v1beta1.KafkaChannel, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error @@ -52,144 +52,18 @@ type KafkaChannelInterface interface { // kafkaChannels implements KafkaChannelInterface type kafkaChannels struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1beta1.KafkaChannel, *v1beta1.KafkaChannelList] } // newKafkaChannels returns a KafkaChannels func newKafkaChannels(c *MessagingV1beta1Client, namespace string) *kafkaChannels { return &kafkaChannels{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1beta1.KafkaChannel, *v1beta1.KafkaChannelList]( + "kafkachannels", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1beta1.KafkaChannel { return &v1beta1.KafkaChannel{} }, + func() *v1beta1.KafkaChannelList { return &v1beta1.KafkaChannelList{} }), } } - -// Get takes name of the kafkaChannel, and returns the corresponding kafkaChannel object, and an error if there is any. -func (c *kafkaChannels) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.KafkaChannel, err error) { - result = &v1beta1.KafkaChannel{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkachannels"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of KafkaChannels that match those selectors. -func (c *kafkaChannels) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.KafkaChannelList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1beta1.KafkaChannelList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkachannels"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested kafkaChannels. -func (c *kafkaChannels) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("kafkachannels"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a kafkaChannel and creates it. Returns the server's representation of the kafkaChannel, and an error, if there is any. -func (c *kafkaChannels) Create(ctx context.Context, kafkaChannel *v1beta1.KafkaChannel, opts v1.CreateOptions) (result *v1beta1.KafkaChannel, err error) { - result = &v1beta1.KafkaChannel{} - err = c.client.Post(). - Namespace(c.ns). - Resource("kafkachannels"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaChannel). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a kafkaChannel and updates it. Returns the server's representation of the kafkaChannel, and an error, if there is any. -func (c *kafkaChannels) Update(ctx context.Context, kafkaChannel *v1beta1.KafkaChannel, opts v1.UpdateOptions) (result *v1beta1.KafkaChannel, err error) { - result = &v1beta1.KafkaChannel{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkachannels"). - Name(kafkaChannel.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaChannel). - Do(ctx). - Into(result) - return -} - -// UpdateStatus was generated because the type contains a Status member. -// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *kafkaChannels) UpdateStatus(ctx context.Context, kafkaChannel *v1beta1.KafkaChannel, opts v1.UpdateOptions) (result *v1beta1.KafkaChannel, err error) { - result = &v1beta1.KafkaChannel{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkachannels"). - Name(kafkaChannel.Name). - SubResource("status"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaChannel). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the kafkaChannel and deletes it. Returns an error if one occurs. -func (c *kafkaChannels) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkachannels"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *kafkaChannels) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkachannels"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched kafkaChannel. -func (c *kafkaChannels) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.KafkaChannel, err error) { - result = &v1beta1.KafkaChannel{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("kafkachannels"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} diff --git a/control-plane/pkg/client/clientset/versioned/typed/sources/v1/fake/fake_kafkasource.go b/control-plane/pkg/client/clientset/versioned/typed/sources/v1/fake/fake_kafkasource.go index 0b86b35ae6..1007198638 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/sources/v1/fake/fake_kafkasource.go +++ b/control-plane/pkg/client/clientset/versioned/typed/sources/v1/fake/fake_kafkasource.go @@ -42,22 +42,24 @@ var kafkasourcesKind = v1.SchemeGroupVersion.WithKind("KafkaSource") // Get takes name of the kafkaSource, and returns the corresponding kafkaSource object, and an error if there is any. func (c *FakeKafkaSources) Get(ctx context.Context, name string, options metav1.GetOptions) (result *v1.KafkaSource, err error) { + emptyResult := &v1.KafkaSource{} obj, err := c.Fake. - Invokes(testing.NewGetAction(kafkasourcesResource, c.ns, name), &v1.KafkaSource{}) + Invokes(testing.NewGetActionWithOptions(kafkasourcesResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1.KafkaSource), err } // List takes label and field selectors, and returns the list of KafkaSources that match those selectors. func (c *FakeKafkaSources) List(ctx context.Context, opts metav1.ListOptions) (result *v1.KafkaSourceList, err error) { + emptyResult := &v1.KafkaSourceList{} obj, err := c.Fake. - Invokes(testing.NewListAction(kafkasourcesResource, kafkasourcesKind, c.ns, opts), &v1.KafkaSourceList{}) + Invokes(testing.NewListActionWithOptions(kafkasourcesResource, kafkasourcesKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -76,40 +78,43 @@ func (c *FakeKafkaSources) List(ctx context.Context, opts metav1.ListOptions) (r // Watch returns a watch.Interface that watches the requested kafkaSources. func (c *FakeKafkaSources) Watch(ctx context.Context, opts metav1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(kafkasourcesResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(kafkasourcesResource, c.ns, opts)) } // Create takes the representation of a kafkaSource and creates it. Returns the server's representation of the kafkaSource, and an error, if there is any. func (c *FakeKafkaSources) Create(ctx context.Context, kafkaSource *v1.KafkaSource, opts metav1.CreateOptions) (result *v1.KafkaSource, err error) { + emptyResult := &v1.KafkaSource{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(kafkasourcesResource, c.ns, kafkaSource), &v1.KafkaSource{}) + Invokes(testing.NewCreateActionWithOptions(kafkasourcesResource, c.ns, kafkaSource, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1.KafkaSource), err } // Update takes the representation of a kafkaSource and updates it. Returns the server's representation of the kafkaSource, and an error, if there is any. func (c *FakeKafkaSources) Update(ctx context.Context, kafkaSource *v1.KafkaSource, opts metav1.UpdateOptions) (result *v1.KafkaSource, err error) { + emptyResult := &v1.KafkaSource{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(kafkasourcesResource, c.ns, kafkaSource), &v1.KafkaSource{}) + Invokes(testing.NewUpdateActionWithOptions(kafkasourcesResource, c.ns, kafkaSource, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1.KafkaSource), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeKafkaSources) UpdateStatus(ctx context.Context, kafkaSource *v1.KafkaSource, opts metav1.UpdateOptions) (*v1.KafkaSource, error) { +func (c *FakeKafkaSources) UpdateStatus(ctx context.Context, kafkaSource *v1.KafkaSource, opts metav1.UpdateOptions) (result *v1.KafkaSource, err error) { + emptyResult := &v1.KafkaSource{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(kafkasourcesResource, "status", c.ns, kafkaSource), &v1.KafkaSource{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(kafkasourcesResource, "status", c.ns, kafkaSource, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1.KafkaSource), err } @@ -124,7 +129,7 @@ func (c *FakeKafkaSources) Delete(ctx context.Context, name string, opts metav1. // DeleteCollection deletes a collection of objects. func (c *FakeKafkaSources) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error { - action := testing.NewDeleteCollectionAction(kafkasourcesResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(kafkasourcesResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1.KafkaSourceList{}) return err @@ -132,33 +137,36 @@ func (c *FakeKafkaSources) DeleteCollection(ctx context.Context, opts metav1.Del // Patch applies the patch and returns the patched kafkaSource. func (c *FakeKafkaSources) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts metav1.PatchOptions, subresources ...string) (result *v1.KafkaSource, err error) { + emptyResult := &v1.KafkaSource{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(kafkasourcesResource, c.ns, name, pt, data, subresources...), &v1.KafkaSource{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(kafkasourcesResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1.KafkaSource), err } // GetScale takes name of the kafkaSource, and returns the corresponding scale object, and an error if there is any. func (c *FakeKafkaSources) GetScale(ctx context.Context, kafkaSourceName string, options metav1.GetOptions) (result *autoscalingv1.Scale, err error) { + emptyResult := &autoscalingv1.Scale{} obj, err := c.Fake. - Invokes(testing.NewGetSubresourceAction(kafkasourcesResource, c.ns, "scale", kafkaSourceName), &autoscalingv1.Scale{}) + Invokes(testing.NewGetSubresourceActionWithOptions(kafkasourcesResource, c.ns, "scale", kafkaSourceName, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*autoscalingv1.Scale), err } // UpdateScale takes the representation of a scale and updates it. Returns the server's representation of the scale, and an error, if there is any. func (c *FakeKafkaSources) UpdateScale(ctx context.Context, kafkaSourceName string, scale *autoscalingv1.Scale, opts metav1.UpdateOptions) (result *autoscalingv1.Scale, err error) { + emptyResult := &autoscalingv1.Scale{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(kafkasourcesResource, "scale", c.ns, scale), &autoscalingv1.Scale{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(kafkasourcesResource, "scale", c.ns, scale, opts), &autoscalingv1.Scale{}) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*autoscalingv1.Scale), err } diff --git a/control-plane/pkg/client/clientset/versioned/typed/sources/v1/kafkasource.go b/control-plane/pkg/client/clientset/versioned/typed/sources/v1/kafkasource.go index eeb8572132..45706813ef 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/sources/v1/kafkasource.go +++ b/control-plane/pkg/client/clientset/versioned/typed/sources/v1/kafkasource.go @@ -20,13 +20,12 @@ package v1 import ( "context" - "time" autoscalingv1 "k8s.io/api/autoscaling/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/sources/v1" scheme "knative.dev/eventing-kafka-broker/control-plane/pkg/client/clientset/versioned/scheme" ) @@ -41,6 +40,7 @@ type KafkaSourcesGetter interface { type KafkaSourceInterface interface { Create(ctx context.Context, kafkaSource *v1.KafkaSource, opts metav1.CreateOptions) (*v1.KafkaSource, error) Update(ctx context.Context, kafkaSource *v1.KafkaSource, opts metav1.UpdateOptions) (*v1.KafkaSource, error) + // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). UpdateStatus(ctx context.Context, kafkaSource *v1.KafkaSource, opts metav1.UpdateOptions) (*v1.KafkaSource, error) Delete(ctx context.Context, name string, opts metav1.DeleteOptions) error DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error @@ -56,153 +56,27 @@ type KafkaSourceInterface interface { // kafkaSources implements KafkaSourceInterface type kafkaSources struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1.KafkaSource, *v1.KafkaSourceList] } // newKafkaSources returns a KafkaSources func newKafkaSources(c *SourcesV1Client, namespace string) *kafkaSources { return &kafkaSources{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1.KafkaSource, *v1.KafkaSourceList]( + "kafkasources", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1.KafkaSource { return &v1.KafkaSource{} }, + func() *v1.KafkaSourceList { return &v1.KafkaSourceList{} }), } } -// Get takes name of the kafkaSource, and returns the corresponding kafkaSource object, and an error if there is any. -func (c *kafkaSources) Get(ctx context.Context, name string, options metav1.GetOptions) (result *v1.KafkaSource, err error) { - result = &v1.KafkaSource{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkasources"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of KafkaSources that match those selectors. -func (c *kafkaSources) List(ctx context.Context, opts metav1.ListOptions) (result *v1.KafkaSourceList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1.KafkaSourceList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkasources"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested kafkaSources. -func (c *kafkaSources) Watch(ctx context.Context, opts metav1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("kafkasources"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a kafkaSource and creates it. Returns the server's representation of the kafkaSource, and an error, if there is any. -func (c *kafkaSources) Create(ctx context.Context, kafkaSource *v1.KafkaSource, opts metav1.CreateOptions) (result *v1.KafkaSource, err error) { - result = &v1.KafkaSource{} - err = c.client.Post(). - Namespace(c.ns). - Resource("kafkasources"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaSource). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a kafkaSource and updates it. Returns the server's representation of the kafkaSource, and an error, if there is any. -func (c *kafkaSources) Update(ctx context.Context, kafkaSource *v1.KafkaSource, opts metav1.UpdateOptions) (result *v1.KafkaSource, err error) { - result = &v1.KafkaSource{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkasources"). - Name(kafkaSource.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaSource). - Do(ctx). - Into(result) - return -} - -// UpdateStatus was generated because the type contains a Status member. -// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *kafkaSources) UpdateStatus(ctx context.Context, kafkaSource *v1.KafkaSource, opts metav1.UpdateOptions) (result *v1.KafkaSource, err error) { - result = &v1.KafkaSource{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkasources"). - Name(kafkaSource.Name). - SubResource("status"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaSource). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the kafkaSource and deletes it. Returns an error if one occurs. -func (c *kafkaSources) Delete(ctx context.Context, name string, opts metav1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkasources"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *kafkaSources) DeleteCollection(ctx context.Context, opts metav1.DeleteOptions, listOpts metav1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkasources"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched kafkaSource. -func (c *kafkaSources) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts metav1.PatchOptions, subresources ...string) (result *v1.KafkaSource, err error) { - result = &v1.KafkaSource{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("kafkasources"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} - // GetScale takes name of the kafkaSource, and returns the corresponding autoscalingv1.Scale object, and an error if there is any. func (c *kafkaSources) GetScale(ctx context.Context, kafkaSourceName string, options metav1.GetOptions) (result *autoscalingv1.Scale, err error) { result = &autoscalingv1.Scale{} - err = c.client.Get(). - Namespace(c.ns). + err = c.GetClient().Get(). + Namespace(c.GetNamespace()). Resource("kafkasources"). Name(kafkaSourceName). SubResource("scale"). @@ -215,8 +89,8 @@ func (c *kafkaSources) GetScale(ctx context.Context, kafkaSourceName string, opt // UpdateScale takes the top resource name and the representation of a scale and updates it. Returns the server's representation of the scale, and an error, if there is any. func (c *kafkaSources) UpdateScale(ctx context.Context, kafkaSourceName string, scale *autoscalingv1.Scale, opts metav1.UpdateOptions) (result *autoscalingv1.Scale, err error) { result = &autoscalingv1.Scale{} - err = c.client.Put(). - Namespace(c.ns). + err = c.GetClient().Put(). + Namespace(c.GetNamespace()). Resource("kafkasources"). Name(kafkaSourceName). SubResource("scale"). diff --git a/control-plane/pkg/client/clientset/versioned/typed/sources/v1beta1/fake/fake_kafkasource.go b/control-plane/pkg/client/clientset/versioned/typed/sources/v1beta1/fake/fake_kafkasource.go index 72c1f198df..8b190e3cc3 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/sources/v1beta1/fake/fake_kafkasource.go +++ b/control-plane/pkg/client/clientset/versioned/typed/sources/v1beta1/fake/fake_kafkasource.go @@ -42,22 +42,24 @@ var kafkasourcesKind = v1beta1.SchemeGroupVersion.WithKind("KafkaSource") // Get takes name of the kafkaSource, and returns the corresponding kafkaSource object, and an error if there is any. func (c *FakeKafkaSources) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.KafkaSource, err error) { + emptyResult := &v1beta1.KafkaSource{} obj, err := c.Fake. - Invokes(testing.NewGetAction(kafkasourcesResource, c.ns, name), &v1beta1.KafkaSource{}) + Invokes(testing.NewGetActionWithOptions(kafkasourcesResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaSource), err } // List takes label and field selectors, and returns the list of KafkaSources that match those selectors. func (c *FakeKafkaSources) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.KafkaSourceList, err error) { + emptyResult := &v1beta1.KafkaSourceList{} obj, err := c.Fake. - Invokes(testing.NewListAction(kafkasourcesResource, kafkasourcesKind, c.ns, opts), &v1beta1.KafkaSourceList{}) + Invokes(testing.NewListActionWithOptions(kafkasourcesResource, kafkasourcesKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -76,40 +78,43 @@ func (c *FakeKafkaSources) List(ctx context.Context, opts v1.ListOptions) (resul // Watch returns a watch.Interface that watches the requested kafkaSources. func (c *FakeKafkaSources) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(kafkasourcesResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(kafkasourcesResource, c.ns, opts)) } // Create takes the representation of a kafkaSource and creates it. Returns the server's representation of the kafkaSource, and an error, if there is any. func (c *FakeKafkaSources) Create(ctx context.Context, kafkaSource *v1beta1.KafkaSource, opts v1.CreateOptions) (result *v1beta1.KafkaSource, err error) { + emptyResult := &v1beta1.KafkaSource{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(kafkasourcesResource, c.ns, kafkaSource), &v1beta1.KafkaSource{}) + Invokes(testing.NewCreateActionWithOptions(kafkasourcesResource, c.ns, kafkaSource, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaSource), err } // Update takes the representation of a kafkaSource and updates it. Returns the server's representation of the kafkaSource, and an error, if there is any. func (c *FakeKafkaSources) Update(ctx context.Context, kafkaSource *v1beta1.KafkaSource, opts v1.UpdateOptions) (result *v1beta1.KafkaSource, err error) { + emptyResult := &v1beta1.KafkaSource{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(kafkasourcesResource, c.ns, kafkaSource), &v1beta1.KafkaSource{}) + Invokes(testing.NewUpdateActionWithOptions(kafkasourcesResource, c.ns, kafkaSource, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaSource), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeKafkaSources) UpdateStatus(ctx context.Context, kafkaSource *v1beta1.KafkaSource, opts v1.UpdateOptions) (*v1beta1.KafkaSource, error) { +func (c *FakeKafkaSources) UpdateStatus(ctx context.Context, kafkaSource *v1beta1.KafkaSource, opts v1.UpdateOptions) (result *v1beta1.KafkaSource, err error) { + emptyResult := &v1beta1.KafkaSource{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(kafkasourcesResource, "status", c.ns, kafkaSource), &v1beta1.KafkaSource{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(kafkasourcesResource, "status", c.ns, kafkaSource, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaSource), err } @@ -124,7 +129,7 @@ func (c *FakeKafkaSources) Delete(ctx context.Context, name string, opts v1.Dele // DeleteCollection deletes a collection of objects. func (c *FakeKafkaSources) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewDeleteCollectionAction(kafkasourcesResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(kafkasourcesResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1beta1.KafkaSourceList{}) return err @@ -132,33 +137,36 @@ func (c *FakeKafkaSources) DeleteCollection(ctx context.Context, opts v1.DeleteO // Patch applies the patch and returns the patched kafkaSource. func (c *FakeKafkaSources) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.KafkaSource, err error) { + emptyResult := &v1beta1.KafkaSource{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(kafkasourcesResource, c.ns, name, pt, data, subresources...), &v1beta1.KafkaSource{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(kafkasourcesResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1beta1.KafkaSource), err } // GetScale takes name of the kafkaSource, and returns the corresponding scale object, and an error if there is any. func (c *FakeKafkaSources) GetScale(ctx context.Context, kafkaSourceName string, options v1.GetOptions) (result *autoscalingv1.Scale, err error) { + emptyResult := &autoscalingv1.Scale{} obj, err := c.Fake. - Invokes(testing.NewGetSubresourceAction(kafkasourcesResource, c.ns, "scale", kafkaSourceName), &autoscalingv1.Scale{}) + Invokes(testing.NewGetSubresourceActionWithOptions(kafkasourcesResource, c.ns, "scale", kafkaSourceName, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*autoscalingv1.Scale), err } // UpdateScale takes the representation of a scale and updates it. Returns the server's representation of the scale, and an error, if there is any. func (c *FakeKafkaSources) UpdateScale(ctx context.Context, kafkaSourceName string, scale *autoscalingv1.Scale, opts v1.UpdateOptions) (result *autoscalingv1.Scale, err error) { + emptyResult := &autoscalingv1.Scale{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(kafkasourcesResource, "scale", c.ns, scale), &autoscalingv1.Scale{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(kafkasourcesResource, "scale", c.ns, scale, opts), &autoscalingv1.Scale{}) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*autoscalingv1.Scale), err } diff --git a/control-plane/pkg/client/clientset/versioned/typed/sources/v1beta1/kafkasource.go b/control-plane/pkg/client/clientset/versioned/typed/sources/v1beta1/kafkasource.go index 6b9bfd37ec..5272b3b111 100644 --- a/control-plane/pkg/client/clientset/versioned/typed/sources/v1beta1/kafkasource.go +++ b/control-plane/pkg/client/clientset/versioned/typed/sources/v1beta1/kafkasource.go @@ -20,13 +20,12 @@ package v1beta1 import ( "context" - "time" autoscalingv1 "k8s.io/api/autoscaling/v1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1beta1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/sources/v1beta1" scheme "knative.dev/eventing-kafka-broker/control-plane/pkg/client/clientset/versioned/scheme" ) @@ -41,6 +40,7 @@ type KafkaSourcesGetter interface { type KafkaSourceInterface interface { Create(ctx context.Context, kafkaSource *v1beta1.KafkaSource, opts v1.CreateOptions) (*v1beta1.KafkaSource, error) Update(ctx context.Context, kafkaSource *v1beta1.KafkaSource, opts v1.UpdateOptions) (*v1beta1.KafkaSource, error) + // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). UpdateStatus(ctx context.Context, kafkaSource *v1beta1.KafkaSource, opts v1.UpdateOptions) (*v1beta1.KafkaSource, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error @@ -56,153 +56,27 @@ type KafkaSourceInterface interface { // kafkaSources implements KafkaSourceInterface type kafkaSources struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1beta1.KafkaSource, *v1beta1.KafkaSourceList] } // newKafkaSources returns a KafkaSources func newKafkaSources(c *SourcesV1beta1Client, namespace string) *kafkaSources { return &kafkaSources{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1beta1.KafkaSource, *v1beta1.KafkaSourceList]( + "kafkasources", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1beta1.KafkaSource { return &v1beta1.KafkaSource{} }, + func() *v1beta1.KafkaSourceList { return &v1beta1.KafkaSourceList{} }), } } -// Get takes name of the kafkaSource, and returns the corresponding kafkaSource object, and an error if there is any. -func (c *kafkaSources) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.KafkaSource, err error) { - result = &v1beta1.KafkaSource{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkasources"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of KafkaSources that match those selectors. -func (c *kafkaSources) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.KafkaSourceList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1beta1.KafkaSourceList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("kafkasources"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested kafkaSources. -func (c *kafkaSources) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("kafkasources"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a kafkaSource and creates it. Returns the server's representation of the kafkaSource, and an error, if there is any. -func (c *kafkaSources) Create(ctx context.Context, kafkaSource *v1beta1.KafkaSource, opts v1.CreateOptions) (result *v1beta1.KafkaSource, err error) { - result = &v1beta1.KafkaSource{} - err = c.client.Post(). - Namespace(c.ns). - Resource("kafkasources"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaSource). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a kafkaSource and updates it. Returns the server's representation of the kafkaSource, and an error, if there is any. -func (c *kafkaSources) Update(ctx context.Context, kafkaSource *v1beta1.KafkaSource, opts v1.UpdateOptions) (result *v1beta1.KafkaSource, err error) { - result = &v1beta1.KafkaSource{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkasources"). - Name(kafkaSource.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaSource). - Do(ctx). - Into(result) - return -} - -// UpdateStatus was generated because the type contains a Status member. -// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *kafkaSources) UpdateStatus(ctx context.Context, kafkaSource *v1beta1.KafkaSource, opts v1.UpdateOptions) (result *v1beta1.KafkaSource, err error) { - result = &v1beta1.KafkaSource{} - err = c.client.Put(). - Namespace(c.ns). - Resource("kafkasources"). - Name(kafkaSource.Name). - SubResource("status"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(kafkaSource). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the kafkaSource and deletes it. Returns an error if one occurs. -func (c *kafkaSources) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkasources"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *kafkaSources) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("kafkasources"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched kafkaSource. -func (c *kafkaSources) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.KafkaSource, err error) { - result = &v1beta1.KafkaSource{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("kafkasources"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} - // GetScale takes name of the kafkaSource, and returns the corresponding autoscalingv1.Scale object, and an error if there is any. func (c *kafkaSources) GetScale(ctx context.Context, kafkaSourceName string, options v1.GetOptions) (result *autoscalingv1.Scale, err error) { result = &autoscalingv1.Scale{} - err = c.client.Get(). - Namespace(c.ns). + err = c.GetClient().Get(). + Namespace(c.GetNamespace()). Resource("kafkasources"). Name(kafkaSourceName). SubResource("scale"). @@ -215,8 +89,8 @@ func (c *kafkaSources) GetScale(ctx context.Context, kafkaSourceName string, opt // UpdateScale takes the top resource name and the representation of a scale and updates it. Returns the server's representation of the scale, and an error, if there is any. func (c *kafkaSources) UpdateScale(ctx context.Context, kafkaSourceName string, scale *autoscalingv1.Scale, opts v1.UpdateOptions) (result *autoscalingv1.Scale, err error) { result = &autoscalingv1.Scale{} - err = c.client.Put(). - Namespace(c.ns). + err = c.GetClient().Put(). + Namespace(c.GetNamespace()). Resource("kafkasources"). Name(kafkaSourceName). SubResource("scale"). diff --git a/control-plane/pkg/client/informers/externalversions/factory.go b/control-plane/pkg/client/informers/externalversions/factory.go index 79c27e15be..8735eae4e3 100644 --- a/control-plane/pkg/client/informers/externalversions/factory.go +++ b/control-plane/pkg/client/informers/externalversions/factory.go @@ -232,6 +232,7 @@ type SharedInformerFactory interface { // Start initializes all requested informers. They are handled in goroutines // which run until the stop channel gets closed. + // Warning: Start does not block. When run in a go-routine, it will race with a later WaitForCacheSync. Start(stopCh <-chan struct{}) // Shutdown marks a factory as shutting down. At that point no new diff --git a/control-plane/pkg/client/listers/bindings/v1/kafkabinding.go b/control-plane/pkg/client/listers/bindings/v1/kafkabinding.go index 8c9a81a05c..19d6cbac14 100644 --- a/control-plane/pkg/client/listers/bindings/v1/kafkabinding.go +++ b/control-plane/pkg/client/listers/bindings/v1/kafkabinding.go @@ -19,8 +19,8 @@ package v1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/bindings/v1" ) @@ -38,25 +38,17 @@ type KafkaBindingLister interface { // kafkaBindingLister implements the KafkaBindingLister interface. type kafkaBindingLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1.KafkaBinding] } // NewKafkaBindingLister returns a new KafkaBindingLister. func NewKafkaBindingLister(indexer cache.Indexer) KafkaBindingLister { - return &kafkaBindingLister{indexer: indexer} -} - -// List lists all KafkaBindings in the indexer. -func (s *kafkaBindingLister) List(selector labels.Selector) (ret []*v1.KafkaBinding, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1.KafkaBinding)) - }) - return ret, err + return &kafkaBindingLister{listers.New[*v1.KafkaBinding](indexer, v1.Resource("kafkabinding"))} } // KafkaBindings returns an object that can list and get KafkaBindings. func (s *kafkaBindingLister) KafkaBindings(namespace string) KafkaBindingNamespaceLister { - return kafkaBindingNamespaceLister{indexer: s.indexer, namespace: namespace} + return kafkaBindingNamespaceLister{listers.NewNamespaced[*v1.KafkaBinding](s.ResourceIndexer, namespace)} } // KafkaBindingNamespaceLister helps list and get KafkaBindings. @@ -74,26 +66,5 @@ type KafkaBindingNamespaceLister interface { // kafkaBindingNamespaceLister implements the KafkaBindingNamespaceLister // interface. type kafkaBindingNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all KafkaBindings in the indexer for a given namespace. -func (s kafkaBindingNamespaceLister) List(selector labels.Selector) (ret []*v1.KafkaBinding, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1.KafkaBinding)) - }) - return ret, err -} - -// Get retrieves the KafkaBinding from the indexer for a given namespace and name. -func (s kafkaBindingNamespaceLister) Get(name string) (*v1.KafkaBinding, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1.Resource("kafkabinding"), name) - } - return obj.(*v1.KafkaBinding), nil + listers.ResourceIndexer[*v1.KafkaBinding] } diff --git a/control-plane/pkg/client/listers/bindings/v1beta1/kafkabinding.go b/control-plane/pkg/client/listers/bindings/v1beta1/kafkabinding.go index 930337e7a1..fe72b9e20f 100644 --- a/control-plane/pkg/client/listers/bindings/v1beta1/kafkabinding.go +++ b/control-plane/pkg/client/listers/bindings/v1beta1/kafkabinding.go @@ -19,8 +19,8 @@ package v1beta1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1beta1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/bindings/v1beta1" ) @@ -38,25 +38,17 @@ type KafkaBindingLister interface { // kafkaBindingLister implements the KafkaBindingLister interface. type kafkaBindingLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1beta1.KafkaBinding] } // NewKafkaBindingLister returns a new KafkaBindingLister. func NewKafkaBindingLister(indexer cache.Indexer) KafkaBindingLister { - return &kafkaBindingLister{indexer: indexer} -} - -// List lists all KafkaBindings in the indexer. -func (s *kafkaBindingLister) List(selector labels.Selector) (ret []*v1beta1.KafkaBinding, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1beta1.KafkaBinding)) - }) - return ret, err + return &kafkaBindingLister{listers.New[*v1beta1.KafkaBinding](indexer, v1beta1.Resource("kafkabinding"))} } // KafkaBindings returns an object that can list and get KafkaBindings. func (s *kafkaBindingLister) KafkaBindings(namespace string) KafkaBindingNamespaceLister { - return kafkaBindingNamespaceLister{indexer: s.indexer, namespace: namespace} + return kafkaBindingNamespaceLister{listers.NewNamespaced[*v1beta1.KafkaBinding](s.ResourceIndexer, namespace)} } // KafkaBindingNamespaceLister helps list and get KafkaBindings. @@ -74,26 +66,5 @@ type KafkaBindingNamespaceLister interface { // kafkaBindingNamespaceLister implements the KafkaBindingNamespaceLister // interface. type kafkaBindingNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all KafkaBindings in the indexer for a given namespace. -func (s kafkaBindingNamespaceLister) List(selector labels.Selector) (ret []*v1beta1.KafkaBinding, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1beta1.KafkaBinding)) - }) - return ret, err -} - -// Get retrieves the KafkaBinding from the indexer for a given namespace and name. -func (s kafkaBindingNamespaceLister) Get(name string) (*v1beta1.KafkaBinding, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1beta1.Resource("kafkabinding"), name) - } - return obj.(*v1beta1.KafkaBinding), nil + listers.ResourceIndexer[*v1beta1.KafkaBinding] } diff --git a/control-plane/pkg/client/listers/eventing/v1alpha1/kafkasink.go b/control-plane/pkg/client/listers/eventing/v1alpha1/kafkasink.go index 101f1922a6..aa75bb3590 100644 --- a/control-plane/pkg/client/listers/eventing/v1alpha1/kafkasink.go +++ b/control-plane/pkg/client/listers/eventing/v1alpha1/kafkasink.go @@ -19,8 +19,8 @@ package v1alpha1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1alpha1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/eventing/v1alpha1" ) @@ -38,25 +38,17 @@ type KafkaSinkLister interface { // kafkaSinkLister implements the KafkaSinkLister interface. type kafkaSinkLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1alpha1.KafkaSink] } // NewKafkaSinkLister returns a new KafkaSinkLister. func NewKafkaSinkLister(indexer cache.Indexer) KafkaSinkLister { - return &kafkaSinkLister{indexer: indexer} -} - -// List lists all KafkaSinks in the indexer. -func (s *kafkaSinkLister) List(selector labels.Selector) (ret []*v1alpha1.KafkaSink, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.KafkaSink)) - }) - return ret, err + return &kafkaSinkLister{listers.New[*v1alpha1.KafkaSink](indexer, v1alpha1.Resource("kafkasink"))} } // KafkaSinks returns an object that can list and get KafkaSinks. func (s *kafkaSinkLister) KafkaSinks(namespace string) KafkaSinkNamespaceLister { - return kafkaSinkNamespaceLister{indexer: s.indexer, namespace: namespace} + return kafkaSinkNamespaceLister{listers.NewNamespaced[*v1alpha1.KafkaSink](s.ResourceIndexer, namespace)} } // KafkaSinkNamespaceLister helps list and get KafkaSinks. @@ -74,26 +66,5 @@ type KafkaSinkNamespaceLister interface { // kafkaSinkNamespaceLister implements the KafkaSinkNamespaceLister // interface. type kafkaSinkNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all KafkaSinks in the indexer for a given namespace. -func (s kafkaSinkNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.KafkaSink, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.KafkaSink)) - }) - return ret, err -} - -// Get retrieves the KafkaSink from the indexer for a given namespace and name. -func (s kafkaSinkNamespaceLister) Get(name string) (*v1alpha1.KafkaSink, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1alpha1.Resource("kafkasink"), name) - } - return obj.(*v1alpha1.KafkaSink), nil + listers.ResourceIndexer[*v1alpha1.KafkaSink] } diff --git a/control-plane/pkg/client/listers/internalskafkaeventing/v1alpha1/consumer.go b/control-plane/pkg/client/listers/internalskafkaeventing/v1alpha1/consumer.go index 932cbabee6..58fac5a684 100644 --- a/control-plane/pkg/client/listers/internalskafkaeventing/v1alpha1/consumer.go +++ b/control-plane/pkg/client/listers/internalskafkaeventing/v1alpha1/consumer.go @@ -19,8 +19,8 @@ package v1alpha1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1alpha1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/internalskafkaeventing/v1alpha1" ) @@ -38,25 +38,17 @@ type ConsumerLister interface { // consumerLister implements the ConsumerLister interface. type consumerLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1alpha1.Consumer] } // NewConsumerLister returns a new ConsumerLister. func NewConsumerLister(indexer cache.Indexer) ConsumerLister { - return &consumerLister{indexer: indexer} -} - -// List lists all Consumers in the indexer. -func (s *consumerLister) List(selector labels.Selector) (ret []*v1alpha1.Consumer, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.Consumer)) - }) - return ret, err + return &consumerLister{listers.New[*v1alpha1.Consumer](indexer, v1alpha1.Resource("consumer"))} } // Consumers returns an object that can list and get Consumers. func (s *consumerLister) Consumers(namespace string) ConsumerNamespaceLister { - return consumerNamespaceLister{indexer: s.indexer, namespace: namespace} + return consumerNamespaceLister{listers.NewNamespaced[*v1alpha1.Consumer](s.ResourceIndexer, namespace)} } // ConsumerNamespaceLister helps list and get Consumers. @@ -74,26 +66,5 @@ type ConsumerNamespaceLister interface { // consumerNamespaceLister implements the ConsumerNamespaceLister // interface. type consumerNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all Consumers in the indexer for a given namespace. -func (s consumerNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.Consumer, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.Consumer)) - }) - return ret, err -} - -// Get retrieves the Consumer from the indexer for a given namespace and name. -func (s consumerNamespaceLister) Get(name string) (*v1alpha1.Consumer, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1alpha1.Resource("consumer"), name) - } - return obj.(*v1alpha1.Consumer), nil + listers.ResourceIndexer[*v1alpha1.Consumer] } diff --git a/control-plane/pkg/client/listers/internalskafkaeventing/v1alpha1/consumergroup.go b/control-plane/pkg/client/listers/internalskafkaeventing/v1alpha1/consumergroup.go index 883623c43e..b0f96d558e 100644 --- a/control-plane/pkg/client/listers/internalskafkaeventing/v1alpha1/consumergroup.go +++ b/control-plane/pkg/client/listers/internalskafkaeventing/v1alpha1/consumergroup.go @@ -19,8 +19,8 @@ package v1alpha1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1alpha1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/internalskafkaeventing/v1alpha1" ) @@ -38,25 +38,17 @@ type ConsumerGroupLister interface { // consumerGroupLister implements the ConsumerGroupLister interface. type consumerGroupLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1alpha1.ConsumerGroup] } // NewConsumerGroupLister returns a new ConsumerGroupLister. func NewConsumerGroupLister(indexer cache.Indexer) ConsumerGroupLister { - return &consumerGroupLister{indexer: indexer} -} - -// List lists all ConsumerGroups in the indexer. -func (s *consumerGroupLister) List(selector labels.Selector) (ret []*v1alpha1.ConsumerGroup, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.ConsumerGroup)) - }) - return ret, err + return &consumerGroupLister{listers.New[*v1alpha1.ConsumerGroup](indexer, v1alpha1.Resource("consumergroup"))} } // ConsumerGroups returns an object that can list and get ConsumerGroups. func (s *consumerGroupLister) ConsumerGroups(namespace string) ConsumerGroupNamespaceLister { - return consumerGroupNamespaceLister{indexer: s.indexer, namespace: namespace} + return consumerGroupNamespaceLister{listers.NewNamespaced[*v1alpha1.ConsumerGroup](s.ResourceIndexer, namespace)} } // ConsumerGroupNamespaceLister helps list and get ConsumerGroups. @@ -74,26 +66,5 @@ type ConsumerGroupNamespaceLister interface { // consumerGroupNamespaceLister implements the ConsumerGroupNamespaceLister // interface. type consumerGroupNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all ConsumerGroups in the indexer for a given namespace. -func (s consumerGroupNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.ConsumerGroup, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.ConsumerGroup)) - }) - return ret, err -} - -// Get retrieves the ConsumerGroup from the indexer for a given namespace and name. -func (s consumerGroupNamespaceLister) Get(name string) (*v1alpha1.ConsumerGroup, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1alpha1.Resource("consumergroup"), name) - } - return obj.(*v1alpha1.ConsumerGroup), nil + listers.ResourceIndexer[*v1alpha1.ConsumerGroup] } diff --git a/control-plane/pkg/client/listers/messaging/v1beta1/kafkachannel.go b/control-plane/pkg/client/listers/messaging/v1beta1/kafkachannel.go index 0bf135c644..2ea27fc14d 100644 --- a/control-plane/pkg/client/listers/messaging/v1beta1/kafkachannel.go +++ b/control-plane/pkg/client/listers/messaging/v1beta1/kafkachannel.go @@ -19,8 +19,8 @@ package v1beta1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1beta1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/messaging/v1beta1" ) @@ -38,25 +38,17 @@ type KafkaChannelLister interface { // kafkaChannelLister implements the KafkaChannelLister interface. type kafkaChannelLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1beta1.KafkaChannel] } // NewKafkaChannelLister returns a new KafkaChannelLister. func NewKafkaChannelLister(indexer cache.Indexer) KafkaChannelLister { - return &kafkaChannelLister{indexer: indexer} -} - -// List lists all KafkaChannels in the indexer. -func (s *kafkaChannelLister) List(selector labels.Selector) (ret []*v1beta1.KafkaChannel, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1beta1.KafkaChannel)) - }) - return ret, err + return &kafkaChannelLister{listers.New[*v1beta1.KafkaChannel](indexer, v1beta1.Resource("kafkachannel"))} } // KafkaChannels returns an object that can list and get KafkaChannels. func (s *kafkaChannelLister) KafkaChannels(namespace string) KafkaChannelNamespaceLister { - return kafkaChannelNamespaceLister{indexer: s.indexer, namespace: namespace} + return kafkaChannelNamespaceLister{listers.NewNamespaced[*v1beta1.KafkaChannel](s.ResourceIndexer, namespace)} } // KafkaChannelNamespaceLister helps list and get KafkaChannels. @@ -74,26 +66,5 @@ type KafkaChannelNamespaceLister interface { // kafkaChannelNamespaceLister implements the KafkaChannelNamespaceLister // interface. type kafkaChannelNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all KafkaChannels in the indexer for a given namespace. -func (s kafkaChannelNamespaceLister) List(selector labels.Selector) (ret []*v1beta1.KafkaChannel, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1beta1.KafkaChannel)) - }) - return ret, err -} - -// Get retrieves the KafkaChannel from the indexer for a given namespace and name. -func (s kafkaChannelNamespaceLister) Get(name string) (*v1beta1.KafkaChannel, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1beta1.Resource("kafkachannel"), name) - } - return obj.(*v1beta1.KafkaChannel), nil + listers.ResourceIndexer[*v1beta1.KafkaChannel] } diff --git a/control-plane/pkg/client/listers/sources/v1/kafkasource.go b/control-plane/pkg/client/listers/sources/v1/kafkasource.go index c36f19e088..e51326c6fe 100644 --- a/control-plane/pkg/client/listers/sources/v1/kafkasource.go +++ b/control-plane/pkg/client/listers/sources/v1/kafkasource.go @@ -19,8 +19,8 @@ package v1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/sources/v1" ) @@ -38,25 +38,17 @@ type KafkaSourceLister interface { // kafkaSourceLister implements the KafkaSourceLister interface. type kafkaSourceLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1.KafkaSource] } // NewKafkaSourceLister returns a new KafkaSourceLister. func NewKafkaSourceLister(indexer cache.Indexer) KafkaSourceLister { - return &kafkaSourceLister{indexer: indexer} -} - -// List lists all KafkaSources in the indexer. -func (s *kafkaSourceLister) List(selector labels.Selector) (ret []*v1.KafkaSource, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1.KafkaSource)) - }) - return ret, err + return &kafkaSourceLister{listers.New[*v1.KafkaSource](indexer, v1.Resource("kafkasource"))} } // KafkaSources returns an object that can list and get KafkaSources. func (s *kafkaSourceLister) KafkaSources(namespace string) KafkaSourceNamespaceLister { - return kafkaSourceNamespaceLister{indexer: s.indexer, namespace: namespace} + return kafkaSourceNamespaceLister{listers.NewNamespaced[*v1.KafkaSource](s.ResourceIndexer, namespace)} } // KafkaSourceNamespaceLister helps list and get KafkaSources. @@ -74,26 +66,5 @@ type KafkaSourceNamespaceLister interface { // kafkaSourceNamespaceLister implements the KafkaSourceNamespaceLister // interface. type kafkaSourceNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all KafkaSources in the indexer for a given namespace. -func (s kafkaSourceNamespaceLister) List(selector labels.Selector) (ret []*v1.KafkaSource, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1.KafkaSource)) - }) - return ret, err -} - -// Get retrieves the KafkaSource from the indexer for a given namespace and name. -func (s kafkaSourceNamespaceLister) Get(name string) (*v1.KafkaSource, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1.Resource("kafkasource"), name) - } - return obj.(*v1.KafkaSource), nil + listers.ResourceIndexer[*v1.KafkaSource] } diff --git a/control-plane/pkg/client/listers/sources/v1beta1/kafkasource.go b/control-plane/pkg/client/listers/sources/v1beta1/kafkasource.go index b1dc12c0f6..fa5ec91a8a 100644 --- a/control-plane/pkg/client/listers/sources/v1beta1/kafkasource.go +++ b/control-plane/pkg/client/listers/sources/v1beta1/kafkasource.go @@ -19,8 +19,8 @@ package v1beta1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1beta1 "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/sources/v1beta1" ) @@ -38,25 +38,17 @@ type KafkaSourceLister interface { // kafkaSourceLister implements the KafkaSourceLister interface. type kafkaSourceLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1beta1.KafkaSource] } // NewKafkaSourceLister returns a new KafkaSourceLister. func NewKafkaSourceLister(indexer cache.Indexer) KafkaSourceLister { - return &kafkaSourceLister{indexer: indexer} -} - -// List lists all KafkaSources in the indexer. -func (s *kafkaSourceLister) List(selector labels.Selector) (ret []*v1beta1.KafkaSource, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1beta1.KafkaSource)) - }) - return ret, err + return &kafkaSourceLister{listers.New[*v1beta1.KafkaSource](indexer, v1beta1.Resource("kafkasource"))} } // KafkaSources returns an object that can list and get KafkaSources. func (s *kafkaSourceLister) KafkaSources(namespace string) KafkaSourceNamespaceLister { - return kafkaSourceNamespaceLister{indexer: s.indexer, namespace: namespace} + return kafkaSourceNamespaceLister{listers.NewNamespaced[*v1beta1.KafkaSource](s.ResourceIndexer, namespace)} } // KafkaSourceNamespaceLister helps list and get KafkaSources. @@ -74,26 +66,5 @@ type KafkaSourceNamespaceLister interface { // kafkaSourceNamespaceLister implements the KafkaSourceNamespaceLister // interface. type kafkaSourceNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all KafkaSources in the indexer for a given namespace. -func (s kafkaSourceNamespaceLister) List(selector labels.Selector) (ret []*v1beta1.KafkaSource, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1beta1.KafkaSource)) - }) - return ret, err -} - -// Get retrieves the KafkaSource from the indexer for a given namespace and name. -func (s kafkaSourceNamespaceLister) Get(name string) (*v1beta1.KafkaSource, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1beta1.Resource("kafkasource"), name) - } - return obj.(*v1beta1.KafkaSource), nil + listers.ResourceIndexer[*v1beta1.KafkaSource] } diff --git a/control-plane/pkg/contract/contract.pb.go b/control-plane/pkg/contract/contract.pb.go index 6d676899ab..a042169b68 100644 --- a/control-plane/pkg/contract/contract.pb.go +++ b/control-plane/pkg/contract/contract.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.26.0 -// protoc v3.17.3 +// protoc-gen-go v1.36.3 +// protoc v5.29.3 // source: contract.proto package contract @@ -331,18 +331,16 @@ func (Protocol) EnumDescriptor() ([]byte, []int) { // working with the project, we prefer to have this additional single line of code. // Protobuf include nightmare? No thanks! type Empty struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Empty) Reset() { *x = Empty{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Empty) String() string { @@ -353,7 +351,7 @@ func (*Empty) ProtoMessage() {} func (x *Empty) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -369,20 +367,17 @@ func (*Empty) Descriptor() ([]byte, []int) { } type Exact struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Attributes map[string]string `protobuf:"bytes,1,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` unknownFields protoimpl.UnknownFields - - Attributes map[string]string `protobuf:"bytes,1,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + sizeCache protoimpl.SizeCache } func (x *Exact) Reset() { *x = Exact{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Exact) String() string { @@ -393,7 +388,7 @@ func (*Exact) ProtoMessage() {} func (x *Exact) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -416,20 +411,17 @@ func (x *Exact) GetAttributes() map[string]string { } type Prefix struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Attributes map[string]string `protobuf:"bytes,1,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` unknownFields protoimpl.UnknownFields - - Attributes map[string]string `protobuf:"bytes,1,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + sizeCache protoimpl.SizeCache } func (x *Prefix) Reset() { *x = Prefix{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Prefix) String() string { @@ -440,7 +432,7 @@ func (*Prefix) ProtoMessage() {} func (x *Prefix) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -463,20 +455,17 @@ func (x *Prefix) GetAttributes() map[string]string { } type Suffix struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Attributes map[string]string `protobuf:"bytes,1,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` unknownFields protoimpl.UnknownFields - - Attributes map[string]string `protobuf:"bytes,1,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + sizeCache protoimpl.SizeCache } func (x *Suffix) Reset() { *x = Suffix{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Suffix) String() string { @@ -487,7 +476,7 @@ func (*Suffix) ProtoMessage() {} func (x *Suffix) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -510,20 +499,17 @@ func (x *Suffix) GetAttributes() map[string]string { } type All struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Filters []*DialectedFilter `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty"` unknownFields protoimpl.UnknownFields - - Filters []*DialectedFilter `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty"` + sizeCache protoimpl.SizeCache } func (x *All) Reset() { *x = All{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *All) String() string { @@ -534,7 +520,7 @@ func (*All) ProtoMessage() {} func (x *All) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -557,20 +543,17 @@ func (x *All) GetFilters() []*DialectedFilter { } type Any struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Filters []*DialectedFilter `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty"` unknownFields protoimpl.UnknownFields - - Filters []*DialectedFilter `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Any) Reset() { *x = Any{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Any) String() string { @@ -581,7 +564,7 @@ func (*Any) ProtoMessage() {} func (x *Any) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -604,20 +587,17 @@ func (x *Any) GetFilters() []*DialectedFilter { } type Not struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Filter *DialectedFilter `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` unknownFields protoimpl.UnknownFields - - Filter *DialectedFilter `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Not) Reset() { *x = Not{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Not) String() string { @@ -628,7 +608,7 @@ func (*Not) ProtoMessage() {} func (x *Not) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -651,20 +631,17 @@ func (x *Not) GetFilter() *DialectedFilter { } type CESQL struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Expression string `protobuf:"bytes,1,opt,name=expression,proto3" json:"expression,omitempty"` unknownFields protoimpl.UnknownFields - - Expression string `protobuf:"bytes,1,opt,name=expression,proto3" json:"expression,omitempty"` + sizeCache protoimpl.SizeCache } func (x *CESQL) Reset() { *x = CESQL{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CESQL) String() string { @@ -675,7 +652,7 @@ func (*CESQL) ProtoMessage() {} func (x *CESQL) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -698,11 +675,8 @@ func (x *CESQL) GetExpression() string { } type DialectedFilter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Filter: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Filter: // // *DialectedFilter_Exact // *DialectedFilter_Prefix @@ -711,16 +685,16 @@ type DialectedFilter struct { // *DialectedFilter_Any // *DialectedFilter_Not // *DialectedFilter_Cesql - Filter isDialectedFilter_Filter `protobuf_oneof:"filter"` + Filter isDialectedFilter_Filter `protobuf_oneof:"filter"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DialectedFilter) Reset() { *x = DialectedFilter{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DialectedFilter) String() string { @@ -731,7 +705,7 @@ func (*DialectedFilter) ProtoMessage() {} func (x *DialectedFilter) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -746,58 +720,72 @@ func (*DialectedFilter) Descriptor() ([]byte, []int) { return file_contract_proto_rawDescGZIP(), []int{8} } -func (m *DialectedFilter) GetFilter() isDialectedFilter_Filter { - if m != nil { - return m.Filter +func (x *DialectedFilter) GetFilter() isDialectedFilter_Filter { + if x != nil { + return x.Filter } return nil } func (x *DialectedFilter) GetExact() *Exact { - if x, ok := x.GetFilter().(*DialectedFilter_Exact); ok { - return x.Exact + if x != nil { + if x, ok := x.Filter.(*DialectedFilter_Exact); ok { + return x.Exact + } } return nil } func (x *DialectedFilter) GetPrefix() *Prefix { - if x, ok := x.GetFilter().(*DialectedFilter_Prefix); ok { - return x.Prefix + if x != nil { + if x, ok := x.Filter.(*DialectedFilter_Prefix); ok { + return x.Prefix + } } return nil } func (x *DialectedFilter) GetSuffix() *Suffix { - if x, ok := x.GetFilter().(*DialectedFilter_Suffix); ok { - return x.Suffix + if x != nil { + if x, ok := x.Filter.(*DialectedFilter_Suffix); ok { + return x.Suffix + } } return nil } func (x *DialectedFilter) GetAll() *All { - if x, ok := x.GetFilter().(*DialectedFilter_All); ok { - return x.All + if x != nil { + if x, ok := x.Filter.(*DialectedFilter_All); ok { + return x.All + } } return nil } func (x *DialectedFilter) GetAny() *Any { - if x, ok := x.GetFilter().(*DialectedFilter_Any); ok { - return x.Any + if x != nil { + if x, ok := x.Filter.(*DialectedFilter_Any); ok { + return x.Any + } } return nil } func (x *DialectedFilter) GetNot() *Not { - if x, ok := x.GetFilter().(*DialectedFilter_Not); ok { - return x.Not + if x != nil { + if x, ok := x.Filter.(*DialectedFilter_Not); ok { + return x.Not + } } return nil } func (x *DialectedFilter) GetCesql() *CESQL { - if x, ok := x.GetFilter().(*DialectedFilter_Cesql); ok { - return x.Cesql + if x != nil { + if x, ok := x.Filter.(*DialectedFilter_Cesql); ok { + return x.Cesql + } } return nil } @@ -849,26 +837,23 @@ func (*DialectedFilter_Not) isDialectedFilter_Filter() {} func (*DialectedFilter_Cesql) isDialectedFilter_Filter() {} type Filter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // attributes filters events by exact match on event context attributes. // Each key in the map is compared with the equivalent key in the event // context. An event passes the filter if all values are equal to the // specified values. // // Nested context attributes are not supported as keys. Only string values are supported. - Attributes map[string]string `protobuf:"bytes,1,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Attributes map[string]string `protobuf:"bytes,1,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Filter) Reset() { *x = Filter{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Filter) String() string { @@ -879,7 +864,7 @@ func (*Filter) ProtoMessage() {} func (x *Filter) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -902,24 +887,21 @@ func (x *Filter) GetAttributes() map[string]string { } type TokenMatcher struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Matcher: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Matcher: // // *TokenMatcher_Exact // *TokenMatcher_Prefix - Matcher isTokenMatcher_Matcher `protobuf_oneof:"matcher"` + Matcher isTokenMatcher_Matcher `protobuf_oneof:"matcher"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TokenMatcher) Reset() { *x = TokenMatcher{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TokenMatcher) String() string { @@ -930,7 +912,7 @@ func (*TokenMatcher) ProtoMessage() {} func (x *TokenMatcher) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -945,23 +927,27 @@ func (*TokenMatcher) Descriptor() ([]byte, []int) { return file_contract_proto_rawDescGZIP(), []int{10} } -func (m *TokenMatcher) GetMatcher() isTokenMatcher_Matcher { - if m != nil { - return m.Matcher +func (x *TokenMatcher) GetMatcher() isTokenMatcher_Matcher { + if x != nil { + return x.Matcher } return nil } func (x *TokenMatcher) GetExact() *Exact { - if x, ok := x.GetMatcher().(*TokenMatcher_Exact); ok { - return x.Exact + if x != nil { + if x, ok := x.Matcher.(*TokenMatcher_Exact); ok { + return x.Exact + } } return nil } func (x *TokenMatcher) GetPrefix() *Prefix { - if x, ok := x.GetMatcher().(*TokenMatcher_Prefix); ok { - return x.Prefix + if x != nil { + if x, ok := x.Matcher.(*TokenMatcher_Prefix); ok { + return x.Prefix + } } return nil } @@ -983,23 +969,20 @@ func (*TokenMatcher_Exact) isTokenMatcher_Matcher() {} func (*TokenMatcher_Prefix) isTokenMatcher_Matcher() {} type EventPolicy struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Token matchers of this EventPolicy TokenMatchers []*TokenMatcher `protobuf:"bytes,1,rep,name=tokenMatchers,proto3" json:"tokenMatchers,omitempty"` // Filters for this EventPolicy - Filters []*DialectedFilter `protobuf:"bytes,2,rep,name=filters,proto3" json:"filters,omitempty"` + Filters []*DialectedFilter `protobuf:"bytes,2,rep,name=filters,proto3" json:"filters,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *EventPolicy) Reset() { *x = EventPolicy{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *EventPolicy) String() string { @@ -1010,7 +993,7 @@ func (*EventPolicy) ProtoMessage() {} func (x *EventPolicy) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1040,10 +1023,7 @@ func (x *EventPolicy) GetFilters() []*DialectedFilter { } type EgressConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Dead letter is where the event is sent when something goes wrong DeadLetter string `protobuf:"bytes,1,opt,name=deadLetter,proto3" json:"deadLetter,omitempty"` // Dead Letter CA Cert is the CA Cert used for HTTPS communication through dead letter @@ -1062,16 +1042,16 @@ type EgressConfig struct { // backoffDelay is the delay before retrying in milliseconds. BackoffDelay uint64 `protobuf:"varint,4,opt,name=backoffDelay,proto3" json:"backoffDelay,omitempty"` // timeout is the single request timeout (not the overall retry timeout) - Timeout uint64 `protobuf:"varint,5,opt,name=timeout,proto3" json:"timeout,omitempty"` + Timeout uint64 `protobuf:"varint,5,opt,name=timeout,proto3" json:"timeout,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *EgressConfig) Reset() { *x = EgressConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *EgressConfig) String() string { @@ -1082,7 +1062,7 @@ func (*EgressConfig) ProtoMessage() {} func (x *EgressConfig) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1154,10 +1134,7 @@ func (x *EgressConfig) GetTimeout() uint64 { } type Egress struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // consumer group name ConsumerGroup string `protobuf:"bytes,1,opt,name=consumerGroup,proto3" json:"consumerGroup,omitempty"` // destination is the sink where events are sent. @@ -1166,7 +1143,7 @@ type Egress struct { DestinationCACerts string `protobuf:"bytes,15,opt,name=destinationCACerts,proto3" json:"destinationCACerts,omitempty"` // OIDC audience of the destination DestinationAudience string `protobuf:"bytes,17,opt,name=destinationAudience,proto3" json:"destinationAudience,omitempty"` - // Types that are assignable to ReplyStrategy: + // Types that are valid to be assigned to ReplyStrategy: // // *Egress_ReplyUrl // *Egress_ReplyToOriginalTopic @@ -1203,15 +1180,15 @@ type Egress struct { FeatureFlags *EgressFeatureFlags `protobuf:"bytes,14,opt,name=featureFlags,proto3" json:"featureFlags,omitempty"` // Name of the service account to use for OIDC authentication. OidcServiceAccountName string `protobuf:"bytes,19,opt,name=oidcServiceAccountName,proto3" json:"oidcServiceAccountName,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Egress) Reset() { *x = Egress{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Egress) String() string { @@ -1222,7 +1199,7 @@ func (*Egress) ProtoMessage() {} func (x *Egress) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1265,30 +1242,36 @@ func (x *Egress) GetDestinationAudience() string { return "" } -func (m *Egress) GetReplyStrategy() isEgress_ReplyStrategy { - if m != nil { - return m.ReplyStrategy +func (x *Egress) GetReplyStrategy() isEgress_ReplyStrategy { + if x != nil { + return x.ReplyStrategy } return nil } func (x *Egress) GetReplyUrl() string { - if x, ok := x.GetReplyStrategy().(*Egress_ReplyUrl); ok { - return x.ReplyUrl + if x != nil { + if x, ok := x.ReplyStrategy.(*Egress_ReplyUrl); ok { + return x.ReplyUrl + } } return "" } func (x *Egress) GetReplyToOriginalTopic() *Empty { - if x, ok := x.GetReplyStrategy().(*Egress_ReplyToOriginalTopic); ok { - return x.ReplyToOriginalTopic + if x != nil { + if x, ok := x.ReplyStrategy.(*Egress_ReplyToOriginalTopic); ok { + return x.ReplyToOriginalTopic + } } return nil } func (x *Egress) GetDiscardReply() *Empty { - if x, ok := x.GetReplyStrategy().(*Egress_DiscardReply); ok { - return x.DiscardReply + if x != nil { + if x, ok := x.ReplyStrategy.(*Egress_DiscardReply); ok { + return x.DiscardReply + } } return nil } @@ -1403,23 +1386,20 @@ func (*Egress_ReplyToOriginalTopic) isEgress_ReplyStrategy() {} func (*Egress_DiscardReply) isEgress_ReplyStrategy() {} type EgressFeatureFlags struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Enable rateLimiter EnableRateLimiter bool `protobuf:"varint,1,opt,name=enableRateLimiter,proto3" json:"enableRateLimiter,omitempty"` // Enable newMetrics EnableOrderedExecutorMetrics bool `protobuf:"varint,2,opt,name=enableOrderedExecutorMetrics,proto3" json:"enableOrderedExecutorMetrics,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *EgressFeatureFlags) Reset() { *x = EgressFeatureFlags{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *EgressFeatureFlags) String() string { @@ -1430,7 +1410,7 @@ func (*EgressFeatureFlags) ProtoMessage() {} func (x *EgressFeatureFlags) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1468,10 +1448,7 @@ func (x *EgressFeatureFlags) GetEnableOrderedExecutorMetrics() bool { // It is allowed to specify both path and host in ingress contract // to support both modes. type Ingress struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Optional content mode to use when pushing messages to Kafka ContentMode ContentMode `protobuf:"varint,1,opt,name=contentMode,proto3,enum=ContentMode" json:"contentMode,omitempty"` // path to listen for incoming events. @@ -1482,15 +1459,15 @@ type Ingress struct { Audience string `protobuf:"bytes,5,opt,name=audience,proto3" json:"audience,omitempty"` // Ready and applying EventPolicies for this ingress EventPolicies []*EventPolicy `protobuf:"bytes,6,rep,name=eventPolicies,proto3" json:"eventPolicies,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Ingress) Reset() { *x = Ingress{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Ingress) String() string { @@ -1501,7 +1478,7 @@ func (*Ingress) ProtoMessage() {} func (x *Ingress) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1553,10 +1530,7 @@ func (x *Ingress) GetEventPolicies() []*EventPolicy { // Kubernetes resource reference. type Reference struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Object id. Uuid string `protobuf:"bytes,1,opt,name=uuid,proto3" json:"uuid,omitempty"` // Object namespace. @@ -1568,16 +1542,16 @@ type Reference struct { // Object kind. Kind string `protobuf:"bytes,5,opt,name=kind,proto3" json:"kind,omitempty"` // Object GroupVersion. - GroupVersion string `protobuf:"bytes,6,opt,name=groupVersion,proto3" json:"groupVersion,omitempty"` + GroupVersion string `protobuf:"bytes,6,opt,name=groupVersion,proto3" json:"groupVersion,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Reference) Reset() { *x = Reference{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Reference) String() string { @@ -1588,7 +1562,7 @@ func (*Reference) ProtoMessage() {} func (x *Reference) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1646,23 +1620,20 @@ func (x *Reference) GetGroupVersion() string { } type SecretReference struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Secret reference. Reference *Reference `protobuf:"bytes,1,opt,name=reference,proto3" json:"reference,omitempty"` // Multiple key-field references. KeyFieldReferences []*KeyFieldReference `protobuf:"bytes,2,rep,name=keyFieldReferences,proto3" json:"keyFieldReferences,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *SecretReference) Reset() { *x = SecretReference{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *SecretReference) String() string { @@ -1673,7 +1644,7 @@ func (*SecretReference) ProtoMessage() {} func (x *SecretReference) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1703,23 +1674,20 @@ func (x *SecretReference) GetKeyFieldReferences() []*KeyFieldReference { } type KeyFieldReference struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Key in the secret. SecretKey string `protobuf:"bytes,2,opt,name=secretKey,proto3" json:"secretKey,omitempty"` // Field name. - Field SecretField `protobuf:"varint,3,opt,name=field,proto3,enum=SecretField" json:"field,omitempty"` + Field SecretField `protobuf:"varint,3,opt,name=field,proto3,enum=SecretField" json:"field,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *KeyFieldReference) Reset() { *x = KeyFieldReference{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[18] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *KeyFieldReference) String() string { @@ -1730,7 +1698,7 @@ func (*KeyFieldReference) ProtoMessage() {} func (x *KeyFieldReference) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[18] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1760,23 +1728,20 @@ func (x *KeyFieldReference) GetField() SecretField { } type MultiSecretReference struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Protocol. Protocol Protocol `protobuf:"varint,1,opt,name=protocol,proto3,enum=Protocol" json:"protocol,omitempty"` // Secret references. - References []*SecretReference `protobuf:"bytes,2,rep,name=references,proto3" json:"references,omitempty"` + References []*SecretReference `protobuf:"bytes,2,rep,name=references,proto3" json:"references,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *MultiSecretReference) Reset() { *x = MultiSecretReference{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[19] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *MultiSecretReference) String() string { @@ -1787,7 +1752,7 @@ func (*MultiSecretReference) ProtoMessage() {} func (x *MultiSecretReference) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[19] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1818,20 +1783,17 @@ func (x *MultiSecretReference) GetReferences() []*SecretReference { // CloudEvent overrides. type CloudEventOverrides struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Extensions map[string]string `protobuf:"bytes,1,rep,name=extensions,proto3" json:"extensions,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` unknownFields protoimpl.UnknownFields - - Extensions map[string]string `protobuf:"bytes,1,rep,name=extensions,proto3" json:"extensions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + sizeCache protoimpl.SizeCache } func (x *CloudEventOverrides) Reset() { *x = CloudEventOverrides{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[20] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CloudEventOverrides) String() string { @@ -1842,7 +1804,7 @@ func (*CloudEventOverrides) ProtoMessage() {} func (x *CloudEventOverrides) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[20] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1865,20 +1827,17 @@ func (x *CloudEventOverrides) GetExtensions() map[string]string { } type FeatureFlags struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - EnableEventTypeAutocreate bool `protobuf:"varint,1,opt,name=enableEventTypeAutocreate,proto3" json:"enableEventTypeAutocreate,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + EnableEventTypeAutocreate bool `protobuf:"varint,1,opt,name=enableEventTypeAutocreate,proto3" json:"enableEventTypeAutocreate,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *FeatureFlags) Reset() { *x = FeatureFlags{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[21] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *FeatureFlags) String() string { @@ -1889,7 +1848,7 @@ func (*FeatureFlags) ProtoMessage() {} func (x *FeatureFlags) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[21] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1912,10 +1871,7 @@ func (x *FeatureFlags) GetEnableEventTypeAutocreate() bool { } type Resource struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Id of the resource // It's the same as the Kubernetes resource uid Uid string `protobuf:"bytes,1,opt,name=uid,proto3" json:"uid,omitempty"` @@ -1933,7 +1889,7 @@ type Resource struct { EgressConfig *EgressConfig `protobuf:"bytes,5,opt,name=egressConfig,proto3" json:"egressConfig,omitempty"` // Optional egresses for this topic Egresses []*Egress `protobuf:"bytes,6,rep,name=egresses,proto3" json:"egresses,omitempty"` - // Types that are assignable to Auth: + // Types that are valid to be assigned to Auth: // // *Resource_AbsentAuth // *Resource_AuthSecret @@ -1948,16 +1904,16 @@ type Resource struct { // - tagging metrics Reference *Reference `protobuf:"bytes,11,opt,name=reference,proto3" json:"reference,omitempty"` // Feature flags for the resource - FeatureFlags *FeatureFlags `protobuf:"bytes,12,opt,name=featureFlags,proto3" json:"featureFlags,omitempty"` + FeatureFlags *FeatureFlags `protobuf:"bytes,12,opt,name=featureFlags,proto3" json:"featureFlags,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Resource) Reset() { *x = Resource{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[22] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Resource) String() string { @@ -1968,7 +1924,7 @@ func (*Resource) ProtoMessage() {} func (x *Resource) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[22] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2025,30 +1981,36 @@ func (x *Resource) GetEgresses() []*Egress { return nil } -func (m *Resource) GetAuth() isResource_Auth { - if m != nil { - return m.Auth +func (x *Resource) GetAuth() isResource_Auth { + if x != nil { + return x.Auth } return nil } func (x *Resource) GetAbsentAuth() *Empty { - if x, ok := x.GetAuth().(*Resource_AbsentAuth); ok { - return x.AbsentAuth + if x != nil { + if x, ok := x.Auth.(*Resource_AbsentAuth); ok { + return x.AbsentAuth + } } return nil } func (x *Resource) GetAuthSecret() *Reference { - if x, ok := x.GetAuth().(*Resource_AuthSecret); ok { - return x.AuthSecret + if x != nil { + if x, ok := x.Auth.(*Resource_AuthSecret); ok { + return x.AuthSecret + } } return nil } func (x *Resource) GetMultiAuthSecret() *MultiSecretReference { - if x, ok := x.GetAuth().(*Resource_MultiAuthSecret); ok { - return x.MultiAuthSecret + if x != nil { + if x, ok := x.Auth.(*Resource_MultiAuthSecret); ok { + return x.MultiAuthSecret + } } return nil } @@ -2131,25 +2093,22 @@ func (*Resource_AuthSecret) isResource_Auth() {} func (*Resource_MultiAuthSecret) isResource_Auth() {} type Contract struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Count each contract update. // Make sure each data plane pod has the same contract generation number. Generation uint64 `protobuf:"varint,1,opt,name=generation,proto3" json:"generation,omitempty"` Resources []*Resource `protobuf:"bytes,2,rep,name=resources,proto3" json:"resources,omitempty"` // PEM encoded CA trust bundles for HTTP client. - TrustBundles []string `protobuf:"bytes,3,rep,name=trustBundles,proto3" json:"trustBundles,omitempty"` + TrustBundles []string `protobuf:"bytes,3,rep,name=trustBundles,proto3" json:"trustBundles,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Contract) Reset() { *x = Contract{} - if protoimpl.UnsafeEnabled { - mi := &file_contract_proto_msgTypes[23] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_contract_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Contract) String() string { @@ -2160,7 +2119,7 @@ func (*Contract) ProtoMessage() {} func (x *Contract) ProtoReflect() protoreflect.Message { mi := &file_contract_proto_msgTypes[23] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2499,7 +2458,7 @@ func file_contract_proto_rawDescGZIP() []byte { var file_contract_proto_enumTypes = make([]protoimpl.EnumInfo, 6) var file_contract_proto_msgTypes = make([]protoimpl.MessageInfo, 29) -var file_contract_proto_goTypes = []interface{}{ +var file_contract_proto_goTypes = []any{ (BackoffPolicy)(0), // 0: BackoffPolicy (DeliveryOrder)(0), // 1: DeliveryOrder (KeyType)(0), // 2: KeyType @@ -2595,297 +2554,7 @@ func file_contract_proto_init() { if File_contract_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_contract_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Empty); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Exact); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Prefix); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Suffix); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*All); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Any); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Not); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CESQL); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DialectedFilter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Filter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TokenMatcher); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EventPolicy); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EgressConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Egress); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EgressFeatureFlags); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Ingress); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Reference); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SecretReference); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*KeyFieldReference); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*MultiSecretReference); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CloudEventOverrides); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureFlags); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Resource); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_contract_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Contract); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_contract_proto_msgTypes[8].OneofWrappers = []interface{}{ + file_contract_proto_msgTypes[8].OneofWrappers = []any{ (*DialectedFilter_Exact)(nil), (*DialectedFilter_Prefix)(nil), (*DialectedFilter_Suffix)(nil), @@ -2894,16 +2563,16 @@ func file_contract_proto_init() { (*DialectedFilter_Not)(nil), (*DialectedFilter_Cesql)(nil), } - file_contract_proto_msgTypes[10].OneofWrappers = []interface{}{ + file_contract_proto_msgTypes[10].OneofWrappers = []any{ (*TokenMatcher_Exact)(nil), (*TokenMatcher_Prefix)(nil), } - file_contract_proto_msgTypes[13].OneofWrappers = []interface{}{ + file_contract_proto_msgTypes[13].OneofWrappers = []any{ (*Egress_ReplyUrl)(nil), (*Egress_ReplyToOriginalTopic)(nil), (*Egress_DiscardReply)(nil), } - file_contract_proto_msgTypes[22].OneofWrappers = []interface{}{ + file_contract_proto_msgTypes[22].OneofWrappers = []any{ (*Resource_AbsentAuth)(nil), (*Resource_AuthSecret)(nil), (*Resource_MultiAuthSecret)(nil), diff --git a/data-plane/THIRD-PARTY.txt b/data-plane/THIRD-PARTY.txt index 50107f2410..a7c0e4c4ff 100644 --- a/data-plane/THIRD-PARTY.txt +++ b/data-plane/THIRD-PARTY.txt @@ -3,7 +3,7 @@ Lists of 238 third-party dependencies. (Eclipse Public License - v 1.0) (GNU Lesser General Public License) Logback Classic Module (ch.qos.logback:logback-classic:1.5.16 - http://logback.qos.ch/logback-classic) (Eclipse Public License - v 1.0) (GNU Lesser General Public License) Logback Core Module (ch.qos.logback:logback-core:1.5.16 - http://logback.qos.ch/logback-core) (Apache License 2.0) brotli4j (com.aayushatharva.brotli4j:brotli4j:1.16.0 - https://github.com/hyperxpro/Brotli4j/brotli4j) - (Apache License 2.0) native-linux-x86_64 (com.aayushatharva.brotli4j:native-linux-x86_64:1.16.0 - https://github.com/hyperxpro/Brotli4j/natives/native-linux-x86_64) + (Apache License 2.0) native-osx-aarch64 (com.aayushatharva.brotli4j:native-osx-aarch64:1.16.0 - https://github.com/hyperxpro/Brotli4j/natives/native-osx-aarch64) (Apache License 2.0) service (com.aayushatharva.brotli4j:service:1.16.0 - https://github.com/hyperxpro/Brotli4j/service) (The Apache Software License, Version 2.0) Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.17.2 - https://github.com/FasterXML/jackson) (The Apache Software License, Version 2.0) Jackson-core (com.fasterxml.jackson.core:jackson-core:2.17.2 - https://github.com/FasterXML/jackson-core) diff --git a/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java b/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java index e7d44edf12..49eecee876 100644 --- a/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java +++ b/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java @@ -1,11 +1,23 @@ // Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE // source: contract.proto +// Protobuf Java Version: 4.29.3 package dev.knative.eventing.kafka.broker.contract; public final class DataPlaneContract { private DataPlaneContract() {} + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + DataPlaneContract.class.getName()); + } + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { @@ -38,6 +50,15 @@ public enum BackoffPolicy implements com.google.protobuf.ProtocolMessageEnum { UNRECOGNIZED(-1), ; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + BackoffPolicy.class.getName()); + } /** *
* Exponential backoff policy @@ -155,6 +176,15 @@ public enum DeliveryOrder implements com.google.protobuf.ProtocolMessageEnum { UNRECOGNIZED(-1), ; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + DeliveryOrder.class.getName()); + } /** ** @@ -8392,6 +7921,7 @@ public interface FilterOrBuilder * Each key in the map is compared with the equivalent key in the event * context. An event passes the filter if all values are equal to the * specified values. + * * Nested context attributes are not supported as keys. Only string values are supported. * * @@ -8409,6 +7939,7 @@ public interface FilterOrBuilder * Each key in the map is compared with the equivalent key in the event * context. An event passes the filter if all values are equal to the * specified values. + * * Nested context attributes are not supported as keys. Only string values are supported. * * @@ -8421,18 +7952,24 @@ public interface FilterOrBuilder * Each key in the map is compared with the equivalent key in the event * context. An event passes the filter if all values are equal to the * specified values. + * * Nested context attributes are not supported as keys. Only string values are supported. * * *UNORDERED = 0;
*/ @@ -268,6 +298,15 @@ public enum KeyType implements com.google.protobuf.ProtocolMessageEnum { UNRECOGNIZED(-1), ; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + KeyType.class.getName()); + } /** *String = 0;
*/ @@ -389,6 +428,15 @@ public enum ContentMode implements com.google.protobuf.ProtocolMessageEnum { UNRECOGNIZED(-1), ; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + ContentMode.class.getName()); + } /** *BINARY = 0;
*/ @@ -510,6 +558,15 @@ public enum SecretField implements com.google.protobuf.ProtocolMessageEnum { UNRECOGNIZED(-1), ; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + SecretField.class.getName()); + } /** *SASL_MECHANISM = 0;
*/ @@ -647,6 +704,15 @@ public enum Protocol implements com.google.protobuf.ProtocolMessageEnum { UNRECOGNIZED(-1), ; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Protocol.class.getName()); + } /** *PLAINTEXT = 0;
*/ @@ -764,70 +830,34 @@ public interface EmptyOrBuilder * * Protobuf type {@code Empty} */ - public static final class Empty extends com.google.protobuf.GeneratedMessageV3 + public static final class Empty extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Empty) EmptyOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Empty.class.getName()); + } // Use Empty.newBuilder() to construct. - private Empty(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Empty(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private Empty() {} - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Empty(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Empty( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Empty_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Empty_fieldAccessorTable .ensureFieldAccessorsInitialized( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.class, @@ -848,7 +878,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -857,7 +887,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -873,7 +903,7 @@ public boolean equals(final java.lang.Object obj) { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty other = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) obj; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -884,7 +914,7 @@ public int hashCode() { } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -924,36 +954,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -975,7 +1004,7 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -990,7 +1019,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build * * Protobuf type {@code Empty} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Empty) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuilder { @@ -999,7 +1028,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Empty_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1008,17 +1037,10 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override @@ -1054,38 +1076,6 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty buildP return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) { @@ -1099,7 +1089,7 @@ public Builder mergeFrom(com.google.protobuf.Message other) { public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty other) { if (other == dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.getDefaultInstance()) return this; - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -1114,31 +1104,33 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Empty) } @@ -1160,7 +1152,18 @@ public Empty parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Empty(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -1204,7 +1207,11 @@ public interface ExactOrBuilder /** * map<string, string> attributes = 1;
*/ - java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue); + /* nullable */ + java.lang.String getAttributesOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue); /** *map<string, string> attributes = 1;
*/ @@ -1213,85 +1220,35 @@ public interface ExactOrBuilder /** * Protobuf type {@code Exact} */ - public static final class Exact extends com.google.protobuf.GeneratedMessageV3 + public static final class Exact extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Exact) ExactOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Exact.class.getName()); + } // Use Exact.newBuilder() to construct. - private Exact(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Exact(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private Exact() {} - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Exact(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Exact( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - attributes_ = com.google.protobuf.MapField.newMapField( - AttributesDefaultEntryHolder.defaultEntry); - mutable_bitField0_ |= 0x00000001; - } - com.google.protobuf.MapEntryattributes__ = - input.readMessage( - AttributesDefaultEntryHolder.defaultEntry.getParserForType(), - extensionRegistry); - attributes_.getMutableMap().put(attributes__.getKey(), attributes__.getValue()); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Exact_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override - protected com.google.protobuf.MapField internalGetMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(int number) { switch (number) { case 1: return internalGetAttributes(); @@ -1301,7 +1258,7 @@ protected com.google.protobuf.MapField internalGetMapField(int number) { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Exact_fieldAccessorTable .ensureFieldAccessorsInitialized( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.class, @@ -1321,6 +1278,7 @@ private static final class AttributesDefaultEntryHolder { ""); } + @SuppressWarnings("serial") private com.google.protobuf.MapField attributes_; private com.google.protobuf.MapField internalGetAttributes() { @@ -1339,7 +1297,7 @@ public int getAttributesCount() { @java.lang.Override public boolean containsAttributes(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } return internalGetAttributes().getMap().containsKey(key); } @@ -1362,9 +1320,12 @@ public java.util.Map getAttributesMap() { * map<string, string> attributes = 1;
*/ @java.lang.Override - public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue) { + public /* nullable */ java.lang.String getAttributesOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Mapmap = internalGetAttributes().getMap(); @@ -1376,7 +1337,7 @@ public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.S @java.lang.Override public java.lang.String getAttributesOrThrow(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Map map = internalGetAttributes().getMap(); @@ -1400,9 +1361,9 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( + com.google.protobuf.GeneratedMessage.serializeStringMapTo( output, internalGetAttributes(), AttributesDefaultEntryHolder.defaultEntry, 1); - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -1421,7 +1382,7 @@ public int getSerializedSize() { .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, attributes__); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -1438,7 +1399,7 @@ public boolean equals(final java.lang.Object obj) { (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact) obj; if (!internalGetAttributes().equals(other.internalGetAttributes())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -1453,7 +1414,7 @@ public int hashCode() { hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + internalGetAttributes().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -1493,36 +1454,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -1544,14 +1504,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Exact} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Exact) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuilder { @@ -1560,7 +1520,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(int number) { switch (number) { case 1: return internalGetAttributes(); @@ -1570,7 +1530,7 @@ protected com.google.protobuf.MapField internalGetMapField(int number) { } @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMutableMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(int number) { switch (number) { case 1: return internalGetMutableAttributes(); @@ -1580,7 +1540,7 @@ protected com.google.protobuf.MapField internalGetMutableMapField(int number) { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Exact_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1589,22 +1549,16 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; internalGetMutableAttributes().clear(); return this; } @@ -1632,43 +1586,19 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact build( public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact(this); - int from_bitField0_ = bitField0_; - result.attributes_ = internalGetAttributes(); - result.attributes_.makeImmutable(); + if (bitField0_ != 0) { + buildPartial0(result); + } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.attributes_ = internalGetAttributes(); + result.attributes_.makeImmutable(); + } } @java.lang.Override @@ -1685,7 +1615,8 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (other == dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.getDefaultInstance()) return this; internalGetMutableAttributes().mergeFrom(other.internalGetAttributes()); - this.mergeUnknownFields(other.unknownFields); + bitField0_ |= 0x00000001; + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -1700,18 +1631,41 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.protobuf.MapEntry attributes__ = + input.readMessage( + AttributesDefaultEntryHolder.defaultEntry.getParserForType(), + extensionRegistry); + internalGetMutableAttributes() + .getMutableMap() + .put(attributes__.getKey(), attributes__.getValue()); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -1727,14 +1681,14 @@ private com.google.protobuf.MapField interna } private com.google.protobuf.MapField internalGetMutableAttributes() { - onChanged(); - ; if (attributes_ == null) { attributes_ = com.google.protobuf.MapField.newMapField(AttributesDefaultEntryHolder.defaultEntry); } if (!attributes_.isMutable()) { attributes_ = attributes_.copy(); } + bitField0_ |= 0x00000001; + onChanged(); return attributes_; } @@ -1747,7 +1701,7 @@ public int getAttributesCount() { @java.lang.Override public boolean containsAttributes(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } return internalGetAttributes().getMap().containsKey(key); } @@ -1770,9 +1724,12 @@ public java.util.Map getAttributesMap() { * map<string, string> attributes = 1;
*/ @java.lang.Override - public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue) { + public /* nullable */ java.lang.String getAttributesOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Mapmap = internalGetAttributes().getMap(); @@ -1784,7 +1741,7 @@ public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.S @java.lang.Override public java.lang.String getAttributesOrThrow(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Map map = internalGetAttributes().getMap(); @@ -1795,6 +1752,7 @@ public java.lang.String getAttributesOrThrow(java.lang.String key) { } public Builder clearAttributes() { + bitField0_ = (bitField0_ & ~0x00000001); internalGetMutableAttributes().getMutableMap().clear(); return this; } @@ -1803,7 +1761,7 @@ public Builder clearAttributes() { */ public Builder removeAttributes(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } internalGetMutableAttributes().getMutableMap().remove(key); return this; @@ -1813,6 +1771,7 @@ public Builder removeAttributes(java.lang.String key) { */ @java.lang.Deprecated public java.util.Map getMutableAttributes() { + bitField0_ |= 0x00000001; return internalGetMutableAttributes().getMutableMap(); } /** @@ -1820,12 +1779,13 @@ public java.util.Map getMutableAttributes() */ public Builder putAttributes(java.lang.String key, java.lang.String value) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } if (value == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map value"); } internalGetMutableAttributes().getMutableMap().put(key, value); + bitField0_ |= 0x00000001; return this; } /** @@ -1833,19 +1793,10 @@ public Builder putAttributes(java.lang.String key, java.lang.String value) { */ public Builder putAllAttributes(java.util.Map values) { internalGetMutableAttributes().getMutableMap().putAll(values); + bitField0_ |= 0x00000001; return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Exact) } @@ -1867,7 +1818,18 @@ public Exact parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Exact(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -1911,7 +1873,11 @@ public interface PrefixOrBuilder /** * map<string, string> attributes = 1;
*/ - java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue); + /* nullable */ + java.lang.String getAttributesOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue); /** *map<string, string> attributes = 1;
*/ @@ -1920,85 +1886,35 @@ public interface PrefixOrBuilder /** * Protobuf type {@code Prefix} */ - public static final class Prefix extends com.google.protobuf.GeneratedMessageV3 + public static final class Prefix extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Prefix) PrefixOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Prefix.class.getName()); + } // Use Prefix.newBuilder() to construct. - private Prefix(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Prefix(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private Prefix() {} - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Prefix(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Prefix( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - attributes_ = com.google.protobuf.MapField.newMapField( - AttributesDefaultEntryHolder.defaultEntry); - mutable_bitField0_ |= 0x00000001; - } - com.google.protobuf.MapEntryattributes__ = - input.readMessage( - AttributesDefaultEntryHolder.defaultEntry.getParserForType(), - extensionRegistry); - attributes_.getMutableMap().put(attributes__.getKey(), attributes__.getValue()); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Prefix_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override - protected com.google.protobuf.MapField internalGetMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(int number) { switch (number) { case 1: return internalGetAttributes(); @@ -2008,7 +1924,7 @@ protected com.google.protobuf.MapField internalGetMapField(int number) { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Prefix_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2029,6 +1945,7 @@ private static final class AttributesDefaultEntryHolder { ""); } + @SuppressWarnings("serial") private com.google.protobuf.MapField attributes_; private com.google.protobuf.MapField internalGetAttributes() { @@ -2047,7 +1964,7 @@ public int getAttributesCount() { @java.lang.Override public boolean containsAttributes(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } return internalGetAttributes().getMap().containsKey(key); } @@ -2070,9 +1987,12 @@ public java.util.Map getAttributesMap() { * map<string, string> attributes = 1;
*/ @java.lang.Override - public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue) { + public /* nullable */ java.lang.String getAttributesOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Mapmap = internalGetAttributes().getMap(); @@ -2084,7 +2004,7 @@ public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.S @java.lang.Override public java.lang.String getAttributesOrThrow(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Map map = internalGetAttributes().getMap(); @@ -2108,9 +2028,9 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( + com.google.protobuf.GeneratedMessage.serializeStringMapTo( output, internalGetAttributes(), AttributesDefaultEntryHolder.defaultEntry, 1); - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -2129,7 +2049,7 @@ public int getSerializedSize() { .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, attributes__); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -2146,7 +2066,7 @@ public boolean equals(final java.lang.Object obj) { (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix) obj; if (!internalGetAttributes().equals(other.internalGetAttributes())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -2161,7 +2081,7 @@ public int hashCode() { hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + internalGetAttributes().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -2201,36 +2121,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefi public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -2253,14 +2172,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Prefix} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Prefix) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuilder { @@ -2269,7 +2188,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(int number) { switch (number) { case 1: return internalGetAttributes(); @@ -2279,7 +2198,7 @@ protected com.google.protobuf.MapField internalGetMapField(int number) { } @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMutableMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(int number) { switch (number) { case 1: return internalGetMutableAttributes(); @@ -2289,7 +2208,7 @@ protected com.google.protobuf.MapField internalGetMutableMapField(int number) { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Prefix_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2298,22 +2217,16 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; internalGetMutableAttributes().clear(); return this; } @@ -2341,43 +2254,19 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix build public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix(this); - int from_bitField0_ = bitField0_; - result.attributes_ = internalGetAttributes(); - result.attributes_.makeImmutable(); + if (bitField0_ != 0) { + buildPartial0(result); + } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.attributes_ = internalGetAttributes(); + result.attributes_.makeImmutable(); + } } @java.lang.Override @@ -2394,7 +2283,8 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (other == dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix.getDefaultInstance()) return this; internalGetMutableAttributes().mergeFrom(other.internalGetAttributes()); - this.mergeUnknownFields(other.unknownFields); + bitField0_ |= 0x00000001; + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -2409,18 +2299,41 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.protobuf.MapEntry attributes__ = + input.readMessage( + AttributesDefaultEntryHolder.defaultEntry.getParserForType(), + extensionRegistry); + internalGetMutableAttributes() + .getMutableMap() + .put(attributes__.getKey(), attributes__.getValue()); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -2436,14 +2349,14 @@ private com.google.protobuf.MapField interna } private com.google.protobuf.MapField internalGetMutableAttributes() { - onChanged(); - ; if (attributes_ == null) { attributes_ = com.google.protobuf.MapField.newMapField(AttributesDefaultEntryHolder.defaultEntry); } if (!attributes_.isMutable()) { attributes_ = attributes_.copy(); } + bitField0_ |= 0x00000001; + onChanged(); return attributes_; } @@ -2456,7 +2369,7 @@ public int getAttributesCount() { @java.lang.Override public boolean containsAttributes(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } return internalGetAttributes().getMap().containsKey(key); } @@ -2479,9 +2392,12 @@ public java.util.Map getAttributesMap() { * map<string, string> attributes = 1;
*/ @java.lang.Override - public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue) { + public /* nullable */ java.lang.String getAttributesOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Mapmap = internalGetAttributes().getMap(); @@ -2493,7 +2409,7 @@ public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.S @java.lang.Override public java.lang.String getAttributesOrThrow(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Map map = internalGetAttributes().getMap(); @@ -2504,6 +2420,7 @@ public java.lang.String getAttributesOrThrow(java.lang.String key) { } public Builder clearAttributes() { + bitField0_ = (bitField0_ & ~0x00000001); internalGetMutableAttributes().getMutableMap().clear(); return this; } @@ -2512,7 +2429,7 @@ public Builder clearAttributes() { */ public Builder removeAttributes(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } internalGetMutableAttributes().getMutableMap().remove(key); return this; @@ -2522,6 +2439,7 @@ public Builder removeAttributes(java.lang.String key) { */ @java.lang.Deprecated public java.util.Map getMutableAttributes() { + bitField0_ |= 0x00000001; return internalGetMutableAttributes().getMutableMap(); } /** @@ -2529,12 +2447,13 @@ public java.util.Map getMutableAttributes() */ public Builder putAttributes(java.lang.String key, java.lang.String value) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } if (value == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map value"); } internalGetMutableAttributes().getMutableMap().put(key, value); + bitField0_ |= 0x00000001; return this; } /** @@ -2542,19 +2461,10 @@ public Builder putAttributes(java.lang.String key, java.lang.String value) { */ public Builder putAllAttributes(java.util.Map values) { internalGetMutableAttributes().getMutableMap().putAll(values); + bitField0_ |= 0x00000001; return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Prefix) } @@ -2576,7 +2486,18 @@ public Prefix parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Prefix(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -2620,7 +2541,11 @@ public interface SuffixOrBuilder /** * map<string, string> attributes = 1;
*/ - java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue); + /* nullable */ + java.lang.String getAttributesOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue); /** *map<string, string> attributes = 1;
*/ @@ -2629,85 +2554,35 @@ public interface SuffixOrBuilder /** * Protobuf type {@code Suffix} */ - public static final class Suffix extends com.google.protobuf.GeneratedMessageV3 + public static final class Suffix extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Suffix) SuffixOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Suffix.class.getName()); + } // Use Suffix.newBuilder() to construct. - private Suffix(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Suffix(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private Suffix() {} - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Suffix(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Suffix( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - attributes_ = com.google.protobuf.MapField.newMapField( - AttributesDefaultEntryHolder.defaultEntry); - mutable_bitField0_ |= 0x00000001; - } - com.google.protobuf.MapEntryattributes__ = - input.readMessage( - AttributesDefaultEntryHolder.defaultEntry.getParserForType(), - extensionRegistry); - attributes_.getMutableMap().put(attributes__.getKey(), attributes__.getValue()); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Suffix_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override - protected com.google.protobuf.MapField internalGetMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(int number) { switch (number) { case 1: return internalGetAttributes(); @@ -2717,7 +2592,7 @@ protected com.google.protobuf.MapField internalGetMapField(int number) { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Suffix_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2738,6 +2613,7 @@ private static final class AttributesDefaultEntryHolder { ""); } + @SuppressWarnings("serial") private com.google.protobuf.MapField attributes_; private com.google.protobuf.MapField internalGetAttributes() { @@ -2756,7 +2632,7 @@ public int getAttributesCount() { @java.lang.Override public boolean containsAttributes(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } return internalGetAttributes().getMap().containsKey(key); } @@ -2779,9 +2655,12 @@ public java.util.Map getAttributesMap() { * map<string, string> attributes = 1;
*/ @java.lang.Override - public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue) { + public /* nullable */ java.lang.String getAttributesOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Mapmap = internalGetAttributes().getMap(); @@ -2793,7 +2672,7 @@ public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.S @java.lang.Override public java.lang.String getAttributesOrThrow(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Map map = internalGetAttributes().getMap(); @@ -2817,9 +2696,9 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( + com.google.protobuf.GeneratedMessage.serializeStringMapTo( output, internalGetAttributes(), AttributesDefaultEntryHolder.defaultEntry, 1); - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -2838,7 +2717,7 @@ public int getSerializedSize() { .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, attributes__); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -2855,7 +2734,7 @@ public boolean equals(final java.lang.Object obj) { (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix) obj; if (!internalGetAttributes().equals(other.internalGetAttributes())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -2870,7 +2749,7 @@ public int hashCode() { hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + internalGetAttributes().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -2910,36 +2789,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffi public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -2962,14 +2840,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Suffix} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Suffix) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SuffixOrBuilder { @@ -2978,7 +2856,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(int number) { switch (number) { case 1: return internalGetAttributes(); @@ -2988,7 +2866,7 @@ protected com.google.protobuf.MapField internalGetMapField(int number) { } @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMutableMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(int number) { switch (number) { case 1: return internalGetMutableAttributes(); @@ -2998,7 +2876,7 @@ protected com.google.protobuf.MapField internalGetMutableMapField(int number) { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Suffix_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3007,22 +2885,16 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; internalGetMutableAttributes().clear(); return this; } @@ -3050,43 +2922,19 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix build public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix(this); - int from_bitField0_ = bitField0_; - result.attributes_ = internalGetAttributes(); - result.attributes_.makeImmutable(); + if (bitField0_ != 0) { + buildPartial0(result); + } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.attributes_ = internalGetAttributes(); + result.attributes_.makeImmutable(); + } } @java.lang.Override @@ -3103,7 +2951,8 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (other == dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix.getDefaultInstance()) return this; internalGetMutableAttributes().mergeFrom(other.internalGetAttributes()); - this.mergeUnknownFields(other.unknownFields); + bitField0_ |= 0x00000001; + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -3118,18 +2967,41 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.protobuf.MapEntry attributes__ = + input.readMessage( + AttributesDefaultEntryHolder.defaultEntry.getParserForType(), + extensionRegistry); + internalGetMutableAttributes() + .getMutableMap() + .put(attributes__.getKey(), attributes__.getValue()); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -3145,14 +3017,14 @@ private com.google.protobuf.MapField interna } private com.google.protobuf.MapField internalGetMutableAttributes() { - onChanged(); - ; if (attributes_ == null) { attributes_ = com.google.protobuf.MapField.newMapField(AttributesDefaultEntryHolder.defaultEntry); } if (!attributes_.isMutable()) { attributes_ = attributes_.copy(); } + bitField0_ |= 0x00000001; + onChanged(); return attributes_; } @@ -3165,7 +3037,7 @@ public int getAttributesCount() { @java.lang.Override public boolean containsAttributes(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } return internalGetAttributes().getMap().containsKey(key); } @@ -3188,9 +3060,12 @@ public java.util.Map getAttributesMap() { * map<string, string> attributes = 1;
*/ @java.lang.Override - public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue) { + public /* nullable */ java.lang.String getAttributesOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Mapmap = internalGetAttributes().getMap(); @@ -3202,7 +3077,7 @@ public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.S @java.lang.Override public java.lang.String getAttributesOrThrow(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Map map = internalGetAttributes().getMap(); @@ -3213,6 +3088,7 @@ public java.lang.String getAttributesOrThrow(java.lang.String key) { } public Builder clearAttributes() { + bitField0_ = (bitField0_ & ~0x00000001); internalGetMutableAttributes().getMutableMap().clear(); return this; } @@ -3221,7 +3097,7 @@ public Builder clearAttributes() { */ public Builder removeAttributes(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } internalGetMutableAttributes().getMutableMap().remove(key); return this; @@ -3231,6 +3107,7 @@ public Builder removeAttributes(java.lang.String key) { */ @java.lang.Deprecated public java.util.Map getMutableAttributes() { + bitField0_ |= 0x00000001; return internalGetMutableAttributes().getMutableMap(); } /** @@ -3238,12 +3115,13 @@ public java.util.Map getMutableAttributes() */ public Builder putAttributes(java.lang.String key, java.lang.String value) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } if (value == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map value"); } internalGetMutableAttributes().getMutableMap().put(key, value); + bitField0_ |= 0x00000001; return this; } /** @@ -3251,19 +3129,10 @@ public Builder putAttributes(java.lang.String key, java.lang.String value) { */ public Builder putAllAttributes(java.util.Map values) { internalGetMutableAttributes().getMutableMap().putAll(values); + bitField0_ |= 0x00000001; return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Suffix) } @@ -3285,7 +3154,18 @@ public Suffix parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Suffix(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -3335,13 +3215,23 @@ dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBu /** * Protobuf type {@code All} */ - public static final class All extends com.google.protobuf.GeneratedMessageV3 + public static final class All extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:All) AllOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + All.class.getName()); + } // Use All.newBuilder() to construct. - private All(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private All(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } @@ -3349,74 +3239,12 @@ private All() { filters_ = java.util.Collections.emptyList(); } - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new All(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private All( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - filters_ = new java.util.ArrayList< - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter>(); - mutable_bitField0_ |= 0x00000001; - } - filters_.add(input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter - .parser(), - extensionRegistry)); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - filters_ = java.util.Collections.unmodifiableList(filters_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_All_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_All_fieldAccessorTable .ensureFieldAccessorsInitialized( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All.class, @@ -3424,6 +3252,8 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int FILTERS_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") private java.util.List filters_; /** * repeated .DialectedFilter filters = 1;
@@ -3482,7 +3312,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io for (int i = 0; i < filters_.size(); i++) { output.writeMessage(1, filters_.get(i)); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -3494,7 +3324,7 @@ public int getSerializedSize() { for (int i = 0; i < filters_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, filters_.get(i)); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -3511,7 +3341,7 @@ public boolean equals(final java.lang.Object obj) { (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All) obj; if (!getFiltersList().equals(other.getFiltersList())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -3526,7 +3356,7 @@ public int hashCode() { hash = (37 * hash) + FILTERS_FIELD_NUMBER; hash = (53 * hash) + getFiltersList().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -3566,36 +3396,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All p public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -3617,14 +3446,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code All} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:All) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AllOrBuilder { @@ -3633,7 +3462,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_All_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -3642,30 +3471,23 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { - getFiltersFieldBuilder(); - } } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; if (filtersBuilder_ == null) { filters_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); } else { + filters_ = null; filtersBuilder_.clear(); } + bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -3692,7 +3514,16 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All build() public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All(this); - int from_bitField0_ = bitField0_; + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All result) { if (filtersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { filters_ = java.util.Collections.unmodifiableList(filters_); @@ -3702,40 +3533,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All buildPar } else { result.filters_ = filtersBuilder_.build(); } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All result) { + int from_bitField0_ = bitField0_; } @java.lang.Override @@ -3769,7 +3570,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon filtersBuilder_ = null; filters_ = other.filters_; bitField0_ = (bitField0_ & ~0x00000001); - filtersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + filtersBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getFiltersFieldBuilder() : null; } else { @@ -3777,7 +3578,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon } } } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -3792,18 +3593,44 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter m = + input.readMessage( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract + .DialectedFilter.parser(), + extensionRegistry); + if (filtersBuilder_ == null) { + ensureFiltersIsMutable(); + filters_.add(m); + } else { + filtersBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -3820,7 +3647,7 @@ private void ensureFiltersIsMutable() { } } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder> @@ -4060,13 +3887,13 @@ public Builder removeFilters(int index) { return getFiltersFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder> getFiltersFieldBuilder() { if (filtersBuilder_ == null) { - filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder>( @@ -4076,16 +3903,6 @@ public Builder removeFilters(int index) { return filtersBuilder_; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:All) } @@ -4106,7 +3923,18 @@ public All parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new All(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -4156,13 +3984,23 @@ dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBu /** * Protobuf type {@code Any} */ - public static final class Any extends com.google.protobuf.GeneratedMessageV3 + public static final class Any extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Any) AnyOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Any.class.getName()); + } // Use Any.newBuilder() to construct. - private Any(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Any(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } @@ -4170,74 +4008,12 @@ private Any() { filters_ = java.util.Collections.emptyList(); } - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Any(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Any( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - filters_ = new java.util.ArrayList< - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter>(); - mutable_bitField0_ |= 0x00000001; - } - filters_.add(input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter - .parser(), - extensionRegistry)); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - filters_ = java.util.Collections.unmodifiableList(filters_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Any_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Any_fieldAccessorTable .ensureFieldAccessorsInitialized( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any.class, @@ -4245,6 +4021,8 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int FILTERS_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") private java.util.List filters_; /** * repeated .DialectedFilter filters = 1;
@@ -4303,7 +4081,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io for (int i = 0; i < filters_.size(); i++) { output.writeMessage(1, filters_.get(i)); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -4315,7 +4093,7 @@ public int getSerializedSize() { for (int i = 0; i < filters_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, filters_.get(i)); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -4332,7 +4110,7 @@ public boolean equals(final java.lang.Object obj) { (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any) obj; if (!getFiltersList().equals(other.getFiltersList())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -4347,7 +4125,7 @@ public int hashCode() { hash = (37 * hash) + FILTERS_FIELD_NUMBER; hash = (53 * hash) + getFiltersList().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -4387,36 +4165,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any p public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -4438,14 +4215,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Any} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Any) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AnyOrBuilder { @@ -4454,7 +4231,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Any_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -4463,30 +4240,23 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { - getFiltersFieldBuilder(); - } } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; if (filtersBuilder_ == null) { filters_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); } else { + filters_ = null; filtersBuilder_.clear(); } + bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -4513,7 +4283,16 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any build() public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any(this); - int from_bitField0_ = bitField0_; + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any result) { if (filtersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { filters_ = java.util.Collections.unmodifiableList(filters_); @@ -4523,40 +4302,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any buildPar } else { result.filters_ = filtersBuilder_.build(); } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); } - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any result) { + int from_bitField0_ = bitField0_; } @java.lang.Override @@ -4590,7 +4339,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon filtersBuilder_ = null; filters_ = other.filters_; bitField0_ = (bitField0_ & ~0x00000001); - filtersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + filtersBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getFiltersFieldBuilder() : null; } else { @@ -4598,7 +4347,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon } } } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -4613,18 +4362,44 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter m = + input.readMessage( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract + .DialectedFilter.parser(), + extensionRegistry); + if (filtersBuilder_ == null) { + ensureFiltersIsMutable(); + filters_.add(m); + } else { + filtersBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -4641,7 +4416,7 @@ private void ensureFiltersIsMutable() { } } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder> @@ -4881,13 +4656,13 @@ public Builder removeFilters(int index) { return getFiltersFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder> getFiltersFieldBuilder() { if (filtersBuilder_ == null) { - filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder>( @@ -4897,16 +4672,6 @@ public Builder removeFilters(int index) { return filtersBuilder_; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Any) } @@ -4927,7 +4692,18 @@ public Any parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Any(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -4969,93 +4745,41 @@ public interface NotOrBuilder /** * Protobuf type {@code Not} */ - public static final class Not extends com.google.protobuf.GeneratedMessageV3 + public static final class Not extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Not) NotOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Not.class.getName()); + } // Use Not.newBuilder() to construct. - private Not(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Not(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private Not() {} - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Not(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Not( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder - subBuilder = null; - if (filter_ != null) { - subBuilder = filter_.toBuilder(); - } - filter_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter - .parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(filter_); - filter_ = subBuilder.buildPartial(); - } - - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Not_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Not_fieldAccessorTable .ensureFieldAccessorsInitialized( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not.class, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not.Builder.class); } + private int bitField0_; public static final int FILTER_FIELD_NUMBER = 1; private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter filter_; /** @@ -5064,7 +4788,7 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF */ @java.lang.Override public boolean hasFilter() { - return filter_ != null; + return ((bitField0_ & 0x00000001) != 0); } /** * .DialectedFilter filter = 1;
@@ -5082,7 +4806,9 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFil @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder getFilterOrBuilder() { - return getFilter(); + return filter_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.getDefaultInstance() + : filter_; } private byte memoizedIsInitialized = -1; @@ -5099,10 +4825,10 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (filter_ != null) { + if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getFilter()); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -5111,10 +4837,10 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (filter_ != null) { + if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getFilter()); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -5134,7 +4860,7 @@ public boolean equals(final java.lang.Object obj) { if (hasFilter()) { if (!getFilter().equals(other.getFilter())) return false; } - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -5149,7 +4875,7 @@ public int hashCode() { hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -5189,36 +4915,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not p public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -5240,14 +4965,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Not} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Not) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.NotOrBuilder { @@ -5256,7 +4981,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Not_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5269,22 +4994,24 @@ private Builder() { maybeForceBuilderInitialization(); } - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFilterFieldBuilder(); + } } @java.lang.Override public Builder clear() { super.clear(); - if (filterBuilder_ == null) { - filter_ = null; - } else { - filter_ = null; + bitField0_ = 0; + filter_ = null; + if (filterBuilder_ != null) { + filterBuilder_.dispose(); filterBuilder_ = null; } return this; @@ -5313,45 +5040,21 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not build() public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not(this); - if (filterBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = filterBuilder_.build(); + if (bitField0_ != 0) { + buildPartial0(result); } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.filter_ = filterBuilder_ == null ? filter_ : filterBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; } @java.lang.Override @@ -5370,7 +5073,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (other.hasFilter()) { mergeFilter(other.getFilter()); } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -5385,23 +5088,42 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage(getFilterFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } + private int bitField0_; + private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter filter_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder> @@ -5411,7 +5133,7 @@ public Builder mergeFrom( * @return Whether the filter field is set. */ public boolean hasFilter() { - return filterBuilder_ != null || filter_ != null; + return ((bitField0_ & 0x00000001) != 0); } /** * .DialectedFilter filter = 1;
@@ -5437,11 +5159,11 @@ public Builder setFilter( throw new NullPointerException(); } filter_ = value; - onChanged(); } else { filterBuilder_.setMessage(value); } - + bitField0_ |= 0x00000001; + onChanged(); return this; } /** @@ -5452,11 +5174,11 @@ public Builder setFilter( builderForValue) { if (filterBuilder_ == null) { filter_ = builderForValue.build(); - onChanged(); } else { filterBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00000001; + onChanged(); return this; } /** @@ -5465,34 +5187,35 @@ public Builder setFilter( public Builder mergeFilter( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter value) { if (filterBuilder_ == null) { - if (filter_ != null) { - filter_ = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.newBuilder( - filter_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00000001) != 0) + && filter_ != null + && filter_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter + .getDefaultInstance()) { + getFilterBuilder().mergeFrom(value); } else { filter_ = value; } - onChanged(); } else { filterBuilder_.mergeFrom(value); } - + if (filter_ != null) { + bitField0_ |= 0x00000001; + onChanged(); + } return this; } /** *.DialectedFilter filter = 1;
*/ public Builder clearFilter() { - if (filterBuilder_ == null) { - filter_ = null; - onChanged(); - } else { - filter_ = null; + bitField0_ = (bitField0_ & ~0x00000001); + filter_ = null; + if (filterBuilder_ != null) { + filterBuilder_.dispose(); filterBuilder_ = null; } - + onChanged(); return this; } /** @@ -5500,7 +5223,7 @@ public Builder clearFilter() { */ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder getFilterBuilder() { - + bitField0_ |= 0x00000001; onChanged(); return getFilterFieldBuilder().getBuilder(); } @@ -5521,13 +5244,13 @@ public Builder clearFilter() { /** *.DialectedFilter filter = 1;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder>( @@ -5537,16 +5260,6 @@ public Builder clearFilter() { return filterBuilder_; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Not) } @@ -5567,7 +5280,18 @@ public Not parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Not(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -5605,70 +5329,28 @@ public interface CESQLOrBuilder /** * Protobuf type {@code CESQL} */ - public static final class CESQL extends com.google.protobuf.GeneratedMessageV3 + public static final class CESQL extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:CESQL) CESQLOrBuilder { private static final long serialVersionUID = 0L; - // Use CESQL.newBuilder() to construct. - private CESQL(com.google.protobuf.GeneratedMessageV3.Builder> builder) { - super(builder); - } - - private CESQL() { - expression_ = ""; - } - - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new CESQL(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private CESQL( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - java.lang.String s = input.readStringRequireUtf8(); - - expression_ = s; - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + CESQL.class.getName()); + } + // Use CESQL.newBuilder() to construct. + private CESQL(com.google.protobuf.GeneratedMessage.Builder> builder) { + super(builder); + } + + private CESQL() { + expression_ = ""; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { @@ -5676,7 +5358,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_CESQL_fieldAccessorTable .ensureFieldAccessorsInitialized( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL.class, @@ -5684,7 +5366,9 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int EXPRESSION_FIELD_NUMBER = 1; - private volatile java.lang.Object expression_; + + @SuppressWarnings("serial") + private volatile java.lang.Object expression_ = ""; /** *string expression = 1;
* @return The expression. @@ -5731,10 +5415,10 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getExpressionBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 1, expression_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(expression_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, expression_); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -5743,10 +5427,10 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getExpressionBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, expression_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(expression_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, expression_); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -5763,7 +5447,7 @@ public boolean equals(final java.lang.Object obj) { (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL) obj; if (!getExpression().equals(other.getExpression())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -5776,7 +5460,7 @@ public int hashCode() { hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + EXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getExpression().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -5816,36 +5500,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -5867,14 +5550,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code CESQL} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:CESQL) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQLOrBuilder { @@ -5883,7 +5566,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_CESQL_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -5892,24 +5575,17 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; expression_ = ""; - return this; } @@ -5936,41 +5612,18 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL build( public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL(this); - result.expression_ = expression_; + if (bitField0_ != 0) { + buildPartial0(result); + } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.expression_ = expression_; + } } @java.lang.Override @@ -5988,9 +5641,10 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon return this; if (!other.getExpression().isEmpty()) { expression_ = other.expression_; + bitField0_ |= 0x00000001; onChanged(); } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -6005,21 +5659,40 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + expression_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } + private int bitField0_; + private java.lang.Object expression_ = ""; /** * string expression = 1;
@@ -6060,8 +5733,8 @@ public Builder setExpression(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - expression_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -6070,8 +5743,8 @@ public Builder setExpression(java.lang.String value) { * @return This builder for chaining. */ public Builder clearExpression() { - expression_ = getDefaultInstance().getExpression(); + bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } @@ -6085,22 +5758,12 @@ public Builder setExpressionBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - expression_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:CESQL) } @@ -6122,7 +5785,18 @@ public CESQL parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new CESQL(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -6251,206 +5925,40 @@ public interface DialectedFilterOrBuilder */ dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQLOrBuilder getCesqlOrBuilder(); - public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.FilterCase getFilterCase(); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.FilterCase getFilterCase(); } /** * Protobuf type {@code DialectedFilter} */ - public static final class DialectedFilter extends com.google.protobuf.GeneratedMessageV3 + public static final class DialectedFilter extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:DialectedFilter) DialectedFilterOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + DialectedFilter.class.getName()); + } // Use DialectedFilter.newBuilder() to construct. - private DialectedFilter(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private DialectedFilter(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private DialectedFilter() {} - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new DialectedFilter(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private DialectedFilter( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.Builder subBuilder = - null; - if (filterCase_ == 1) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact) - filter_) - .toBuilder(); - } - filter_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact) filter_); - filter_ = subBuilder.buildPartial(); - } - filterCase_ = 1; - break; - } - case 18: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix.Builder subBuilder = - null; - if (filterCase_ == 2) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix) - filter_) - .toBuilder(); - } - filter_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix) filter_); - filter_ = subBuilder.buildPartial(); - } - filterCase_ = 2; - break; - } - case 26: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix.Builder subBuilder = - null; - if (filterCase_ == 3) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix) - filter_) - .toBuilder(); - } - filter_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix) filter_); - filter_ = subBuilder.buildPartial(); - } - filterCase_ = 3; - break; - } - case 34: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All.Builder subBuilder = null; - if (filterCase_ == 4) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All) - filter_) - .toBuilder(); - } - filter_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All) filter_); - filter_ = subBuilder.buildPartial(); - } - filterCase_ = 4; - break; - } - case 42: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any.Builder subBuilder = null; - if (filterCase_ == 5) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any) - filter_) - .toBuilder(); - } - filter_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any) filter_); - filter_ = subBuilder.buildPartial(); - } - filterCase_ = 5; - break; - } - case 50: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not.Builder subBuilder = null; - if (filterCase_ == 6) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not) - filter_) - .toBuilder(); - } - filter_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not) filter_); - filter_ = subBuilder.buildPartial(); - } - filterCase_ = 6; - break; - } - case 58: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL.Builder subBuilder = - null; - if (filterCase_ == 7) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL) - filter_) - .toBuilder(); - } - filter_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL) filter_); - filter_ = subBuilder.buildPartial(); - } - filterCase_ = 7; - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_DialectedFilter_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_DialectedFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -6459,6 +5967,8 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } private int filterCase_ = 0; + + @SuppressWarnings("serial") private java.lang.Object filter_; public enum FilterCase @@ -6771,7 +6281,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (filterCase_ == 7) { output.writeMessage(7, (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL) filter_); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -6808,7 +6318,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 7, (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL) filter_); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -6850,7 +6360,7 @@ public boolean equals(final java.lang.Object obj) { case 0: default: } - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -6893,7 +6403,7 @@ public int hashCode() { case 0: default: } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -6933,36 +6443,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Diale public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -6985,14 +6494,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DialectedFilter} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:DialectedFilter) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder { @@ -7002,7 +6511,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_DialectedFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -7012,22 +6521,37 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; + if (exactBuilder_ != null) { + exactBuilder_.clear(); + } + if (prefixBuilder_ != null) { + prefixBuilder_.clear(); + } + if (suffixBuilder_ != null) { + suffixBuilder_.clear(); + } + if (allBuilder_ != null) { + allBuilder_.clear(); + } + if (anyBuilder_ != null) { + anyBuilder_.clear(); + } + if (notBuilder_ != null) { + notBuilder_.clear(); + } + if (cesqlBuilder_ != null) { + cesqlBuilder_.clear(); + } filterCase_ = 0; filter_ = null; return this; @@ -7059,90 +6583,44 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFil public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter(this); - if (filterCase_ == 1) { - if (exactBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = exactBuilder_.build(); - } - } - if (filterCase_ == 2) { - if (prefixBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = prefixBuilder_.build(); - } - } - if (filterCase_ == 3) { - if (suffixBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = suffixBuilder_.build(); - } + if (bitField0_ != 0) { + buildPartial0(result); } - if (filterCase_ == 4) { - if (allBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = allBuilder_.build(); - } - } - if (filterCase_ == 5) { - if (anyBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = anyBuilder_.build(); - } - } - if (filterCase_ == 6) { - if (notBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = notBuilder_.build(); - } - } - if (filterCase_ == 7) { - if (cesqlBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = cesqlBuilder_.build(); - } - } - result.filterCase_ = filterCase_; + buildPartialOneofs(result); onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); + private void buildPartial0( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter result) { + int from_bitField0_ = bitField0_; } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartialOneofs( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter result) { + result.filterCase_ = filterCase_; + result.filter_ = this.filter_; + if (filterCase_ == 1 && exactBuilder_ != null) { + result.filter_ = exactBuilder_.build(); + } + if (filterCase_ == 2 && prefixBuilder_ != null) { + result.filter_ = prefixBuilder_.build(); + } + if (filterCase_ == 3 && suffixBuilder_ != null) { + result.filter_ = suffixBuilder_.build(); + } + if (filterCase_ == 4 && allBuilder_ != null) { + result.filter_ = allBuilder_.build(); + } + if (filterCase_ == 5 && anyBuilder_ != null) { + result.filter_ = anyBuilder_.build(); + } + if (filterCase_ == 6 && notBuilder_ != null) { + result.filter_ = notBuilder_.build(); + } + if (filterCase_ == 7 && cesqlBuilder_ != null) { + result.filter_ = cesqlBuilder_.build(); + } } @java.lang.Override @@ -7194,7 +6672,7 @@ public Builder mergeFrom( break; } } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -7209,18 +6687,65 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage(getExactFieldBuilder().getBuilder(), extensionRegistry); + filterCase_ = 1; + break; + } // case 10 + case 18: { + input.readMessage(getPrefixFieldBuilder().getBuilder(), extensionRegistry); + filterCase_ = 2; + break; + } // case 18 + case 26: { + input.readMessage(getSuffixFieldBuilder().getBuilder(), extensionRegistry); + filterCase_ = 3; + break; + } // case 26 + case 34: { + input.readMessage(getAllFieldBuilder().getBuilder(), extensionRegistry); + filterCase_ = 4; + break; + } // case 34 + case 42: { + input.readMessage(getAnyFieldBuilder().getBuilder(), extensionRegistry); + filterCase_ = 5; + break; + } // case 42 + case 50: { + input.readMessage(getNotFieldBuilder().getBuilder(), extensionRegistry); + filterCase_ = 6; + break; + } // case 50 + case 58: { + input.readMessage(getCesqlFieldBuilder().getBuilder(), extensionRegistry); + filterCase_ = 7; + break; + } // case 58 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -7238,7 +6763,9 @@ public Builder clearFilter() { return this; } - private com.google.protobuf.SingleFieldBuilderV3< + private int bitField0_; + + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuilder> @@ -7319,8 +6846,9 @@ public Builder mergeExact(dev.knative.eventing.kafka.broker.contract.DataPlaneCo } else { if (filterCase_ == 1) { exactBuilder_.mergeFrom(value); + } else { + exactBuilder_.setMessage(value); } - exactBuilder_.setMessage(value); } filterCase_ = 1; return this; @@ -7367,7 +6895,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuild /** * .Exact exact = 1;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuilder> @@ -7377,7 +6905,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuild filter_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.getDefaultInstance(); } - exactBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + exactBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuilder>( @@ -7388,11 +6916,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuild } filterCase_ = 1; onChanged(); - ; return exactBuilder_; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuilder> @@ -7473,8 +7000,9 @@ public Builder mergePrefix(dev.knative.eventing.kafka.broker.contract.DataPlaneC } else { if (filterCase_ == 2) { prefixBuilder_.mergeFrom(value); + } else { + prefixBuilder_.setMessage(value); } - prefixBuilder_.setMessage(value); } filterCase_ = 2; return this; @@ -7521,7 +7049,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuil /** *.Prefix prefix = 2;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuilder> @@ -7532,7 +7060,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuil dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix .getDefaultInstance(); } - prefixBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + prefixBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuilder>( @@ -7543,11 +7071,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuil } filterCase_ = 2; onChanged(); - ; return prefixBuilder_; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SuffixOrBuilder> @@ -7628,8 +7155,9 @@ public Builder mergeSuffix(dev.knative.eventing.kafka.broker.contract.DataPlaneC } else { if (filterCase_ == 3) { suffixBuilder_.mergeFrom(value); + } else { + suffixBuilder_.setMessage(value); } - suffixBuilder_.setMessage(value); } filterCase_ = 3; return this; @@ -7676,7 +7204,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SuffixOrBuil /** *.Suffix suffix = 3;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SuffixOrBuilder> @@ -7687,7 +7215,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SuffixOrBuil dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix .getDefaultInstance(); } - suffixBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + suffixBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Suffix.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SuffixOrBuilder>( @@ -7698,11 +7226,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SuffixOrBuil } filterCase_ = 3; onChanged(); - ; return suffixBuilder_; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AllOrBuilder> @@ -7783,8 +7310,9 @@ public Builder mergeAll(dev.knative.eventing.kafka.broker.contract.DataPlaneCont } else { if (filterCase_ == 4) { allBuilder_.mergeFrom(value); + } else { + allBuilder_.setMessage(value); } - allBuilder_.setMessage(value); } filterCase_ = 4; return this; @@ -7831,7 +7359,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AllOrBuilder /** *.All all = 4;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AllOrBuilder> @@ -7840,7 +7368,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AllOrBuilder if (!(filterCase_ == 4)) { filter_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All.getDefaultInstance(); } - allBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + allBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.All.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AllOrBuilder>( @@ -7851,11 +7379,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AllOrBuilder } filterCase_ = 4; onChanged(); - ; return allBuilder_; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AnyOrBuilder> @@ -7936,8 +7463,9 @@ public Builder mergeAny(dev.knative.eventing.kafka.broker.contract.DataPlaneCont } else { if (filterCase_ == 5) { anyBuilder_.mergeFrom(value); + } else { + anyBuilder_.setMessage(value); } - anyBuilder_.setMessage(value); } filterCase_ = 5; return this; @@ -7984,7 +7512,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AnyOrBuilder /** *.Any any = 5;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AnyOrBuilder> @@ -7993,7 +7521,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AnyOrBuilder if (!(filterCase_ == 5)) { filter_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any.getDefaultInstance(); } - anyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + anyBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Any.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AnyOrBuilder>( @@ -8004,11 +7532,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.AnyOrBuilder } filterCase_ = 5; onChanged(); - ; return anyBuilder_; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.NotOrBuilder> @@ -8089,8 +7616,9 @@ public Builder mergeNot(dev.knative.eventing.kafka.broker.contract.DataPlaneCont } else { if (filterCase_ == 6) { notBuilder_.mergeFrom(value); + } else { + notBuilder_.setMessage(value); } - notBuilder_.setMessage(value); } filterCase_ = 6; return this; @@ -8137,7 +7665,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.NotOrBuilder /** *.Not not = 6;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.NotOrBuilder> @@ -8146,7 +7674,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.NotOrBuilder if (!(filterCase_ == 6)) { filter_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not.getDefaultInstance(); } - notBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + notBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Not.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.NotOrBuilder>( @@ -8157,11 +7685,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.NotOrBuilder } filterCase_ = 6; onChanged(); - ; return notBuilder_; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQLOrBuilder> @@ -8243,8 +7770,9 @@ public Builder mergeCesql(dev.knative.eventing.kafka.broker.contract.DataPlaneCo } else { if (filterCase_ == 7) { cesqlBuilder_.mergeFrom(value); + } else { + cesqlBuilder_.setMessage(value); } - cesqlBuilder_.setMessage(value); } filterCase_ = 7; return this; @@ -8291,7 +7819,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQLOrBuild /** *.CESQL cesql = 7;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQLOrBuilder> @@ -8301,7 +7829,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQLOrBuild filter_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL.getDefaultInstance(); } - cesqlBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + cesqlBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQL.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQLOrBuilder>( @@ -8312,20 +7840,9 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CESQLOrBuild } filterCase_ = 7; onChanged(); - ; return cesqlBuilder_; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:DialectedFilter) } @@ -8349,7 +7866,18 @@ public DialectedFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new DialectedFilter(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -8380,6 +7908,7 @@ public interface FilterOrBuilder * Each key in the map is compared with the equivalent key in the event * context. An event passes the filter if all values are equal to the * specified values. + * * Nested context attributes are not supported as keys. Only string values are supported. *
map<string, string> attributes = 1;
*/
- java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue);
+ /* nullable */
+ java.lang.String getAttributesOrDefault(
+ java.lang.String key,
+ /* nullable */
+ java.lang.String defaultValue);
/**
* * attributes filters events by exact match on event context attributes. * Each key in the map is compared with the equivalent key in the event * context. An event passes the filter if all values are equal to the * specified values. + * * Nested context attributes are not supported as keys. Only string values are supported. ** @@ -8443,85 +7980,35 @@ public interface FilterOrBuilder /** * Protobuf type {@code Filter} */ - public static final class Filter extends com.google.protobuf.GeneratedMessageV3 + public static final class Filter extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Filter) FilterOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Filter.class.getName()); + } // Use Filter.newBuilder() to construct. - private Filter(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Filter(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private Filter() {} - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Filter(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Filter( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - attributes_ = com.google.protobuf.MapField.newMapField( - AttributesDefaultEntryHolder.defaultEntry); - mutable_bitField0_ |= 0x00000001; - } - com.google.protobuf.MapEntry
map<string, string> attributes = 1;
*/
@java.lang.Override
- public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue) {
+ public /* nullable */ java.lang.String getAttributesOrDefault(
+ java.lang.String key,
+ /* nullable */
+ java.lang.String defaultValue) {
if (key == null) {
- throw new java.lang.NullPointerException();
+ throw new NullPointerException("map key");
}
java.util.Mapmap<string, string> attributes = 1;
*/
@java.lang.Override
- public java.lang.String getAttributesOrDefault(java.lang.String key, java.lang.String defaultValue) {
+ public /* nullable */ java.lang.String getAttributesOrDefault(
+ java.lang.String key,
+ /* nullable */
+ java.lang.String defaultValue) {
if (key == null) {
- throw new java.lang.NullPointerException();
+ throw new NullPointerException("map key");
}
java.util.Map.Exact exact = 1;
*/
- private com.google.protobuf.SingleFieldBuilderV3<
+ private com.google.protobuf.SingleFieldBuilder<
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact,
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.Builder,
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuilder>
@@ -9976,7 +9405,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuild
matcher_ =
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.getDefaultInstance();
}
- exactBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ exactBuilder_ = new com.google.protobuf.SingleFieldBuilder<
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact,
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Exact.Builder,
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuilder>(
@@ -9987,11 +9416,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ExactOrBuild
}
matcherCase_ = 1;
onChanged();
- ;
return exactBuilder_;
}
- private com.google.protobuf.SingleFieldBuilderV3<
+ private com.google.protobuf.SingleFieldBuilder<
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix,
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix.Builder,
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuilder>
@@ -10072,8 +9500,9 @@ public Builder mergePrefix(dev.knative.eventing.kafka.broker.contract.DataPlaneC
} else {
if (matcherCase_ == 2) {
prefixBuilder_.mergeFrom(value);
+ } else {
+ prefixBuilder_.setMessage(value);
}
- prefixBuilder_.setMessage(value);
}
matcherCase_ = 2;
return this;
@@ -10120,7 +9549,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuil
/**
* .Prefix prefix = 2;
*/
- private com.google.protobuf.SingleFieldBuilderV3<
+ private com.google.protobuf.SingleFieldBuilder<
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix,
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix.Builder,
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuilder>
@@ -10131,7 +9560,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuil
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix
.getDefaultInstance();
}
- prefixBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ prefixBuilder_ = new com.google.protobuf.SingleFieldBuilder<
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix,
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Prefix.Builder,
dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuilder>(
@@ -10142,20 +9571,9 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.PrefixOrBuil
}
matcherCase_ = 2;
onChanged();
- ;
return prefixBuilder_;
}
- @java.lang.Override
- public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
// @@protoc_insertion_point(builder_scope:TokenMatcher)
}
@@ -10177,7 +9595,18 @@ public TokenMatcher parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return new TokenMatcher(input, extensionRegistry);
+ Builder builder = newBuilder();
+ try {
+ builder.mergeFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(builder.buildPartial());
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(e)
+ .setUnfinishedMessage(builder.buildPartial());
+ }
+ return builder.buildPartial();
}
};
@@ -10291,13 +9720,23 @@ dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBu
/**
* Protobuf type {@code EventPolicy}
*/
- public static final class EventPolicy extends com.google.protobuf.GeneratedMessageV3
+ public static final class EventPolicy extends com.google.protobuf.GeneratedMessage
implements
// @@protoc_insertion_point(message_implements:EventPolicy)
EventPolicyOrBuilder {
private static final long serialVersionUID = 0L;
+
+ static {
+ com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion(
+ com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC,
+ /* major= */ 4,
+ /* minor= */ 29,
+ /* patch= */ 3,
+ /* suffix= */ "",
+ EventPolicy.class.getName());
+ }
// Use EventPolicy.newBuilder() to construct.
- private EventPolicy(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ private EventPolicy(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
}
@@ -10306,88 +9745,12 @@ private EventPolicy() {
filters_ = java.util.Collections.emptyList();
}
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
- return new EventPolicy();
- }
-
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
- return this.unknownFields;
- }
-
- private EventPolicy(
- com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- case 10: {
- if (!((mutable_bitField0_ & 0x00000001) != 0)) {
- tokenMatchers_ = new java.util.ArrayList<
- dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcher>();
- mutable_bitField0_ |= 0x00000001;
- }
- tokenMatchers_.add(input.readMessage(
- dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcher.parser(),
- extensionRegistry));
- break;
- }
- case 18: {
- if (!((mutable_bitField0_ & 0x00000002) != 0)) {
- filters_ = new java.util.ArrayList<
- dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter>();
- mutable_bitField0_ |= 0x00000002;
- }
- filters_.add(input.readMessage(
- dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter
- .parser(),
- extensionRegistry));
- break;
- }
- default: {
- if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
- } finally {
- if (((mutable_bitField0_ & 0x00000001) != 0)) {
- tokenMatchers_ = java.util.Collections.unmodifiableList(tokenMatchers_);
- }
- if (((mutable_bitField0_ & 0x00000002) != 0)) {
- filters_ = java.util.Collections.unmodifiableList(filters_);
- }
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
-
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_EventPolicy_descriptor;
}
@java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() {
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() {
return dev.knative.eventing.kafka.broker.contract.DataPlaneContract
.internal_static_EventPolicy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -10396,6 +9759,8 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF
}
public static final int TOKENMATCHERS_FIELD_NUMBER = 1;
+
+ @SuppressWarnings("serial")
private java.util.List@@ -10540,7 +9907,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io for (int i = 0; i < filters_.size(); i++) { output.writeMessage(2, filters_.get(i)); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -10555,7 +9922,7 @@ public int getSerializedSize() { for (int i = 0; i < filters_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, filters_.get(i)); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -10573,7 +9940,7 @@ public boolean equals(final java.lang.Object obj) { if (!getTokenMatchersList().equals(other.getTokenMatchersList())) return false; if (!getFiltersList().equals(other.getFiltersList())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -10592,7 +9959,7 @@ public int hashCode() { hash = (37 * hash) + FILTERS_FIELD_NUMBER; hash = (53 * hash) + getFiltersList().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -10632,36 +9999,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Event public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -10684,14 +10050,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code EventPolicy} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:EventPolicy) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicyOrBuilder { @@ -10701,7 +10067,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_EventPolicy_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -10710,37 +10076,30 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { - getTokenMatchersFieldBuilder(); - getFiltersFieldBuilder(); - } } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; if (tokenMatchersBuilder_ == null) { tokenMatchers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); } else { + tokenMatchers_ = null; tokenMatchersBuilder_.clear(); } + bitField0_ = (bitField0_ & ~0x00000001); if (filtersBuilder_ == null) { filters_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); } else { + filters_ = null; filtersBuilder_.clear(); } + bitField0_ = (bitField0_ & ~0x00000002); return this; } @@ -10769,7 +10128,16 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy(this); - int from_bitField0_ = bitField0_; + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy result) { if (tokenMatchersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { tokenMatchers_ = java.util.Collections.unmodifiableList(tokenMatchers_); @@ -10788,40 +10156,11 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy } else { result.filters_ = filtersBuilder_.build(); } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy result) { + int from_bitField0_ = bitField0_; } @java.lang.Override @@ -10856,7 +10195,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon tokenMatchersBuilder_ = null; tokenMatchers_ = other.tokenMatchers_; bitField0_ = (bitField0_ & ~0x00000001); - tokenMatchersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + tokenMatchersBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getTokenMatchersFieldBuilder() : null; } else { @@ -10882,7 +10221,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon filtersBuilder_ = null; filters_ = other.filters_; bitField0_ = (bitField0_ & ~0x00000002); - filtersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + filtersBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getFiltersFieldBuilder() : null; } else { @@ -10890,7 +10229,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon } } } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -10905,18 +10244,58 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcher m = + input.readMessage( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract + .TokenMatcher.parser(), + extensionRegistry); + if (tokenMatchersBuilder_ == null) { + ensureTokenMatchersIsMutable(); + tokenMatchers_.add(m); + } else { + tokenMatchersBuilder_.addMessage(m); + } + break; + } // case 10 + case 18: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter m = + input.readMessage( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract + .DialectedFilter.parser(), + extensionRegistry); + if (filtersBuilder_ == null) { + ensureFiltersIsMutable(); + filters_.add(m); + } else { + filtersBuilder_.addMessage(m); + } + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -10933,7 +10312,7 @@ private void ensureTokenMatchersIsMutable() { } } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcher, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcher.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcherOrBuilder> @@ -11240,13 +10619,13 @@ public Builder removeTokenMatchers(int index) { return getTokenMatchersFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcher, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcher.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcherOrBuilder> getTokenMatchersFieldBuilder() { if (tokenMatchersBuilder_ == null) { - tokenMatchersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + tokenMatchersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcher, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcher.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.TokenMatcherOrBuilder>( @@ -11267,7 +10646,7 @@ private void ensureFiltersIsMutable() { } } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder> @@ -11579,13 +10958,13 @@ public Builder removeFilters(int index) { return getFiltersFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder> getFiltersFieldBuilder() { if (filtersBuilder_ == null) { - filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder>( @@ -11595,16 +10974,6 @@ public Builder removeFilters(int index) { return filtersBuilder_; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:EventPolicy) } @@ -11626,7 +10995,18 @@ public EventPolicy parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new EventPolicy(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -11730,6 +11110,7 @@ public interface EgressConfigOrBuilder * * retry is the minimum number of retries the sender should attempt when * sending an event before moving it to the dead letter sink. + * * Setting retry to 0 means don't retry. ** @@ -11780,13 +11161,23 @@ public interface EgressConfigOrBuilder /** * Protobuf type {@code EgressConfig} */ - public static final class EgressConfig extends com.google.protobuf.GeneratedMessageV3 + public static final class EgressConfig extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:EgressConfig) EgressConfigOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + EgressConfig.class.getName()); + } // Use EgressConfig.newBuilder() to construct. - private EgressConfig(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private EgressConfig(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } @@ -11798,100 +11189,12 @@ private EgressConfig() { backoffPolicy_ = 0; } - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new EgressConfig(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private EgressConfig( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - java.lang.String s = input.readStringRequireUtf8(); - - deadLetter_ = s; - break; - } - case 16: { - retry_ = input.readUInt32(); - break; - } - case 24: { - int rawValue = input.readEnum(); - - backoffPolicy_ = rawValue; - break; - } - case 32: { - backoffDelay_ = input.readUInt64(); - break; - } - case 40: { - timeout_ = input.readUInt64(); - break; - } - case 50: { - java.lang.String s = input.readStringRequireUtf8(); - - deadLetterCACerts_ = s; - break; - } - case 58: { - java.lang.String s = input.readStringRequireUtf8(); - - deadLetterAudience_ = s; - break; - } - case 66: { - java.lang.String s = input.readStringRequireUtf8(); - - format_ = s; - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_EgressConfig_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_EgressConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -11900,7 +11203,9 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int DEADLETTER_FIELD_NUMBER = 1; - private volatile java.lang.Object deadLetter_; + + @SuppressWarnings("serial") + private volatile java.lang.Object deadLetter_ = ""; /** ** Dead letter is where the event is sent when something goes wrong @@ -11942,7 +11247,9 @@ public com.google.protobuf.ByteString getDeadLetterBytes() { } public static final int DEADLETTERCACERTS_FIELD_NUMBER = 6; - private volatile java.lang.Object deadLetterCACerts_; + + @SuppressWarnings("serial") + private volatile java.lang.Object deadLetterCACerts_ = ""; /** ** Dead Letter CA Cert is the CA Cert used for HTTPS communication through dead letter @@ -11984,7 +11291,9 @@ public com.google.protobuf.ByteString getDeadLetterCACertsBytes() { } public static final int DEADLETTERAUDIENCE_FIELD_NUMBER = 7; - private volatile java.lang.Object deadLetterAudience_; + + @SuppressWarnings("serial") + private volatile java.lang.Object deadLetterAudience_ = ""; /** ** Dead Letter Audience is the OIDC audience of the dead letter @@ -12026,7 +11335,9 @@ public com.google.protobuf.ByteString getDeadLetterAudienceBytes() { } public static final int FORMAT_FIELD_NUMBER = 8; - private volatile java.lang.Object format_; + + @SuppressWarnings("serial") + private volatile java.lang.Object format_ = ""; /** ** format is the format used to deliver the event. Can be one of "json" or "binary" @@ -12068,11 +11379,12 @@ public com.google.protobuf.ByteString getFormatBytes() { } public static final int RETRY_FIELD_NUMBER = 2; - private int retry_; + private int retry_ = 0; /** ** retry is the minimum number of retries the sender should attempt when * sending an event before moving it to the dead letter sink. + * * Setting retry to 0 means don't retry. ** @@ -12085,7 +11397,7 @@ public int getRetry() { } public static final int BACKOFFPOLICY_FIELD_NUMBER = 3; - private int backoffPolicy_; + private int backoffPolicy_ = 0; /** ** backoffPolicy is the retry backoff policy (linear, exponential). @@ -12108,16 +11420,16 @@ public int getBackoffPolicyValue() { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy getBackoffPolicy() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy.valueOf(backoffPolicy_); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy.forNumber( + backoffPolicy_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy.UNRECOGNIZED : result; } public static final int BACKOFFDELAY_FIELD_NUMBER = 4; - private long backoffDelay_; + private long backoffDelay_ = 0L; /** ** backoffDelay is the delay before retrying in milliseconds. @@ -12132,7 +11444,7 @@ public long getBackoffDelay() { } public static final int TIMEOUT_FIELD_NUMBER = 5; - private long timeout_; + private long timeout_ = 0L; /** ** timeout is the single request timeout (not the overall retry timeout) @@ -12160,8 +11472,8 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getDeadLetterBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 1, deadLetter_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(deadLetter_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, deadLetter_); } if (retry_ != 0) { output.writeUInt32(2, retry_); @@ -12177,16 +11489,16 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (timeout_ != 0L) { output.writeUInt64(5, timeout_); } - if (!getDeadLetterCACertsBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 6, deadLetterCACerts_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(deadLetterCACerts_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 6, deadLetterCACerts_); } - if (!getDeadLetterAudienceBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 7, deadLetterAudience_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(deadLetterAudience_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 7, deadLetterAudience_); } - if (!getFormatBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 8, format_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(format_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 8, format_); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -12195,8 +11507,8 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getDeadLetterBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, deadLetter_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(deadLetter_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, deadLetter_); } if (retry_ != 0) { size += com.google.protobuf.CodedOutputStream.computeUInt32Size(2, retry_); @@ -12212,16 +11524,16 @@ public int getSerializedSize() { if (timeout_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeUInt64Size(5, timeout_); } - if (!getDeadLetterCACertsBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, deadLetterCACerts_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(deadLetterCACerts_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(6, deadLetterCACerts_); } - if (!getDeadLetterAudienceBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, deadLetterAudience_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(deadLetterAudience_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(7, deadLetterAudience_); } - if (!getFormatBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, format_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(format_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(8, format_); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -12245,7 +11557,7 @@ public boolean equals(final java.lang.Object obj) { if (backoffPolicy_ != other.backoffPolicy_) return false; if (getBackoffDelay() != other.getBackoffDelay()) return false; if (getTimeout() != other.getTimeout()) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -12272,7 +11584,7 @@ public int hashCode() { hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getBackoffDelay()); hash = (37 * hash) + TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getTimeout()); - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -12312,36 +11624,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egres public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -12364,14 +11675,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code EgressConfig} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:EgressConfig) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfigOrBuilder { @@ -12381,7 +11692,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_EgressConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -12391,38 +11702,24 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; deadLetter_ = ""; - deadLetterCACerts_ = ""; - deadLetterAudience_ = ""; - format_ = ""; - retry_ = 0; - backoffPolicy_ = 0; - backoffDelay_ = 0L; - timeout_ = 0L; - return this; } @@ -12451,48 +11748,40 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig(this); - result.deadLetter_ = deadLetter_; - result.deadLetterCACerts_ = deadLetterCACerts_; - result.deadLetterAudience_ = deadLetterAudience_; - result.format_ = format_; - result.retry_ = retry_; - result.backoffPolicy_ = backoffPolicy_; - result.backoffDelay_ = backoffDelay_; - result.timeout_ = timeout_; + if (bitField0_ != 0) { + buildPartial0(result); + } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.deadLetter_ = deadLetter_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.deadLetterCACerts_ = deadLetterCACerts_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.deadLetterAudience_ = deadLetterAudience_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.format_ = format_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.retry_ = retry_; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.backoffPolicy_ = backoffPolicy_; + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.backoffDelay_ = backoffDelay_; + } + if (((from_bitField0_ & 0x00000080) != 0)) { + result.timeout_ = timeout_; + } } @java.lang.Override @@ -12511,18 +11800,22 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon .getDefaultInstance()) return this; if (!other.getDeadLetter().isEmpty()) { deadLetter_ = other.deadLetter_; + bitField0_ |= 0x00000001; onChanged(); } if (!other.getDeadLetterCACerts().isEmpty()) { deadLetterCACerts_ = other.deadLetterCACerts_; + bitField0_ |= 0x00000002; onChanged(); } if (!other.getDeadLetterAudience().isEmpty()) { deadLetterAudience_ = other.deadLetterAudience_; + bitField0_ |= 0x00000004; onChanged(); } if (!other.getFormat().isEmpty()) { format_ = other.format_; + bitField0_ |= 0x00000008; onChanged(); } if (other.getRetry() != 0) { @@ -12537,7 +11830,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (other.getTimeout() != 0L) { setTimeout(other.getTimeout()); } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -12552,21 +11845,75 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + deadLetter_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: { + retry_ = input.readUInt32(); + bitField0_ |= 0x00000010; + break; + } // case 16 + case 24: { + backoffPolicy_ = input.readEnum(); + bitField0_ |= 0x00000020; + break; + } // case 24 + case 32: { + backoffDelay_ = input.readUInt64(); + bitField0_ |= 0x00000040; + break; + } // case 32 + case 40: { + timeout_ = input.readUInt64(); + bitField0_ |= 0x00000080; + break; + } // case 40 + case 50: { + deadLetterCACerts_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 50 + case 58: { + deadLetterAudience_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 58 + case 66: { + format_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000008; + break; + } // case 66 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } + private int bitField0_; + private java.lang.Object deadLetter_ = ""; /** * @@ -12619,8 +11966,8 @@ public Builder setDeadLetter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - deadLetter_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -12633,8 +11980,8 @@ public Builder setDeadLetter(java.lang.String value) { * @return This builder for chaining. */ public Builder clearDeadLetter() { - deadLetter_ = getDefaultInstance().getDeadLetter(); + bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } @@ -12652,8 +11999,8 @@ public Builder setDeadLetterBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - deadLetter_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -12710,8 +12057,8 @@ public Builder setDeadLetterCACerts(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - deadLetterCACerts_ = value; + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -12724,8 +12071,8 @@ public Builder setDeadLetterCACerts(java.lang.String value) { * @return This builder for chaining. */ public Builder clearDeadLetterCACerts() { - deadLetterCACerts_ = getDefaultInstance().getDeadLetterCACerts(); + bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } @@ -12743,8 +12090,8 @@ public Builder setDeadLetterCACertsBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - deadLetterCACerts_ = value; + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -12801,8 +12148,8 @@ public Builder setDeadLetterAudience(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - deadLetterAudience_ = value; + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -12815,8 +12162,8 @@ public Builder setDeadLetterAudience(java.lang.String value) { * @return This builder for chaining. */ public Builder clearDeadLetterAudience() { - deadLetterAudience_ = getDefaultInstance().getDeadLetterAudience(); + bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } @@ -12834,8 +12181,8 @@ public Builder setDeadLetterAudienceBytes(com.google.protobuf.ByteString value) throw new NullPointerException(); } checkByteStringIsUtf8(value); - deadLetterAudience_ = value; + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -12892,8 +12239,8 @@ public Builder setFormat(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - format_ = value; + bitField0_ |= 0x00000008; onChanged(); return this; } @@ -12906,8 +12253,8 @@ public Builder setFormat(java.lang.String value) { * @return This builder for chaining. */ public Builder clearFormat() { - format_ = getDefaultInstance().getFormat(); + bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } @@ -12925,8 +12272,8 @@ public Builder setFormatBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - format_ = value; + bitField0_ |= 0x00000008; onChanged(); return this; } @@ -12936,6 +12283,7 @@ public Builder setFormatBytes(com.google.protobuf.ByteString value) { ** retry is the minimum number of retries the sender should attempt when * sending an event before moving it to the dead letter sink. + * * Setting retry to 0 means don't retry. ** @@ -12950,6 +12298,7 @@ public int getRetry() { ** retry is the minimum number of retries the sender should attempt when * sending an event before moving it to the dead letter sink. + * * Setting retry to 0 means don't retry. ** @@ -12960,6 +12309,7 @@ public int getRetry() { public Builder setRetry(int value) { retry_ = value; + bitField0_ |= 0x00000010; onChanged(); return this; } @@ -12967,6 +12317,7 @@ public Builder setRetry(int value) { ** retry is the minimum number of retries the sender should attempt when * sending an event before moving it to the dead letter sink. + * * Setting retry to 0 means don't retry. ** @@ -12974,7 +12325,7 @@ public Builder setRetry(int value) { * @return This builder for chaining. */ public Builder clearRetry() { - + bitField0_ = (bitField0_ & ~0x00000010); retry_ = 0; onChanged(); return this; @@ -13003,8 +12354,8 @@ public int getBackoffPolicyValue() { * @return This builder for chaining. */ public Builder setBackoffPolicyValue(int value) { - backoffPolicy_ = value; + bitField0_ |= 0x00000020; onChanged(); return this; } @@ -13018,9 +12369,8 @@ public Builder setBackoffPolicyValue(int value) { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy getBackoffPolicy() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy.valueOf( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy.forNumber( backoffPolicy_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy.UNRECOGNIZED @@ -13040,7 +12390,7 @@ public Builder setBackoffPolicy( if (value == null) { throw new NullPointerException(); } - + bitField0_ |= 0x00000020; backoffPolicy_ = value.getNumber(); onChanged(); return this; @@ -13054,7 +12404,7 @@ public Builder setBackoffPolicy( * @return This builder for chaining. */ public Builder clearBackoffPolicy() { - + bitField0_ = (bitField0_ & ~0x00000020); backoffPolicy_ = 0; onChanged(); return this; @@ -13085,6 +12435,7 @@ public long getBackoffDelay() { public Builder setBackoffDelay(long value) { backoffDelay_ = value; + bitField0_ |= 0x00000040; onChanged(); return this; } @@ -13097,7 +12448,7 @@ public Builder setBackoffDelay(long value) { * @return This builder for chaining. */ public Builder clearBackoffDelay() { - + bitField0_ = (bitField0_ & ~0x00000040); backoffDelay_ = 0L; onChanged(); return this; @@ -13128,6 +12479,7 @@ public long getTimeout() { public Builder setTimeout(long value) { timeout_ = value; + bitField0_ |= 0x00000080; onChanged(); return this; } @@ -13140,22 +12492,12 @@ public Builder setTimeout(long value) { * @return This builder for chaining. */ public Builder clearTimeout() { - + bitField0_ = (bitField0_ & ~0x00000080); timeout_ = 0L; onChanged(); return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:EgressConfig) } @@ -13177,7 +12519,18 @@ public EgressConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new EgressConfig(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -13518,6 +12871,7 @@ public interface EgressOrBuilder /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -13530,6 +12884,7 @@ public interface EgressOrBuilder /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -13542,6 +12897,7 @@ public interface EgressOrBuilder /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -13652,19 +13008,28 @@ public interface EgressOrBuilder */ com.google.protobuf.ByteString getOidcServiceAccountNameBytes(); - public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.ReplyStrategyCase - getReplyStrategyCase(); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.ReplyStrategyCase getReplyStrategyCase(); } /** * Protobuf type {@code Egress} */ - public static final class Egress extends com.google.protobuf.GeneratedMessageV3 + public static final class Egress extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Egress) EgressOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Egress.class.getName()); + } // Use Egress.newBuilder() to construct. - private Egress(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Egress(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } @@ -13682,249 +13047,12 @@ private Egress() { oidcServiceAccountName_ = ""; } - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Egress(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Egress( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - java.lang.String s = input.readStringRequireUtf8(); - - consumerGroup_ = s; - break; - } - case 18: { - java.lang.String s = input.readStringRequireUtf8(); - - destination_ = s; - break; - } - case 26: { - java.lang.String s = input.readStringRequireUtf8(); - replyStrategyCase_ = 3; - replyStrategy_ = s; - break; - } - case 34: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder subBuilder = - null; - if (replyStrategyCase_ == 4) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) - replyStrategy_) - .toBuilder(); - } - replyStrategy_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) - replyStrategy_); - replyStrategy_ = subBuilder.buildPartial(); - } - replyStrategyCase_ = 4; - break; - } - case 42: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter.Builder subBuilder = - null; - if (filter_ != null) { - subBuilder = filter_.toBuilder(); - } - filter_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(filter_); - filter_ = subBuilder.buildPartial(); - } - - break; - } - case 50: { - java.lang.String s = input.readStringRequireUtf8(); - - uid_ = s; - break; - } - case 58: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder - subBuilder = null; - if (egressConfig_ != null) { - subBuilder = egressConfig_.toBuilder(); - } - egressConfig_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(egressConfig_); - egressConfig_ = subBuilder.buildPartial(); - } - - break; - } - case 64: { - int rawValue = input.readEnum(); - - deliveryOrder_ = rawValue; - break; - } - case 74: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder subBuilder = - null; - if (replyStrategyCase_ == 9) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) - replyStrategy_) - .toBuilder(); - } - replyStrategy_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) - replyStrategy_); - replyStrategy_ = subBuilder.buildPartial(); - } - replyStrategyCase_ = 9; - break; - } - case 80: { - int rawValue = input.readEnum(); - - keyType_ = rawValue; - break; - } - case 90: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder subBuilder = - null; - if (reference_ != null) { - subBuilder = reference_.toBuilder(); - } - reference_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(reference_); - reference_ = subBuilder.buildPartial(); - } - - break; - } - case 98: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - dialectedFilter_ = new java.util.ArrayList< - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter>(); - mutable_bitField0_ |= 0x00000001; - } - dialectedFilter_.add(input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter - .parser(), - extensionRegistry)); - break; - } - case 104: { - vReplicas_ = input.readInt32(); - break; - } - case 114: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags.Builder - subBuilder = null; - if (featureFlags_ != null) { - subBuilder = featureFlags_.toBuilder(); - } - featureFlags_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags - .parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(featureFlags_); - featureFlags_ = subBuilder.buildPartial(); - } - - break; - } - case 122: { - java.lang.String s = input.readStringRequireUtf8(); - - destinationCACerts_ = s; - break; - } - case 130: { - java.lang.String s = input.readStringRequireUtf8(); - - replyUrlCACerts_ = s; - break; - } - case 138: { - java.lang.String s = input.readStringRequireUtf8(); - - destinationAudience_ = s; - break; - } - case 146: { - java.lang.String s = input.readStringRequireUtf8(); - - replyUrlAudience_ = s; - break; - } - case 154: { - java.lang.String s = input.readStringRequireUtf8(); - - oidcServiceAccountName_ = s; - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - dialectedFilter_ = java.util.Collections.unmodifiableList(dialectedFilter_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Egress_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Egress_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -13932,7 +13060,10 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.Builder.class); } + private int bitField0_; private int replyStrategyCase_ = 0; + + @SuppressWarnings("serial") private java.lang.Object replyStrategy_; public enum ReplyStrategyCase @@ -13982,7 +13113,9 @@ public ReplyStrategyCase getReplyStrategyCase() { } public static final int CONSUMERGROUP_FIELD_NUMBER = 1; - private volatile java.lang.Object consumerGroup_; + + @SuppressWarnings("serial") + private volatile java.lang.Object consumerGroup_ = ""; /** ** consumer group name @@ -14024,7 +13157,9 @@ public com.google.protobuf.ByteString getConsumerGroupBytes() { } public static final int DESTINATION_FIELD_NUMBER = 2; - private volatile java.lang.Object destination_; + + @SuppressWarnings("serial") + private volatile java.lang.Object destination_ = ""; /** ** destination is the sink where events are sent. @@ -14066,7 +13201,9 @@ public com.google.protobuf.ByteString getDestinationBytes() { } public static final int DESTINATIONCACERTS_FIELD_NUMBER = 15; - private volatile java.lang.Object destinationCACerts_; + + @SuppressWarnings("serial") + private volatile java.lang.Object destinationCACerts_ = ""; /** ** destination CA Cert is the CA Cert used for HTTPS communication through destination @@ -14108,7 +13245,9 @@ public com.google.protobuf.ByteString getDestinationCACertsBytes() { } public static final int DESTINATIONAUDIENCE_FIELD_NUMBER = 17; - private volatile java.lang.Object destinationAudience_; + + @SuppressWarnings("serial") + private volatile java.lang.Object destinationAudience_ = ""; /** ** OIDC audience of the destination @@ -14297,7 +13436,9 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuild } public static final int REPLYURLCACERTS_FIELD_NUMBER = 16; - private volatile java.lang.Object replyUrlCACerts_; + + @SuppressWarnings("serial") + private volatile java.lang.Object replyUrlCACerts_ = ""; /** ** replyUrl CA Cert is the CA Cert used for HTTPS communication through replyUrl @@ -14339,7 +13480,9 @@ public com.google.protobuf.ByteString getReplyUrlCACertsBytes() { } public static final int REPLYURLAUDIENCE_FIELD_NUMBER = 18; - private volatile java.lang.Object replyUrlAudience_; + + @SuppressWarnings("serial") + private volatile java.lang.Object replyUrlAudience_ = ""; /** ** OIDC audience of the replyUrl @@ -14392,7 +13535,7 @@ public com.google.protobuf.ByteString getReplyUrlAudienceBytes() { */ @java.lang.Override public boolean hasFilter() { - return filter_ != null; + return ((bitField0_ & 0x00000001) != 0); } /** *@@ -14417,11 +13560,15 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter getFi */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FilterOrBuilder getFilterOrBuilder() { - return getFilter(); + return filter_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter.getDefaultInstance() + : filter_; } public static final int UID_FIELD_NUMBER = 6; - private volatile java.lang.Object uid_; + + @SuppressWarnings("serial") + private volatile java.lang.Object uid_ = ""; /** ** Id of the egress @@ -14477,7 +13624,7 @@ public com.google.protobuf.ByteString getUidBytes() { */ @java.lang.Override public boolean hasEgressConfig() { - return egressConfig_ != null; + return ((bitField0_ & 0x00000002) != 0); } /** *@@ -14505,11 +13652,13 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfigOrBuilder getEgressConfigOrBuilder() { - return getEgressConfig(); + return egressConfig_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.getDefaultInstance() + : egressConfig_; } public static final int DELIVERYORDER_FIELD_NUMBER = 8; - private int deliveryOrder_; + private int deliveryOrder_ = 0; /** ** Delivery guarantee to use @@ -14534,16 +13683,16 @@ public int getDeliveryOrderValue() { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DeliveryOrder getDeliveryOrder() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DeliveryOrder result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DeliveryOrder.valueOf(deliveryOrder_); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DeliveryOrder.forNumber( + deliveryOrder_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DeliveryOrder.UNRECOGNIZED : result; } public static final int KEYTYPE_FIELD_NUMBER = 10; - private int keyType_; + private int keyType_ = 0; /** ** Kafka record key type. @@ -14566,9 +13715,8 @@ public int getKeyTypeValue() { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType getKeyType() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType.valueOf(keyType_); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType.forNumber(keyType_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType.UNRECOGNIZED : result; @@ -14579,6 +13727,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType getK /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -14589,11 +13738,12 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType getK */ @java.lang.Override public boolean hasReference() { - return reference_ != null; + return ((bitField0_ & 0x00000004) != 0); } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -14611,6 +13761,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference ge /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -14620,10 +13771,14 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference ge */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder getReferenceOrBuilder() { - return getReference(); + return reference_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance() + : reference_; } public static final int DIALECTEDFILTER_FIELD_NUMBER = 12; + + @SuppressWarnings("serial") private java.util.ListdialectedFilter_; /** @@ -14688,7 +13843,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFil } public static final int VREPLICAS_FIELD_NUMBER = 13; - private int vReplicas_; + private int vReplicas_ = 0; /** * * Number of virtual replicas. @@ -14714,7 +13869,7 @@ public int getVReplicas() { */ @java.lang.Override public boolean hasFeatureFlags() { - return featureFlags_ != null; + return ((bitField0_ & 0x00000008) != 0); } /** *@@ -14741,11 +13896,16 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatur @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlagsOrBuilder getFeatureFlagsOrBuilder() { - return getFeatureFlags(); + return featureFlags_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags + .getDefaultInstance() + : featureFlags_; } public static final int OIDCSERVICEACCOUNTNAME_FIELD_NUMBER = 19; - private volatile java.lang.Object oidcServiceAccountName_; + + @SuppressWarnings("serial") + private volatile java.lang.Object oidcServiceAccountName_ = ""; /** ** Name of the service account to use for OIDC authentication. @@ -14800,26 +13960,26 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getConsumerGroupBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 1, consumerGroup_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(consumerGroup_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, consumerGroup_); } - if (!getDestinationBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, destination_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(destination_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, destination_); } if (replyStrategyCase_ == 3) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, replyStrategy_); + com.google.protobuf.GeneratedMessage.writeString(output, 3, replyStrategy_); } if (replyStrategyCase_ == 4) { output.writeMessage( 4, (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) replyStrategy_); } - if (filter_ != null) { + if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(5, getFilter()); } - if (!getUidBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 6, uid_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(uid_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 6, uid_); } - if (egressConfig_ != null) { + if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(7, getEgressConfig()); } if (deliveryOrder_ @@ -14834,7 +13994,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (keyType_ != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType.String.getNumber()) { output.writeEnum(10, keyType_); } - if (reference_ != null) { + if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(11, getReference()); } for (int i = 0; i < dialectedFilter_.size(); i++) { @@ -14843,25 +14003,25 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (vReplicas_ != 0) { output.writeInt32(13, vReplicas_); } - if (featureFlags_ != null) { + if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(14, getFeatureFlags()); } - if (!getDestinationCACertsBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 15, destinationCACerts_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(destinationCACerts_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 15, destinationCACerts_); } - if (!getReplyUrlCACertsBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 16, replyUrlCACerts_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(replyUrlCACerts_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 16, replyUrlCACerts_); } - if (!getDestinationAudienceBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 17, destinationAudience_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(destinationAudience_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 17, destinationAudience_); } - if (!getReplyUrlAudienceBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 18, replyUrlAudience_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(replyUrlAudience_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 18, replyUrlAudience_); } - if (!getOidcServiceAccountNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 19, oidcServiceAccountName_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(oidcServiceAccountName_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 19, oidcServiceAccountName_); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -14870,26 +14030,26 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getConsumerGroupBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, consumerGroup_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(consumerGroup_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, consumerGroup_); } - if (!getDestinationBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, destination_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(destination_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, destination_); } if (replyStrategyCase_ == 3) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, replyStrategy_); + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, replyStrategy_); } if (replyStrategyCase_ == 4) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 4, (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) replyStrategy_); } - if (filter_ != null) { + if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getFilter()); } - if (!getUidBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, uid_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(uid_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(6, uid_); } - if (egressConfig_ != null) { + if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(7, getEgressConfig()); } if (deliveryOrder_ @@ -14904,7 +14064,7 @@ public int getSerializedSize() { if (keyType_ != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType.String.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(10, keyType_); } - if (reference_ != null) { + if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(11, getReference()); } for (int i = 0; i < dialectedFilter_.size(); i++) { @@ -14913,25 +14073,25 @@ public int getSerializedSize() { if (vReplicas_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(13, vReplicas_); } - if (featureFlags_ != null) { + if (((bitField0_ & 0x00000008) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(14, getFeatureFlags()); } - if (!getDestinationCACertsBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(15, destinationCACerts_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(destinationCACerts_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(15, destinationCACerts_); } - if (!getReplyUrlCACertsBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(16, replyUrlCACerts_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(replyUrlCACerts_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(16, replyUrlCACerts_); } - if (!getDestinationAudienceBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(17, destinationAudience_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(destinationAudience_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(17, destinationAudience_); } - if (!getReplyUrlAudienceBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(18, replyUrlAudience_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(replyUrlAudience_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(18, replyUrlAudience_); } - if (!getOidcServiceAccountNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(19, oidcServiceAccountName_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(oidcServiceAccountName_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(19, oidcServiceAccountName_); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -14989,7 +14149,7 @@ public boolean equals(final java.lang.Object obj) { case 0: default: } - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -15058,7 +14218,7 @@ public int hashCode() { case 0: default: } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -15098,36 +14258,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egres public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -15150,14 +14309,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Egress} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Egress) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressOrBuilder { @@ -15166,7 +14325,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Egress_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -15179,72 +14338,69 @@ private Builder() { maybeForceBuilderInitialization(); } - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFilterFieldBuilder(); + getEgressConfigFieldBuilder(); + getReferenceFieldBuilder(); getDialectedFilterFieldBuilder(); + getFeatureFlagsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; consumerGroup_ = ""; - destination_ = ""; - destinationCACerts_ = ""; - destinationAudience_ = ""; - + if (replyToOriginalTopicBuilder_ != null) { + replyToOriginalTopicBuilder_.clear(); + } + if (discardReplyBuilder_ != null) { + discardReplyBuilder_.clear(); + } replyUrlCACerts_ = ""; - replyUrlAudience_ = ""; - - if (filterBuilder_ == null) { - filter_ = null; - } else { - filter_ = null; + filter_ = null; + if (filterBuilder_ != null) { + filterBuilder_.dispose(); filterBuilder_ = null; } uid_ = ""; - - if (egressConfigBuilder_ == null) { - egressConfig_ = null; - } else { - egressConfig_ = null; + egressConfig_ = null; + if (egressConfigBuilder_ != null) { + egressConfigBuilder_.dispose(); egressConfigBuilder_ = null; } deliveryOrder_ = 0; - keyType_ = 0; - - if (referenceBuilder_ == null) { - reference_ = null; - } else { - reference_ = null; + reference_ = null; + if (referenceBuilder_ != null) { + referenceBuilder_.dispose(); referenceBuilder_ = null; } if (dialectedFilterBuilder_ == null) { dialectedFilter_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); } else { + dialectedFilter_ = null; dialectedFilterBuilder_.clear(); } + bitField0_ = (bitField0_ & ~0x00008000); vReplicas_ = 0; - - if (featureFlagsBuilder_ == null) { - featureFlags_ = null; - } else { - featureFlags_ = null; + featureFlags_ = null; + if (featureFlagsBuilder_ != null) { + featureFlagsBuilder_.dispose(); featureFlagsBuilder_ = null; } oidcServiceAccountName_ = ""; - replyStrategyCase_ = 0; replyStrategy_ = null; return this; @@ -15273,99 +14429,93 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress build public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress(this); - int from_bitField0_ = bitField0_; - result.consumerGroup_ = consumerGroup_; - result.destination_ = destination_; - result.destinationCACerts_ = destinationCACerts_; - result.destinationAudience_ = destinationAudience_; - if (replyStrategyCase_ == 3) { - result.replyStrategy_ = replyStrategy_; - } - if (replyStrategyCase_ == 4) { - if (replyToOriginalTopicBuilder_ == null) { - result.replyStrategy_ = replyStrategy_; - } else { - result.replyStrategy_ = replyToOriginalTopicBuilder_.build(); - } - } - if (replyStrategyCase_ == 9) { - if (discardReplyBuilder_ == null) { - result.replyStrategy_ = replyStrategy_; - } else { - result.replyStrategy_ = discardReplyBuilder_.build(); - } - } - result.replyUrlCACerts_ = replyUrlCACerts_; - result.replyUrlAudience_ = replyUrlAudience_; - if (filterBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = filterBuilder_.build(); - } - result.uid_ = uid_; - if (egressConfigBuilder_ == null) { - result.egressConfig_ = egressConfig_; - } else { - result.egressConfig_ = egressConfigBuilder_.build(); - } - result.deliveryOrder_ = deliveryOrder_; - result.keyType_ = keyType_; - if (referenceBuilder_ == null) { - result.reference_ = reference_; - } else { - result.reference_ = referenceBuilder_.build(); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); } + buildPartialOneofs(result); + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress result) { if (dialectedFilterBuilder_ == null) { - if (((bitField0_ & 0x00000001) != 0)) { + if (((bitField0_ & 0x00008000) != 0)) { dialectedFilter_ = java.util.Collections.unmodifiableList(dialectedFilter_); - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00008000); } result.dialectedFilter_ = dialectedFilter_; } else { result.dialectedFilter_ = dialectedFilterBuilder_.build(); } - result.vReplicas_ = vReplicas_; - if (featureFlagsBuilder_ == null) { - result.featureFlags_ = featureFlags_; - } else { - result.featureFlags_ = featureFlagsBuilder_.build(); - } - result.oidcServiceAccountName_ = oidcServiceAccountName_; - result.replyStrategyCase_ = replyStrategyCase_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); } - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.consumerGroup_ = consumerGroup_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.destination_ = destination_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.destinationCACerts_ = destinationCACerts_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.destinationAudience_ = destinationAudience_; + } + if (((from_bitField0_ & 0x00000080) != 0)) { + result.replyUrlCACerts_ = replyUrlCACerts_; + } + if (((from_bitField0_ & 0x00000100) != 0)) { + result.replyUrlAudience_ = replyUrlAudience_; + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000200) != 0)) { + result.filter_ = filterBuilder_ == null ? filter_ : filterBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000400) != 0)) { + result.uid_ = uid_; + } + if (((from_bitField0_ & 0x00000800) != 0)) { + result.egressConfig_ = egressConfigBuilder_ == null ? egressConfig_ : egressConfigBuilder_.build(); + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00001000) != 0)) { + result.deliveryOrder_ = deliveryOrder_; + } + if (((from_bitField0_ & 0x00002000) != 0)) { + result.keyType_ = keyType_; + } + if (((from_bitField0_ & 0x00004000) != 0)) { + result.reference_ = referenceBuilder_ == null ? reference_ : referenceBuilder_.build(); + to_bitField0_ |= 0x00000004; + } + if (((from_bitField0_ & 0x00010000) != 0)) { + result.vReplicas_ = vReplicas_; + } + if (((from_bitField0_ & 0x00020000) != 0)) { + result.featureFlags_ = featureFlagsBuilder_ == null ? featureFlags_ : featureFlagsBuilder_.build(); + to_bitField0_ |= 0x00000008; + } + if (((from_bitField0_ & 0x00040000) != 0)) { + result.oidcServiceAccountName_ = oidcServiceAccountName_; + } + result.bitField0_ |= to_bitField0_; } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartialOneofs( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress result) { + result.replyStrategyCase_ = replyStrategyCase_; + result.replyStrategy_ = this.replyStrategy_; + if (replyStrategyCase_ == 4 && replyToOriginalTopicBuilder_ != null) { + result.replyStrategy_ = replyToOriginalTopicBuilder_.build(); + } + if (replyStrategyCase_ == 9 && discardReplyBuilder_ != null) { + result.replyStrategy_ = discardReplyBuilder_.build(); + } } @java.lang.Override @@ -15383,26 +14533,32 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon return this; if (!other.getConsumerGroup().isEmpty()) { consumerGroup_ = other.consumerGroup_; + bitField0_ |= 0x00000001; onChanged(); } if (!other.getDestination().isEmpty()) { destination_ = other.destination_; + bitField0_ |= 0x00000002; onChanged(); } if (!other.getDestinationCACerts().isEmpty()) { destinationCACerts_ = other.destinationCACerts_; + bitField0_ |= 0x00000004; onChanged(); } if (!other.getDestinationAudience().isEmpty()) { destinationAudience_ = other.destinationAudience_; + bitField0_ |= 0x00000008; onChanged(); } if (!other.getReplyUrlCACerts().isEmpty()) { replyUrlCACerts_ = other.replyUrlCACerts_; + bitField0_ |= 0x00000080; onChanged(); } if (!other.getReplyUrlAudience().isEmpty()) { replyUrlAudience_ = other.replyUrlAudience_; + bitField0_ |= 0x00000100; onChanged(); } if (other.hasFilter()) { @@ -15410,6 +14566,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon } if (!other.getUid().isEmpty()) { uid_ = other.uid_; + bitField0_ |= 0x00000400; onChanged(); } if (other.hasEgressConfig()) { @@ -15428,7 +14585,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (!other.dialectedFilter_.isEmpty()) { if (dialectedFilter_.isEmpty()) { dialectedFilter_ = other.dialectedFilter_; - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00008000); } else { ensureDialectedFilterIsMutable(); dialectedFilter_.addAll(other.dialectedFilter_); @@ -15441,8 +14598,8 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon dialectedFilterBuilder_.dispose(); dialectedFilterBuilder_ = null; dialectedFilter_ = other.dialectedFilter_; - bitField0_ = (bitField0_ & ~0x00000001); - dialectedFilterBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + bitField0_ = (bitField0_ & ~0x00008000); + dialectedFilterBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getDialectedFilterFieldBuilder() : null; } else { @@ -15458,6 +14615,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon } if (!other.getOidcServiceAccountName().isEmpty()) { oidcServiceAccountName_ = other.oidcServiceAccountName_; + bitField0_ |= 0x00040000; onChanged(); } switch (other.getReplyStrategyCase()) { @@ -15479,7 +14637,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon break; } } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -15494,18 +14652,136 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + consumerGroup_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + destination_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + replyStrategyCase_ = 3; + replyStrategy_ = s; + break; + } // case 26 + case 34: { + input.readMessage( + getReplyToOriginalTopicFieldBuilder().getBuilder(), extensionRegistry); + replyStrategyCase_ = 4; + break; + } // case 34 + case 42: { + input.readMessage(getFilterFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000200; + break; + } // case 42 + case 50: { + uid_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000400; + break; + } // case 50 + case 58: { + input.readMessage(getEgressConfigFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000800; + break; + } // case 58 + case 64: { + deliveryOrder_ = input.readEnum(); + bitField0_ |= 0x00001000; + break; + } // case 64 + case 74: { + input.readMessage(getDiscardReplyFieldBuilder().getBuilder(), extensionRegistry); + replyStrategyCase_ = 9; + break; + } // case 74 + case 80: { + keyType_ = input.readEnum(); + bitField0_ |= 0x00002000; + break; + } // case 80 + case 90: { + input.readMessage(getReferenceFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00004000; + break; + } // case 90 + case 98: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter m = + input.readMessage( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract + .DialectedFilter.parser(), + extensionRegistry); + if (dialectedFilterBuilder_ == null) { + ensureDialectedFilterIsMutable(); + dialectedFilter_.add(m); + } else { + dialectedFilterBuilder_.addMessage(m); + } + break; + } // case 98 + case 104: { + vReplicas_ = input.readInt32(); + bitField0_ |= 0x00010000; + break; + } // case 104 + case 114: { + input.readMessage(getFeatureFlagsFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00020000; + break; + } // case 114 + case 122: { + destinationCACerts_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 122 + case 130: { + replyUrlCACerts_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000080; + break; + } // case 130 + case 138: { + destinationAudience_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000008; + break; + } // case 138 + case 146: { + replyUrlAudience_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000100; + break; + } // case 146 + case 154: { + oidcServiceAccountName_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00040000; + break; + } // case 154 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -15577,8 +14853,8 @@ public Builder setConsumerGroup(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - consumerGroup_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -15591,8 +14867,8 @@ public Builder setConsumerGroup(java.lang.String value) { * @return This builder for chaining. */ public Builder clearConsumerGroup() { - consumerGroup_ = getDefaultInstance().getConsumerGroup(); + bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } @@ -15610,8 +14886,8 @@ public Builder setConsumerGroupBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - consumerGroup_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -15668,8 +14944,8 @@ public Builder setDestination(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - destination_ = value; + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -15682,8 +14958,8 @@ public Builder setDestination(java.lang.String value) { * @return This builder for chaining. */ public Builder clearDestination() { - destination_ = getDefaultInstance().getDestination(); + bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } @@ -15701,8 +14977,8 @@ public Builder setDestinationBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - destination_ = value; + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -15759,8 +15035,8 @@ public Builder setDestinationCACerts(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - destinationCACerts_ = value; + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -15773,8 +15049,8 @@ public Builder setDestinationCACerts(java.lang.String value) { * @return This builder for chaining. */ public Builder clearDestinationCACerts() { - destinationCACerts_ = getDefaultInstance().getDestinationCACerts(); + bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } @@ -15792,8 +15068,8 @@ public Builder setDestinationCACertsBytes(com.google.protobuf.ByteString value) throw new NullPointerException(); } checkByteStringIsUtf8(value); - destinationCACerts_ = value; + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -15850,8 +15126,8 @@ public Builder setDestinationAudience(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - destinationAudience_ = value; + bitField0_ |= 0x00000008; onChanged(); return this; } @@ -15864,8 +15140,8 @@ public Builder setDestinationAudience(java.lang.String value) { * @return This builder for chaining. */ public Builder clearDestinationAudience() { - destinationAudience_ = getDefaultInstance().getDestinationAudience(); + bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } @@ -15883,8 +15159,8 @@ public Builder setDestinationAudienceBytes(com.google.protobuf.ByteString value) throw new NullPointerException(); } checkByteStringIsUtf8(value); - destinationAudience_ = value; + bitField0_ |= 0x00000008; onChanged(); return this; } @@ -16005,7 +15281,7 @@ public Builder setReplyUrlBytes(com.google.protobuf.ByteString value) { return this; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuilder> @@ -16109,8 +15385,9 @@ public Builder mergeReplyToOriginalTopic( } else { if (replyStrategyCase_ == 4) { replyToOriginalTopicBuilder_.mergeFrom(value); + } else { + replyToOriginalTopicBuilder_.setMessage(value); } - replyToOriginalTopicBuilder_.setMessage(value); } replyStrategyCase_ = 4; return this; @@ -16175,7 +15452,7 @@ public Builder clearReplyToOriginalTopic() { * * .Empty replyToOriginalTopic = 4;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuilder> @@ -16185,7 +15462,7 @@ public Builder clearReplyToOriginalTopic() { replyStrategy_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.getDefaultInstance(); } - replyToOriginalTopicBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + replyToOriginalTopicBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuilder>( @@ -16196,11 +15473,10 @@ public Builder clearReplyToOriginalTopic() { } replyStrategyCase_ = 4; onChanged(); - ; return replyToOriginalTopicBuilder_; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuilder> @@ -16302,8 +15578,9 @@ public Builder mergeDiscardReply(dev.knative.eventing.kafka.broker.contract.Data } else { if (replyStrategyCase_ == 9) { discardReplyBuilder_.mergeFrom(value); + } else { + discardReplyBuilder_.setMessage(value); } - discardReplyBuilder_.setMessage(value); } replyStrategyCase_ = 9; return this; @@ -16367,7 +15644,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builde * *.Empty discardReply = 9;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuilder> @@ -16377,7 +15654,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builde replyStrategy_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.getDefaultInstance(); } - discardReplyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + discardReplyBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuilder>( @@ -16388,7 +15665,6 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builde } replyStrategyCase_ = 9; onChanged(); - ; return discardReplyBuilder_; } @@ -16444,8 +15720,8 @@ public Builder setReplyUrlCACerts(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - replyUrlCACerts_ = value; + bitField0_ |= 0x00000080; onChanged(); return this; } @@ -16458,8 +15734,8 @@ public Builder setReplyUrlCACerts(java.lang.String value) { * @return This builder for chaining. */ public Builder clearReplyUrlCACerts() { - replyUrlCACerts_ = getDefaultInstance().getReplyUrlCACerts(); + bitField0_ = (bitField0_ & ~0x00000080); onChanged(); return this; } @@ -16477,8 +15753,8 @@ public Builder setReplyUrlCACertsBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - replyUrlCACerts_ = value; + bitField0_ |= 0x00000080; onChanged(); return this; } @@ -16535,8 +15811,8 @@ public Builder setReplyUrlAudience(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - replyUrlAudience_ = value; + bitField0_ |= 0x00000100; onChanged(); return this; } @@ -16549,8 +15825,8 @@ public Builder setReplyUrlAudience(java.lang.String value) { * @return This builder for chaining. */ public Builder clearReplyUrlAudience() { - replyUrlAudience_ = getDefaultInstance().getReplyUrlAudience(); + bitField0_ = (bitField0_ & ~0x00000100); onChanged(); return this; } @@ -16568,14 +15844,14 @@ public Builder setReplyUrlAudienceBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - replyUrlAudience_ = value; + bitField0_ |= 0x00000100; onChanged(); return this; } private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter filter_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FilterOrBuilder> @@ -16589,7 +15865,7 @@ public Builder setReplyUrlAudienceBytes(com.google.protobuf.ByteString value) { * @return Whether the filter field is set. */ public boolean hasFilter() { - return filterBuilder_ != null || filter_ != null; + return ((bitField0_ & 0x00000200) != 0); } /** *@@ -16621,11 +15897,11 @@ public Builder setFilter(dev.knative.eventing.kafka.broker.contract.DataPlaneCon throw new NullPointerException(); } filter_ = value; - onChanged(); } else { filterBuilder_.setMessage(value); } - + bitField0_ |= 0x00000200; + onChanged(); return this; } /** @@ -16639,11 +15915,11 @@ public Builder setFilter( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter.Builder builderForValue) { if (filterBuilder_ == null) { filter_ = builderForValue.build(); - onChanged(); } else { filterBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00000200; + onChanged(); return this; } /** @@ -16655,19 +15931,22 @@ public Builder setFilter( */ public Builder mergeFilter(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter value) { if (filterBuilder_ == null) { - if (filter_ != null) { - filter_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter.newBuilder( - filter_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00000200) != 0) + && filter_ != null + && filter_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter + .getDefaultInstance()) { + getFilterBuilder().mergeFrom(value); } else { filter_ = value; } - onChanged(); } else { filterBuilder_.mergeFrom(value); } - + if (filter_ != null) { + bitField0_ |= 0x00000200; + onChanged(); + } return this; } /** @@ -16678,14 +15957,13 @@ public Builder mergeFilter(dev.knative.eventing.kafka.broker.contract.DataPlaneC *.Filter filter = 5;
*/ public Builder clearFilter() { - if (filterBuilder_ == null) { - filter_ = null; - onChanged(); - } else { - filter_ = null; + bitField0_ = (bitField0_ & ~0x00000200); + filter_ = null; + if (filterBuilder_ != null) { + filterBuilder_.dispose(); filterBuilder_ = null; } - + onChanged(); return this; } /** @@ -16696,7 +15974,7 @@ public Builder clearFilter() { *.Filter filter = 5;
*/ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter.Builder getFilterBuilder() { - + bitField0_ |= 0x00000200; onChanged(); return getFilterFieldBuilder().getBuilder(); } @@ -16723,13 +16001,13 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FilterOrBuil * *.Filter filter = 5;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FilterOrBuilder>( @@ -16794,8 +16072,8 @@ public Builder setUid(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - uid_ = value; + bitField0_ |= 0x00000400; onChanged(); return this; } @@ -16809,8 +16087,8 @@ public Builder setUid(java.lang.String value) { * @return This builder for chaining. */ public Builder clearUid() { - uid_ = getDefaultInstance().getUid(); + bitField0_ = (bitField0_ & ~0x00000400); onChanged(); return this; } @@ -16829,14 +16107,14 @@ public Builder setUidBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - uid_ = value; + bitField0_ |= 0x00000400; onChanged(); return this; } private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig egressConfig_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfigOrBuilder> @@ -16851,7 +16129,7 @@ public Builder setUidBytes(com.google.protobuf.ByteString value) { * @return Whether the egressConfig field is set. */ public boolean hasEgressConfig() { - return egressConfigBuilder_ != null || egressConfig_ != null; + return ((bitField0_ & 0x00000800) != 0); } /** *@@ -16887,11 +16165,11 @@ public Builder setEgressConfig( throw new NullPointerException(); } egressConfig_ = value; - onChanged(); } else { egressConfigBuilder_.setMessage(value); } - + bitField0_ |= 0x00000800; + onChanged(); return this; } /** @@ -16906,11 +16184,11 @@ public Builder setEgressConfig( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder builderForValue) { if (egressConfigBuilder_ == null) { egressConfig_ = builderForValue.build(); - onChanged(); } else { egressConfigBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00000800; + onChanged(); return this; } /** @@ -16924,20 +16202,22 @@ public Builder setEgressConfig( public Builder mergeEgressConfig( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig value) { if (egressConfigBuilder_ == null) { - if (egressConfig_ != null) { - egressConfig_ = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.newBuilder( - egressConfig_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00000800) != 0) + && egressConfig_ != null + && egressConfig_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig + .getDefaultInstance()) { + getEgressConfigBuilder().mergeFrom(value); } else { egressConfig_ = value; } - onChanged(); } else { egressConfigBuilder_.mergeFrom(value); } - + if (egressConfig_ != null) { + bitField0_ |= 0x00000800; + onChanged(); + } return this; } /** @@ -16949,14 +16229,13 @@ public Builder mergeEgressConfig( *.EgressConfig egressConfig = 7;
*/ public Builder clearEgressConfig() { - if (egressConfigBuilder_ == null) { - egressConfig_ = null; - onChanged(); - } else { - egressConfig_ = null; + bitField0_ = (bitField0_ & ~0x00000800); + egressConfig_ = null; + if (egressConfigBuilder_ != null) { + egressConfigBuilder_.dispose(); egressConfigBuilder_ = null; } - + onChanged(); return this; } /** @@ -16969,7 +16248,7 @@ public Builder clearEgressConfig() { */ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder getEgressConfigBuilder() { - + bitField0_ |= 0x00000800; onChanged(); return getEgressConfigFieldBuilder().getBuilder(); } @@ -17000,13 +16279,13 @@ public Builder clearEgressConfig() { * *.EgressConfig egressConfig = 7;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfigOrBuilder> getEgressConfigFieldBuilder() { if (egressConfigBuilder_ == null) { - egressConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + egressConfigBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfigOrBuilder>( @@ -17041,8 +16320,8 @@ public int getDeliveryOrderValue() { * @return This builder for chaining. */ public Builder setDeliveryOrderValue(int value) { - deliveryOrder_ = value; + bitField0_ |= 0x00001000; onChanged(); return this; } @@ -17057,9 +16336,8 @@ public Builder setDeliveryOrderValue(int value) { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DeliveryOrder getDeliveryOrder() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DeliveryOrder result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DeliveryOrder.valueOf( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DeliveryOrder.forNumber( deliveryOrder_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DeliveryOrder.UNRECOGNIZED @@ -17080,7 +16358,7 @@ public Builder setDeliveryOrder( if (value == null) { throw new NullPointerException(); } - + bitField0_ |= 0x00001000; deliveryOrder_ = value.getNumber(); onChanged(); return this; @@ -17095,7 +16373,7 @@ public Builder setDeliveryOrder( * @return This builder for chaining. */ public Builder clearDeliveryOrder() { - + bitField0_ = (bitField0_ & ~0x00001000); deliveryOrder_ = 0; onChanged(); return this; @@ -17124,8 +16402,8 @@ public int getKeyTypeValue() { * @return This builder for chaining. */ public Builder setKeyTypeValue(int value) { - keyType_ = value; + bitField0_ |= 0x00002000; onChanged(); return this; } @@ -17139,9 +16417,8 @@ public Builder setKeyTypeValue(int value) { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType getKeyType() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType.valueOf(keyType_); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType.forNumber(keyType_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyType.UNRECOGNIZED : result; @@ -17159,7 +16436,7 @@ public Builder setKeyType(dev.knative.eventing.kafka.broker.contract.DataPlaneCo if (value == null) { throw new NullPointerException(); } - + bitField0_ |= 0x00002000; keyType_ = value.getNumber(); onChanged(); return this; @@ -17173,14 +16450,14 @@ public Builder setKeyType(dev.knative.eventing.kafka.broker.contract.DataPlaneCo * @return This builder for chaining. */ public Builder clearKeyType() { - + bitField0_ = (bitField0_ & ~0x00002000); keyType_ = 0; onChanged(); return this; } private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference reference_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder> @@ -17188,6 +16465,7 @@ public Builder clearKeyType() { /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -17197,11 +16475,12 @@ public Builder clearKeyType() { * @return Whether the reference field is set. */ public boolean hasReference() { - return referenceBuilder_ != null || reference_ != null; + return ((bitField0_ & 0x00004000) != 0); } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -17223,6 +16502,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference ge /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -17236,16 +16516,17 @@ public Builder setReference(dev.knative.eventing.kafka.broker.contract.DataPlane throw new NullPointerException(); } reference_ = value; - onChanged(); } else { referenceBuilder_.setMessage(value); } - + bitField0_ |= 0x00004000; + onChanged(); return this; } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -17257,16 +16538,17 @@ public Builder setReference( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder builderForValue) { if (referenceBuilder_ == null) { reference_ = builderForValue.build(); - onChanged(); } else { referenceBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00004000; + onChanged(); return this; } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -17277,24 +16559,28 @@ public Builder setReference( public Builder mergeReference( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference value) { if (referenceBuilder_ == null) { - if (reference_ != null) { - reference_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.newBuilder( - reference_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00004000) != 0) + && reference_ != null + && reference_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference + .getDefaultInstance()) { + getReferenceBuilder().mergeFrom(value); } else { reference_ = value; } - onChanged(); } else { referenceBuilder_.mergeFrom(value); } - + if (reference_ != null) { + bitField0_ |= 0x00004000; + onChanged(); + } return this; } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -17303,19 +16589,19 @@ public Builder mergeReference( *.Reference reference = 11;
*/ public Builder clearReference() { - if (referenceBuilder_ == null) { - reference_ = null; - onChanged(); - } else { - reference_ = null; + bitField0_ = (bitField0_ & ~0x00004000); + reference_ = null; + if (referenceBuilder_ != null) { + referenceBuilder_.dispose(); referenceBuilder_ = null; } - + onChanged(); return this; } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -17325,13 +16611,14 @@ public Builder clearReference() { */ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder getReferenceBuilder() { - + bitField0_ |= 0x00004000; onChanged(); return getReferenceFieldBuilder().getBuilder(); } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -17353,6 +16640,7 @@ public Builder clearReference() { /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - tagging metrics @@ -17360,13 +16648,13 @@ public Builder clearReference() { * *.Reference reference = 11;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder> getReferenceFieldBuilder() { if (referenceBuilder_ == null) { - referenceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + referenceBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder>( @@ -17380,15 +16668,15 @@ public Builder clearReference() { dialectedFilter_ = java.util.Collections.emptyList(); private void ensureDialectedFilterIsMutable() { - if (!((bitField0_ & 0x00000001) != 0)) { + if (!((bitField0_ & 0x00008000) != 0)) { dialectedFilter_ = new java.util.ArrayList< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter>( dialectedFilter_); - bitField0_ |= 0x00000001; + bitField0_ |= 0x00008000; } } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder> @@ -17592,7 +16880,7 @@ public Builder addAllDialectedFilter( public Builder clearDialectedFilter() { if (dialectedFilterBuilder_ == null) { dialectedFilter_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00008000); onChanged(); } else { dialectedFilterBuilder_.clear(); @@ -17701,17 +16989,17 @@ public Builder removeDialectedFilter(int index) { return getDialectedFilterFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder> getDialectedFilterFieldBuilder() { if (dialectedFilterBuilder_ == null) { - dialectedFilterBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + dialectedFilterBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilter.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.DialectedFilterOrBuilder>( - dialectedFilter_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + dialectedFilter_, ((bitField0_ & 0x00008000) != 0), getParentForChildren(), isClean()); dialectedFilter_ = null; } return dialectedFilterBuilder_; @@ -17742,6 +17030,7 @@ public int getVReplicas() { public Builder setVReplicas(int value) { vReplicas_ = value; + bitField0_ |= 0x00010000; onChanged(); return this; } @@ -17754,14 +17043,14 @@ public Builder setVReplicas(int value) { * @return This builder for chaining. */ public Builder clearVReplicas() { - + bitField0_ = (bitField0_ & ~0x00010000); vReplicas_ = 0; onChanged(); return this; } private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags featureFlags_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlagsOrBuilder> @@ -17775,7 +17064,7 @@ public Builder clearVReplicas() { * @return Whether the featureFlags field is set. */ public boolean hasFeatureFlags() { - return featureFlagsBuilder_ != null || featureFlags_ != null; + return ((bitField0_ & 0x00020000) != 0); } /** *@@ -17809,11 +17098,11 @@ public Builder setFeatureFlags( throw new NullPointerException(); } featureFlags_ = value; - onChanged(); } else { featureFlagsBuilder_.setMessage(value); } - + bitField0_ |= 0x00020000; + onChanged(); return this; } /** @@ -17828,11 +17117,11 @@ public Builder setFeatureFlags( builderForValue) { if (featureFlagsBuilder_ == null) { featureFlags_ = builderForValue.build(); - onChanged(); } else { featureFlagsBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00020000; + onChanged(); return this; } /** @@ -17845,20 +17134,22 @@ public Builder setFeatureFlags( public Builder mergeFeatureFlags( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags value) { if (featureFlagsBuilder_ == null) { - if (featureFlags_ != null) { - featureFlags_ = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags - .newBuilder(featureFlags_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00020000) != 0) + && featureFlags_ != null + && featureFlags_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags + .getDefaultInstance()) { + getFeatureFlagsBuilder().mergeFrom(value); } else { featureFlags_ = value; } - onChanged(); } else { featureFlagsBuilder_.mergeFrom(value); } - + if (featureFlags_ != null) { + bitField0_ |= 0x00020000; + onChanged(); + } return this; } /** @@ -17869,14 +17160,13 @@ public Builder mergeFeatureFlags( *.EgressFeatureFlags featureFlags = 14;
*/ public Builder clearFeatureFlags() { - if (featureFlagsBuilder_ == null) { - featureFlags_ = null; - onChanged(); - } else { - featureFlags_ = null; + bitField0_ = (bitField0_ & ~0x00020000); + featureFlags_ = null; + if (featureFlagsBuilder_ != null) { + featureFlagsBuilder_.dispose(); featureFlagsBuilder_ = null; } - + onChanged(); return this; } /** @@ -17888,7 +17178,7 @@ public Builder clearFeatureFlags() { */ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags.Builder getFeatureFlagsBuilder() { - + bitField0_ |= 0x00020000; onChanged(); return getFeatureFlagsFieldBuilder().getBuilder(); } @@ -17917,13 +17207,13 @@ public Builder clearFeatureFlags() { * *.EgressFeatureFlags featureFlags = 14;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlagsOrBuilder> getFeatureFlagsFieldBuilder() { if (featureFlagsBuilder_ == null) { - featureFlagsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + featureFlagsBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlagsOrBuilder>( @@ -17985,8 +17275,8 @@ public Builder setOidcServiceAccountName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - oidcServiceAccountName_ = value; + bitField0_ |= 0x00040000; onChanged(); return this; } @@ -17999,8 +17289,8 @@ public Builder setOidcServiceAccountName(java.lang.String value) { * @return This builder for chaining. */ public Builder clearOidcServiceAccountName() { - oidcServiceAccountName_ = getDefaultInstance().getOidcServiceAccountName(); + bitField0_ = (bitField0_ & ~0x00040000); onChanged(); return this; } @@ -18018,22 +17308,12 @@ public Builder setOidcServiceAccountNameBytes(com.google.protobuf.ByteString val throw new NullPointerException(); } checkByteStringIsUtf8(value); - oidcServiceAccountName_ = value; + bitField0_ |= 0x00040000; onChanged(); return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Egress) } @@ -18055,7 +17335,18 @@ public Egress parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Egress(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -18102,79 +17393,35 @@ public interface EgressFeatureFlagsOrBuilder /** * Protobuf type {@code EgressFeatureFlags} */ - public static final class EgressFeatureFlags extends com.google.protobuf.GeneratedMessageV3 + public static final class EgressFeatureFlags extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:EgressFeatureFlags) EgressFeatureFlagsOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + EgressFeatureFlags.class.getName()); + } // Use EgressFeatureFlags.newBuilder() to construct. - private EgressFeatureFlags(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private EgressFeatureFlags(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private EgressFeatureFlags() {} - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new EgressFeatureFlags(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private EgressFeatureFlags( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 8: { - enableRateLimiter_ = input.readBool(); - break; - } - case 16: { - enableOrderedExecutorMetrics_ = input.readBool(); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_EgressFeatureFlags_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_EgressFeatureFlags_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -18184,7 +17431,7 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int ENABLERATELIMITER_FIELD_NUMBER = 1; - private boolean enableRateLimiter_; + private boolean enableRateLimiter_ = false; /** ** Enable rateLimiter @@ -18199,7 +17446,7 @@ public boolean getEnableRateLimiter() { } public static final int ENABLEORDEREDEXECUTORMETRICS_FIELD_NUMBER = 2; - private boolean enableOrderedExecutorMetrics_; + private boolean enableOrderedExecutorMetrics_ = false; /** ** Enable newMetrics @@ -18233,7 +17480,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (enableOrderedExecutorMetrics_ != false) { output.writeBool(2, enableOrderedExecutorMetrics_); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -18248,7 +17495,7 @@ public int getSerializedSize() { if (enableOrderedExecutorMetrics_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, enableOrderedExecutorMetrics_); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -18266,7 +17513,7 @@ public boolean equals(final java.lang.Object obj) { if (getEnableRateLimiter() != other.getEnableRateLimiter()) return false; if (getEnableOrderedExecutorMetrics() != other.getEnableOrderedExecutorMetrics()) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -18281,7 +17528,7 @@ public int hashCode() { hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnableRateLimiter()); hash = (37 * hash) + ENABLEORDEREDEXECUTORMETRICS_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnableOrderedExecutorMetrics()); - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -18321,37 +17568,36 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egres public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -18374,14 +17620,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code EgressFeatureFlags} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:EgressFeatureFlags) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlagsOrBuilder { @@ -18391,7 +17637,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_EgressFeatureFlags_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -18402,26 +17648,18 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF // Construct using // dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; enableRateLimiter_ = false; - enableOrderedExecutorMetrics_ = false; - return this; } @@ -18451,42 +17689,22 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatur public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags(this); - result.enableRateLimiter_ = enableRateLimiter_; - result.enableOrderedExecutorMetrics_ = enableOrderedExecutorMetrics_; + if (bitField0_ != 0) { + buildPartial0(result); + } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.enableRateLimiter_ = enableRateLimiter_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.enableOrderedExecutorMetrics_ = enableOrderedExecutorMetrics_; + } } @java.lang.Override @@ -18511,7 +17729,7 @@ public Builder mergeFrom( if (other.getEnableOrderedExecutorMetrics() != false) { setEnableOrderedExecutorMetrics(other.getEnableOrderedExecutorMetrics()); } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -18526,21 +17744,45 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + enableRateLimiter_ = input.readBool(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 16: { + enableOrderedExecutorMetrics_ = input.readBool(); + bitField0_ |= 0x00000002; + break; + } // case 16 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlags) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } + private int bitField0_; + private boolean enableRateLimiter_; /** * @@ -18566,6 +17808,7 @@ public boolean getEnableRateLimiter() { public Builder setEnableRateLimiter(boolean value) { enableRateLimiter_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -18578,7 +17821,7 @@ public Builder setEnableRateLimiter(boolean value) { * @return This builder for chaining. */ public Builder clearEnableRateLimiter() { - + bitField0_ = (bitField0_ & ~0x00000001); enableRateLimiter_ = false; onChanged(); return this; @@ -18609,6 +17852,7 @@ public boolean getEnableOrderedExecutorMetrics() { public Builder setEnableOrderedExecutorMetrics(boolean value) { enableOrderedExecutorMetrics_ = value; + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -18621,22 +17865,12 @@ public Builder setEnableOrderedExecutorMetrics(boolean value) { * @return This builder for chaining. */ public Builder clearEnableOrderedExecutorMetrics() { - + bitField0_ = (bitField0_ & ~0x00000002); enableOrderedExecutorMetrics_ = false; onChanged(); return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:EgressFeatureFlags) } @@ -18660,7 +17894,18 @@ public EgressFeatureFlags parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new EgressFeatureFlags(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -18808,6 +18053,7 @@ dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicyOrBuilde ** Ingress is the definition for HTTP ingress that is receiving the events * into the Knative Kafka component. + * * path and host fields are used for identifying the targets. They are exclusive. * When a request comes with "/some-path", hostname will not be checked. * When a request comes with "/", only hostname matching will be done. @@ -18817,13 +18063,23 @@ dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicyOrBuilde * * Protobuf type {@code Ingress} */ - public static final class Ingress extends com.google.protobuf.GeneratedMessageV3 + public static final class Ingress extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Ingress) IngressOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Ingress.class.getName()); + } // Use Ingress.newBuilder() to construct. - private Ingress(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Ingress(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } @@ -18835,97 +18091,12 @@ private Ingress() { eventPolicies_ = java.util.Collections.emptyList(); } - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Ingress(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Ingress( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 8: { - int rawValue = input.readEnum(); - - contentMode_ = rawValue; - break; - } - case 18: { - java.lang.String s = input.readStringRequireUtf8(); - - path_ = s; - break; - } - case 26: { - java.lang.String s = input.readStringRequireUtf8(); - - host_ = s; - break; - } - case 42: { - java.lang.String s = input.readStringRequireUtf8(); - - audience_ = s; - break; - } - case 50: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - eventPolicies_ = new java.util.ArrayList< - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy>(); - mutable_bitField0_ |= 0x00000001; - } - eventPolicies_.add(input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy.parser(), - extensionRegistry)); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - eventPolicies_ = java.util.Collections.unmodifiableList(eventPolicies_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Ingress_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Ingress_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -18934,7 +18105,7 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int CONTENTMODE_FIELD_NUMBER = 1; - private int contentMode_; + private int contentMode_ = 0; /** ** Optional content mode to use when pushing messages to Kafka @@ -18957,16 +18128,17 @@ public int getContentModeValue() { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode getContentMode() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode.valueOf(contentMode_); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode.forNumber(contentMode_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode.UNRECOGNIZED : result; } public static final int PATH_FIELD_NUMBER = 2; - private volatile java.lang.Object path_; + + @SuppressWarnings("serial") + private volatile java.lang.Object path_ = ""; /** ** path to listen for incoming events. @@ -19008,7 +18180,9 @@ public com.google.protobuf.ByteString getPathBytes() { } public static final int HOST_FIELD_NUMBER = 3; - private volatile java.lang.Object host_; + + @SuppressWarnings("serial") + private volatile java.lang.Object host_ = ""; /** ** host header to match @@ -19050,7 +18224,9 @@ public com.google.protobuf.ByteString getHostBytes() { } public static final int AUDIENCE_FIELD_NUMBER = 5; - private volatile java.lang.Object audience_; + + @SuppressWarnings("serial") + private volatile java.lang.Object audience_ = ""; /** ** OIDC audience of this ingress @@ -19092,6 +18268,8 @@ public com.google.protobuf.ByteString getAudienceBytes() { } public static final int EVENTPOLICIES_FIELD_NUMBER = 6; + + @SuppressWarnings("serial") private java.util.ListeventPolicies_; /** * @@ -19171,19 +18349,19 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode.BINARY.getNumber()) { output.writeEnum(1, contentMode_); } - if (!getPathBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, path_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(path_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, path_); } - if (!getHostBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, host_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(host_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, host_); } - if (!getAudienceBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 5, audience_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(audience_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 5, audience_); } for (int i = 0; i < eventPolicies_.size(); i++) { output.writeMessage(6, eventPolicies_.get(i)); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -19196,19 +18374,19 @@ public int getSerializedSize() { != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode.BINARY.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, contentMode_); } - if (!getPathBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, path_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(path_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, path_); } - if (!getHostBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, host_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(host_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, host_); } - if (!getAudienceBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, audience_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(audience_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(5, audience_); } for (int i = 0; i < eventPolicies_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(6, eventPolicies_.get(i)); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -19229,7 +18407,7 @@ public boolean equals(final java.lang.Object obj) { if (!getHost().equals(other.getHost())) return false; if (!getAudience().equals(other.getAudience())) return false; if (!getEventPoliciesList().equals(other.getEventPoliciesList())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -19252,7 +18430,7 @@ public int hashCode() { hash = (37 * hash) + EVENTPOLICIES_FIELD_NUMBER; hash = (53 * hash) + getEventPoliciesList().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -19292,36 +18470,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingre public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -19344,7 +18521,7 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -19352,6 +18529,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build ** Ingress is the definition for HTTP ingress that is receiving the events * into the Knative Kafka component. + * * path and host fields are used for identifying the targets. They are exclusive. * When a request comes with "/some-path", hostname will not be checked. * When a request comes with "/", only hostname matching will be done. @@ -19361,7 +18539,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build * * Protobuf type {@code Ingress} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Ingress) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.IngressOrBuilder { @@ -19370,7 +18548,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Ingress_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -19379,38 +18557,27 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { - getEventPoliciesFieldBuilder(); - } } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; contentMode_ = 0; - path_ = ""; - host_ = ""; - audience_ = ""; - if (eventPoliciesBuilder_ == null) { eventPolicies_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); } else { + eventPolicies_ = null; eventPoliciesBuilder_.clear(); } + bitField0_ = (bitField0_ & ~0x00000010); return this; } @@ -19437,54 +18604,41 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress buil public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress(this); - int from_bitField0_ = bitField0_; - result.contentMode_ = contentMode_; - result.path_ = path_; - result.host_ = host_; - result.audience_ = audience_; + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress result) { if (eventPoliciesBuilder_ == null) { - if (((bitField0_ & 0x00000001) != 0)) { + if (((bitField0_ & 0x00000010) != 0)) { eventPolicies_ = java.util.Collections.unmodifiableList(eventPolicies_); - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000010); } result.eventPolicies_ = eventPolicies_; } else { result.eventPolicies_ = eventPoliciesBuilder_.build(); } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.contentMode_ = contentMode_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.path_ = path_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.host_ = host_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.audience_ = audience_; + } } @java.lang.Override @@ -19505,21 +18659,24 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon } if (!other.getPath().isEmpty()) { path_ = other.path_; + bitField0_ |= 0x00000002; onChanged(); } if (!other.getHost().isEmpty()) { host_ = other.host_; + bitField0_ |= 0x00000004; onChanged(); } if (!other.getAudience().isEmpty()) { audience_ = other.audience_; + bitField0_ |= 0x00000008; onChanged(); } if (eventPoliciesBuilder_ == null) { if (!other.eventPolicies_.isEmpty()) { if (eventPolicies_.isEmpty()) { eventPolicies_ = other.eventPolicies_; - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000010); } else { ensureEventPoliciesIsMutable(); eventPolicies_.addAll(other.eventPolicies_); @@ -19532,8 +18689,8 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon eventPoliciesBuilder_.dispose(); eventPoliciesBuilder_ = null; eventPolicies_ = other.eventPolicies_; - bitField0_ = (bitField0_ & ~0x00000001); - eventPoliciesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + bitField0_ = (bitField0_ & ~0x00000010); + eventPoliciesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getEventPoliciesFieldBuilder() : null; } else { @@ -19541,7 +18698,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon } } } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -19556,18 +18713,64 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + contentMode_ = input.readEnum(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 18: { + path_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + host_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 42: { + audience_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000008; + break; + } // case 42 + case 50: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy m = + input.readMessage( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy + .parser(), + extensionRegistry); + if (eventPoliciesBuilder_ == null) { + ensureEventPoliciesIsMutable(); + eventPolicies_.add(m); + } else { + eventPoliciesBuilder_.addMessage(m); + } + break; + } // case 50 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -19596,8 +18799,8 @@ public int getContentModeValue() { * @return This builder for chaining. */ public Builder setContentModeValue(int value) { - contentMode_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -19611,9 +18814,9 @@ public Builder setContentModeValue(int value) { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode getContentMode() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode.valueOf(contentMode_); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode.forNumber( + contentMode_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContentMode.UNRECOGNIZED : result; @@ -19632,7 +18835,7 @@ public Builder setContentMode( if (value == null) { throw new NullPointerException(); } - + bitField0_ |= 0x00000001; contentMode_ = value.getNumber(); onChanged(); return this; @@ -19646,7 +18849,7 @@ public Builder setContentMode( * @return This builder for chaining. */ public Builder clearContentMode() { - + bitField0_ = (bitField0_ & ~0x00000001); contentMode_ = 0; onChanged(); return this; @@ -19704,8 +18907,8 @@ public Builder setPath(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - path_ = value; + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -19718,8 +18921,8 @@ public Builder setPath(java.lang.String value) { * @return This builder for chaining. */ public Builder clearPath() { - path_ = getDefaultInstance().getPath(); + bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } @@ -19737,8 +18940,8 @@ public Builder setPathBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - path_ = value; + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -19795,8 +18998,8 @@ public Builder setHost(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - host_ = value; + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -19809,8 +19012,8 @@ public Builder setHost(java.lang.String value) { * @return This builder for chaining. */ public Builder clearHost() { - host_ = getDefaultInstance().getHost(); + bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } @@ -19828,8 +19031,8 @@ public Builder setHostBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - host_ = value; + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -19886,8 +19089,8 @@ public Builder setAudience(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - audience_ = value; + bitField0_ |= 0x00000008; onChanged(); return this; } @@ -19900,8 +19103,8 @@ public Builder setAudience(java.lang.String value) { * @return This builder for chaining. */ public Builder clearAudience() { - audience_ = getDefaultInstance().getAudience(); + bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } @@ -19919,8 +19122,8 @@ public Builder setAudienceBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - audience_ = value; + bitField0_ |= 0x00000008; onChanged(); return this; } @@ -19929,14 +19132,14 @@ public Builder setAudienceBytes(com.google.protobuf.ByteString value) { eventPolicies_ = java.util.Collections.emptyList(); private void ensureEventPoliciesIsMutable() { - if (!((bitField0_ & 0x00000001) != 0)) { + if (!((bitField0_ & 0x00000010) != 0)) { eventPolicies_ = new java.util.ArrayList< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy>(eventPolicies_); - bitField0_ |= 0x00000001; + bitField0_ |= 0x00000010; } } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicyOrBuilder> @@ -20135,7 +19338,7 @@ public Builder addAllEventPolicies( public Builder clearEventPolicies() { if (eventPoliciesBuilder_ == null) { eventPolicies_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000010); onChanged(); } else { eventPoliciesBuilder_.clear(); @@ -20242,32 +19445,22 @@ public Builder removeEventPolicies(int index) { return getEventPoliciesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicyOrBuilder> getEventPoliciesFieldBuilder() { if (eventPoliciesBuilder_ == null) { - eventPoliciesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + eventPoliciesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicy.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EventPolicyOrBuilder>( - eventPolicies_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + eventPolicies_, ((bitField0_ & 0x00000010) != 0), getParentForChildren(), isClean()); eventPolicies_ = null; } return eventPoliciesBuilder_; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Ingress) } @@ -20289,7 +19482,18 @@ public Ingress parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Ingress(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -20434,13 +19638,23 @@ public interface ReferenceOrBuilder * * Protobuf type {@code Reference} */ - public static final class Reference extends com.google.protobuf.GeneratedMessageV3 + public static final class Reference extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Reference) ReferenceOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Reference.class.getName()); + } // Use Reference.newBuilder() to construct. - private Reference(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Reference(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } @@ -20453,94 +19667,12 @@ private Reference() { groupVersion_ = ""; } - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Reference(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Reference( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - java.lang.String s = input.readStringRequireUtf8(); - - uuid_ = s; - break; - } - case 18: { - java.lang.String s = input.readStringRequireUtf8(); - - namespace_ = s; - break; - } - case 26: { - java.lang.String s = input.readStringRequireUtf8(); - - name_ = s; - break; - } - case 34: { - java.lang.String s = input.readStringRequireUtf8(); - - version_ = s; - break; - } - case 42: { - java.lang.String s = input.readStringRequireUtf8(); - - kind_ = s; - break; - } - case 50: { - java.lang.String s = input.readStringRequireUtf8(); - - groupVersion_ = s; - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Reference_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Reference_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -20549,7 +19681,9 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int UUID_FIELD_NUMBER = 1; - private volatile java.lang.Object uuid_; + + @SuppressWarnings("serial") + private volatile java.lang.Object uuid_ = ""; /** * * Object id. @@ -20591,7 +19725,9 @@ public com.google.protobuf.ByteString getUuidBytes() { } public static final int NAMESPACE_FIELD_NUMBER = 2; - private volatile java.lang.Object namespace_; + + @SuppressWarnings("serial") + private volatile java.lang.Object namespace_ = ""; /** ** Object namespace. @@ -20633,7 +19769,9 @@ public com.google.protobuf.ByteString getNamespaceBytes() { } public static final int NAME_FIELD_NUMBER = 3; - private volatile java.lang.Object name_; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; /** ** Object name. @@ -20675,7 +19813,9 @@ public com.google.protobuf.ByteString getNameBytes() { } public static final int VERSION_FIELD_NUMBER = 4; - private volatile java.lang.Object version_; + + @SuppressWarnings("serial") + private volatile java.lang.Object version_ = ""; /** ** Object ResourceVersion. @@ -20717,7 +19857,9 @@ public com.google.protobuf.ByteString getVersionBytes() { } public static final int KIND_FIELD_NUMBER = 5; - private volatile java.lang.Object kind_; + + @SuppressWarnings("serial") + private volatile java.lang.Object kind_ = ""; /** ** Object kind. @@ -20759,7 +19901,9 @@ public com.google.protobuf.ByteString getKindBytes() { } public static final int GROUPVERSION_FIELD_NUMBER = 6; - private volatile java.lang.Object groupVersion_; + + @SuppressWarnings("serial") + private volatile java.lang.Object groupVersion_ = ""; /** ** Object GroupVersion. @@ -20814,25 +19958,25 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getUuidBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uuid_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(uuid_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, uuid_); } - if (!getNamespaceBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, namespace_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(namespace_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, namespace_); } - if (!getNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, name_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, name_); } - if (!getVersionBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 4, version_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(version_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 4, version_); } - if (!getKindBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 5, kind_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(kind_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 5, kind_); } - if (!getGroupVersionBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 6, groupVersion_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(groupVersion_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 6, groupVersion_); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -20841,25 +19985,25 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getUuidBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uuid_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(uuid_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, uuid_); } - if (!getNamespaceBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, namespace_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(namespace_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, namespace_); } - if (!getNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, name_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, name_); } - if (!getVersionBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, version_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(version_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(4, version_); } - if (!getKindBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, kind_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(kind_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(5, kind_); } - if (!getGroupVersionBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, groupVersion_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(groupVersion_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(6, groupVersion_); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -20881,7 +20025,7 @@ public boolean equals(final java.lang.Object obj) { if (!getVersion().equals(other.getVersion())) return false; if (!getKind().equals(other.getKind())) return false; if (!getGroupVersion().equals(other.getGroupVersion())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -20904,7 +20048,7 @@ public int hashCode() { hash = (53 * hash) + getKind().hashCode(); hash = (37 * hash) + GROUPVERSION_FIELD_NUMBER; hash = (53 * hash) + getGroupVersion().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -20944,36 +20088,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Refer public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -20996,7 +20139,7 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -21007,7 +20150,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build * * Protobuf type {@code Reference} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Reference) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder { @@ -21017,7 +20160,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Reference_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -21026,34 +20169,22 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; uuid_ = ""; - namespace_ = ""; - name_ = ""; - version_ = ""; - kind_ = ""; - groupVersion_ = ""; - return this; } @@ -21081,46 +20212,33 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference bu public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference(this); - result.uuid_ = uuid_; - result.namespace_ = namespace_; - result.name_ = name_; - result.version_ = version_; - result.kind_ = kind_; - result.groupVersion_ = groupVersion_; + if (bitField0_ != 0) { + buildPartial0(result); + } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.uuid_ = uuid_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.namespace_ = namespace_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.version_ = version_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.kind_ = kind_; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.groupVersion_ = groupVersion_; + } } @java.lang.Override @@ -21139,29 +20257,35 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon return this; if (!other.getUuid().isEmpty()) { uuid_ = other.uuid_; + bitField0_ |= 0x00000001; onChanged(); } if (!other.getNamespace().isEmpty()) { namespace_ = other.namespace_; + bitField0_ |= 0x00000002; onChanged(); } if (!other.getName().isEmpty()) { name_ = other.name_; + bitField0_ |= 0x00000004; onChanged(); } if (!other.getVersion().isEmpty()) { version_ = other.version_; + bitField0_ |= 0x00000008; onChanged(); } if (!other.getKind().isEmpty()) { kind_ = other.kind_; + bitField0_ |= 0x00000010; onChanged(); } if (!other.getGroupVersion().isEmpty()) { groupVersion_ = other.groupVersion_; + bitField0_ |= 0x00000020; onChanged(); } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -21176,21 +20300,65 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + uuid_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + namespace_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + version_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 42: { + kind_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000010; + break; + } // case 42 + case 50: { + groupVersion_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000020; + break; + } // case 50 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } + private int bitField0_; + private java.lang.Object uuid_ = ""; /** * @@ -21243,8 +20411,8 @@ public Builder setUuid(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - uuid_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -21257,8 +20425,8 @@ public Builder setUuid(java.lang.String value) { * @return This builder for chaining. */ public Builder clearUuid() { - uuid_ = getDefaultInstance().getUuid(); + bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } @@ -21276,8 +20444,8 @@ public Builder setUuidBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - uuid_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -21334,8 +20502,8 @@ public Builder setNamespace(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - namespace_ = value; + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -21348,8 +20516,8 @@ public Builder setNamespace(java.lang.String value) { * @return This builder for chaining. */ public Builder clearNamespace() { - namespace_ = getDefaultInstance().getNamespace(); + bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } @@ -21367,8 +20535,8 @@ public Builder setNamespaceBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - namespace_ = value; + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -21425,8 +20593,8 @@ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - name_ = value; + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -21439,8 +20607,8 @@ public Builder setName(java.lang.String value) { * @return This builder for chaining. */ public Builder clearName() { - name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } @@ -21458,8 +20626,8 @@ public Builder setNameBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - name_ = value; + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -21516,8 +20684,8 @@ public Builder setVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - version_ = value; + bitField0_ |= 0x00000008; onChanged(); return this; } @@ -21530,8 +20698,8 @@ public Builder setVersion(java.lang.String value) { * @return This builder for chaining. */ public Builder clearVersion() { - version_ = getDefaultInstance().getVersion(); + bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } @@ -21549,8 +20717,8 @@ public Builder setVersionBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - version_ = value; + bitField0_ |= 0x00000008; onChanged(); return this; } @@ -21607,8 +20775,8 @@ public Builder setKind(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - kind_ = value; + bitField0_ |= 0x00000010; onChanged(); return this; } @@ -21621,8 +20789,8 @@ public Builder setKind(java.lang.String value) { * @return This builder for chaining. */ public Builder clearKind() { - kind_ = getDefaultInstance().getKind(); + bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } @@ -21640,8 +20808,8 @@ public Builder setKindBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - kind_ = value; + bitField0_ |= 0x00000010; onChanged(); return this; } @@ -21698,8 +20866,8 @@ public Builder setGroupVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - groupVersion_ = value; + bitField0_ |= 0x00000020; onChanged(); return this; } @@ -21712,8 +20880,8 @@ public Builder setGroupVersion(java.lang.String value) { * @return This builder for chaining. */ public Builder clearGroupVersion() { - groupVersion_ = getDefaultInstance().getGroupVersion(); + bitField0_ = (bitField0_ & ~0x00000020); onChanged(); return this; } @@ -21731,22 +20899,12 @@ public Builder setGroupVersionBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - groupVersion_ = value; + bitField0_ |= 0x00000020; onChanged(); return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Reference) } @@ -21768,7 +20926,18 @@ public Reference parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Reference(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -21868,13 +21037,23 @@ public interface SecretReferenceOrBuilder /** * Protobuf type {@code SecretReference} */ - public static final class SecretReference extends com.google.protobuf.GeneratedMessageV3 + public static final class SecretReference extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:SecretReference) SecretReferenceOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + SecretReference.class.getName()); + } // Use SecretReference.newBuilder() to construct. - private SecretReference(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private SecretReference(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } @@ -21882,92 +21061,13 @@ private SecretReference() { keyFieldReferences_ = java.util.Collections.emptyList(); } - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new SecretReference(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private SecretReference( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder subBuilder = - null; - if (reference_ != null) { - subBuilder = reference_.toBuilder(); - } - reference_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(reference_); - reference_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - keyFieldReferences_ = new java.util.ArrayList< - dev.knative.eventing.kafka.broker.contract.DataPlaneContract - .KeyFieldReference>(); - mutable_bitField0_ |= 0x00000001; - } - keyFieldReferences_.add(input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference - .parser(), - extensionRegistry)); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - keyFieldReferences_ = java.util.Collections.unmodifiableList(keyFieldReferences_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_SecretReference_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_SecretReference_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -21975,6 +21075,7 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference.Builder.class); } + private int bitField0_; public static final int REFERENCE_FIELD_NUMBER = 1; private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference reference_; /** @@ -21987,7 +21088,7 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF */ @java.lang.Override public boolean hasReference() { - return reference_ != null; + return ((bitField0_ & 0x00000001) != 0); } /** *@@ -22012,10 +21113,14 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference ge */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder getReferenceOrBuilder() { - return getReference(); + return reference_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance() + : reference_; } public static final int KEYFIELDREFERENCES_FIELD_NUMBER = 2; + + @SuppressWarnings("serial") private java.util.ListkeyFieldReferences_; /** @@ -22094,13 +21199,13 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (reference_ != null) { + if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getReference()); } for (int i = 0; i < keyFieldReferences_.size(); i++) { output.writeMessage(2, keyFieldReferences_.get(i)); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -22109,13 +21214,13 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (reference_ != null) { + if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getReference()); } for (int i = 0; i < keyFieldReferences_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, keyFieldReferences_.get(i)); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -22136,7 +21241,7 @@ public boolean equals(final java.lang.Object obj) { if (!getReference().equals(other.getReference())) return false; } if (!getKeyFieldReferencesList().equals(other.getKeyFieldReferencesList())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -22155,7 +21260,7 @@ public int hashCode() { hash = (37 * hash) + KEYFIELDREFERENCES_FIELD_NUMBER; hash = (53 * hash) + getKeyFieldReferencesList().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -22195,36 +21300,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Secre public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -22247,14 +21351,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code SecretReference} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:SecretReference) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReferenceOrBuilder { @@ -22264,7 +21368,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_SecretReference_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -22278,13 +21382,14 @@ private Builder() { maybeForceBuilderInitialization(); } - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getReferenceFieldBuilder(); getKeyFieldReferencesFieldBuilder(); } } @@ -22292,18 +21397,19 @@ private void maybeForceBuilderInitialization() { @java.lang.Override public Builder clear() { super.clear(); - if (referenceBuilder_ == null) { - reference_ = null; - } else { - reference_ = null; + bitField0_ = 0; + reference_ = null; + if (referenceBuilder_ != null) { + referenceBuilder_.dispose(); referenceBuilder_ = null; } if (keyFieldReferencesBuilder_ == null) { keyFieldReferences_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); } else { + keyFieldReferences_ = null; keyFieldReferencesBuilder_.clear(); } + bitField0_ = (bitField0_ & ~0x00000002); return this; } @@ -22333,55 +21439,36 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretRefere public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference(this); - int from_bitField0_ = bitField0_; - if (referenceBuilder_ == null) { - result.reference_ = reference_; - } else { - result.reference_ = referenceBuilder_.build(); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference result) { if (keyFieldReferencesBuilder_ == null) { - if (((bitField0_ & 0x00000001) != 0)) { + if (((bitField0_ & 0x00000002) != 0)) { keyFieldReferences_ = java.util.Collections.unmodifiableList(keyFieldReferences_); - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000002); } result.keyFieldReferences_ = keyFieldReferences_; } else { result.keyFieldReferences_ = keyFieldReferencesBuilder_.build(); } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); } - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.reference_ = referenceBuilder_ == null ? reference_ : referenceBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; } @java.lang.Override @@ -22407,7 +21494,7 @@ public Builder mergeFrom( if (!other.keyFieldReferences_.isEmpty()) { if (keyFieldReferences_.isEmpty()) { keyFieldReferences_ = other.keyFieldReferences_; - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000002); } else { ensureKeyFieldReferencesIsMutable(); keyFieldReferences_.addAll(other.keyFieldReferences_); @@ -22420,8 +21507,8 @@ public Builder mergeFrom( keyFieldReferencesBuilder_.dispose(); keyFieldReferencesBuilder_ = null; keyFieldReferences_ = other.keyFieldReferences_; - bitField0_ = (bitField0_ & ~0x00000001); - keyFieldReferencesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + bitField0_ = (bitField0_ & ~0x00000002); + keyFieldReferencesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getKeyFieldReferencesFieldBuilder() : null; } else { @@ -22429,7 +21516,7 @@ public Builder mergeFrom( } } } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -22444,25 +21531,56 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage(getReferenceFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference m = + input.readMessage( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract + .KeyFieldReference.parser(), + extensionRegistry); + if (keyFieldReferencesBuilder_ == null) { + ensureKeyFieldReferencesIsMutable(); + keyFieldReferences_.add(m); + } else { + keyFieldReferencesBuilder_.addMessage(m); + } + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } private int bitField0_; private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference reference_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder> @@ -22476,7 +21594,7 @@ public Builder mergeFrom( * @return Whether the reference field is set. */ public boolean hasReference() { - return referenceBuilder_ != null || reference_ != null; + return ((bitField0_ & 0x00000001) != 0); } /** * @@ -22509,11 +21627,11 @@ public Builder setReference(dev.knative.eventing.kafka.broker.contract.DataPlane throw new NullPointerException(); } reference_ = value; - onChanged(); } else { referenceBuilder_.setMessage(value); } - + bitField0_ |= 0x00000001; + onChanged(); return this; } /** @@ -22527,11 +21645,11 @@ public Builder setReference( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder builderForValue) { if (referenceBuilder_ == null) { reference_ = builderForValue.build(); - onChanged(); } else { referenceBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00000001; + onChanged(); return this; } /** @@ -22544,19 +21662,22 @@ public Builder setReference( public Builder mergeReference( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference value) { if (referenceBuilder_ == null) { - if (reference_ != null) { - reference_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.newBuilder( - reference_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00000001) != 0) + && reference_ != null + && reference_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference + .getDefaultInstance()) { + getReferenceBuilder().mergeFrom(value); } else { reference_ = value; } - onChanged(); } else { referenceBuilder_.mergeFrom(value); } - + if (reference_ != null) { + bitField0_ |= 0x00000001; + onChanged(); + } return this; } /** @@ -22567,14 +21688,13 @@ public Builder mergeReference( *.Reference reference = 1;
*/ public Builder clearReference() { - if (referenceBuilder_ == null) { - reference_ = null; - onChanged(); - } else { - reference_ = null; + bitField0_ = (bitField0_ & ~0x00000001); + reference_ = null; + if (referenceBuilder_ != null) { + referenceBuilder_.dispose(); referenceBuilder_ = null; } - + onChanged(); return this; } /** @@ -22586,7 +21706,7 @@ public Builder clearReference() { */ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder getReferenceBuilder() { - + bitField0_ |= 0x00000001; onChanged(); return getReferenceFieldBuilder().getBuilder(); } @@ -22615,13 +21735,13 @@ public Builder clearReference() { * *.Reference reference = 1;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder> getReferenceFieldBuilder() { if (referenceBuilder_ == null) { - referenceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + referenceBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder>( @@ -22635,15 +21755,15 @@ public Builder clearReference() { keyFieldReferences_ = java.util.Collections.emptyList(); private void ensureKeyFieldReferencesIsMutable() { - if (!((bitField0_ & 0x00000001) != 0)) { + if (!((bitField0_ & 0x00000002) != 0)) { keyFieldReferences_ = new java.util.ArrayList< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference>( keyFieldReferences_); - bitField0_ |= 0x00000001; + bitField0_ |= 0x00000002; } } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReferenceOrBuilder> @@ -22847,7 +21967,7 @@ public Builder addAllKeyFieldReferences( public Builder clearKeyFieldReferences() { if (keyFieldReferencesBuilder_ == null) { keyFieldReferences_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { keyFieldReferencesBuilder_.clear(); @@ -22956,32 +22076,22 @@ public Builder removeKeyFieldReferences(int index) { return getKeyFieldReferencesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReferenceOrBuilder> getKeyFieldReferencesFieldBuilder() { if (keyFieldReferencesBuilder_ == null) { - keyFieldReferencesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + keyFieldReferencesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReferenceOrBuilder>( - keyFieldReferences_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + keyFieldReferences_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); keyFieldReferences_ = null; } return keyFieldReferencesBuilder_; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:SecretReference) } @@ -23005,7 +22115,18 @@ public SecretReference parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new SecretReference(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -23071,13 +22192,23 @@ public interface KeyFieldReferenceOrBuilder /** * Protobuf type {@code KeyFieldReference} */ - public static final class KeyFieldReference extends com.google.protobuf.GeneratedMessageV3 + public static final class KeyFieldReference extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:KeyFieldReference) KeyFieldReferenceOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + KeyFieldReference.class.getName()); + } // Use KeyFieldReference.newBuilder() to construct. - private KeyFieldReference(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private KeyFieldReference(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } @@ -23086,71 +22217,13 @@ private KeyFieldReference() { field_ = 0; } - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new KeyFieldReference(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private KeyFieldReference( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 18: { - java.lang.String s = input.readStringRequireUtf8(); - - secretKey_ = s; - break; - } - case 24: { - int rawValue = input.readEnum(); - - field_ = rawValue; - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_KeyFieldReference_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_KeyFieldReference_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -23160,7 +22233,9 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int SECRETKEY_FIELD_NUMBER = 2; - private volatile java.lang.Object secretKey_; + + @SuppressWarnings("serial") + private volatile java.lang.Object secretKey_ = ""; /** ** Key in the secret. @@ -23202,7 +22277,7 @@ public com.google.protobuf.ByteString getSecretKeyBytes() { } public static final int FIELD_FIELD_NUMBER = 3; - private int field_; + private int field_ = 0; /** ** Field name. @@ -23225,9 +22300,8 @@ public int getFieldValue() { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField getField() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField.valueOf(field_); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField.forNumber(field_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField.UNRECOGNIZED : result; @@ -23247,15 +22321,15 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getSecretKeyBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, secretKey_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(secretKey_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, secretKey_); } if (field_ != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField.SASL_MECHANISM .getNumber()) { output.writeEnum(3, field_); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -23264,15 +22338,15 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getSecretKeyBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, secretKey_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(secretKey_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, secretKey_); } if (field_ != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField.SASL_MECHANISM .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, field_); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -23290,7 +22364,7 @@ public boolean equals(final java.lang.Object obj) { if (!getSecretKey().equals(other.getSecretKey())) return false; if (field_ != other.field_) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -23305,7 +22379,7 @@ public int hashCode() { hash = (53 * hash) + getSecretKey().hashCode(); hash = (37 * hash) + FIELD_FIELD_NUMBER; hash = (53 * hash) + field_; - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -23345,36 +22419,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFi public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -23397,14 +22470,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code KeyFieldReference} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:KeyFieldReference) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReferenceOrBuilder { @@ -23414,7 +22487,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_KeyFieldReference_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -23425,26 +22498,18 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF // Construct using // dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; secretKey_ = ""; - field_ = 0; - return this; } @@ -23474,42 +22539,22 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldRefe public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference(this); - result.secretKey_ = secretKey_; - result.field_ = field_; + if (bitField0_ != 0) { + buildPartial0(result); + } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.secretKey_ = secretKey_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.field_ = field_; + } } @java.lang.Override @@ -23530,12 +22575,13 @@ public Builder mergeFrom( .getDefaultInstance()) return this; if (!other.getSecretKey().isEmpty()) { secretKey_ = other.secretKey_; + bitField0_ |= 0x00000001; onChanged(); } if (other.field_ != 0) { setFieldValue(other.getFieldValue()); } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -23550,21 +22596,45 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 18: { + secretKey_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 18 + case 24: { + field_ = input.readEnum(); + bitField0_ |= 0x00000002; + break; + } // case 24 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.KeyFieldReference) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } + private int bitField0_; + private java.lang.Object secretKey_ = ""; /** * @@ -23617,8 +22687,8 @@ public Builder setSecretKey(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - secretKey_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -23631,8 +22701,8 @@ public Builder setSecretKey(java.lang.String value) { * @return This builder for chaining. */ public Builder clearSecretKey() { - secretKey_ = getDefaultInstance().getSecretKey(); + bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } @@ -23650,8 +22720,8 @@ public Builder setSecretKeyBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - secretKey_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -23679,8 +22749,8 @@ public int getFieldValue() { * @return This builder for chaining. */ public Builder setFieldValue(int value) { - field_ = value; + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -23694,9 +22764,8 @@ public Builder setFieldValue(int value) { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField getField() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField.valueOf(field_); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField.forNumber(field_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretField.UNRECOGNIZED : result; @@ -23714,7 +22783,7 @@ public Builder setField(dev.knative.eventing.kafka.broker.contract.DataPlaneCont if (value == null) { throw new NullPointerException(); } - + bitField0_ |= 0x00000002; field_ = value.getNumber(); onChanged(); return this; @@ -23728,22 +22797,12 @@ public Builder setField(dev.knative.eventing.kafka.broker.contract.DataPlaneCont * @return This builder for chaining. */ public Builder clearField() { - + bitField0_ = (bitField0_ & ~0x00000002); field_ = 0; onChanged(); return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:KeyFieldReference) } @@ -23767,7 +22826,18 @@ public KeyFieldReference parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new KeyFieldReference(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -23858,13 +22928,23 @@ dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReferenceOrBu /** * Protobuf type {@code MultiSecretReference} */ - public static final class MultiSecretReference extends com.google.protobuf.GeneratedMessageV3 + public static final class MultiSecretReference extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:MultiSecretReference) MultiSecretReferenceOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + MultiSecretReference.class.getName()); + } // Use MultiSecretReference.newBuilder() to construct. - private MultiSecretReference(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private MultiSecretReference(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } @@ -23873,81 +22953,13 @@ private MultiSecretReference() { references_ = java.util.Collections.emptyList(); } - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new MultiSecretReference(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private MultiSecretReference( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 8: { - int rawValue = input.readEnum(); - - protocol_ = rawValue; - break; - } - case 18: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - references_ = new java.util.ArrayList< - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference>(); - mutable_bitField0_ |= 0x00000001; - } - references_.add(input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference - .parser(), - extensionRegistry)); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - references_ = java.util.Collections.unmodifiableList(references_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_MultiSecretReference_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_MultiSecretReference_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -23957,7 +22969,7 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int PROTOCOL_FIELD_NUMBER = 1; - private int protocol_; + private int protocol_ = 0; /** ** Protocol. @@ -23980,15 +22992,16 @@ public int getProtocolValue() { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Protocol getProtocol() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Protocol result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Protocol.valueOf(protocol_); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Protocol.forNumber(protocol_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Protocol.UNRECOGNIZED : result; } public static final int REFERENCES_FIELD_NUMBER = 2; + + @SuppressWarnings("serial") private java.util.Listreferences_; /** @@ -24072,7 +23085,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io for (int i = 0; i < references_.size(); i++) { output.writeMessage(2, references_.get(i)); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -24088,7 +23101,7 @@ public int getSerializedSize() { for (int i = 0; i < references_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, references_.get(i)); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -24106,7 +23119,7 @@ public boolean equals(final java.lang.Object obj) { if (protocol_ != other.protocol_) return false; if (!getReferencesList().equals(other.getReferencesList())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -24123,7 +23136,7 @@ public int hashCode() { hash = (37 * hash) + REFERENCES_FIELD_NUMBER; hash = (53 * hash) + getReferencesList().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -24163,37 +23176,36 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Multi public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -24216,14 +23228,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code MultiSecretReference} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:MultiSecretReference) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReferenceOrBuilder { @@ -24233,7 +23245,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_MultiSecretReference_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -24244,32 +23256,24 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF // Construct using // dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { - getReferencesFieldBuilder(); - } } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; protocol_ = 0; - if (referencesBuilder_ == null) { references_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); } else { + references_ = null; referencesBuilder_.clear(); } + bitField0_ = (bitField0_ & ~0x00000002); return this; } @@ -24300,51 +23304,33 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretR public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference(this); - int from_bitField0_ = bitField0_; - result.protocol_ = protocol_; + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference result) { if (referencesBuilder_ == null) { - if (((bitField0_ & 0x00000001) != 0)) { + if (((bitField0_ & 0x00000002) != 0)) { references_ = java.util.Collections.unmodifiableList(references_); - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000002); } result.references_ = references_; } else { result.references_ = referencesBuilder_.build(); } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.protocol_ = protocol_; + } } @java.lang.Override @@ -24371,7 +23357,7 @@ public Builder mergeFrom( if (!other.references_.isEmpty()) { if (references_.isEmpty()) { references_ = other.references_; - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000002); } else { ensureReferencesIsMutable(); references_.addAll(other.references_); @@ -24384,8 +23370,8 @@ public Builder mergeFrom( referencesBuilder_.dispose(); referencesBuilder_ = null; references_ = other.references_; - bitField0_ = (bitField0_ & ~0x00000001); - referencesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + bitField0_ = (bitField0_ & ~0x00000002); + referencesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getReferencesFieldBuilder() : null; } else { @@ -24393,7 +23379,7 @@ public Builder mergeFrom( } } } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -24408,18 +23394,49 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + protocol_ = input.readEnum(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 18: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference m = + input.readMessage( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract + .SecretReference.parser(), + extensionRegistry); + if (referencesBuilder_ == null) { + ensureReferencesIsMutable(); + references_.add(m); + } else { + referencesBuilder_.addMessage(m); + } + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -24448,8 +23465,8 @@ public int getProtocolValue() { * @return This builder for chaining. */ public Builder setProtocolValue(int value) { - protocol_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -24463,9 +23480,8 @@ public Builder setProtocolValue(int value) { */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Protocol getProtocol() { - @SuppressWarnings("deprecation") dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Protocol result = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Protocol.valueOf(protocol_); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Protocol.forNumber(protocol_); return result == null ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Protocol.UNRECOGNIZED : result; @@ -24483,7 +23499,7 @@ public Builder setProtocol(dev.knative.eventing.kafka.broker.contract.DataPlaneC if (value == null) { throw new NullPointerException(); } - + bitField0_ |= 0x00000001; protocol_ = value.getNumber(); onChanged(); return this; @@ -24497,7 +23513,7 @@ public Builder setProtocol(dev.knative.eventing.kafka.broker.contract.DataPlaneC * @return This builder for chaining. */ public Builder clearProtocol() { - + bitField0_ = (bitField0_ & ~0x00000001); protocol_ = 0; onChanged(); return this; @@ -24507,14 +23523,14 @@ public Builder clearProtocol() { references_ = java.util.Collections.emptyList(); private void ensureReferencesIsMutable() { - if (!((bitField0_ & 0x00000001) != 0)) { + if (!((bitField0_ & 0x00000002) != 0)) { references_ = new java.util.ArrayList< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference>(references_); - bitField0_ |= 0x00000001; + bitField0_ |= 0x00000002; } } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReferenceOrBuilder> @@ -24718,7 +23734,7 @@ public Builder addAllReferences( public Builder clearReferences() { if (referencesBuilder_ == null) { references_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { referencesBuilder_.clear(); @@ -24827,32 +23843,22 @@ public Builder removeReferences(int index) { return getReferencesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReferenceOrBuilder> getReferencesFieldBuilder() { if (referencesBuilder_ == null) { - referencesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + referencesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.SecretReferenceOrBuilder>( - references_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + references_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); references_ = null; } return referencesBuilder_; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:MultiSecretReference) } @@ -24876,7 +23882,18 @@ public MultiSecretReference parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new MultiSecretReference(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -24921,7 +23938,11 @@ public interface CloudEventOverridesOrBuilder /** * map<string, string> extensions = 1;
*/ - java.lang.String getExtensionsOrDefault(java.lang.String key, java.lang.String defaultValue); + /* nullable */ + java.lang.String getExtensionsOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue); /** *map<string, string> extensions = 1;
*/ @@ -24934,78 +23955,28 @@ public interface CloudEventOverridesOrBuilder * * Protobuf type {@code CloudEventOverrides} */ - public static final class CloudEventOverrides extends com.google.protobuf.GeneratedMessageV3 + public static final class CloudEventOverrides extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:CloudEventOverrides) CloudEventOverridesOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + CloudEventOverrides.class.getName()); + } // Use CloudEventOverrides.newBuilder() to construct. - private CloudEventOverrides(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private CloudEventOverrides(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private CloudEventOverrides() {} - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new CloudEventOverrides(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private CloudEventOverrides( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - extensions_ = com.google.protobuf.MapField.newMapField( - ExtensionsDefaultEntryHolder.defaultEntry); - mutable_bitField0_ |= 0x00000001; - } - com.google.protobuf.MapEntryextensions__ = - input.readMessage( - ExtensionsDefaultEntryHolder.defaultEntry.getParserForType(), - extensionRegistry); - extensions_.getMutableMap().put(extensions__.getKey(), extensions__.getValue()); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_CloudEventOverrides_descriptor; @@ -25013,7 +23984,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { @SuppressWarnings({"rawtypes"}) @java.lang.Override - protected com.google.protobuf.MapField internalGetMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(int number) { switch (number) { case 1: return internalGetExtensions(); @@ -25023,7 +23994,7 @@ protected com.google.protobuf.MapField internalGetMapField(int number) { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_CloudEventOverrides_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -25045,6 +24016,7 @@ private static final class ExtensionsDefaultEntryHolder { ""); } + @SuppressWarnings("serial") private com.google.protobuf.MapField extensions_; private com.google.protobuf.MapField internalGetExtensions() { @@ -25063,7 +24035,7 @@ public int getExtensionsCount() { @java.lang.Override public boolean containsExtensions(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } return internalGetExtensions().getMap().containsKey(key); } @@ -25086,9 +24058,12 @@ public java.util.Map getExtensionsMap() { * map<string, string> extensions = 1;
*/ @java.lang.Override - public java.lang.String getExtensionsOrDefault(java.lang.String key, java.lang.String defaultValue) { + public /* nullable */ java.lang.String getExtensionsOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Mapmap = internalGetExtensions().getMap(); @@ -25100,7 +24075,7 @@ public java.lang.String getExtensionsOrDefault(java.lang.String key, java.lang.S @java.lang.Override public java.lang.String getExtensionsOrThrow(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Map map = internalGetExtensions().getMap(); @@ -25124,9 +24099,9 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( + com.google.protobuf.GeneratedMessage.serializeStringMapTo( output, internalGetExtensions(), ExtensionsDefaultEntryHolder.defaultEntry, 1); - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -25145,7 +24120,7 @@ public int getSerializedSize() { .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, extensions__); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -25162,7 +24137,7 @@ public boolean equals(final java.lang.Object obj) { (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides) obj; if (!internalGetExtensions().equals(other.internalGetExtensions())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -25177,7 +24152,7 @@ public int hashCode() { hash = (37 * hash) + EXTENSIONS_FIELD_NUMBER; hash = (53 * hash) + internalGetExtensions().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -25217,37 +24192,36 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Cloud public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -25270,7 +24244,7 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -25281,7 +24255,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build * * Protobuf type {@code CloudEventOverrides} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:CloudEventOverrides) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverridesOrBuilder { @@ -25291,7 +24265,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(int number) { switch (number) { case 1: return internalGetExtensions(); @@ -25301,7 +24275,7 @@ protected com.google.protobuf.MapField internalGetMapField(int number) { } @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMutableMapField(int number) { + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(int number) { switch (number) { case 1: return internalGetMutableExtensions(); @@ -25311,7 +24285,7 @@ protected com.google.protobuf.MapField internalGetMutableMapField(int number) { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_CloudEventOverrides_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -25322,22 +24296,16 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF // Construct using // dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; internalGetMutableExtensions().clear(); return this; } @@ -25369,43 +24337,20 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOv public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides(this); - int from_bitField0_ = bitField0_; - result.extensions_ = internalGetExtensions(); - result.extensions_.makeImmutable(); + if (bitField0_ != 0) { + buildPartial0(result); + } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.extensions_ = internalGetExtensions(); + result.extensions_.makeImmutable(); + } } @java.lang.Override @@ -25425,7 +24370,8 @@ public Builder mergeFrom( == dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides .getDefaultInstance()) return this; internalGetMutableExtensions().mergeFrom(other.internalGetExtensions()); - this.mergeUnknownFields(other.unknownFields); + bitField0_ |= 0x00000001; + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -25440,18 +24386,41 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.protobuf.MapEntry extensions__ = + input.readMessage( + ExtensionsDefaultEntryHolder.defaultEntry.getParserForType(), + extensionRegistry); + internalGetMutableExtensions() + .getMutableMap() + .put(extensions__.getKey(), extensions__.getValue()); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -25467,14 +24436,14 @@ private com.google.protobuf.MapField interna } private com.google.protobuf.MapField internalGetMutableExtensions() { - onChanged(); - ; if (extensions_ == null) { extensions_ = com.google.protobuf.MapField.newMapField(ExtensionsDefaultEntryHolder.defaultEntry); } if (!extensions_.isMutable()) { extensions_ = extensions_.copy(); } + bitField0_ |= 0x00000001; + onChanged(); return extensions_; } @@ -25487,7 +24456,7 @@ public int getExtensionsCount() { @java.lang.Override public boolean containsExtensions(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } return internalGetExtensions().getMap().containsKey(key); } @@ -25510,9 +24479,12 @@ public java.util.Map getExtensionsMap() { * map<string, string> extensions = 1;
*/ @java.lang.Override - public java.lang.String getExtensionsOrDefault(java.lang.String key, java.lang.String defaultValue) { + public /* nullable */ java.lang.String getExtensionsOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Mapmap = internalGetExtensions().getMap(); @@ -25524,7 +24496,7 @@ public java.lang.String getExtensionsOrDefault(java.lang.String key, java.lang.S @java.lang.Override public java.lang.String getExtensionsOrThrow(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } java.util.Map map = internalGetExtensions().getMap(); @@ -25535,6 +24507,7 @@ public java.lang.String getExtensionsOrThrow(java.lang.String key) { } public Builder clearExtensions() { + bitField0_ = (bitField0_ & ~0x00000001); internalGetMutableExtensions().getMutableMap().clear(); return this; } @@ -25543,7 +24516,7 @@ public Builder clearExtensions() { */ public Builder removeExtensions(java.lang.String key) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } internalGetMutableExtensions().getMutableMap().remove(key); return this; @@ -25553,6 +24526,7 @@ public Builder removeExtensions(java.lang.String key) { */ @java.lang.Deprecated public java.util.Map getMutableExtensions() { + bitField0_ |= 0x00000001; return internalGetMutableExtensions().getMutableMap(); } /** @@ -25560,12 +24534,13 @@ public java.util.Map getMutableExtensions() */ public Builder putExtensions(java.lang.String key, java.lang.String value) { if (key == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map key"); } if (value == null) { - throw new java.lang.NullPointerException(); + throw new NullPointerException("map value"); } internalGetMutableExtensions().getMutableMap().put(key, value); + bitField0_ |= 0x00000001; return this; } /** @@ -25573,19 +24548,10 @@ public Builder putExtensions(java.lang.String key, java.lang.String value) { */ public Builder putAllExtensions(java.util.Map values) { internalGetMutableExtensions().getMutableMap().putAll(values); + bitField0_ |= 0x00000001; return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:CloudEventOverrides) } @@ -25609,7 +24575,18 @@ public CloudEventOverrides parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new CloudEventOverrides(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -25643,74 +24620,34 @@ public interface FeatureFlagsOrBuilder /** * Protobuf type {@code FeatureFlags} */ - public static final class FeatureFlags extends com.google.protobuf.GeneratedMessageV3 + public static final class FeatureFlags extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:FeatureFlags) - FeatureFlagsOrBuilder { - private static final long serialVersionUID = 0L; - // Use FeatureFlags.newBuilder() to construct. - private FeatureFlags(com.google.protobuf.GeneratedMessageV3.Builder> builder) { - super(builder); - } - - private FeatureFlags() {} - - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new FeatureFlags(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private FeatureFlags( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 8: { - enableEventTypeAutocreate_ = input.readBool(); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } + FeatureFlagsOrBuilder { + private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + FeatureFlags.class.getName()); } + // Use FeatureFlags.newBuilder() to construct. + private FeatureFlags(com.google.protobuf.GeneratedMessage.Builder> builder) { + super(builder); + } + + private FeatureFlags() {} public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_FeatureFlags_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_FeatureFlags_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -25719,7 +24656,7 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int ENABLEEVENTTYPEAUTOCREATE_FIELD_NUMBER = 1; - private boolean enableEventTypeAutocreate_; + private boolean enableEventTypeAutocreate_ = false; /** * bool enableEventTypeAutocreate = 1;
* @return The enableEventTypeAutocreate. @@ -25746,7 +24683,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (enableEventTypeAutocreate_ != false) { output.writeBool(1, enableEventTypeAutocreate_); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -25758,7 +24695,7 @@ public int getSerializedSize() { if (enableEventTypeAutocreate_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(1, enableEventTypeAutocreate_); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -25775,7 +24712,7 @@ public boolean equals(final java.lang.Object obj) { (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags) obj; if (getEnableEventTypeAutocreate() != other.getEnableEventTypeAutocreate()) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -25788,7 +24725,7 @@ public int hashCode() { hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + ENABLEEVENTTYPEAUTOCREATE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnableEventTypeAutocreate()); - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -25828,36 +24765,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Featu public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -25880,14 +24816,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code FeatureFlags} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:FeatureFlags) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlagsOrBuilder { @@ -25897,7 +24833,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_FeatureFlags_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -25907,24 +24843,17 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; enableEventTypeAutocreate_ = false; - return this; } @@ -25953,41 +24882,19 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags(this); - result.enableEventTypeAutocreate_ = enableEventTypeAutocreate_; + if (bitField0_ != 0) { + buildPartial0(result); + } onBuilt(); return result; } - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.enableEventTypeAutocreate_ = enableEventTypeAutocreate_; + } } @java.lang.Override @@ -26007,7 +24914,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (other.getEnableEventTypeAutocreate() != false) { setEnableEventTypeAutocreate(other.getEnableEventTypeAutocreate()); } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -26022,21 +24929,40 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + enableEventTypeAutocreate_ = input.readBool(); + bitField0_ |= 0x00000001; + break; + } // case 8 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } + private int bitField0_; + private boolean enableEventTypeAutocreate_; /** * bool enableEventTypeAutocreate = 1;
@@ -26054,6 +24980,7 @@ public boolean getEnableEventTypeAutocreate() { public Builder setEnableEventTypeAutocreate(boolean value) { enableEventTypeAutocreate_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -26062,22 +24989,12 @@ public Builder setEnableEventTypeAutocreate(boolean value) { * @return This builder for chaining. */ public Builder clearEnableEventTypeAutocreate() { - + bitField0_ = (bitField0_ & ~0x00000001); enableEventTypeAutocreate_ = false; onChanged(); return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:FeatureFlags) } @@ -26099,7 +25016,18 @@ public FeatureFlags parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new FeatureFlags(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -26148,7 +25076,7 @@ public interface ResourceOrBuilder ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -26159,7 +25087,7 @@ public interface ResourceOrBuilder ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -26170,7 +25098,7 @@ public interface ResourceOrBuilder ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -26182,7 +25110,7 @@ public interface ResourceOrBuilder ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -26338,34 +25266,37 @@ public interface ResourceOrBuilder /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -26375,34 +25306,37 @@ public interface ResourceOrBuilder /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -26412,34 +25346,37 @@ public interface ResourceOrBuilder /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -26493,6 +25430,7 @@ public interface ResourceOrBuilder /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -26506,6 +25444,7 @@ public interface ResourceOrBuilder /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -26519,6 +25458,7 @@ public interface ResourceOrBuilder /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -26556,262 +25496,44 @@ public interface ResourceOrBuilder */ dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlagsOrBuilder getFeatureFlagsOrBuilder(); - public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.AuthCase getAuthCase(); + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.AuthCase getAuthCase(); } /** * Protobuf type {@code Resource} */ - public static final class Resource extends com.google.protobuf.GeneratedMessageV3 + public static final class Resource extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Resource) ResourceOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Resource.class.getName()); + } // Use Resource.newBuilder() to construct. - private Resource(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Resource(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private Resource() { uid_ = ""; - topics_ = com.google.protobuf.LazyStringArrayList.EMPTY; + topics_ = com.google.protobuf.LazyStringArrayList.emptyList(); bootstrapServers_ = ""; egresses_ = java.util.Collections.emptyList(); } - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Resource(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Resource( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - java.lang.String s = input.readStringRequireUtf8(); - - uid_ = s; - break; - } - case 18: { - java.lang.String s = input.readStringRequireUtf8(); - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - topics_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000001; - } - topics_.add(s); - break; - } - case 26: { - java.lang.String s = input.readStringRequireUtf8(); - - bootstrapServers_ = s; - break; - } - case 34: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress.Builder subBuilder = - null; - if (ingress_ != null) { - subBuilder = ingress_.toBuilder(); - } - ingress_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(ingress_); - ingress_ = subBuilder.buildPartial(); - } - - break; - } - case 42: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder - subBuilder = null; - if (egressConfig_ != null) { - subBuilder = egressConfig_.toBuilder(); - } - egressConfig_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(egressConfig_); - egressConfig_ = subBuilder.buildPartial(); - } - - break; - } - case 50: { - if (!((mutable_bitField0_ & 0x00000002) != 0)) { - egresses_ = new java.util.ArrayList< - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress>(); - mutable_bitField0_ |= 0x00000002; - } - egresses_.add(input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.parser(), - extensionRegistry)); - break; - } - case 58: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder subBuilder = - null; - if (authCase_ == 7) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) - auth_) - .toBuilder(); - } - auth_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty) auth_); - auth_ = subBuilder.buildPartial(); - } - authCase_ = 7; - break; - } - case 66: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder subBuilder = - null; - if (authCase_ == 8) { - subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) - auth_) - .toBuilder(); - } - auth_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_); - auth_ = subBuilder.buildPartial(); - } - authCase_ = 8; - break; - } - case 74: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference.Builder - subBuilder = null; - if (authCase_ == 9) { - subBuilder = - ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract - .MultiSecretReference) - auth_) - .toBuilder(); - } - auth_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference - .parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom( - (dev.knative.eventing.kafka.broker.contract.DataPlaneContract - .MultiSecretReference) - auth_); - auth_ = subBuilder.buildPartial(); - } - authCase_ = 9; - break; - } - case 82: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides.Builder - subBuilder = null; - if (cloudEventOverrides_ != null) { - subBuilder = cloudEventOverrides_.toBuilder(); - } - cloudEventOverrides_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides - .parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(cloudEventOverrides_); - cloudEventOverrides_ = subBuilder.buildPartial(); - } - - break; - } - case 90: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder subBuilder = - null; - if (reference_ != null) { - subBuilder = reference_.toBuilder(); - } - reference_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(reference_); - reference_ = subBuilder.buildPartial(); - } - - break; - } - case 98: { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags.Builder - subBuilder = null; - if (featureFlags_ != null) { - subBuilder = featureFlags_.toBuilder(); - } - featureFlags_ = input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags.parser(), - extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(featureFlags_); - featureFlags_ = subBuilder.buildPartial(); - } - - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - topics_ = topics_.getUnmodifiableView(); - } - if (((mutable_bitField0_ & 0x00000002) != 0)) { - egresses_ = java.util.Collections.unmodifiableList(egresses_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Resource_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Resource_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -26819,7 +25541,10 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.Builder.class); } + private int bitField0_; private int authCase_ = 0; + + @SuppressWarnings("serial") private java.lang.Object auth_; public enum AuthCase @@ -26869,7 +25594,9 @@ public AuthCase getAuthCase() { } public static final int UID_FIELD_NUMBER = 1; - private volatile java.lang.Object uid_; + + @SuppressWarnings("serial") + private volatile java.lang.Object uid_ = ""; /** ** Id of the resource @@ -26913,12 +25640,14 @@ public com.google.protobuf.ByteString getUidBytes() { } public static final int TOPICS_FIELD_NUMBER = 2; - private com.google.protobuf.LazyStringList topics_; + + @SuppressWarnings("serial") + private com.google.protobuf.LazyStringArrayList topics_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -26931,7 +25660,7 @@ public com.google.protobuf.ProtocolStringList getTopicsList() { ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -26944,7 +25673,7 @@ public int getTopicsCount() { ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -26958,7 +25687,7 @@ public java.lang.String getTopics(int index) { ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -26970,7 +25699,9 @@ public com.google.protobuf.ByteString getTopicsBytes(int index) { } public static final int BOOTSTRAPSERVERS_FIELD_NUMBER = 3; - private volatile java.lang.Object bootstrapServers_; + + @SuppressWarnings("serial") + private volatile java.lang.Object bootstrapServers_ = ""; /** ** A comma separated list of host/port pairs to use for establishing the initial connection to the Kafka cluster. @@ -27025,7 +25756,7 @@ public com.google.protobuf.ByteString getBootstrapServersBytes() { */ @java.lang.Override public boolean hasIngress() { - return ingress_ != null; + return ((bitField0_ & 0x00000001) != 0); } /** *@@ -27050,7 +25781,9 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress getI */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.IngressOrBuilder getIngressOrBuilder() { - return getIngress(); + return ingress_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress.getDefaultInstance() + : ingress_; } public static final int EGRESSCONFIG_FIELD_NUMBER = 5; @@ -27065,7 +25798,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.IngressOrBui */ @java.lang.Override public boolean hasEgressConfig() { - return egressConfig_ != null; + return ((bitField0_ & 0x00000002) != 0); } /** *@@ -27091,10 +25824,14 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfigOrBuilder getEgressConfigOrBuilder() { - return getEgressConfig(); + return egressConfig_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.getDefaultInstance() + : egressConfig_; } public static final int EGRESSES_FIELD_NUMBER = 6; + + @SuppressWarnings("serial") private java.util.Listegresses_; /** * @@ -27201,34 +25938,37 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuild /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -27241,34 +25981,37 @@ public boolean hasAuthSecret() { /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -27284,34 +26027,37 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference ge /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -27379,7 +26125,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretR */ @java.lang.Override public boolean hasCloudEventOverrides() { - return cloudEventOverrides_ != null; + return ((bitField0_ & 0x00000004) != 0); } /** *.CloudEventOverrides cloudEventOverrides = 10;
@@ -27399,7 +26145,10 @@ public boolean hasCloudEventOverrides() { @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverridesOrBuilder getCloudEventOverridesOrBuilder() { - return getCloudEventOverrides(); + return cloudEventOverrides_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides + .getDefaultInstance() + : cloudEventOverrides_; } public static final int REFERENCE_FIELD_NUMBER = 11; @@ -27407,6 +26156,7 @@ public boolean hasCloudEventOverrides() { /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -27418,11 +26168,12 @@ public boolean hasCloudEventOverrides() { */ @java.lang.Override public boolean hasReference() { - return reference_ != null; + return ((bitField0_ & 0x00000008) != 0); } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -27441,6 +26192,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference ge /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -27451,7 +26203,9 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference ge */ @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder getReferenceOrBuilder() { - return getReference(); + return reference_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance() + : reference_; } public static final int FEATUREFLAGS_FIELD_NUMBER = 12; @@ -27466,7 +26220,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrB */ @java.lang.Override public boolean hasFeatureFlags() { - return featureFlags_ != null; + return ((bitField0_ & 0x00000010) != 0); } /** *@@ -27492,7 +26246,9 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags @java.lang.Override public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlagsOrBuilder getFeatureFlagsOrBuilder() { - return getFeatureFlags(); + return featureFlags_ == null + ? dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags.getDefaultInstance() + : featureFlags_; } private byte memoizedIsInitialized = -1; @@ -27509,19 +26265,19 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getUidBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uid_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(uid_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, uid_); } for (int i = 0; i < topics_.size(); i++) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, topics_.getRaw(i)); + com.google.protobuf.GeneratedMessage.writeString(output, 2, topics_.getRaw(i)); } - if (!getBootstrapServersBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, bootstrapServers_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(bootstrapServers_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, bootstrapServers_); } - if (ingress_ != null) { + if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(4, getIngress()); } - if (egressConfig_ != null) { + if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(5, getEgressConfig()); } for (int i = 0; i < egresses_.size(); i++) { @@ -27537,16 +26293,16 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io output.writeMessage( 9, (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference) auth_); } - if (cloudEventOverrides_ != null) { + if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(10, getCloudEventOverrides()); } - if (reference_ != null) { + if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(11, getReference()); } - if (featureFlags_ != null) { + if (((bitField0_ & 0x00000010) != 0)) { output.writeMessage(12, getFeatureFlags()); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -27555,8 +26311,8 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getUidBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uid_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(uid_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, uid_); } { int dataSize = 0; @@ -27566,13 +26322,13 @@ public int getSerializedSize() { size += dataSize; size += 1 * getTopicsList().size(); } - if (!getBootstrapServersBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, bootstrapServers_); + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(bootstrapServers_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, bootstrapServers_); } - if (ingress_ != null) { + if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getIngress()); } - if (egressConfig_ != null) { + if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getEgressConfig()); } for (int i = 0; i < egresses_.size(); i++) { @@ -27590,16 +26346,16 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 9, (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference) auth_); } - if (cloudEventOverrides_ != null) { + if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(10, getCloudEventOverrides()); } - if (reference_ != null) { + if (((bitField0_ & 0x00000008) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(11, getReference()); } - if (featureFlags_ != null) { + if (((bitField0_ & 0x00000010) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(12, getFeatureFlags()); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -27653,7 +26409,7 @@ public boolean equals(final java.lang.Object obj) { case 0: default: } - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -27712,7 +26468,7 @@ public int hashCode() { case 0: default: } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -27752,36 +26508,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resou public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -27804,14 +26559,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Resource} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Resource) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ResourceOrBuilder { @@ -27820,7 +26575,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Resource_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -27833,60 +26588,68 @@ private Builder() { maybeForceBuilderInitialization(); } - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getIngressFieldBuilder(); + getEgressConfigFieldBuilder(); getEgressesFieldBuilder(); + getCloudEventOverridesFieldBuilder(); + getReferenceFieldBuilder(); + getFeatureFlagsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; uid_ = ""; - - topics_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); + topics_ = com.google.protobuf.LazyStringArrayList.emptyList(); bootstrapServers_ = ""; - - if (ingressBuilder_ == null) { - ingress_ = null; - } else { - ingress_ = null; + ingress_ = null; + if (ingressBuilder_ != null) { + ingressBuilder_.dispose(); ingressBuilder_ = null; } - if (egressConfigBuilder_ == null) { - egressConfig_ = null; - } else { - egressConfig_ = null; + egressConfig_ = null; + if (egressConfigBuilder_ != null) { + egressConfigBuilder_.dispose(); egressConfigBuilder_ = null; } if (egressesBuilder_ == null) { egresses_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); } else { + egresses_ = null; egressesBuilder_.clear(); } - if (cloudEventOverridesBuilder_ == null) { - cloudEventOverrides_ = null; - } else { - cloudEventOverrides_ = null; + bitField0_ = (bitField0_ & ~0x00000020); + if (absentAuthBuilder_ != null) { + absentAuthBuilder_.clear(); + } + if (authSecretBuilder_ != null) { + authSecretBuilder_.clear(); + } + if (multiAuthSecretBuilder_ != null) { + multiAuthSecretBuilder_.clear(); + } + cloudEventOverrides_ = null; + if (cloudEventOverridesBuilder_ != null) { + cloudEventOverridesBuilder_.dispose(); cloudEventOverridesBuilder_ = null; } - if (referenceBuilder_ == null) { - reference_ = null; - } else { - reference_ = null; + reference_ = null; + if (referenceBuilder_ != null) { + referenceBuilder_.dispose(); referenceBuilder_ = null; } - if (featureFlagsBuilder_ == null) { - featureFlags_ = null; - } else { - featureFlags_ = null; + featureFlags_ = null; + if (featureFlagsBuilder_ != null) { + featureFlagsBuilder_.dispose(); featureFlagsBuilder_ = null; } authCase_ = 0; @@ -27917,104 +26680,79 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource bui public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource(this); - int from_bitField0_ = bitField0_; - result.uid_ = uid_; - if (((bitField0_ & 0x00000001) != 0)) { - topics_ = topics_.getUnmodifiableView(); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.topics_ = topics_; - result.bootstrapServers_ = bootstrapServers_; - if (ingressBuilder_ == null) { - result.ingress_ = ingress_; - } else { - result.ingress_ = ingressBuilder_.build(); - } - if (egressConfigBuilder_ == null) { - result.egressConfig_ = egressConfig_; - } else { - result.egressConfig_ = egressConfigBuilder_.build(); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); } + buildPartialOneofs(result); + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource result) { if (egressesBuilder_ == null) { - if (((bitField0_ & 0x00000002) != 0)) { + if (((bitField0_ & 0x00000020) != 0)) { egresses_ = java.util.Collections.unmodifiableList(egresses_); - bitField0_ = (bitField0_ & ~0x00000002); + bitField0_ = (bitField0_ & ~0x00000020); } result.egresses_ = egresses_; } else { result.egresses_ = egressesBuilder_.build(); } - if (authCase_ == 7) { - if (absentAuthBuilder_ == null) { - result.auth_ = auth_; - } else { - result.auth_ = absentAuthBuilder_.build(); - } + } + + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.uid_ = uid_; } - if (authCase_ == 8) { - if (authSecretBuilder_ == null) { - result.auth_ = auth_; - } else { - result.auth_ = authSecretBuilder_.build(); - } + if (((from_bitField0_ & 0x00000002) != 0)) { + topics_.makeImmutable(); + result.topics_ = topics_; } - if (authCase_ == 9) { - if (multiAuthSecretBuilder_ == null) { - result.auth_ = auth_; - } else { - result.auth_ = multiAuthSecretBuilder_.build(); - } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.bootstrapServers_ = bootstrapServers_; } - if (cloudEventOverridesBuilder_ == null) { - result.cloudEventOverrides_ = cloudEventOverrides_; - } else { - result.cloudEventOverrides_ = cloudEventOverridesBuilder_.build(); + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000008) != 0)) { + result.ingress_ = ingressBuilder_ == null ? ingress_ : ingressBuilder_.build(); + to_bitField0_ |= 0x00000001; } - if (referenceBuilder_ == null) { - result.reference_ = reference_; - } else { - result.reference_ = referenceBuilder_.build(); + if (((from_bitField0_ & 0x00000010) != 0)) { + result.egressConfig_ = egressConfigBuilder_ == null ? egressConfig_ : egressConfigBuilder_.build(); + to_bitField0_ |= 0x00000002; } - if (featureFlagsBuilder_ == null) { - result.featureFlags_ = featureFlags_; - } else { - result.featureFlags_ = featureFlagsBuilder_.build(); + if (((from_bitField0_ & 0x00000200) != 0)) { + result.cloudEventOverrides_ = cloudEventOverridesBuilder_ == null + ? cloudEventOverrides_ + : cloudEventOverridesBuilder_.build(); + to_bitField0_ |= 0x00000004; } - result.authCase_ = authCase_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); + if (((from_bitField0_ & 0x00000400) != 0)) { + result.reference_ = referenceBuilder_ == null ? reference_ : referenceBuilder_.build(); + to_bitField0_ |= 0x00000008; + } + if (((from_bitField0_ & 0x00000800) != 0)) { + result.featureFlags_ = featureFlagsBuilder_ == null ? featureFlags_ : featureFlagsBuilder_.build(); + to_bitField0_ |= 0x00000010; + } + result.bitField0_ |= to_bitField0_; } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartialOneofs( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource result) { + result.authCase_ = authCase_; + result.auth_ = this.auth_; + if (authCase_ == 7 && absentAuthBuilder_ != null) { + result.auth_ = absentAuthBuilder_.build(); + } + if (authCase_ == 8 && authSecretBuilder_ != null) { + result.auth_ = authSecretBuilder_.build(); + } + if (authCase_ == 9 && multiAuthSecretBuilder_ != null) { + result.auth_ = multiAuthSecretBuilder_.build(); + } } @java.lang.Override @@ -28032,12 +26770,13 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon return this; if (!other.getUid().isEmpty()) { uid_ = other.uid_; + bitField0_ |= 0x00000001; onChanged(); } if (!other.topics_.isEmpty()) { if (topics_.isEmpty()) { topics_ = other.topics_; - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ |= 0x00000002; } else { ensureTopicsIsMutable(); topics_.addAll(other.topics_); @@ -28046,6 +26785,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon } if (!other.getBootstrapServers().isEmpty()) { bootstrapServers_ = other.bootstrapServers_; + bitField0_ |= 0x00000004; onChanged(); } if (other.hasIngress()) { @@ -28058,7 +26798,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (!other.egresses_.isEmpty()) { if (egresses_.isEmpty()) { egresses_ = other.egresses_; - bitField0_ = (bitField0_ & ~0x00000002); + bitField0_ = (bitField0_ & ~0x00000020); } else { ensureEgressesIsMutable(); egresses_.addAll(other.egresses_); @@ -28071,8 +26811,8 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon egressesBuilder_.dispose(); egressesBuilder_ = null; egresses_ = other.egresses_; - bitField0_ = (bitField0_ & ~0x00000002); - egressesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + bitField0_ = (bitField0_ & ~0x00000020); + egressesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getEgressesFieldBuilder() : null; } else { @@ -28106,7 +26846,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon break; } } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -28121,18 +26861,102 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + uid_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + ensureTopicsIsMutable(); + topics_.add(s); + break; + } // case 18 + case 26: { + bootstrapServers_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + input.readMessage(getIngressFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 42: { + input.readMessage(getEgressConfigFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000010; + break; + } // case 42 + case 50: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress m = + input.readMessage( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress + .parser(), + extensionRegistry); + if (egressesBuilder_ == null) { + ensureEgressesIsMutable(); + egresses_.add(m); + } else { + egressesBuilder_.addMessage(m); + } + break; + } // case 50 + case 58: { + input.readMessage(getAbsentAuthFieldBuilder().getBuilder(), extensionRegistry); + authCase_ = 7; + break; + } // case 58 + case 66: { + input.readMessage(getAuthSecretFieldBuilder().getBuilder(), extensionRegistry); + authCase_ = 8; + break; + } // case 66 + case 74: { + input.readMessage( + getMultiAuthSecretFieldBuilder().getBuilder(), extensionRegistry); + authCase_ = 9; + break; + } // case 74 + case 82: { + input.readMessage( + getCloudEventOverridesFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000200; + break; + } // case 82 + case 90: { + input.readMessage(getReferenceFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000400; + break; + } // case 90 + case 98: { + input.readMessage(getFeatureFlagsFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000800; + break; + } // case 98 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -28207,8 +27031,8 @@ public Builder setUid(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - uid_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -28222,8 +27046,8 @@ public Builder setUid(java.lang.String value) { * @return This builder for chaining. */ public Builder clearUid() { - uid_ = getDefaultInstance().getUid(); + bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } @@ -28242,38 +27066,40 @@ public Builder setUidBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - uid_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } - private com.google.protobuf.LazyStringList topics_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private com.google.protobuf.LazyStringArrayList topics_ = + com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureTopicsIsMutable() { - if (!((bitField0_ & 0x00000001) != 0)) { + if (!topics_.isModifiable()) { topics_ = new com.google.protobuf.LazyStringArrayList(topics_); - bitField0_ |= 0x00000001; } + bitField0_ |= 0x00000002; } /** * * Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
* @return A list containing the topics. */ public com.google.protobuf.ProtocolStringList getTopicsList() { - return topics_.getUnmodifiableView(); + topics_.makeImmutable(); + return topics_; } /** ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -28286,7 +27112,7 @@ public int getTopicsCount() { ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -28300,7 +27126,7 @@ public java.lang.String getTopics(int index) { ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -28314,7 +27140,7 @@ public com.google.protobuf.ByteString getTopicsBytes(int index) { ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -28328,6 +27154,7 @@ public Builder setTopics(int index, java.lang.String value) { } ensureTopicsIsMutable(); topics_.set(index, value); + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -28335,7 +27162,7 @@ public Builder setTopics(int index, java.lang.String value) { ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -28348,6 +27175,7 @@ public Builder addTopics(java.lang.String value) { } ensureTopicsIsMutable(); topics_.add(value); + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -28355,7 +27183,7 @@ public Builder addTopics(java.lang.String value) { ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -28365,6 +27193,7 @@ public Builder addTopics(java.lang.String value) { public Builder addAllTopics(java.lang.Iterablevalues) { ensureTopicsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, topics_); + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -28372,15 +27201,16 @@ public Builder addAllTopics(java.lang.Iterable values) { * * Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
* @return This builder for chaining. */ public Builder clearTopics() { - topics_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); + topics_ = com.google.protobuf.LazyStringArrayList.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + ; onChanged(); return this; } @@ -28388,7 +27218,7 @@ public Builder clearTopics() { ** Topics name * Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + * if the resource does just dispatch from Kafka, then this topic list can contain multiple elements ** *repeated string topics = 2;
@@ -28402,6 +27232,7 @@ public Builder addTopicsBytes(com.google.protobuf.ByteString value) { checkByteStringIsUtf8(value); ensureTopicsIsMutable(); topics_.add(value); + bitField0_ |= 0x00000002; onChanged(); return this; } @@ -28461,8 +27292,8 @@ public Builder setBootstrapServers(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - bootstrapServers_ = value; + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -28476,8 +27307,8 @@ public Builder setBootstrapServers(java.lang.String value) { * @return This builder for chaining. */ public Builder clearBootstrapServers() { - bootstrapServers_ = getDefaultInstance().getBootstrapServers(); + bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } @@ -28496,14 +27327,14 @@ public Builder setBootstrapServersBytes(com.google.protobuf.ByteString value) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - bootstrapServers_ = value; + bitField0_ |= 0x00000004; onChanged(); return this; } private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress ingress_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.IngressOrBuilder> @@ -28517,7 +27348,7 @@ public Builder setBootstrapServersBytes(com.google.protobuf.ByteString value) { * @return Whether the ingress field is set. */ public boolean hasIngress() { - return ingressBuilder_ != null || ingress_ != null; + return ((bitField0_ & 0x00000008) != 0); } /** *@@ -28549,11 +27380,11 @@ public Builder setIngress(dev.knative.eventing.kafka.broker.contract.DataPlaneCo throw new NullPointerException(); } ingress_ = value; - onChanged(); } else { ingressBuilder_.setMessage(value); } - + bitField0_ |= 0x00000008; + onChanged(); return this; } /** @@ -28567,11 +27398,11 @@ public Builder setIngress( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress.Builder builderForValue) { if (ingressBuilder_ == null) { ingress_ = builderForValue.build(); - onChanged(); } else { ingressBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00000008; + onChanged(); return this; } /** @@ -28583,19 +27414,22 @@ public Builder setIngress( */ public Builder mergeIngress(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress value) { if (ingressBuilder_ == null) { - if (ingress_ != null) { - ingress_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress.newBuilder( - ingress_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00000008) != 0) + && ingress_ != null + && ingress_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress + .getDefaultInstance()) { + getIngressBuilder().mergeFrom(value); } else { ingress_ = value; } - onChanged(); } else { ingressBuilder_.mergeFrom(value); } - + if (ingress_ != null) { + bitField0_ |= 0x00000008; + onChanged(); + } return this; } /** @@ -28606,14 +27440,13 @@ public Builder mergeIngress(dev.knative.eventing.kafka.broker.contract.DataPlane *.Ingress ingress = 4;
*/ public Builder clearIngress() { - if (ingressBuilder_ == null) { - ingress_ = null; - onChanged(); - } else { - ingress_ = null; + bitField0_ = (bitField0_ & ~0x00000008); + ingress_ = null; + if (ingressBuilder_ != null) { + ingressBuilder_.dispose(); ingressBuilder_ = null; } - + onChanged(); return this; } /** @@ -28624,7 +27457,7 @@ public Builder clearIngress() { *.Ingress ingress = 4;
*/ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress.Builder getIngressBuilder() { - + bitField0_ |= 0x00000008; onChanged(); return getIngressFieldBuilder().getBuilder(); } @@ -28651,13 +27484,13 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.IngressOrBui * *.Ingress ingress = 4;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.IngressOrBuilder> getIngressFieldBuilder() { if (ingressBuilder_ == null) { - ingressBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + ingressBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.IngressOrBuilder>( @@ -28668,7 +27501,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.IngressOrBui } private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig egressConfig_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfigOrBuilder> @@ -28682,7 +27515,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.IngressOrBui * @return Whether the egressConfig field is set. */ public boolean hasEgressConfig() { - return egressConfigBuilder_ != null || egressConfig_ != null; + return ((bitField0_ & 0x00000010) != 0); } /** *@@ -28716,11 +27549,11 @@ public Builder setEgressConfig( throw new NullPointerException(); } egressConfig_ = value; - onChanged(); } else { egressConfigBuilder_.setMessage(value); } - + bitField0_ |= 0x00000010; + onChanged(); return this; } /** @@ -28734,11 +27567,11 @@ public Builder setEgressConfig( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder builderForValue) { if (egressConfigBuilder_ == null) { egressConfig_ = builderForValue.build(); - onChanged(); } else { egressConfigBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00000010; + onChanged(); return this; } /** @@ -28751,20 +27584,22 @@ public Builder setEgressConfig( public Builder mergeEgressConfig( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig value) { if (egressConfigBuilder_ == null) { - if (egressConfig_ != null) { - egressConfig_ = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.newBuilder( - egressConfig_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00000010) != 0) + && egressConfig_ != null + && egressConfig_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig + .getDefaultInstance()) { + getEgressConfigBuilder().mergeFrom(value); } else { egressConfig_ = value; } - onChanged(); } else { egressConfigBuilder_.mergeFrom(value); } - + if (egressConfig_ != null) { + bitField0_ |= 0x00000010; + onChanged(); + } return this; } /** @@ -28775,14 +27610,13 @@ public Builder mergeEgressConfig( *.EgressConfig egressConfig = 5;
*/ public Builder clearEgressConfig() { - if (egressConfigBuilder_ == null) { - egressConfig_ = null; - onChanged(); - } else { - egressConfig_ = null; + bitField0_ = (bitField0_ & ~0x00000010); + egressConfig_ = null; + if (egressConfigBuilder_ != null) { + egressConfigBuilder_.dispose(); egressConfigBuilder_ = null; } - + onChanged(); return this; } /** @@ -28794,7 +27628,7 @@ public Builder clearEgressConfig() { */ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder getEgressConfigBuilder() { - + bitField0_ |= 0x00000010; onChanged(); return getEgressConfigFieldBuilder().getBuilder(); } @@ -28823,13 +27657,13 @@ public Builder clearEgressConfig() { * *.EgressConfig egressConfig = 5;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfigOrBuilder> getEgressConfigFieldBuilder() { if (egressConfigBuilder_ == null) { - egressConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + egressConfigBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfigOrBuilder>( @@ -28843,14 +27677,14 @@ public Builder clearEgressConfig() { java.util.Collections.emptyList(); private void ensureEgressesIsMutable() { - if (!((bitField0_ & 0x00000002) != 0)) { + if (!((bitField0_ & 0x00000020) != 0)) { egresses_ = new java.util.ArrayList< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress>(egresses_); - bitField0_ |= 0x00000002; + bitField0_ |= 0x00000020; } } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressOrBuilder> @@ -29046,7 +27880,7 @@ public Builder addAllEgresses( public Builder clearEgresses() { if (egressesBuilder_ == null) { egresses_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); + bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { egressesBuilder_.clear(); @@ -29152,23 +27986,23 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.Build return getEgressesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressOrBuilder> getEgressesFieldBuilder() { if (egressesBuilder_ == null) { - egressesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + egressesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressOrBuilder>( - egresses_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); + egresses_, ((bitField0_ & 0x00000020) != 0), getParentForChildren(), isClean()); egresses_ = null; } return egressesBuilder_; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuilder> @@ -29269,8 +28103,9 @@ public Builder mergeAbsentAuth(dev.knative.eventing.kafka.broker.contract.DataPl } else { if (authCase_ == 7) { absentAuthBuilder_.mergeFrom(value); + } else { + absentAuthBuilder_.setMessage(value); } - absentAuthBuilder_.setMessage(value); } authCase_ = 7; return this; @@ -29334,7 +28169,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builde * *.Empty absentAuth = 7;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuilder> @@ -29343,7 +28178,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builde if (!(authCase_ == 7)) { auth_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.getDefaultInstance(); } - absentAuthBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + absentAuthBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EmptyOrBuilder>( @@ -29354,11 +28189,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builde } authCase_ = 7; onChanged(); - ; return absentAuthBuilder_; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder> @@ -29366,34 +28200,37 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Empty.Builde /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -29406,34 +28243,37 @@ public boolean hasAuthSecret() { /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -29456,34 +28296,37 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference ge /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -29504,34 +28347,37 @@ public Builder setAuthSecret(dev.knative.eventing.kafka.broker.contract.DataPlan /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -29550,34 +28396,37 @@ public Builder setAuthSecret( /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -29600,8 +28449,9 @@ public Builder mergeAuthSecret( } else { if (authCase_ == 8) { authSecretBuilder_.mergeFrom(value); + } else { + authSecretBuilder_.setMessage(value); } - authSecretBuilder_.setMessage(value); } authCase_ = 8; return this; @@ -29609,34 +28459,37 @@ public Builder mergeAuthSecret( /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -29660,34 +28513,37 @@ public Builder clearAuthSecret() { /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -29699,34 +28555,37 @@ public Builder clearAuthSecret() { /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
@@ -29746,39 +28605,42 @@ public Builder clearAuthSecret() { /** ** Secret reference. + * * Secret format: - * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - * ca.crt: <CA PEM certificate> - * user.crt: <User PEM certificate> - * user.key: <User PEM key> - * user: <SASL username> - * password: <SASL password> + * + * protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + * sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + * ca.crt: <CA PEM certificate> + * user.crt: <User PEM certificate> + * user.key: <User PEM key> + * user: <SASL username> + * password: <SASL password> + * * Validation: - * - protocol=PLAINTEXT - * - protocol=SSL - * - required: - * - ca.crt - * - user.crt - * - user.key - * - protocol=SASL_PLAINTEXT - * - required: - * - sasl.mechanism - * - user - * - password - * - protocol=SASL_SSL - * - required: - * - sasl.mechanism - * - ca.crt - * - user.crt - * - user.key - * - user - * - password + * - protocol=PLAINTEXT + * - protocol=SSL + * - required: + * - ca.crt + * - user.crt + * - user.key + * - protocol=SASL_PLAINTEXT + * - required: + * - sasl.mechanism + * - user + * - password + * - protocol=SASL_SSL + * - required: + * - sasl.mechanism + * - ca.crt + * - user.crt + * - user.key + * - user + * - password ** *.Reference authSecret = 8;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder> @@ -29789,7 +28651,7 @@ public Builder clearAuthSecret() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference .getDefaultInstance(); } - authSecretBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + authSecretBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder>( @@ -29800,11 +28662,10 @@ public Builder clearAuthSecret() { } authCase_ = 8; onChanged(); - ; return authSecretBuilder_; } - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReferenceOrBuilder> @@ -29916,8 +28777,9 @@ public Builder mergeMultiAuthSecret( } else { if (authCase_ == 9) { multiAuthSecretBuilder_.mergeFrom(value); + } else { + multiAuthSecretBuilder_.setMessage(value); } - multiAuthSecretBuilder_.setMessage(value); } authCase_ = 9; return this; @@ -29984,7 +28846,7 @@ public Builder clearMultiAuthSecret() { * *.MultiSecretReference multiAuthSecret = 9;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReferenceOrBuilder> @@ -29994,7 +28856,7 @@ public Builder clearMultiAuthSecret() { auth_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference .getDefaultInstance(); } - multiAuthSecretBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + multiAuthSecretBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.MultiSecretReferenceOrBuilder>( @@ -30005,13 +28867,12 @@ public Builder clearMultiAuthSecret() { } authCase_ = 9; onChanged(); - ; return multiAuthSecretBuilder_; } private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides cloudEventOverrides_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverridesOrBuilder> @@ -30021,7 +28882,7 @@ public Builder clearMultiAuthSecret() { * @return Whether the cloudEventOverrides field is set. */ public boolean hasCloudEventOverrides() { - return cloudEventOverridesBuilder_ != null || cloudEventOverrides_ != null; + return ((bitField0_ & 0x00000200) != 0); } /** *.CloudEventOverrides cloudEventOverrides = 10;
@@ -30048,11 +28909,11 @@ public Builder setCloudEventOverrides( throw new NullPointerException(); } cloudEventOverrides_ = value; - onChanged(); } else { cloudEventOverridesBuilder_.setMessage(value); } - + bitField0_ |= 0x00000200; + onChanged(); return this; } /** @@ -30063,11 +28924,11 @@ public Builder setCloudEventOverrides( builderForValue) { if (cloudEventOverridesBuilder_ == null) { cloudEventOverrides_ = builderForValue.build(); - onChanged(); } else { cloudEventOverridesBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00000200; + onChanged(); return this; } /** @@ -30076,34 +28937,35 @@ public Builder setCloudEventOverrides( public Builder mergeCloudEventOverrides( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides value) { if (cloudEventOverridesBuilder_ == null) { - if (cloudEventOverrides_ != null) { - cloudEventOverrides_ = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides - .newBuilder(cloudEventOverrides_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00000200) != 0) + && cloudEventOverrides_ != null + && cloudEventOverrides_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides + .getDefaultInstance()) { + getCloudEventOverridesBuilder().mergeFrom(value); } else { cloudEventOverrides_ = value; } - onChanged(); } else { cloudEventOverridesBuilder_.mergeFrom(value); } - + if (cloudEventOverrides_ != null) { + bitField0_ |= 0x00000200; + onChanged(); + } return this; } /** *.CloudEventOverrides cloudEventOverrides = 10;
*/ public Builder clearCloudEventOverrides() { - if (cloudEventOverridesBuilder_ == null) { - cloudEventOverrides_ = null; - onChanged(); - } else { - cloudEventOverrides_ = null; + bitField0_ = (bitField0_ & ~0x00000200); + cloudEventOverrides_ = null; + if (cloudEventOverridesBuilder_ != null) { + cloudEventOverridesBuilder_.dispose(); cloudEventOverridesBuilder_ = null; } - + onChanged(); return this; } /** @@ -30111,7 +28973,7 @@ public Builder clearCloudEventOverrides() { */ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides.Builder getCloudEventOverridesBuilder() { - + bitField0_ |= 0x00000200; onChanged(); return getCloudEventOverridesFieldBuilder().getBuilder(); } @@ -30132,13 +28994,13 @@ public Builder clearCloudEventOverrides() { /** *.CloudEventOverrides cloudEventOverrides = 10;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverridesOrBuilder> getCloudEventOverridesFieldBuilder() { if (cloudEventOverridesBuilder_ == null) { - cloudEventOverridesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + cloudEventOverridesBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverrides.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.CloudEventOverridesOrBuilder>( @@ -30149,7 +29011,7 @@ public Builder clearCloudEventOverrides() { } private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference reference_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder> @@ -30157,6 +29019,7 @@ public Builder clearCloudEventOverrides() { /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -30167,11 +29030,12 @@ public Builder clearCloudEventOverrides() { * @return Whether the reference field is set. */ public boolean hasReference() { - return referenceBuilder_ != null || reference_ != null; + return ((bitField0_ & 0x00000400) != 0); } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -30194,6 +29058,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference ge /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -30208,16 +29073,17 @@ public Builder setReference(dev.knative.eventing.kafka.broker.contract.DataPlane throw new NullPointerException(); } reference_ = value; - onChanged(); } else { referenceBuilder_.setMessage(value); } - + bitField0_ |= 0x00000400; + onChanged(); return this; } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -30230,16 +29096,17 @@ public Builder setReference( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder builderForValue) { if (referenceBuilder_ == null) { reference_ = builderForValue.build(); - onChanged(); } else { referenceBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00000400; + onChanged(); return this; } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -30251,24 +29118,28 @@ public Builder setReference( public Builder mergeReference( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference value) { if (referenceBuilder_ == null) { - if (reference_ != null) { - reference_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.newBuilder( - reference_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00000400) != 0) + && reference_ != null + && reference_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference + .getDefaultInstance()) { + getReferenceBuilder().mergeFrom(value); } else { reference_ = value; } - onChanged(); } else { referenceBuilder_.mergeFrom(value); } - + if (reference_ != null) { + bitField0_ |= 0x00000400; + onChanged(); + } return this; } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -30278,19 +29149,19 @@ public Builder mergeReference( *.Reference reference = 11;
*/ public Builder clearReference() { - if (referenceBuilder_ == null) { - reference_ = null; - onChanged(); - } else { - reference_ = null; + bitField0_ = (bitField0_ & ~0x00000400); + reference_ = null; + if (referenceBuilder_ != null) { + referenceBuilder_.dispose(); referenceBuilder_ = null; } - + onChanged(); return this; } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -30301,13 +29172,14 @@ public Builder clearReference() { */ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder getReferenceBuilder() { - + bitField0_ |= 0x00000400; onChanged(); return getReferenceFieldBuilder().getBuilder(); } /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -30330,6 +29202,7 @@ public Builder clearReference() { /** ** Resource reference. + * * This reference is used to reference the associated resource for data plane * activities such as: * - setting the `source` attribute of a KafkaSource event (when it's not a CloudEvent) @@ -30338,13 +29211,13 @@ public Builder clearReference() { * *.Reference reference = 11;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder> getReferenceFieldBuilder() { if (referenceBuilder_ == null) { - referenceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + referenceBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder>( @@ -30355,7 +29228,7 @@ public Builder clearReference() { } private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags featureFlags_; - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlagsOrBuilder> @@ -30369,7 +29242,7 @@ public Builder clearReference() { * @return Whether the featureFlags field is set. */ public boolean hasFeatureFlags() { - return featureFlagsBuilder_ != null || featureFlags_ != null; + return ((bitField0_ & 0x00000800) != 0); } /** *@@ -30403,11 +29276,11 @@ public Builder setFeatureFlags( throw new NullPointerException(); } featureFlags_ = value; - onChanged(); } else { featureFlagsBuilder_.setMessage(value); } - + bitField0_ |= 0x00000800; + onChanged(); return this; } /** @@ -30421,11 +29294,11 @@ public Builder setFeatureFlags( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags.Builder builderForValue) { if (featureFlagsBuilder_ == null) { featureFlags_ = builderForValue.build(); - onChanged(); } else { featureFlagsBuilder_.setMessage(builderForValue.build()); } - + bitField0_ |= 0x00000800; + onChanged(); return this; } /** @@ -30438,20 +29311,22 @@ public Builder setFeatureFlags( public Builder mergeFeatureFlags( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags value) { if (featureFlagsBuilder_ == null) { - if (featureFlags_ != null) { - featureFlags_ = - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags.newBuilder( - featureFlags_) - .mergeFrom(value) - .buildPartial(); + if (((bitField0_ & 0x00000800) != 0) + && featureFlags_ != null + && featureFlags_ + != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags + .getDefaultInstance()) { + getFeatureFlagsBuilder().mergeFrom(value); } else { featureFlags_ = value; } - onChanged(); } else { featureFlagsBuilder_.mergeFrom(value); } - + if (featureFlags_ != null) { + bitField0_ |= 0x00000800; + onChanged(); + } return this; } /** @@ -30462,14 +29337,13 @@ public Builder mergeFeatureFlags( *.FeatureFlags featureFlags = 12;
*/ public Builder clearFeatureFlags() { - if (featureFlagsBuilder_ == null) { - featureFlags_ = null; - onChanged(); - } else { - featureFlags_ = null; + bitField0_ = (bitField0_ & ~0x00000800); + featureFlags_ = null; + if (featureFlagsBuilder_ != null) { + featureFlagsBuilder_.dispose(); featureFlagsBuilder_ = null; } - + onChanged(); return this; } /** @@ -30481,7 +29355,7 @@ public Builder clearFeatureFlags() { */ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags.Builder getFeatureFlagsBuilder() { - + bitField0_ |= 0x00000800; onChanged(); return getFeatureFlagsFieldBuilder().getBuilder(); } @@ -30510,13 +29384,13 @@ public Builder clearFeatureFlags() { * *.FeatureFlags featureFlags = 12;
*/ - private com.google.protobuf.SingleFieldBuilderV3< + private com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlagsOrBuilder> getFeatureFlagsFieldBuilder() { if (featureFlagsBuilder_ == null) { - featureFlagsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + featureFlagsBuilder_ = new com.google.protobuf.SingleFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlags.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.FeatureFlagsOrBuilder>( @@ -30526,16 +29400,6 @@ public Builder clearFeatureFlags() { return featureFlagsBuilder_; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Resource) } @@ -30557,7 +29421,18 @@ public Resource parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Resource(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -30656,96 +29531,29 @@ public interface ContractOrBuilder /** * Protobuf type {@code Contract} */ - public static final class Contract extends com.google.protobuf.GeneratedMessageV3 + public static final class Contract extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Contract) ContractOrBuilder { private static final long serialVersionUID = 0L; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 29, + /* patch= */ 3, + /* suffix= */ "", + Contract.class.getName()); + } // Use Contract.newBuilder() to construct. - private Contract(com.google.protobuf.GeneratedMessageV3.Builder> builder) { + private Contract(com.google.protobuf.GeneratedMessage.Builder> builder) { super(builder); } private Contract() { resources_ = java.util.Collections.emptyList(); - trustBundles_ = com.google.protobuf.LazyStringArrayList.EMPTY; - } - - @java.lang.Override - @SuppressWarnings({"unused"}) - protected java.lang.Object newInstance(UnusedPrivateParameter unused) { - return new Contract(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return this.unknownFields; - } - - private Contract( - com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 8: { - generation_ = input.readUInt64(); - break; - } - case 18: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - resources_ = new java.util.ArrayList< - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource>(); - mutable_bitField0_ |= 0x00000001; - } - resources_.add(input.readMessage( - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.parser(), - extensionRegistry)); - break; - } - case 26: { - java.lang.String s = input.readStringRequireUtf8(); - if (!((mutable_bitField0_ & 0x00000002) != 0)) { - trustBundles_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000002; - } - trustBundles_.add(s); - break; - } - default: { - if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - resources_ = java.util.Collections.unmodifiableList(resources_); - } - if (((mutable_bitField0_ & 0x00000002) != 0)) { - trustBundles_ = trustBundles_.getUnmodifiableView(); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } + trustBundles_ = com.google.protobuf.LazyStringArrayList.emptyList(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { @@ -30753,7 +29561,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Contract_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -30762,7 +29570,7 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } public static final int GENERATION_FIELD_NUMBER = 1; - private long generation_; + private long generation_ = 0L; /** ** Count each contract update. @@ -30778,6 +29586,8 @@ public long getGeneration() { } public static final int RESOURCES_FIELD_NUMBER = 2; + + @SuppressWarnings("serial") private java.util.Listresources_; /** * repeated .Resource resources = 2;
@@ -30819,7 +29629,10 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ResourceOrBu } public static final int TRUSTBUNDLES_FIELD_NUMBER = 3; - private com.google.protobuf.LazyStringList trustBundles_; + + @SuppressWarnings("serial") + private com.google.protobuf.LazyStringArrayList trustBundles_ = + com.google.protobuf.LazyStringArrayList.emptyList(); /** ** PEM encoded CA trust bundles for HTTP client. @@ -30888,9 +29701,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io output.writeMessage(2, resources_.get(i)); } for (int i = 0; i < trustBundles_.size(); i++) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, trustBundles_.getRaw(i)); + com.google.protobuf.GeneratedMessage.writeString(output, 3, trustBundles_.getRaw(i)); } - unknownFields.writeTo(output); + getUnknownFields().writeTo(output); } @java.lang.Override @@ -30913,7 +29726,7 @@ public int getSerializedSize() { size += dataSize; size += 1 * getTrustBundlesList().size(); } - size += unknownFields.getSerializedSize(); + size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @@ -30932,7 +29745,7 @@ public boolean equals(final java.lang.Object obj) { if (getGeneration() != other.getGeneration()) return false; if (!getResourcesList().equals(other.getResourcesList())) return false; if (!getTrustBundlesList().equals(other.getTrustBundlesList())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @@ -30953,7 +29766,7 @@ public int hashCode() { hash = (37 * hash) + TRUSTBUNDLES_FIELD_NUMBER; hash = (53 * hash) + getTrustBundlesList().hashCode(); } - hash = (29 * hash) + unknownFields.hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } @@ -30993,36 +29806,35 @@ public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contr public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract parseFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( - PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input); } public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override @@ -31045,14 +29857,14 @@ public Builder toBuilder() { } @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Contract} */ - public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder+ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:Contract) dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ContractOrBuilder { @@ -31061,7 +29873,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return dev.knative.eventing.kafka.broker.contract.DataPlaneContract .internal_static_Contract_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -31070,34 +29882,25 @@ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetF } // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } + private Builder() {} - private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { - getResourcesFieldBuilder(); - } } @java.lang.Override public Builder clear() { super.clear(); + bitField0_ = 0; generation_ = 0L; - if (resourcesBuilder_ == null) { resources_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); } else { + resources_ = null; resourcesBuilder_.clear(); } - trustBundles_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); + trustBundles_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @@ -31124,56 +29927,36 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract bui public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract buildPartial() { dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract(this); - int from_bitField0_ = bitField0_; - result.generation_ = generation_; + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract result) { if (resourcesBuilder_ == null) { - if (((bitField0_ & 0x00000001) != 0)) { + if (((bitField0_ & 0x00000002) != 0)) { resources_ = java.util.Collections.unmodifiableList(resources_); - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000002); } result.resources_ = resources_; } else { result.resources_ = resourcesBuilder_.build(); } - if (((bitField0_ & 0x00000002) != 0)) { - trustBundles_ = trustBundles_.getUnmodifiableView(); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.trustBundles_ = trustBundles_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - - @java.lang.Override - public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.setField(field, value); - } - - @java.lang.Override - public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); } - @java.lang.Override - public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { - return super.addRepeatedField(field, value); + private void buildPartial0(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.generation_ = generation_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + trustBundles_.makeImmutable(); + result.trustBundles_ = trustBundles_; + } } @java.lang.Override @@ -31196,7 +29979,7 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (!other.resources_.isEmpty()) { if (resources_.isEmpty()) { resources_ = other.resources_; - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000002); } else { ensureResourcesIsMutable(); resources_.addAll(other.resources_); @@ -31209,8 +29992,8 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon resourcesBuilder_.dispose(); resourcesBuilder_ = null; resources_ = other.resources_; - bitField0_ = (bitField0_ & ~0x00000001); - resourcesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + bitField0_ = (bitField0_ & ~0x00000002); + resourcesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getResourcesFieldBuilder() : null; } else { @@ -31221,14 +30004,14 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (!other.trustBundles_.isEmpty()) { if (trustBundles_.isEmpty()) { trustBundles_ = other.trustBundles_; - bitField0_ = (bitField0_ & ~0x00000002); + bitField0_ |= 0x00000004; } else { ensureTrustBundlesIsMutable(); trustBundles_.addAll(other.trustBundles_); } onChanged(); } - this.mergeUnknownFields(other.unknownFields); + this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @@ -31243,18 +30026,55 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract parsedMessage = null; + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + generation_ = input.readUInt64(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 18: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource m = + input.readMessage( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource + .parser(), + extensionRegistry); + if (resourcesBuilder_ == null) { + ensureResourcesIsMutable(); + resources_.add(m); + } else { + resourcesBuilder_.addMessage(m); + } + break; + } // case 18 + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + ensureTrustBundlesIsMutable(); + trustBundles_.add(s); + break; + } // case 26 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract) - e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } + onChanged(); + } // finally return this; } @@ -31287,6 +30107,7 @@ public long getGeneration() { public Builder setGeneration(long value) { generation_ = value; + bitField0_ |= 0x00000001; onChanged(); return this; } @@ -31300,7 +30121,7 @@ public Builder setGeneration(long value) { * @return This builder for chaining. */ public Builder clearGeneration() { - + bitField0_ = (bitField0_ & ~0x00000001); generation_ = 0L; onChanged(); return this; @@ -31310,14 +30131,14 @@ public Builder clearGeneration() { java.util.Collections.emptyList(); private void ensureResourcesIsMutable() { - if (!((bitField0_ & 0x00000001) != 0)) { + if (!((bitField0_ & 0x00000002) != 0)) { resources_ = new java.util.ArrayList< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource>(resources_); - bitField0_ |= 0x00000001; + bitField0_ |= 0x00000002; } } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ResourceOrBuilder> @@ -31469,7 +30290,7 @@ public Builder addAllResources( public Builder clearResources() { if (resourcesBuilder_ == null) { resources_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); + bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { resourcesBuilder_.clear(); @@ -31547,29 +30368,30 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.Bui return getResourcesFieldBuilder().getBuilderList(); } - private com.google.protobuf.RepeatedFieldBuilderV3< + private com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ResourceOrBuilder> getResourcesFieldBuilder() { if (resourcesBuilder_ == null) { - resourcesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + resourcesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ResourceOrBuilder>( - resources_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + resources_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); resources_ = null; } return resourcesBuilder_; } - private com.google.protobuf.LazyStringList trustBundles_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private com.google.protobuf.LazyStringArrayList trustBundles_ = + com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureTrustBundlesIsMutable() { - if (!((bitField0_ & 0x00000002) != 0)) { + if (!trustBundles_.isModifiable()) { trustBundles_ = new com.google.protobuf.LazyStringArrayList(trustBundles_); - bitField0_ |= 0x00000002; } + bitField0_ |= 0x00000004; } /** * @@ -31580,7 +30402,8 @@ private void ensureTrustBundlesIsMutable() { * @return A list containing the trustBundles. */ public com.google.protobuf.ProtocolStringList getTrustBundlesList() { - return trustBundles_.getUnmodifiableView(); + trustBundles_.makeImmutable(); + return trustBundles_; } /** *@@ -31633,6 +30456,7 @@ public Builder setTrustBundles(int index, java.lang.String value) { } ensureTrustBundlesIsMutable(); trustBundles_.set(index, value); + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -31651,6 +30475,7 @@ public Builder addTrustBundles(java.lang.String value) { } ensureTrustBundlesIsMutable(); trustBundles_.add(value); + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -31666,6 +30491,7 @@ public Builder addTrustBundles(java.lang.String value) { public Builder addAllTrustBundles(java.lang.Iterablevalues) { ensureTrustBundlesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, trustBundles_); + bitField0_ |= 0x00000004; onChanged(); return this; } @@ -31678,8 +30504,9 @@ public Builder addAllTrustBundles(java.lang.Iterable values) { * @return This builder for chaining. */ public Builder clearTrustBundles() { - trustBundles_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); + trustBundles_ = com.google.protobuf.LazyStringArrayList.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + ; onChanged(); return this; } @@ -31699,20 +30526,11 @@ public Builder addTrustBundlesBytes(com.google.protobuf.ByteString value) { checkByteStringIsUtf8(value); ensureTrustBundlesIsMutable(); trustBundles_.add(value); + bitField0_ |= 0x00000004; onChanged(); return this; } - @java.lang.Override - public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - // @@protoc_insertion_point(builder_scope:Contract) } @@ -31734,7 +30552,18 @@ public Contract parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Contract(input, extensionRegistry); + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); } }; @@ -31754,92 +30583,89 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract get } private static final com.google.protobuf.Descriptors.Descriptor internal_static_Empty_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Empty_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Exact_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Exact_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Exact_AttributesEntry_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Exact_AttributesEntry_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Prefix_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Prefix_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Prefix_AttributesEntry_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Prefix_AttributesEntry_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Suffix_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Suffix_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Suffix_AttributesEntry_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Suffix_AttributesEntry_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_All_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_All_fieldAccessorTable; + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_All_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Any_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_Any_fieldAccessorTable; + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Any_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Not_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_Not_fieldAccessorTable; + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Not_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_CESQL_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CESQL_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_DialectedFilter_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DialectedFilter_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Filter_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Filter_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Filter_AttributesEntry_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Filter_AttributesEntry_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_TokenMatcher_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_TokenMatcher_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_EventPolicy_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EventPolicy_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_EgressConfig_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EgressConfig_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Egress_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Egress_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_EgressFeatureFlags_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EgressFeatureFlags_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Ingress_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Ingress_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Reference_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Reference_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_SecretReference_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SecretReference_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_KeyFieldReference_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_KeyFieldReference_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_MultiSecretReference_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MultiSecretReference_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_CloudEventOverrides_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CloudEventOverrides_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_CloudEventOverrides_ExtensionsEntry_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CloudEventOverrides_ExtensionsEntry_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_FeatureFlags_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_FeatureFlags_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Resource_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Resource_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Contract_descriptor; - private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + private static final com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Contract_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -31941,98 +30767,98 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] {}); internal_static_Empty_descriptor = getDescriptor().getMessageTypes().get(0); - internal_static_Empty_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Empty_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Empty_descriptor, new java.lang.String[] {}); internal_static_Exact_descriptor = getDescriptor().getMessageTypes().get(1); - internal_static_Exact_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Exact_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Exact_descriptor, new java.lang.String[] { "Attributes", }); internal_static_Exact_AttributesEntry_descriptor = internal_static_Exact_descriptor.getNestedTypes().get(0); internal_static_Exact_AttributesEntry_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Exact_AttributesEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_Prefix_descriptor = getDescriptor().getMessageTypes().get(2); - internal_static_Prefix_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Prefix_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Prefix_descriptor, new java.lang.String[] { "Attributes", }); internal_static_Prefix_AttributesEntry_descriptor = internal_static_Prefix_descriptor.getNestedTypes().get(0); internal_static_Prefix_AttributesEntry_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Prefix_AttributesEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_Suffix_descriptor = getDescriptor().getMessageTypes().get(3); - internal_static_Suffix_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Suffix_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Suffix_descriptor, new java.lang.String[] { "Attributes", }); internal_static_Suffix_AttributesEntry_descriptor = internal_static_Suffix_descriptor.getNestedTypes().get(0); internal_static_Suffix_AttributesEntry_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Suffix_AttributesEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_All_descriptor = getDescriptor().getMessageTypes().get(4); - internal_static_All_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_All_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_All_descriptor, new java.lang.String[] { "Filters", }); internal_static_Any_descriptor = getDescriptor().getMessageTypes().get(5); - internal_static_Any_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Any_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Any_descriptor, new java.lang.String[] { "Filters", }); internal_static_Not_descriptor = getDescriptor().getMessageTypes().get(6); - internal_static_Not_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Not_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Not_descriptor, new java.lang.String[] { "Filter", }); internal_static_CESQL_descriptor = getDescriptor().getMessageTypes().get(7); - internal_static_CESQL_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_CESQL_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CESQL_descriptor, new java.lang.String[] { "Expression", }); internal_static_DialectedFilter_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_DialectedFilter_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DialectedFilter_descriptor, new java.lang.String[] { "Exact", "Prefix", "Suffix", "All", "Any", "Not", "Cesql", "Filter", }); internal_static_Filter_descriptor = getDescriptor().getMessageTypes().get(9); - internal_static_Filter_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Filter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Filter_descriptor, new java.lang.String[] { "Attributes", }); internal_static_Filter_AttributesEntry_descriptor = internal_static_Filter_descriptor.getNestedTypes().get(0); internal_static_Filter_AttributesEntry_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Filter_AttributesEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_TokenMatcher_descriptor = getDescriptor().getMessageTypes().get(10); - internal_static_TokenMatcher_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_TokenMatcher_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TokenMatcher_descriptor, new java.lang.String[] { "Exact", "Prefix", "Matcher", }); internal_static_EventPolicy_descriptor = getDescriptor().getMessageTypes().get(11); - internal_static_EventPolicy_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_EventPolicy_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EventPolicy_descriptor, new java.lang.String[] { "TokenMatchers", "Filters", }); internal_static_EgressConfig_descriptor = getDescriptor().getMessageTypes().get(12); - internal_static_EgressConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_EgressConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EgressConfig_descriptor, new java.lang.String[] { "DeadLetter", "DeadLetterCACerts", @@ -32044,7 +30870,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "Timeout", }); internal_static_Egress_descriptor = getDescriptor().getMessageTypes().get(13); - internal_static_Egress_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Egress_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Egress_descriptor, new java.lang.String[] { "ConsumerGroup", "Destination", @@ -32070,63 +30896,63 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { internal_static_EgressFeatureFlags_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_EgressFeatureFlags_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EgressFeatureFlags_descriptor, new java.lang.String[] { "EnableRateLimiter", "EnableOrderedExecutorMetrics", }); internal_static_Ingress_descriptor = getDescriptor().getMessageTypes().get(15); - internal_static_Ingress_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Ingress_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Ingress_descriptor, new java.lang.String[] { "ContentMode", "Path", "Host", "Audience", "EventPolicies", }); internal_static_Reference_descriptor = getDescriptor().getMessageTypes().get(16); - internal_static_Reference_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Reference_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Reference_descriptor, new java.lang.String[] { "Uuid", "Namespace", "Name", "Version", "Kind", "GroupVersion", }); internal_static_SecretReference_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_SecretReference_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SecretReference_descriptor, new java.lang.String[] { "Reference", "KeyFieldReferences", }); internal_static_KeyFieldReference_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_KeyFieldReference_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_KeyFieldReference_descriptor, new java.lang.String[] { "SecretKey", "Field", }); internal_static_MultiSecretReference_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_MultiSecretReference_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiSecretReference_descriptor, new java.lang.String[] { "Protocol", "References", }); internal_static_CloudEventOverrides_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_CloudEventOverrides_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CloudEventOverrides_descriptor, new java.lang.String[] { "Extensions", }); internal_static_CloudEventOverrides_ExtensionsEntry_descriptor = internal_static_CloudEventOverrides_descriptor.getNestedTypes().get(0); internal_static_CloudEventOverrides_ExtensionsEntry_fieldAccessorTable = - new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CloudEventOverrides_ExtensionsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_FeatureFlags_descriptor = getDescriptor().getMessageTypes().get(21); - internal_static_FeatureFlags_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_FeatureFlags_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FeatureFlags_descriptor, new java.lang.String[] { "EnableEventTypeAutocreate", }); internal_static_Resource_descriptor = getDescriptor().getMessageTypes().get(22); - internal_static_Resource_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Resource_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Resource_descriptor, new java.lang.String[] { "Uid", "Topics", @@ -32143,10 +30969,11 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "Auth", }); internal_static_Contract_descriptor = getDescriptor().getMessageTypes().get(23); - internal_static_Contract_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Contract_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Contract_descriptor, new java.lang.String[] { "Generation", "Resources", "TrustBundles", }); + descriptor.resolveAllFeaturesImmutable(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/third_party/pkg/client/clientset/versioned/fake/clientset_generated.go b/third_party/pkg/client/clientset/versioned/fake/clientset_generated.go index 942dbdcc80..029576eafa 100644 --- a/third_party/pkg/client/clientset/versioned/fake/clientset_generated.go +++ b/third_party/pkg/client/clientset/versioned/fake/clientset_generated.go @@ -31,8 +31,12 @@ import ( // NewSimpleClientset returns a clientset that will respond with the provided objects. // It's backed by a very simple object tracker that processes creates, updates and deletions as-is, -// without applying any validations and/or defaults. It shouldn't be considered a replacement +// without applying any field management, validations and/or defaults. It shouldn't be considered a replacement // for a real clientset and is mostly useful in simple unit tests. +// +// DEPRECATED: NewClientset replaces this with support for field management, which significantly improves +// server side apply testing. NewClientset is only available when apply configurations are generated (e.g. +// via --with-applyconfig). func NewSimpleClientset(objects ...runtime.Object) *Clientset { o := testing.NewObjectTracker(scheme, codecs.UniversalDecoder()) for _, obj := range objects { diff --git a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/clustertriggerauthentication.go b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/clustertriggerauthentication.go index 371171125d..6c84f1997e 100644 --- a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/clustertriggerauthentication.go +++ b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/clustertriggerauthentication.go @@ -20,12 +20,11 @@ package v1alpha1 import ( "context" - "time" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1alpha1 "knative.dev/eventing-kafka-broker/third_party/pkg/apis/keda/v1alpha1" scheme "knative.dev/eventing-kafka-broker/third_party/pkg/client/clientset/versioned/scheme" ) @@ -51,118 +50,18 @@ type ClusterTriggerAuthenticationInterface interface { // clusterTriggerAuthentications implements ClusterTriggerAuthenticationInterface type clusterTriggerAuthentications struct { - client rest.Interface + *gentype.ClientWithList[*v1alpha1.ClusterTriggerAuthentication, *v1alpha1.ClusterTriggerAuthenticationList] } // newClusterTriggerAuthentications returns a ClusterTriggerAuthentications func newClusterTriggerAuthentications(c *KedaV1alpha1Client) *clusterTriggerAuthentications { return &clusterTriggerAuthentications{ - client: c.RESTClient(), + gentype.NewClientWithList[*v1alpha1.ClusterTriggerAuthentication, *v1alpha1.ClusterTriggerAuthenticationList]( + "clustertriggerauthentications", + c.RESTClient(), + scheme.ParameterCodec, + "", + func() *v1alpha1.ClusterTriggerAuthentication { return &v1alpha1.ClusterTriggerAuthentication{} }, + func() *v1alpha1.ClusterTriggerAuthenticationList { return &v1alpha1.ClusterTriggerAuthenticationList{} }), } } - -// Get takes name of the clusterTriggerAuthentication, and returns the corresponding clusterTriggerAuthentication object, and an error if there is any. -func (c *clusterTriggerAuthentications) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.ClusterTriggerAuthentication, err error) { - result = &v1alpha1.ClusterTriggerAuthentication{} - err = c.client.Get(). - Resource("clustertriggerauthentications"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of ClusterTriggerAuthentications that match those selectors. -func (c *clusterTriggerAuthentications) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.ClusterTriggerAuthenticationList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1alpha1.ClusterTriggerAuthenticationList{} - err = c.client.Get(). - Resource("clustertriggerauthentications"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested clusterTriggerAuthentications. -func (c *clusterTriggerAuthentications) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Resource("clustertriggerauthentications"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a clusterTriggerAuthentication and creates it. Returns the server's representation of the clusterTriggerAuthentication, and an error, if there is any. -func (c *clusterTriggerAuthentications) Create(ctx context.Context, clusterTriggerAuthentication *v1alpha1.ClusterTriggerAuthentication, opts v1.CreateOptions) (result *v1alpha1.ClusterTriggerAuthentication, err error) { - result = &v1alpha1.ClusterTriggerAuthentication{} - err = c.client.Post(). - Resource("clustertriggerauthentications"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(clusterTriggerAuthentication). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a clusterTriggerAuthentication and updates it. Returns the server's representation of the clusterTriggerAuthentication, and an error, if there is any. -func (c *clusterTriggerAuthentications) Update(ctx context.Context, clusterTriggerAuthentication *v1alpha1.ClusterTriggerAuthentication, opts v1.UpdateOptions) (result *v1alpha1.ClusterTriggerAuthentication, err error) { - result = &v1alpha1.ClusterTriggerAuthentication{} - err = c.client.Put(). - Resource("clustertriggerauthentications"). - Name(clusterTriggerAuthentication.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(clusterTriggerAuthentication). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the clusterTriggerAuthentication and deletes it. Returns an error if one occurs. -func (c *clusterTriggerAuthentications) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Resource("clustertriggerauthentications"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *clusterTriggerAuthentications) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Resource("clustertriggerauthentications"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched clusterTriggerAuthentication. -func (c *clusterTriggerAuthentications) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.ClusterTriggerAuthentication, err error) { - result = &v1alpha1.ClusterTriggerAuthentication{} - err = c.client.Patch(pt). - Resource("clustertriggerauthentications"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} diff --git a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_clustertriggerauthentication.go b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_clustertriggerauthentication.go index ab41a64362..47f27441bd 100644 --- a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_clustertriggerauthentication.go +++ b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_clustertriggerauthentication.go @@ -40,20 +40,22 @@ var clustertriggerauthenticationsKind = v1alpha1.SchemeGroupVersion.WithKind("Cl // Get takes name of the clusterTriggerAuthentication, and returns the corresponding clusterTriggerAuthentication object, and an error if there is any. func (c *FakeClusterTriggerAuthentications) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.ClusterTriggerAuthentication, err error) { + emptyResult := &v1alpha1.ClusterTriggerAuthentication{} obj, err := c.Fake. - Invokes(testing.NewRootGetAction(clustertriggerauthenticationsResource, name), &v1alpha1.ClusterTriggerAuthentication{}) + Invokes(testing.NewRootGetActionWithOptions(clustertriggerauthenticationsResource, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ClusterTriggerAuthentication), err } // List takes label and field selectors, and returns the list of ClusterTriggerAuthentications that match those selectors. func (c *FakeClusterTriggerAuthentications) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.ClusterTriggerAuthenticationList, err error) { + emptyResult := &v1alpha1.ClusterTriggerAuthenticationList{} obj, err := c.Fake. - Invokes(testing.NewRootListAction(clustertriggerauthenticationsResource, clustertriggerauthenticationsKind, opts), &v1alpha1.ClusterTriggerAuthenticationList{}) + Invokes(testing.NewRootListActionWithOptions(clustertriggerauthenticationsResource, clustertriggerauthenticationsKind, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -72,25 +74,27 @@ func (c *FakeClusterTriggerAuthentications) List(ctx context.Context, opts v1.Li // Watch returns a watch.Interface that watches the requested clusterTriggerAuthentications. func (c *FakeClusterTriggerAuthentications) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewRootWatchAction(clustertriggerauthenticationsResource, opts)) + InvokesWatch(testing.NewRootWatchActionWithOptions(clustertriggerauthenticationsResource, opts)) } // Create takes the representation of a clusterTriggerAuthentication and creates it. Returns the server's representation of the clusterTriggerAuthentication, and an error, if there is any. func (c *FakeClusterTriggerAuthentications) Create(ctx context.Context, clusterTriggerAuthentication *v1alpha1.ClusterTriggerAuthentication, opts v1.CreateOptions) (result *v1alpha1.ClusterTriggerAuthentication, err error) { + emptyResult := &v1alpha1.ClusterTriggerAuthentication{} obj, err := c.Fake. - Invokes(testing.NewRootCreateAction(clustertriggerauthenticationsResource, clusterTriggerAuthentication), &v1alpha1.ClusterTriggerAuthentication{}) + Invokes(testing.NewRootCreateActionWithOptions(clustertriggerauthenticationsResource, clusterTriggerAuthentication, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ClusterTriggerAuthentication), err } // Update takes the representation of a clusterTriggerAuthentication and updates it. Returns the server's representation of the clusterTriggerAuthentication, and an error, if there is any. func (c *FakeClusterTriggerAuthentications) Update(ctx context.Context, clusterTriggerAuthentication *v1alpha1.ClusterTriggerAuthentication, opts v1.UpdateOptions) (result *v1alpha1.ClusterTriggerAuthentication, err error) { + emptyResult := &v1alpha1.ClusterTriggerAuthentication{} obj, err := c.Fake. - Invokes(testing.NewRootUpdateAction(clustertriggerauthenticationsResource, clusterTriggerAuthentication), &v1alpha1.ClusterTriggerAuthentication{}) + Invokes(testing.NewRootUpdateActionWithOptions(clustertriggerauthenticationsResource, clusterTriggerAuthentication, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ClusterTriggerAuthentication), err } @@ -104,7 +108,7 @@ func (c *FakeClusterTriggerAuthentications) Delete(ctx context.Context, name str // DeleteCollection deletes a collection of objects. func (c *FakeClusterTriggerAuthentications) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewRootDeleteCollectionAction(clustertriggerauthenticationsResource, listOpts) + action := testing.NewRootDeleteCollectionActionWithOptions(clustertriggerauthenticationsResource, opts, listOpts) _, err := c.Fake.Invokes(action, &v1alpha1.ClusterTriggerAuthenticationList{}) return err @@ -112,10 +116,11 @@ func (c *FakeClusterTriggerAuthentications) DeleteCollection(ctx context.Context // Patch applies the patch and returns the patched clusterTriggerAuthentication. func (c *FakeClusterTriggerAuthentications) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.ClusterTriggerAuthentication, err error) { + emptyResult := &v1alpha1.ClusterTriggerAuthentication{} obj, err := c.Fake. - Invokes(testing.NewRootPatchSubresourceAction(clustertriggerauthenticationsResource, name, pt, data, subresources...), &v1alpha1.ClusterTriggerAuthentication{}) + Invokes(testing.NewRootPatchSubresourceActionWithOptions(clustertriggerauthenticationsResource, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ClusterTriggerAuthentication), err } diff --git a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_scaledjob.go b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_scaledjob.go index 25f5589315..b1283abeb8 100644 --- a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_scaledjob.go +++ b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_scaledjob.go @@ -41,22 +41,24 @@ var scaledjobsKind = v1alpha1.SchemeGroupVersion.WithKind("ScaledJob") // Get takes name of the scaledJob, and returns the corresponding scaledJob object, and an error if there is any. func (c *FakeScaledJobs) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.ScaledJob, err error) { + emptyResult := &v1alpha1.ScaledJob{} obj, err := c.Fake. - Invokes(testing.NewGetAction(scaledjobsResource, c.ns, name), &v1alpha1.ScaledJob{}) + Invokes(testing.NewGetActionWithOptions(scaledjobsResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ScaledJob), err } // List takes label and field selectors, and returns the list of ScaledJobs that match those selectors. func (c *FakeScaledJobs) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.ScaledJobList, err error) { + emptyResult := &v1alpha1.ScaledJobList{} obj, err := c.Fake. - Invokes(testing.NewListAction(scaledjobsResource, scaledjobsKind, c.ns, opts), &v1alpha1.ScaledJobList{}) + Invokes(testing.NewListActionWithOptions(scaledjobsResource, scaledjobsKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -75,40 +77,43 @@ func (c *FakeScaledJobs) List(ctx context.Context, opts v1.ListOptions) (result // Watch returns a watch.Interface that watches the requested scaledJobs. func (c *FakeScaledJobs) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(scaledjobsResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(scaledjobsResource, c.ns, opts)) } // Create takes the representation of a scaledJob and creates it. Returns the server's representation of the scaledJob, and an error, if there is any. func (c *FakeScaledJobs) Create(ctx context.Context, scaledJob *v1alpha1.ScaledJob, opts v1.CreateOptions) (result *v1alpha1.ScaledJob, err error) { + emptyResult := &v1alpha1.ScaledJob{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(scaledjobsResource, c.ns, scaledJob), &v1alpha1.ScaledJob{}) + Invokes(testing.NewCreateActionWithOptions(scaledjobsResource, c.ns, scaledJob, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ScaledJob), err } // Update takes the representation of a scaledJob and updates it. Returns the server's representation of the scaledJob, and an error, if there is any. func (c *FakeScaledJobs) Update(ctx context.Context, scaledJob *v1alpha1.ScaledJob, opts v1.UpdateOptions) (result *v1alpha1.ScaledJob, err error) { + emptyResult := &v1alpha1.ScaledJob{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(scaledjobsResource, c.ns, scaledJob), &v1alpha1.ScaledJob{}) + Invokes(testing.NewUpdateActionWithOptions(scaledjobsResource, c.ns, scaledJob, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ScaledJob), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeScaledJobs) UpdateStatus(ctx context.Context, scaledJob *v1alpha1.ScaledJob, opts v1.UpdateOptions) (*v1alpha1.ScaledJob, error) { +func (c *FakeScaledJobs) UpdateStatus(ctx context.Context, scaledJob *v1alpha1.ScaledJob, opts v1.UpdateOptions) (result *v1alpha1.ScaledJob, err error) { + emptyResult := &v1alpha1.ScaledJob{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(scaledjobsResource, "status", c.ns, scaledJob), &v1alpha1.ScaledJob{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(scaledjobsResource, "status", c.ns, scaledJob, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ScaledJob), err } @@ -123,7 +128,7 @@ func (c *FakeScaledJobs) Delete(ctx context.Context, name string, opts v1.Delete // DeleteCollection deletes a collection of objects. func (c *FakeScaledJobs) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewDeleteCollectionAction(scaledjobsResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(scaledjobsResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1alpha1.ScaledJobList{}) return err @@ -131,11 +136,12 @@ func (c *FakeScaledJobs) DeleteCollection(ctx context.Context, opts v1.DeleteOpt // Patch applies the patch and returns the patched scaledJob. func (c *FakeScaledJobs) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.ScaledJob, err error) { + emptyResult := &v1alpha1.ScaledJob{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(scaledjobsResource, c.ns, name, pt, data, subresources...), &v1alpha1.ScaledJob{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(scaledjobsResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ScaledJob), err } diff --git a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_scaledobject.go b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_scaledobject.go index 5f5ca37edc..ef48efa838 100644 --- a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_scaledobject.go +++ b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_scaledobject.go @@ -41,22 +41,24 @@ var scaledobjectsKind = v1alpha1.SchemeGroupVersion.WithKind("ScaledObject") // Get takes name of the scaledObject, and returns the corresponding scaledObject object, and an error if there is any. func (c *FakeScaledObjects) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.ScaledObject, err error) { + emptyResult := &v1alpha1.ScaledObject{} obj, err := c.Fake. - Invokes(testing.NewGetAction(scaledobjectsResource, c.ns, name), &v1alpha1.ScaledObject{}) + Invokes(testing.NewGetActionWithOptions(scaledobjectsResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ScaledObject), err } // List takes label and field selectors, and returns the list of ScaledObjects that match those selectors. func (c *FakeScaledObjects) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.ScaledObjectList, err error) { + emptyResult := &v1alpha1.ScaledObjectList{} obj, err := c.Fake. - Invokes(testing.NewListAction(scaledobjectsResource, scaledobjectsKind, c.ns, opts), &v1alpha1.ScaledObjectList{}) + Invokes(testing.NewListActionWithOptions(scaledobjectsResource, scaledobjectsKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -75,40 +77,43 @@ func (c *FakeScaledObjects) List(ctx context.Context, opts v1.ListOptions) (resu // Watch returns a watch.Interface that watches the requested scaledObjects. func (c *FakeScaledObjects) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(scaledobjectsResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(scaledobjectsResource, c.ns, opts)) } // Create takes the representation of a scaledObject and creates it. Returns the server's representation of the scaledObject, and an error, if there is any. func (c *FakeScaledObjects) Create(ctx context.Context, scaledObject *v1alpha1.ScaledObject, opts v1.CreateOptions) (result *v1alpha1.ScaledObject, err error) { + emptyResult := &v1alpha1.ScaledObject{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(scaledobjectsResource, c.ns, scaledObject), &v1alpha1.ScaledObject{}) + Invokes(testing.NewCreateActionWithOptions(scaledobjectsResource, c.ns, scaledObject, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ScaledObject), err } // Update takes the representation of a scaledObject and updates it. Returns the server's representation of the scaledObject, and an error, if there is any. func (c *FakeScaledObjects) Update(ctx context.Context, scaledObject *v1alpha1.ScaledObject, opts v1.UpdateOptions) (result *v1alpha1.ScaledObject, err error) { + emptyResult := &v1alpha1.ScaledObject{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(scaledobjectsResource, c.ns, scaledObject), &v1alpha1.ScaledObject{}) + Invokes(testing.NewUpdateActionWithOptions(scaledobjectsResource, c.ns, scaledObject, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ScaledObject), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeScaledObjects) UpdateStatus(ctx context.Context, scaledObject *v1alpha1.ScaledObject, opts v1.UpdateOptions) (*v1alpha1.ScaledObject, error) { +func (c *FakeScaledObjects) UpdateStatus(ctx context.Context, scaledObject *v1alpha1.ScaledObject, opts v1.UpdateOptions) (result *v1alpha1.ScaledObject, err error) { + emptyResult := &v1alpha1.ScaledObject{} obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(scaledobjectsResource, "status", c.ns, scaledObject), &v1alpha1.ScaledObject{}) + Invokes(testing.NewUpdateSubresourceActionWithOptions(scaledobjectsResource, "status", c.ns, scaledObject, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ScaledObject), err } @@ -123,7 +128,7 @@ func (c *FakeScaledObjects) Delete(ctx context.Context, name string, opts v1.Del // DeleteCollection deletes a collection of objects. func (c *FakeScaledObjects) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewDeleteCollectionAction(scaledobjectsResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(scaledobjectsResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1alpha1.ScaledObjectList{}) return err @@ -131,11 +136,12 @@ func (c *FakeScaledObjects) DeleteCollection(ctx context.Context, opts v1.Delete // Patch applies the patch and returns the patched scaledObject. func (c *FakeScaledObjects) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.ScaledObject, err error) { + emptyResult := &v1alpha1.ScaledObject{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(scaledobjectsResource, c.ns, name, pt, data, subresources...), &v1alpha1.ScaledObject{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(scaledobjectsResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.ScaledObject), err } diff --git a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_triggerauthentication.go b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_triggerauthentication.go index 79d0b41b25..49647e69c0 100644 --- a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_triggerauthentication.go +++ b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/fake/fake_triggerauthentication.go @@ -41,22 +41,24 @@ var triggerauthenticationsKind = v1alpha1.SchemeGroupVersion.WithKind("TriggerAu // Get takes name of the triggerAuthentication, and returns the corresponding triggerAuthentication object, and an error if there is any. func (c *FakeTriggerAuthentications) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.TriggerAuthentication, err error) { + emptyResult := &v1alpha1.TriggerAuthentication{} obj, err := c.Fake. - Invokes(testing.NewGetAction(triggerauthenticationsResource, c.ns, name), &v1alpha1.TriggerAuthentication{}) + Invokes(testing.NewGetActionWithOptions(triggerauthenticationsResource, c.ns, name, options), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.TriggerAuthentication), err } // List takes label and field selectors, and returns the list of TriggerAuthentications that match those selectors. func (c *FakeTriggerAuthentications) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.TriggerAuthenticationList, err error) { + emptyResult := &v1alpha1.TriggerAuthenticationList{} obj, err := c.Fake. - Invokes(testing.NewListAction(triggerauthenticationsResource, triggerauthenticationsKind, c.ns, opts), &v1alpha1.TriggerAuthenticationList{}) + Invokes(testing.NewListActionWithOptions(triggerauthenticationsResource, triggerauthenticationsKind, c.ns, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } label, _, _ := testing.ExtractFromListOptions(opts) @@ -75,28 +77,30 @@ func (c *FakeTriggerAuthentications) List(ctx context.Context, opts v1.ListOptio // Watch returns a watch.Interface that watches the requested triggerAuthentications. func (c *FakeTriggerAuthentications) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. - InvokesWatch(testing.NewWatchAction(triggerauthenticationsResource, c.ns, opts)) + InvokesWatch(testing.NewWatchActionWithOptions(triggerauthenticationsResource, c.ns, opts)) } // Create takes the representation of a triggerAuthentication and creates it. Returns the server's representation of the triggerAuthentication, and an error, if there is any. func (c *FakeTriggerAuthentications) Create(ctx context.Context, triggerAuthentication *v1alpha1.TriggerAuthentication, opts v1.CreateOptions) (result *v1alpha1.TriggerAuthentication, err error) { + emptyResult := &v1alpha1.TriggerAuthentication{} obj, err := c.Fake. - Invokes(testing.NewCreateAction(triggerauthenticationsResource, c.ns, triggerAuthentication), &v1alpha1.TriggerAuthentication{}) + Invokes(testing.NewCreateActionWithOptions(triggerauthenticationsResource, c.ns, triggerAuthentication, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.TriggerAuthentication), err } // Update takes the representation of a triggerAuthentication and updates it. Returns the server's representation of the triggerAuthentication, and an error, if there is any. func (c *FakeTriggerAuthentications) Update(ctx context.Context, triggerAuthentication *v1alpha1.TriggerAuthentication, opts v1.UpdateOptions) (result *v1alpha1.TriggerAuthentication, err error) { + emptyResult := &v1alpha1.TriggerAuthentication{} obj, err := c.Fake. - Invokes(testing.NewUpdateAction(triggerauthenticationsResource, c.ns, triggerAuthentication), &v1alpha1.TriggerAuthentication{}) + Invokes(testing.NewUpdateActionWithOptions(triggerauthenticationsResource, c.ns, triggerAuthentication, opts), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.TriggerAuthentication), err } @@ -111,7 +115,7 @@ func (c *FakeTriggerAuthentications) Delete(ctx context.Context, name string, op // DeleteCollection deletes a collection of objects. func (c *FakeTriggerAuthentications) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewDeleteCollectionAction(triggerauthenticationsResource, c.ns, listOpts) + action := testing.NewDeleteCollectionActionWithOptions(triggerauthenticationsResource, c.ns, opts, listOpts) _, err := c.Fake.Invokes(action, &v1alpha1.TriggerAuthenticationList{}) return err @@ -119,11 +123,12 @@ func (c *FakeTriggerAuthentications) DeleteCollection(ctx context.Context, opts // Patch applies the patch and returns the patched triggerAuthentication. func (c *FakeTriggerAuthentications) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.TriggerAuthentication, err error) { + emptyResult := &v1alpha1.TriggerAuthentication{} obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(triggerauthenticationsResource, c.ns, name, pt, data, subresources...), &v1alpha1.TriggerAuthentication{}) + Invokes(testing.NewPatchSubresourceActionWithOptions(triggerauthenticationsResource, c.ns, name, pt, data, opts, subresources...), emptyResult) if obj == nil { - return nil, err + return emptyResult, err } return obj.(*v1alpha1.TriggerAuthentication), err } diff --git a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/scaledjob.go b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/scaledjob.go index 4a8b9cca7d..2138e87cfc 100644 --- a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/scaledjob.go +++ b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/scaledjob.go @@ -20,12 +20,11 @@ package v1alpha1 import ( "context" - "time" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1alpha1 "knative.dev/eventing-kafka-broker/third_party/pkg/apis/keda/v1alpha1" scheme "knative.dev/eventing-kafka-broker/third_party/pkg/client/clientset/versioned/scheme" ) @@ -40,6 +39,7 @@ type ScaledJobsGetter interface { type ScaledJobInterface interface { Create(ctx context.Context, scaledJob *v1alpha1.ScaledJob, opts v1.CreateOptions) (*v1alpha1.ScaledJob, error) Update(ctx context.Context, scaledJob *v1alpha1.ScaledJob, opts v1.UpdateOptions) (*v1alpha1.ScaledJob, error) + // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). UpdateStatus(ctx context.Context, scaledJob *v1alpha1.ScaledJob, opts v1.UpdateOptions) (*v1alpha1.ScaledJob, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error @@ -52,144 +52,18 @@ type ScaledJobInterface interface { // scaledJobs implements ScaledJobInterface type scaledJobs struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1alpha1.ScaledJob, *v1alpha1.ScaledJobList] } // newScaledJobs returns a ScaledJobs func newScaledJobs(c *KedaV1alpha1Client, namespace string) *scaledJobs { return &scaledJobs{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1alpha1.ScaledJob, *v1alpha1.ScaledJobList]( + "scaledjobs", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1alpha1.ScaledJob { return &v1alpha1.ScaledJob{} }, + func() *v1alpha1.ScaledJobList { return &v1alpha1.ScaledJobList{} }), } } - -// Get takes name of the scaledJob, and returns the corresponding scaledJob object, and an error if there is any. -func (c *scaledJobs) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.ScaledJob, err error) { - result = &v1alpha1.ScaledJob{} - err = c.client.Get(). - Namespace(c.ns). - Resource("scaledjobs"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of ScaledJobs that match those selectors. -func (c *scaledJobs) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.ScaledJobList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1alpha1.ScaledJobList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("scaledjobs"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested scaledJobs. -func (c *scaledJobs) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("scaledjobs"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a scaledJob and creates it. Returns the server's representation of the scaledJob, and an error, if there is any. -func (c *scaledJobs) Create(ctx context.Context, scaledJob *v1alpha1.ScaledJob, opts v1.CreateOptions) (result *v1alpha1.ScaledJob, err error) { - result = &v1alpha1.ScaledJob{} - err = c.client.Post(). - Namespace(c.ns). - Resource("scaledjobs"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(scaledJob). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a scaledJob and updates it. Returns the server's representation of the scaledJob, and an error, if there is any. -func (c *scaledJobs) Update(ctx context.Context, scaledJob *v1alpha1.ScaledJob, opts v1.UpdateOptions) (result *v1alpha1.ScaledJob, err error) { - result = &v1alpha1.ScaledJob{} - err = c.client.Put(). - Namespace(c.ns). - Resource("scaledjobs"). - Name(scaledJob.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(scaledJob). - Do(ctx). - Into(result) - return -} - -// UpdateStatus was generated because the type contains a Status member. -// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *scaledJobs) UpdateStatus(ctx context.Context, scaledJob *v1alpha1.ScaledJob, opts v1.UpdateOptions) (result *v1alpha1.ScaledJob, err error) { - result = &v1alpha1.ScaledJob{} - err = c.client.Put(). - Namespace(c.ns). - Resource("scaledjobs"). - Name(scaledJob.Name). - SubResource("status"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(scaledJob). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the scaledJob and deletes it. Returns an error if one occurs. -func (c *scaledJobs) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("scaledjobs"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *scaledJobs) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("scaledjobs"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched scaledJob. -func (c *scaledJobs) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.ScaledJob, err error) { - result = &v1alpha1.ScaledJob{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("scaledjobs"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} diff --git a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/scaledobject.go b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/scaledobject.go index 2716bcc2dc..50313c5f47 100644 --- a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/scaledobject.go +++ b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/scaledobject.go @@ -20,12 +20,11 @@ package v1alpha1 import ( "context" - "time" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1alpha1 "knative.dev/eventing-kafka-broker/third_party/pkg/apis/keda/v1alpha1" scheme "knative.dev/eventing-kafka-broker/third_party/pkg/client/clientset/versioned/scheme" ) @@ -40,6 +39,7 @@ type ScaledObjectsGetter interface { type ScaledObjectInterface interface { Create(ctx context.Context, scaledObject *v1alpha1.ScaledObject, opts v1.CreateOptions) (*v1alpha1.ScaledObject, error) Update(ctx context.Context, scaledObject *v1alpha1.ScaledObject, opts v1.UpdateOptions) (*v1alpha1.ScaledObject, error) + // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). UpdateStatus(ctx context.Context, scaledObject *v1alpha1.ScaledObject, opts v1.UpdateOptions) (*v1alpha1.ScaledObject, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error @@ -52,144 +52,18 @@ type ScaledObjectInterface interface { // scaledObjects implements ScaledObjectInterface type scaledObjects struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1alpha1.ScaledObject, *v1alpha1.ScaledObjectList] } // newScaledObjects returns a ScaledObjects func newScaledObjects(c *KedaV1alpha1Client, namespace string) *scaledObjects { return &scaledObjects{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1alpha1.ScaledObject, *v1alpha1.ScaledObjectList]( + "scaledobjects", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1alpha1.ScaledObject { return &v1alpha1.ScaledObject{} }, + func() *v1alpha1.ScaledObjectList { return &v1alpha1.ScaledObjectList{} }), } } - -// Get takes name of the scaledObject, and returns the corresponding scaledObject object, and an error if there is any. -func (c *scaledObjects) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.ScaledObject, err error) { - result = &v1alpha1.ScaledObject{} - err = c.client.Get(). - Namespace(c.ns). - Resource("scaledobjects"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of ScaledObjects that match those selectors. -func (c *scaledObjects) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.ScaledObjectList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1alpha1.ScaledObjectList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("scaledobjects"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested scaledObjects. -func (c *scaledObjects) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("scaledobjects"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a scaledObject and creates it. Returns the server's representation of the scaledObject, and an error, if there is any. -func (c *scaledObjects) Create(ctx context.Context, scaledObject *v1alpha1.ScaledObject, opts v1.CreateOptions) (result *v1alpha1.ScaledObject, err error) { - result = &v1alpha1.ScaledObject{} - err = c.client.Post(). - Namespace(c.ns). - Resource("scaledobjects"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(scaledObject). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a scaledObject and updates it. Returns the server's representation of the scaledObject, and an error, if there is any. -func (c *scaledObjects) Update(ctx context.Context, scaledObject *v1alpha1.ScaledObject, opts v1.UpdateOptions) (result *v1alpha1.ScaledObject, err error) { - result = &v1alpha1.ScaledObject{} - err = c.client.Put(). - Namespace(c.ns). - Resource("scaledobjects"). - Name(scaledObject.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(scaledObject). - Do(ctx). - Into(result) - return -} - -// UpdateStatus was generated because the type contains a Status member. -// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *scaledObjects) UpdateStatus(ctx context.Context, scaledObject *v1alpha1.ScaledObject, opts v1.UpdateOptions) (result *v1alpha1.ScaledObject, err error) { - result = &v1alpha1.ScaledObject{} - err = c.client.Put(). - Namespace(c.ns). - Resource("scaledobjects"). - Name(scaledObject.Name). - SubResource("status"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(scaledObject). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the scaledObject and deletes it. Returns an error if one occurs. -func (c *scaledObjects) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("scaledobjects"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *scaledObjects) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("scaledobjects"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched scaledObject. -func (c *scaledObjects) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.ScaledObject, err error) { - result = &v1alpha1.ScaledObject{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("scaledobjects"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} diff --git a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/triggerauthentication.go b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/triggerauthentication.go index 92f6616001..54b2bbf945 100644 --- a/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/triggerauthentication.go +++ b/third_party/pkg/client/clientset/versioned/typed/keda/v1alpha1/triggerauthentication.go @@ -20,12 +20,11 @@ package v1alpha1 import ( "context" - "time" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" + gentype "k8s.io/client-go/gentype" v1alpha1 "knative.dev/eventing-kafka-broker/third_party/pkg/apis/keda/v1alpha1" scheme "knative.dev/eventing-kafka-broker/third_party/pkg/client/clientset/versioned/scheme" ) @@ -51,128 +50,18 @@ type TriggerAuthenticationInterface interface { // triggerAuthentications implements TriggerAuthenticationInterface type triggerAuthentications struct { - client rest.Interface - ns string + *gentype.ClientWithList[*v1alpha1.TriggerAuthentication, *v1alpha1.TriggerAuthenticationList] } // newTriggerAuthentications returns a TriggerAuthentications func newTriggerAuthentications(c *KedaV1alpha1Client, namespace string) *triggerAuthentications { return &triggerAuthentications{ - client: c.RESTClient(), - ns: namespace, + gentype.NewClientWithList[*v1alpha1.TriggerAuthentication, *v1alpha1.TriggerAuthenticationList]( + "triggerauthentications", + c.RESTClient(), + scheme.ParameterCodec, + namespace, + func() *v1alpha1.TriggerAuthentication { return &v1alpha1.TriggerAuthentication{} }, + func() *v1alpha1.TriggerAuthenticationList { return &v1alpha1.TriggerAuthenticationList{} }), } } - -// Get takes name of the triggerAuthentication, and returns the corresponding triggerAuthentication object, and an error if there is any. -func (c *triggerAuthentications) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.TriggerAuthentication, err error) { - result = &v1alpha1.TriggerAuthentication{} - err = c.client.Get(). - Namespace(c.ns). - Resource("triggerauthentications"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of TriggerAuthentications that match those selectors. -func (c *triggerAuthentications) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.TriggerAuthenticationList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1alpha1.TriggerAuthenticationList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("triggerauthentications"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested triggerAuthentications. -func (c *triggerAuthentications) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("triggerauthentications"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a triggerAuthentication and creates it. Returns the server's representation of the triggerAuthentication, and an error, if there is any. -func (c *triggerAuthentications) Create(ctx context.Context, triggerAuthentication *v1alpha1.TriggerAuthentication, opts v1.CreateOptions) (result *v1alpha1.TriggerAuthentication, err error) { - result = &v1alpha1.TriggerAuthentication{} - err = c.client.Post(). - Namespace(c.ns). - Resource("triggerauthentications"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(triggerAuthentication). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a triggerAuthentication and updates it. Returns the server's representation of the triggerAuthentication, and an error, if there is any. -func (c *triggerAuthentications) Update(ctx context.Context, triggerAuthentication *v1alpha1.TriggerAuthentication, opts v1.UpdateOptions) (result *v1alpha1.TriggerAuthentication, err error) { - result = &v1alpha1.TriggerAuthentication{} - err = c.client.Put(). - Namespace(c.ns). - Resource("triggerauthentications"). - Name(triggerAuthentication.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(triggerAuthentication). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the triggerAuthentication and deletes it. Returns an error if one occurs. -func (c *triggerAuthentications) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("triggerauthentications"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *triggerAuthentications) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("triggerauthentications"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched triggerAuthentication. -func (c *triggerAuthentications) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.TriggerAuthentication, err error) { - result = &v1alpha1.TriggerAuthentication{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("triggerauthentications"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} diff --git a/third_party/pkg/client/informers/externalversions/factory.go b/third_party/pkg/client/informers/externalversions/factory.go index a44266b00d..1c3783b1aa 100644 --- a/third_party/pkg/client/informers/externalversions/factory.go +++ b/third_party/pkg/client/informers/externalversions/factory.go @@ -228,6 +228,7 @@ type SharedInformerFactory interface { // Start initializes all requested informers. They are handled in goroutines // which run until the stop channel gets closed. + // Warning: Start does not block. When run in a go-routine, it will race with a later WaitForCacheSync. Start(stopCh <-chan struct{}) // Shutdown marks a factory as shutting down. At that point no new diff --git a/third_party/pkg/client/listers/keda/v1alpha1/clustertriggerauthentication.go b/third_party/pkg/client/listers/keda/v1alpha1/clustertriggerauthentication.go index 255c3b92ba..5f9b265528 100644 --- a/third_party/pkg/client/listers/keda/v1alpha1/clustertriggerauthentication.go +++ b/third_party/pkg/client/listers/keda/v1alpha1/clustertriggerauthentication.go @@ -19,8 +19,8 @@ package v1alpha1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1alpha1 "knative.dev/eventing-kafka-broker/third_party/pkg/apis/keda/v1alpha1" ) @@ -39,30 +39,10 @@ type ClusterTriggerAuthenticationLister interface { // clusterTriggerAuthenticationLister implements the ClusterTriggerAuthenticationLister interface. type clusterTriggerAuthenticationLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1alpha1.ClusterTriggerAuthentication] } // NewClusterTriggerAuthenticationLister returns a new ClusterTriggerAuthenticationLister. func NewClusterTriggerAuthenticationLister(indexer cache.Indexer) ClusterTriggerAuthenticationLister { - return &clusterTriggerAuthenticationLister{indexer: indexer} -} - -// List lists all ClusterTriggerAuthentications in the indexer. -func (s *clusterTriggerAuthenticationLister) List(selector labels.Selector) (ret []*v1alpha1.ClusterTriggerAuthentication, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.ClusterTriggerAuthentication)) - }) - return ret, err -} - -// Get retrieves the ClusterTriggerAuthentication from the index for a given name. -func (s *clusterTriggerAuthenticationLister) Get(name string) (*v1alpha1.ClusterTriggerAuthentication, error) { - obj, exists, err := s.indexer.GetByKey(name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1alpha1.Resource("clustertriggerauthentication"), name) - } - return obj.(*v1alpha1.ClusterTriggerAuthentication), nil + return &clusterTriggerAuthenticationLister{listers.New[*v1alpha1.ClusterTriggerAuthentication](indexer, v1alpha1.Resource("clustertriggerauthentication"))} } diff --git a/third_party/pkg/client/listers/keda/v1alpha1/scaledjob.go b/third_party/pkg/client/listers/keda/v1alpha1/scaledjob.go index 4446f12d0f..d6afa9af7d 100644 --- a/third_party/pkg/client/listers/keda/v1alpha1/scaledjob.go +++ b/third_party/pkg/client/listers/keda/v1alpha1/scaledjob.go @@ -19,8 +19,8 @@ package v1alpha1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1alpha1 "knative.dev/eventing-kafka-broker/third_party/pkg/apis/keda/v1alpha1" ) @@ -38,25 +38,17 @@ type ScaledJobLister interface { // scaledJobLister implements the ScaledJobLister interface. type scaledJobLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1alpha1.ScaledJob] } // NewScaledJobLister returns a new ScaledJobLister. func NewScaledJobLister(indexer cache.Indexer) ScaledJobLister { - return &scaledJobLister{indexer: indexer} -} - -// List lists all ScaledJobs in the indexer. -func (s *scaledJobLister) List(selector labels.Selector) (ret []*v1alpha1.ScaledJob, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.ScaledJob)) - }) - return ret, err + return &scaledJobLister{listers.New[*v1alpha1.ScaledJob](indexer, v1alpha1.Resource("scaledjob"))} } // ScaledJobs returns an object that can list and get ScaledJobs. func (s *scaledJobLister) ScaledJobs(namespace string) ScaledJobNamespaceLister { - return scaledJobNamespaceLister{indexer: s.indexer, namespace: namespace} + return scaledJobNamespaceLister{listers.NewNamespaced[*v1alpha1.ScaledJob](s.ResourceIndexer, namespace)} } // ScaledJobNamespaceLister helps list and get ScaledJobs. @@ -74,26 +66,5 @@ type ScaledJobNamespaceLister interface { // scaledJobNamespaceLister implements the ScaledJobNamespaceLister // interface. type scaledJobNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all ScaledJobs in the indexer for a given namespace. -func (s scaledJobNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.ScaledJob, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.ScaledJob)) - }) - return ret, err -} - -// Get retrieves the ScaledJob from the indexer for a given namespace and name. -func (s scaledJobNamespaceLister) Get(name string) (*v1alpha1.ScaledJob, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1alpha1.Resource("scaledjob"), name) - } - return obj.(*v1alpha1.ScaledJob), nil + listers.ResourceIndexer[*v1alpha1.ScaledJob] } diff --git a/third_party/pkg/client/listers/keda/v1alpha1/scaledobject.go b/third_party/pkg/client/listers/keda/v1alpha1/scaledobject.go index ee73d6fd6a..4e8b4f0dc6 100644 --- a/third_party/pkg/client/listers/keda/v1alpha1/scaledobject.go +++ b/third_party/pkg/client/listers/keda/v1alpha1/scaledobject.go @@ -19,8 +19,8 @@ package v1alpha1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1alpha1 "knative.dev/eventing-kafka-broker/third_party/pkg/apis/keda/v1alpha1" ) @@ -38,25 +38,17 @@ type ScaledObjectLister interface { // scaledObjectLister implements the ScaledObjectLister interface. type scaledObjectLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1alpha1.ScaledObject] } // NewScaledObjectLister returns a new ScaledObjectLister. func NewScaledObjectLister(indexer cache.Indexer) ScaledObjectLister { - return &scaledObjectLister{indexer: indexer} -} - -// List lists all ScaledObjects in the indexer. -func (s *scaledObjectLister) List(selector labels.Selector) (ret []*v1alpha1.ScaledObject, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.ScaledObject)) - }) - return ret, err + return &scaledObjectLister{listers.New[*v1alpha1.ScaledObject](indexer, v1alpha1.Resource("scaledobject"))} } // ScaledObjects returns an object that can list and get ScaledObjects. func (s *scaledObjectLister) ScaledObjects(namespace string) ScaledObjectNamespaceLister { - return scaledObjectNamespaceLister{indexer: s.indexer, namespace: namespace} + return scaledObjectNamespaceLister{listers.NewNamespaced[*v1alpha1.ScaledObject](s.ResourceIndexer, namespace)} } // ScaledObjectNamespaceLister helps list and get ScaledObjects. @@ -74,26 +66,5 @@ type ScaledObjectNamespaceLister interface { // scaledObjectNamespaceLister implements the ScaledObjectNamespaceLister // interface. type scaledObjectNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all ScaledObjects in the indexer for a given namespace. -func (s scaledObjectNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.ScaledObject, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.ScaledObject)) - }) - return ret, err -} - -// Get retrieves the ScaledObject from the indexer for a given namespace and name. -func (s scaledObjectNamespaceLister) Get(name string) (*v1alpha1.ScaledObject, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1alpha1.Resource("scaledobject"), name) - } - return obj.(*v1alpha1.ScaledObject), nil + listers.ResourceIndexer[*v1alpha1.ScaledObject] } diff --git a/third_party/pkg/client/listers/keda/v1alpha1/triggerauthentication.go b/third_party/pkg/client/listers/keda/v1alpha1/triggerauthentication.go index 210a63030a..340cd7c106 100644 --- a/third_party/pkg/client/listers/keda/v1alpha1/triggerauthentication.go +++ b/third_party/pkg/client/listers/keda/v1alpha1/triggerauthentication.go @@ -19,8 +19,8 @@ package v1alpha1 import ( - "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/listers" "k8s.io/client-go/tools/cache" v1alpha1 "knative.dev/eventing-kafka-broker/third_party/pkg/apis/keda/v1alpha1" ) @@ -38,25 +38,17 @@ type TriggerAuthenticationLister interface { // triggerAuthenticationLister implements the TriggerAuthenticationLister interface. type triggerAuthenticationLister struct { - indexer cache.Indexer + listers.ResourceIndexer[*v1alpha1.TriggerAuthentication] } // NewTriggerAuthenticationLister returns a new TriggerAuthenticationLister. func NewTriggerAuthenticationLister(indexer cache.Indexer) TriggerAuthenticationLister { - return &triggerAuthenticationLister{indexer: indexer} -} - -// List lists all TriggerAuthentications in the indexer. -func (s *triggerAuthenticationLister) List(selector labels.Selector) (ret []*v1alpha1.TriggerAuthentication, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.TriggerAuthentication)) - }) - return ret, err + return &triggerAuthenticationLister{listers.New[*v1alpha1.TriggerAuthentication](indexer, v1alpha1.Resource("triggerauthentication"))} } // TriggerAuthentications returns an object that can list and get TriggerAuthentications. func (s *triggerAuthenticationLister) TriggerAuthentications(namespace string) TriggerAuthenticationNamespaceLister { - return triggerAuthenticationNamespaceLister{indexer: s.indexer, namespace: namespace} + return triggerAuthenticationNamespaceLister{listers.NewNamespaced[*v1alpha1.TriggerAuthentication](s.ResourceIndexer, namespace)} } // TriggerAuthenticationNamespaceLister helps list and get TriggerAuthentications. @@ -74,26 +66,5 @@ type TriggerAuthenticationNamespaceLister interface { // triggerAuthenticationNamespaceLister implements the TriggerAuthenticationNamespaceLister // interface. type triggerAuthenticationNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all TriggerAuthentications in the indexer for a given namespace. -func (s triggerAuthenticationNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.TriggerAuthentication, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.TriggerAuthentication)) - }) - return ret, err -} - -// Get retrieves the TriggerAuthentication from the indexer for a given namespace and name. -func (s triggerAuthenticationNamespaceLister) Get(name string) (*v1alpha1.TriggerAuthentication, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1alpha1.Resource("triggerauthentication"), name) - } - return obj.(*v1alpha1.TriggerAuthentication), nil + listers.ResourceIndexer[*v1alpha1.TriggerAuthentication] }