diff --git a/hack/protoc.sh b/hack/protoc.sh index 069ff1b..a46fcee 100755 --- a/hack/protoc.sh +++ b/hack/protoc.sh @@ -6,7 +6,7 @@ LANGUAGE=go proto_modules="common/v1 common/v2 cdnsystem/v1 dfdaemon/v1 dfdaemon/v2 errordetails/v1 manager/v1 manager/v2 scheduler/v1 scheduler/v2 -security/v1 trainer/v1" +security/v1 trainer/v1 inference/v1" echo "generate protos..." diff --git a/pkg/apis/inference/v1/grpc_service.pb.go b/pkg/apis/inference/v1/grpc_service.pb.go new file mode 100644 index 0000000..71501ce --- /dev/null +++ b/pkg/apis/inference/v1/grpc_service.pb.go @@ -0,0 +1,6276 @@ +// Copyright 2020-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// * Neither the name of NVIDIA CORPORATION nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY +// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.6 +// source: pkg/apis/inference/v1/grpc_service.proto + +package inference + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// @@ +// @@.. cpp:var:: message ServerLiveRequest +// @@ +// @@ Request message for ServerLive. +// @@ +type ServerLiveRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *ServerLiveRequest) Reset() { + *x = ServerLiveRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ServerLiveRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServerLiveRequest) ProtoMessage() {} + +func (x *ServerLiveRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServerLiveRequest.ProtoReflect.Descriptor instead. +func (*ServerLiveRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{0} +} + +// @@ +// @@.. cpp:var:: message ServerLiveResponse +// @@ +// @@ Response message for ServerLive. +// @@ +type ServerLiveResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: bool live + // @@ + // @@ True if the inference server is live, false it not live. + // @@ + Live bool `protobuf:"varint,1,opt,name=live,proto3" json:"live,omitempty"` +} + +func (x *ServerLiveResponse) Reset() { + *x = ServerLiveResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ServerLiveResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServerLiveResponse) ProtoMessage() {} + +func (x *ServerLiveResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServerLiveResponse.ProtoReflect.Descriptor instead. +func (*ServerLiveResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{1} +} + +func (x *ServerLiveResponse) GetLive() bool { + if x != nil { + return x.Live + } + return false +} + +// @@ +// @@.. cpp:var:: message ServerReadyRequest +// @@ +// @@ Request message for ServerReady. +// @@ +type ServerReadyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *ServerReadyRequest) Reset() { + *x = ServerReadyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ServerReadyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServerReadyRequest) ProtoMessage() {} + +func (x *ServerReadyRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServerReadyRequest.ProtoReflect.Descriptor instead. +func (*ServerReadyRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{2} +} + +// @@ +// @@.. cpp:var:: message ServerReadyResponse +// @@ +// @@ Response message for ServerReady. +// @@ +type ServerReadyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: bool ready + // @@ + // @@ True if the inference server is ready, false it not ready. + // @@ + Ready bool `protobuf:"varint,1,opt,name=ready,proto3" json:"ready,omitempty"` +} + +func (x *ServerReadyResponse) Reset() { + *x = ServerReadyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ServerReadyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServerReadyResponse) ProtoMessage() {} + +func (x *ServerReadyResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServerReadyResponse.ProtoReflect.Descriptor instead. +func (*ServerReadyResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{3} +} + +func (x *ServerReadyResponse) GetReady() bool { + if x != nil { + return x.Ready + } + return false +} + +// @@ +// @@.. cpp:var:: message ModelReadyRequest +// @@ +// @@ Request message for ModelReady. +// @@ +type ModelReadyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the model to check for readiness. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: string version + // @@ + // @@ The version of the model to check for readiness. If not given the + // @@ server will choose a version based on the model and internal policy. + // @@ + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` +} + +func (x *ModelReadyRequest) Reset() { + *x = ModelReadyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelReadyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelReadyRequest) ProtoMessage() {} + +func (x *ModelReadyRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelReadyRequest.ProtoReflect.Descriptor instead. +func (*ModelReadyRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{4} +} + +func (x *ModelReadyRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelReadyRequest) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +// @@ +// @@.. cpp:var:: message ModelReadyResponse +// @@ +// @@ Response message for ModelReady. +// @@ +type ModelReadyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: bool ready + // @@ + // @@ True if the model is ready, false it not ready. + // @@ + Ready bool `protobuf:"varint,1,opt,name=ready,proto3" json:"ready,omitempty"` +} + +func (x *ModelReadyResponse) Reset() { + *x = ModelReadyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelReadyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelReadyResponse) ProtoMessage() {} + +func (x *ModelReadyResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelReadyResponse.ProtoReflect.Descriptor instead. +func (*ModelReadyResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{5} +} + +func (x *ModelReadyResponse) GetReady() bool { + if x != nil { + return x.Ready + } + return false +} + +// @@ +// @@.. cpp:var:: message ServerMetadataRequest +// @@ +// @@ Request message for ServerMetadata. +// @@ +type ServerMetadataRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *ServerMetadataRequest) Reset() { + *x = ServerMetadataRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ServerMetadataRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServerMetadataRequest) ProtoMessage() {} + +func (x *ServerMetadataRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServerMetadataRequest.ProtoReflect.Descriptor instead. +func (*ServerMetadataRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{6} +} + +// @@ +// @@.. cpp:var:: message ServerMetadataResponse +// @@ +// @@ Response message for ServerMetadata. +// @@ +type ServerMetadataResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The server name. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ + // @@ .. cpp:var:: string version + // @@ + // @@ The server version. + // @@ + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // @@ + // @@ .. cpp:var:: string extensions (repeated) + // @@ + // @@ The extensions supported by the server. + // @@ + Extensions []string `protobuf:"bytes,3,rep,name=extensions,proto3" json:"extensions,omitempty"` +} + +func (x *ServerMetadataResponse) Reset() { + *x = ServerMetadataResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ServerMetadataResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServerMetadataResponse) ProtoMessage() {} + +func (x *ServerMetadataResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServerMetadataResponse.ProtoReflect.Descriptor instead. +func (*ServerMetadataResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{7} +} + +func (x *ServerMetadataResponse) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ServerMetadataResponse) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *ServerMetadataResponse) GetExtensions() []string { + if x != nil { + return x.Extensions + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelMetadataRequest +// @@ +// @@ Request message for ModelMetadata. +// @@ +type ModelMetadataRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the model. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: string version + // @@ + // @@ The version of the model to check for readiness. If not + // @@ given the server will choose a version based on the + // @@ model and internal policy. + // @@ + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` +} + +func (x *ModelMetadataRequest) Reset() { + *x = ModelMetadataRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelMetadataRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelMetadataRequest) ProtoMessage() {} + +func (x *ModelMetadataRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelMetadataRequest.ProtoReflect.Descriptor instead. +func (*ModelMetadataRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{8} +} + +func (x *ModelMetadataRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelMetadataRequest) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +// @@ +// @@.. cpp:var:: message ModelMetadataResponse +// @@ +// @@ Response message for ModelMetadata. +// @@ +type ModelMetadataResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The model name. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ + // @@ .. cpp:var:: string versions (repeated) + // @@ + // @@ The versions of the model. + // @@ + Versions []string `protobuf:"bytes,2,rep,name=versions,proto3" json:"versions,omitempty"` + // @@ + // @@ .. cpp:var:: string platform + // @@ + // @@ The model's platform. + // @@ + Platform string `protobuf:"bytes,3,opt,name=platform,proto3" json:"platform,omitempty"` + // @@ + // @@ .. cpp:var:: TensorMetadata inputs (repeated) + // @@ + // @@ The model's inputs. + // @@ + Inputs []*ModelMetadataResponse_TensorMetadata `protobuf:"bytes,4,rep,name=inputs,proto3" json:"inputs,omitempty"` + // @@ + // @@ .. cpp:var:: TensorMetadata outputs (repeated) + // @@ + // @@ The model's outputs. + // @@ + Outputs []*ModelMetadataResponse_TensorMetadata `protobuf:"bytes,5,rep,name=outputs,proto3" json:"outputs,omitempty"` +} + +func (x *ModelMetadataResponse) Reset() { + *x = ModelMetadataResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelMetadataResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelMetadataResponse) ProtoMessage() {} + +func (x *ModelMetadataResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelMetadataResponse.ProtoReflect.Descriptor instead. +func (*ModelMetadataResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{9} +} + +func (x *ModelMetadataResponse) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelMetadataResponse) GetVersions() []string { + if x != nil { + return x.Versions + } + return nil +} + +func (x *ModelMetadataResponse) GetPlatform() string { + if x != nil { + return x.Platform + } + return "" +} + +func (x *ModelMetadataResponse) GetInputs() []*ModelMetadataResponse_TensorMetadata { + if x != nil { + return x.Inputs + } + return nil +} + +func (x *ModelMetadataResponse) GetOutputs() []*ModelMetadataResponse_TensorMetadata { + if x != nil { + return x.Outputs + } + return nil +} + +// @@ +// @@.. cpp:var:: message InferParameter +// @@ +// @@ An inference parameter value. +// @@ +type InferParameter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: oneof parameter_choice + // @@ + // @@ The parameter value can be a string, an int64, + // @@ an uint64, a double, or a boolean + // @@ + // @@ Note: double and uint64 are currently + // @@ placeholders for future use and + // @@ are not supported for custom parameters + // @@ + // + // Types that are assignable to ParameterChoice: + // + // *InferParameter_BoolParam + // *InferParameter_Int64Param + // *InferParameter_StringParam + // *InferParameter_DoubleParam + // *InferParameter_Uint64Param + ParameterChoice isInferParameter_ParameterChoice `protobuf_oneof:"parameter_choice"` +} + +func (x *InferParameter) Reset() { + *x = InferParameter{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InferParameter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InferParameter) ProtoMessage() {} + +func (x *InferParameter) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InferParameter.ProtoReflect.Descriptor instead. +func (*InferParameter) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{10} +} + +func (m *InferParameter) GetParameterChoice() isInferParameter_ParameterChoice { + if m != nil { + return m.ParameterChoice + } + return nil +} + +func (x *InferParameter) GetBoolParam() bool { + if x, ok := x.GetParameterChoice().(*InferParameter_BoolParam); ok { + return x.BoolParam + } + return false +} + +func (x *InferParameter) GetInt64Param() int64 { + if x, ok := x.GetParameterChoice().(*InferParameter_Int64Param); ok { + return x.Int64Param + } + return 0 +} + +func (x *InferParameter) GetStringParam() string { + if x, ok := x.GetParameterChoice().(*InferParameter_StringParam); ok { + return x.StringParam + } + return "" +} + +func (x *InferParameter) GetDoubleParam() float64 { + if x, ok := x.GetParameterChoice().(*InferParameter_DoubleParam); ok { + return x.DoubleParam + } + return 0 +} + +func (x *InferParameter) GetUint64Param() uint64 { + if x, ok := x.GetParameterChoice().(*InferParameter_Uint64Param); ok { + return x.Uint64Param + } + return 0 +} + +type isInferParameter_ParameterChoice interface { + isInferParameter_ParameterChoice() +} + +type InferParameter_BoolParam struct { + // @@ .. cpp:var:: bool bool_param + // @@ + // @@ A boolean parameter value. + // @@ + BoolParam bool `protobuf:"varint,1,opt,name=bool_param,json=boolParam,proto3,oneof"` +} + +type InferParameter_Int64Param struct { + // @@ .. cpp:var:: int64 int64_param + // @@ + // @@ An int64 parameter value. + // @@ + Int64Param int64 `protobuf:"varint,2,opt,name=int64_param,json=int64Param,proto3,oneof"` +} + +type InferParameter_StringParam struct { + // @@ .. cpp:var:: string string_param + // @@ + // @@ A string parameter value. + // @@ + StringParam string `protobuf:"bytes,3,opt,name=string_param,json=stringParam,proto3,oneof"` +} + +type InferParameter_DoubleParam struct { + // @@ .. cpp:var:: double double_param + // @@ + // @@ A double parameter value. + // @@ + // @@ Not supported for custom parameters + // @@ + DoubleParam float64 `protobuf:"fixed64,4,opt,name=double_param,json=doubleParam,proto3,oneof"` +} + +type InferParameter_Uint64Param struct { + // @@ .. cpp:var:: uint64 uint64_param + // @@ + // @@ A uint64 parameter value. + // @@ + // @@ Not supported for custom parameters + // @@ + Uint64Param uint64 `protobuf:"varint,5,opt,name=uint64_param,json=uint64Param,proto3,oneof"` +} + +func (*InferParameter_BoolParam) isInferParameter_ParameterChoice() {} + +func (*InferParameter_Int64Param) isInferParameter_ParameterChoice() {} + +func (*InferParameter_StringParam) isInferParameter_ParameterChoice() {} + +func (*InferParameter_DoubleParam) isInferParameter_ParameterChoice() {} + +func (*InferParameter_Uint64Param) isInferParameter_ParameterChoice() {} + +// @@ +// @@.. cpp:var:: message InferTensorContents +// @@ +// @@ The data contained in a tensor represented by the repeated type +// @@ that matches the tensor's data type. Protobuf oneof is not used +// @@ because oneofs cannot contain repeated fields. +// @@ +type InferTensorContents struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: bool bool_contents (repeated) + // @@ + // @@ Representation for BOOL data type. The size must match what is + // @@ expected by the tensor's shape. The contents must be the flattened, + // @@ one-dimensional, row-major order of the tensor elements. + // @@ + BoolContents []bool `protobuf:"varint,1,rep,packed,name=bool_contents,json=boolContents,proto3" json:"bool_contents,omitempty"` + // @@ + // @@ .. cpp:var:: int32 int_contents (repeated) + // @@ + // @@ Representation for INT8, INT16, and INT32 data types. The size + // @@ must match what is expected by the tensor's shape. The contents + // @@ must be the flattened, one-dimensional, row-major order of the + // @@ tensor elements. + // @@ + IntContents []int32 `protobuf:"varint,2,rep,packed,name=int_contents,json=intContents,proto3" json:"int_contents,omitempty"` + // @@ + // @@ .. cpp:var:: int64 int64_contents (repeated) + // @@ + // @@ Representation for INT64 data types. The size must match what + // @@ is expected by the tensor's shape. The contents must be the + // @@ flattened, one-dimensional, row-major order of the tensor elements. + // @@ + Int64Contents []int64 `protobuf:"varint,3,rep,packed,name=int64_contents,json=int64Contents,proto3" json:"int64_contents,omitempty"` + // @@ + // @@ .. cpp:var:: uint32 uint_contents (repeated) + // @@ + // @@ Representation for UINT8, UINT16, and UINT32 data types. The size + // @@ must match what is expected by the tensor's shape. The contents + // @@ must be the flattened, one-dimensional, row-major order of the + // @@ tensor elements. + // @@ + UintContents []uint32 `protobuf:"varint,4,rep,packed,name=uint_contents,json=uintContents,proto3" json:"uint_contents,omitempty"` + // @@ + // @@ .. cpp:var:: uint64 uint64_contents (repeated) + // @@ + // @@ Representation for UINT64 data types. The size must match what + // @@ is expected by the tensor's shape. The contents must be the + // @@ flattened, one-dimensional, row-major order of the tensor elements. + // @@ + Uint64Contents []uint64 `protobuf:"varint,5,rep,packed,name=uint64_contents,json=uint64Contents,proto3" json:"uint64_contents,omitempty"` + // @@ + // @@ .. cpp:var:: float fp32_contents (repeated) + // @@ + // @@ Representation for FP32 data type. The size must match what is + // @@ expected by the tensor's shape. The contents must be the flattened, + // @@ one-dimensional, row-major order of the tensor elements. + // @@ + Fp32Contents []float32 `protobuf:"fixed32,6,rep,packed,name=fp32_contents,json=fp32Contents,proto3" json:"fp32_contents,omitempty"` + // @@ + // @@ .. cpp:var:: double fp64_contents (repeated) + // @@ + // @@ Representation for FP64 data type. The size must match what is + // @@ expected by the tensor's shape. The contents must be the flattened, + // @@ one-dimensional, row-major order of the tensor elements. + // @@ + Fp64Contents []float64 `protobuf:"fixed64,7,rep,packed,name=fp64_contents,json=fp64Contents,proto3" json:"fp64_contents,omitempty"` + // @@ + // @@ .. cpp:var:: bytes bytes_contents (repeated) + // @@ + // @@ Representation for BYTES data type. The size must match what is + // @@ expected by the tensor's shape. The contents must be the flattened, + // @@ one-dimensional, row-major order of the tensor elements. + // @@ + BytesContents [][]byte `protobuf:"bytes,8,rep,name=bytes_contents,json=bytesContents,proto3" json:"bytes_contents,omitempty"` +} + +func (x *InferTensorContents) Reset() { + *x = InferTensorContents{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InferTensorContents) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InferTensorContents) ProtoMessage() {} + +func (x *InferTensorContents) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InferTensorContents.ProtoReflect.Descriptor instead. +func (*InferTensorContents) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{11} +} + +func (x *InferTensorContents) GetBoolContents() []bool { + if x != nil { + return x.BoolContents + } + return nil +} + +func (x *InferTensorContents) GetIntContents() []int32 { + if x != nil { + return x.IntContents + } + return nil +} + +func (x *InferTensorContents) GetInt64Contents() []int64 { + if x != nil { + return x.Int64Contents + } + return nil +} + +func (x *InferTensorContents) GetUintContents() []uint32 { + if x != nil { + return x.UintContents + } + return nil +} + +func (x *InferTensorContents) GetUint64Contents() []uint64 { + if x != nil { + return x.Uint64Contents + } + return nil +} + +func (x *InferTensorContents) GetFp32Contents() []float32 { + if x != nil { + return x.Fp32Contents + } + return nil +} + +func (x *InferTensorContents) GetFp64Contents() []float64 { + if x != nil { + return x.Fp64Contents + } + return nil +} + +func (x *InferTensorContents) GetBytesContents() [][]byte { + if x != nil { + return x.BytesContents + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelInferRequest +// @@ +// @@ Request message for ModelInfer. +// @@ +type ModelInferRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string model_name + // @@ + // @@ The name of the model to use for inferencing. + // @@ + ModelName string `protobuf:"bytes,1,opt,name=model_name,json=modelName,proto3" json:"model_name,omitempty"` + // @@ .. cpp:var:: string model_version + // @@ + // @@ The version of the model to use for inference. If not + // @@ given the latest/most-recent version of the model is used. + // @@ + ModelVersion string `protobuf:"bytes,2,opt,name=model_version,json=modelVersion,proto3" json:"model_version,omitempty"` + // @@ .. cpp:var:: string id + // @@ + // @@ Optional identifier for the request. If specified will be + // @@ returned in the response. + // @@ + Id string `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // @@ .. cpp:var:: map parameters + // @@ + // @@ Optional inference parameters. + // @@ + Parameters map[string]*InferParameter `protobuf:"bytes,4,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ + // @@ .. cpp:var:: InferInputTensor inputs (repeated) + // @@ + // @@ The input tensors for the inference. + // @@ + Inputs []*ModelInferRequest_InferInputTensor `protobuf:"bytes,5,rep,name=inputs,proto3" json:"inputs,omitempty"` + // @@ + // @@ .. cpp:var:: InferRequestedOutputTensor outputs (repeated) + // @@ + // @@ The requested output tensors for the inference. Optional, if not + // @@ specified all outputs specified in the model config will be + // @@ returned. + // @@ + Outputs []*ModelInferRequest_InferRequestedOutputTensor `protobuf:"bytes,6,rep,name=outputs,proto3" json:"outputs,omitempty"` + // @@ + // @@ .. cpp:var:: bytes raw_input_contents + // @@ + // @@ The data contained in an input tensor can be represented in + // @@ "raw" bytes form or in the repeated type that matches the + // @@ tensor's data type. Using the "raw" bytes form will + // @@ typically allow higher performance due to the way protobuf + // @@ allocation and reuse interacts with GRPC. For example, see + // @@ https://github.com/grpc/grpc/issues/23231. + // @@ + // @@ To use the raw representation 'raw_input_contents' must be + // @@ initialized with data for each tensor in the same order as + // @@ 'inputs'. For each tensor, the size of this content must + // @@ match what is expected by the tensor's shape and data + // @@ type. The raw data must be the flattened, one-dimensional, + // @@ row-major order of the tensor elements without any stride + // @@ or padding between the elements. Note that the FP16 and BF16 data + // @@ types must be represented as raw content as there is no + // @@ specific data type for a 16-bit float type. + // @@ + // @@ If this field is specified then InferInputTensor::contents + // @@ must not be specified for any input tensor. + // @@ + RawInputContents [][]byte `protobuf:"bytes,7,rep,name=raw_input_contents,json=rawInputContents,proto3" json:"raw_input_contents,omitempty"` +} + +func (x *ModelInferRequest) Reset() { + *x = ModelInferRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelInferRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelInferRequest) ProtoMessage() {} + +func (x *ModelInferRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelInferRequest.ProtoReflect.Descriptor instead. +func (*ModelInferRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{12} +} + +func (x *ModelInferRequest) GetModelName() string { + if x != nil { + return x.ModelName + } + return "" +} + +func (x *ModelInferRequest) GetModelVersion() string { + if x != nil { + return x.ModelVersion + } + return "" +} + +func (x *ModelInferRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *ModelInferRequest) GetParameters() map[string]*InferParameter { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *ModelInferRequest) GetInputs() []*ModelInferRequest_InferInputTensor { + if x != nil { + return x.Inputs + } + return nil +} + +func (x *ModelInferRequest) GetOutputs() []*ModelInferRequest_InferRequestedOutputTensor { + if x != nil { + return x.Outputs + } + return nil +} + +func (x *ModelInferRequest) GetRawInputContents() [][]byte { + if x != nil { + return x.RawInputContents + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelInferResponse +// @@ +// @@ Response message for ModelInfer. +// @@ +type ModelInferResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string model_name + // @@ + // @@ The name of the model used for inference. + // @@ + ModelName string `protobuf:"bytes,1,opt,name=model_name,json=modelName,proto3" json:"model_name,omitempty"` + // @@ .. cpp:var:: string model_version + // @@ + // @@ The version of the model used for inference. + // @@ + ModelVersion string `protobuf:"bytes,2,opt,name=model_version,json=modelVersion,proto3" json:"model_version,omitempty"` + // @@ .. cpp:var:: string id + // @@ + // @@ The id of the inference request if one was specified. + // @@ + Id string `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // @@ .. cpp:var:: map parameters + // @@ + // @@ Optional inference response parameters. + // @@ + Parameters map[string]*InferParameter `protobuf:"bytes,4,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ + // @@ .. cpp:var:: InferOutputTensor outputs (repeated) + // @@ + // @@ The output tensors holding inference results. + // @@ + Outputs []*ModelInferResponse_InferOutputTensor `protobuf:"bytes,5,rep,name=outputs,proto3" json:"outputs,omitempty"` + // @@ + // @@ .. cpp:var:: bytes raw_output_contents + // @@ + // @@ The data contained in an output tensor can be represented in + // @@ "raw" bytes form or in the repeated type that matches the + // @@ tensor's data type. Using the "raw" bytes form will + // @@ typically allow higher performance due to the way protobuf + // @@ allocation and reuse interacts with GRPC. For example, see + // @@ https://github.com/grpc/grpc/issues/23231. + // @@ + // @@ To use the raw representation 'raw_output_contents' must be + // @@ initialized with data for each tensor in the same order as + // @@ 'outputs'. For each tensor, the size of this content must + // @@ match what is expected by the tensor's shape and data + // @@ type. The raw data must be the flattened, one-dimensional, + // @@ row-major order of the tensor elements without any stride + // @@ or padding between the elements. Note that the FP16 and BF16 data + // @@ types must be represented as raw content as there is no + // @@ specific data type for a 16-bit float type. + // @@ + // @@ If this field is specified then InferOutputTensor::contents + // @@ must not be specified for any output tensor. + // @@ + RawOutputContents [][]byte `protobuf:"bytes,6,rep,name=raw_output_contents,json=rawOutputContents,proto3" json:"raw_output_contents,omitempty"` +} + +func (x *ModelInferResponse) Reset() { + *x = ModelInferResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelInferResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelInferResponse) ProtoMessage() {} + +func (x *ModelInferResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelInferResponse.ProtoReflect.Descriptor instead. +func (*ModelInferResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{13} +} + +func (x *ModelInferResponse) GetModelName() string { + if x != nil { + return x.ModelName + } + return "" +} + +func (x *ModelInferResponse) GetModelVersion() string { + if x != nil { + return x.ModelVersion + } + return "" +} + +func (x *ModelInferResponse) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *ModelInferResponse) GetParameters() map[string]*InferParameter { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *ModelInferResponse) GetOutputs() []*ModelInferResponse_InferOutputTensor { + if x != nil { + return x.Outputs + } + return nil +} + +func (x *ModelInferResponse) GetRawOutputContents() [][]byte { + if x != nil { + return x.RawOutputContents + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelStreamInferResponse +// @@ +// @@ Response message for ModelStreamInfer. +// @@ +type ModelStreamInferResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string error_message + // @@ + // @@ The message describing the error. The empty message + // @@ indicates the inference was successful without errors. + // @@ + ErrorMessage string `protobuf:"bytes,1,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` + // @@ + // @@ .. cpp:var:: ModelInferResponse infer_response + // @@ + // @@ Holds the results of the request. + // @@ + InferResponse *ModelInferResponse `protobuf:"bytes,2,opt,name=infer_response,json=inferResponse,proto3" json:"infer_response,omitempty"` +} + +func (x *ModelStreamInferResponse) Reset() { + *x = ModelStreamInferResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelStreamInferResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelStreamInferResponse) ProtoMessage() {} + +func (x *ModelStreamInferResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelStreamInferResponse.ProtoReflect.Descriptor instead. +func (*ModelStreamInferResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{14} +} + +func (x *ModelStreamInferResponse) GetErrorMessage() string { + if x != nil { + return x.ErrorMessage + } + return "" +} + +func (x *ModelStreamInferResponse) GetInferResponse() *ModelInferResponse { + if x != nil { + return x.InferResponse + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelConfigRequest +// @@ +// @@ Request message for ModelConfig. +// @@ +type ModelConfigRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the model. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: string version + // @@ + // @@ The version of the model. If not given the model version + // @@ is selected automatically based on the version policy. + // @@ + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` +} + +func (x *ModelConfigRequest) Reset() { + *x = ModelConfigRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelConfigRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelConfigRequest) ProtoMessage() {} + +func (x *ModelConfigRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelConfigRequest.ProtoReflect.Descriptor instead. +func (*ModelConfigRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{15} +} + +func (x *ModelConfigRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelConfigRequest) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +// @@ +// @@.. cpp:var:: message ModelConfigResponse +// @@ +// @@ Response message for ModelConfig. +// @@ +type ModelConfigResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: ModelConfig config + // @@ + // @@ The model configuration. + // @@ + Config *ModelConfig `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` +} + +func (x *ModelConfigResponse) Reset() { + *x = ModelConfigResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelConfigResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelConfigResponse) ProtoMessage() {} + +func (x *ModelConfigResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelConfigResponse.ProtoReflect.Descriptor instead. +func (*ModelConfigResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{16} +} + +func (x *ModelConfigResponse) GetConfig() *ModelConfig { + if x != nil { + return x.Config + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelStatisticsRequest +// @@ +// @@ Request message for ModelStatistics. +// @@ +type ModelStatisticsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the model. If not given returns statistics for + // @@ all models. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: string version + // @@ + // @@ The version of the model. If not given returns statistics for + // @@ all model versions. + // @@ + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` +} + +func (x *ModelStatisticsRequest) Reset() { + *x = ModelStatisticsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelStatisticsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelStatisticsRequest) ProtoMessage() {} + +func (x *ModelStatisticsRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelStatisticsRequest.ProtoReflect.Descriptor instead. +func (*ModelStatisticsRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{17} +} + +func (x *ModelStatisticsRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelStatisticsRequest) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +// @@ +// @@.. cpp:var:: message StatisticDuration +// @@ +// @@ Statistic recording a cumulative duration metric. +// @@ +type StatisticDuration struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: uint64 count + // @@ + // @@ Cumulative number of times this metric occurred. + // @@ + Count uint64 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + // @@ .. cpp:var:: uint64 total_time_ns + // @@ + // @@ Total collected duration of this metric in nanoseconds. + // @@ + Ns uint64 `protobuf:"varint,2,opt,name=ns,proto3" json:"ns,omitempty"` +} + +func (x *StatisticDuration) Reset() { + *x = StatisticDuration{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *StatisticDuration) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*StatisticDuration) ProtoMessage() {} + +func (x *StatisticDuration) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use StatisticDuration.ProtoReflect.Descriptor instead. +func (*StatisticDuration) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{18} +} + +func (x *StatisticDuration) GetCount() uint64 { + if x != nil { + return x.Count + } + return 0 +} + +func (x *StatisticDuration) GetNs() uint64 { + if x != nil { + return x.Ns + } + return 0 +} + +// @@ +// @@.. cpp:var:: message InferStatistics +// @@ +// @@ Inference statistics. +// @@ +type InferStatistics struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: StatisticDuration success + // @@ + // @@ Cumulative count and duration for successful inference + // @@ request. The "success" count and cumulative duration includes + // @@ cache hits. + // @@ + Success *StatisticDuration `protobuf:"bytes,1,opt,name=success,proto3" json:"success,omitempty"` + // @@ .. cpp:var:: StatisticDuration fail + // @@ + // @@ Cumulative count and duration for failed inference + // @@ request. + // @@ + Fail *StatisticDuration `protobuf:"bytes,2,opt,name=fail,proto3" json:"fail,omitempty"` + // @@ .. cpp:var:: StatisticDuration queue + // @@ + // @@ The count and cumulative duration that inference requests wait in + // @@ scheduling or other queues. The "queue" count and cumulative + // @@ duration includes cache hits. + // @@ + Queue *StatisticDuration `protobuf:"bytes,3,opt,name=queue,proto3" json:"queue,omitempty"` + // @@ .. cpp:var:: StatisticDuration compute_input + // @@ + // @@ The count and cumulative duration to prepare input tensor data as + // @@ required by the model framework / backend. For example, this duration + // @@ should include the time to copy input tensor data to the GPU. + // @@ The "compute_input" count and cumulative duration do not account for + // @@ requests that were a cache hit. See the "cache_hit" field for more + // @@ info. + // @@ + ComputeInput *StatisticDuration `protobuf:"bytes,4,opt,name=compute_input,json=computeInput,proto3" json:"compute_input,omitempty"` + // @@ .. cpp:var:: StatisticDuration compute_infer + // @@ + // @@ The count and cumulative duration to execute the model. + // @@ The "compute_infer" count and cumulative duration do not account for + // @@ requests that were a cache hit. See the "cache_hit" field for more + // @@ info. + // @@ + ComputeInfer *StatisticDuration `protobuf:"bytes,5,opt,name=compute_infer,json=computeInfer,proto3" json:"compute_infer,omitempty"` + // @@ .. cpp:var:: StatisticDuration compute_output + // @@ + // @@ The count and cumulative duration to extract output tensor data + // @@ produced by the model framework / backend. For example, this duration + // @@ should include the time to copy output tensor data from the GPU. + // @@ The "compute_output" count and cumulative duration do not account for + // @@ requests that were a cache hit. See the "cache_hit" field for more + // @@ info. + // @@ + ComputeOutput *StatisticDuration `protobuf:"bytes,6,opt,name=compute_output,json=computeOutput,proto3" json:"compute_output,omitempty"` + // @@ .. cpp:var:: StatisticDuration cache_hit + // @@ + // @@ The count of response cache hits and cumulative duration to lookup + // @@ and extract output tensor data from the Response Cache on a cache + // @@ hit. For example, this duration should include the time to copy + // @@ output tensor data from the Response Cache to the response object. + // @@ On cache hits, triton does not need to go to the model/backend + // @@ for the output tensor data, so the "compute_input", "compute_infer", + // @@ and "compute_output" fields are not updated. Assuming the response + // @@ cache is enabled for a given model, a cache hit occurs for a + // @@ request to that model when the request metadata (model name, + // @@ model version, model inputs) hashes to an existing entry in the + // @@ cache. On a cache miss, the request hash and response output tensor + // @@ data is added to the cache. See response cache docs for more info: + // @@ + // https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md + // @@ + CacheHit *StatisticDuration `protobuf:"bytes,7,opt,name=cache_hit,json=cacheHit,proto3" json:"cache_hit,omitempty"` + // @@ .. cpp:var:: StatisticDuration cache_miss + // @@ + // @@ The count of response cache misses and cumulative duration to lookup + // @@ and insert output tensor data from the computed response to the + // cache. + // @@ For example, this duration should include the time to copy + // @@ output tensor data from the response object to the Response Cache. + // @@ Assuming the response cache is enabled for a given model, a cache + // @@ miss occurs for a request to that model when the request metadata + // @@ does NOT hash to an existing entry in the cache. See the response + // @@ cache docs for more info: + // @@ + // https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md + // @@ + CacheMiss *StatisticDuration `protobuf:"bytes,8,opt,name=cache_miss,json=cacheMiss,proto3" json:"cache_miss,omitempty"` +} + +func (x *InferStatistics) Reset() { + *x = InferStatistics{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InferStatistics) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InferStatistics) ProtoMessage() {} + +func (x *InferStatistics) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InferStatistics.ProtoReflect.Descriptor instead. +func (*InferStatistics) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{19} +} + +func (x *InferStatistics) GetSuccess() *StatisticDuration { + if x != nil { + return x.Success + } + return nil +} + +func (x *InferStatistics) GetFail() *StatisticDuration { + if x != nil { + return x.Fail + } + return nil +} + +func (x *InferStatistics) GetQueue() *StatisticDuration { + if x != nil { + return x.Queue + } + return nil +} + +func (x *InferStatistics) GetComputeInput() *StatisticDuration { + if x != nil { + return x.ComputeInput + } + return nil +} + +func (x *InferStatistics) GetComputeInfer() *StatisticDuration { + if x != nil { + return x.ComputeInfer + } + return nil +} + +func (x *InferStatistics) GetComputeOutput() *StatisticDuration { + if x != nil { + return x.ComputeOutput + } + return nil +} + +func (x *InferStatistics) GetCacheHit() *StatisticDuration { + if x != nil { + return x.CacheHit + } + return nil +} + +func (x *InferStatistics) GetCacheMiss() *StatisticDuration { + if x != nil { + return x.CacheMiss + } + return nil +} + +// @@ +// @@.. cpp:var:: message InferBatchStatistics +// @@ +// @@ Inference batch statistics. +// @@ +type InferBatchStatistics struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: uint64 batch_size + // @@ + // @@ The size of the batch. + // @@ + BatchSize uint64 `protobuf:"varint,1,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + // @@ .. cpp:var:: StatisticDuration compute_input + // @@ + // @@ The count and cumulative duration to prepare input tensor data as + // @@ required by the model framework / backend with the given batch size. + // @@ For example, this duration should include the time to copy input + // @@ tensor data to the GPU. + // @@ + ComputeInput *StatisticDuration `protobuf:"bytes,2,opt,name=compute_input,json=computeInput,proto3" json:"compute_input,omitempty"` + // @@ .. cpp:var:: StatisticDuration compute_infer + // @@ + // @@ The count and cumulative duration to execute the model with the given + // @@ batch size. + // @@ + ComputeInfer *StatisticDuration `protobuf:"bytes,3,opt,name=compute_infer,json=computeInfer,proto3" json:"compute_infer,omitempty"` + // @@ .. cpp:var:: StatisticDuration compute_output + // @@ + // @@ The count and cumulative duration to extract output tensor data + // @@ produced by the model framework / backend with the given batch size. + // @@ For example, this duration should include the time to copy output + // @@ tensor data from the GPU. + // @@ + ComputeOutput *StatisticDuration `protobuf:"bytes,4,opt,name=compute_output,json=computeOutput,proto3" json:"compute_output,omitempty"` +} + +func (x *InferBatchStatistics) Reset() { + *x = InferBatchStatistics{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InferBatchStatistics) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InferBatchStatistics) ProtoMessage() {} + +func (x *InferBatchStatistics) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InferBatchStatistics.ProtoReflect.Descriptor instead. +func (*InferBatchStatistics) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{20} +} + +func (x *InferBatchStatistics) GetBatchSize() uint64 { + if x != nil { + return x.BatchSize + } + return 0 +} + +func (x *InferBatchStatistics) GetComputeInput() *StatisticDuration { + if x != nil { + return x.ComputeInput + } + return nil +} + +func (x *InferBatchStatistics) GetComputeInfer() *StatisticDuration { + if x != nil { + return x.ComputeInfer + } + return nil +} + +func (x *InferBatchStatistics) GetComputeOutput() *StatisticDuration { + if x != nil { + return x.ComputeOutput + } + return nil +} + +// @@ +// @@.. cpp:var:: message MemoryUsage +// @@ +// @@ Memory usage. +// @@ +type MemoryUsage struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string type + // @@ + // @@ The type of memory, the value can be "CPU", "CPU_PINNED", "GPU". + // @@ + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // @@ .. cpp:var:: int64 id + // @@ + // @@ The id of the memory, typically used with "type" to identify + // @@ a device that hosts the memory. + // @@ + Id int64 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` + // @@ .. cpp:var:: uint64 byte_size + // @@ + // @@ The byte size of the memory. + // @@ + ByteSize uint64 `protobuf:"varint,3,opt,name=byte_size,json=byteSize,proto3" json:"byte_size,omitempty"` +} + +func (x *MemoryUsage) Reset() { + *x = MemoryUsage{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MemoryUsage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MemoryUsage) ProtoMessage() {} + +func (x *MemoryUsage) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MemoryUsage.ProtoReflect.Descriptor instead. +func (*MemoryUsage) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{21} +} + +func (x *MemoryUsage) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *MemoryUsage) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *MemoryUsage) GetByteSize() uint64 { + if x != nil { + return x.ByteSize + } + return 0 +} + +// @@ +// @@.. cpp:var:: message ModelStatistics +// @@ +// @@ Statistics for a specific model and version. +// @@ +type ModelStatistics struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the model. If not given returns statistics for all + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: string version + // @@ + // @@ The version of the model. + // @@ + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // @@ .. cpp:var:: uint64 last_inference + // @@ + // @@ The timestamp of the last inference request made for this model, + // @@ as milliseconds since the epoch. + // @@ + LastInference uint64 `protobuf:"varint,3,opt,name=last_inference,json=lastInference,proto3" json:"last_inference,omitempty"` + // @@ .. cpp:var:: uint64 last_inference + // @@ + // @@ The cumulative count of successful inference requests made for this + // @@ model. Each inference in a batched request is counted as an + // @@ individual inference. For example, if a client sends a single + // @@ inference request with batch size 64, "inference_count" will be + // @@ incremented by 64. Similarly, if a clients sends 64 individual + // @@ requests each with batch size 1, "inference_count" will be + // @@ incremented by 64. The "inference_count" value DOES NOT include + // @@ cache hits. + // @@ + InferenceCount uint64 `protobuf:"varint,4,opt,name=inference_count,json=inferenceCount,proto3" json:"inference_count,omitempty"` + // @@ .. cpp:var:: uint64 last_inference + // @@ + // @@ The cumulative count of the number of successful inference executions + // @@ performed for the model. When dynamic batching is enabled, a single + // @@ model execution can perform inferencing for more than one inference + // @@ request. For example, if a clients sends 64 individual requests each + // @@ with batch size 1 and the dynamic batcher batches them into a single + // @@ large batch for model execution then "execution_count" will be + // @@ incremented by 1. If, on the other hand, the dynamic batcher is not + // @@ enabled for that each of the 64 individual requests is executed + // @@ independently, then "execution_count" will be incremented by 64. + // @@ The "execution_count" value DOES NOT include cache hits. + // @@ + ExecutionCount uint64 `protobuf:"varint,5,opt,name=execution_count,json=executionCount,proto3" json:"execution_count,omitempty"` + // @@ .. cpp:var:: InferStatistics inference_stats + // @@ + // @@ The aggregate statistics for the model/version. + // @@ + InferenceStats *InferStatistics `protobuf:"bytes,6,opt,name=inference_stats,json=inferenceStats,proto3" json:"inference_stats,omitempty"` + // @@ .. cpp:var:: InferBatchStatistics batch_stats (repeated) + // @@ + // @@ The aggregate statistics for each different batch size that is + // @@ executed in the model. The batch statistics indicate how many actual + // @@ model executions were performed and show differences due to different + // @@ batch size (for example, larger batches typically take longer to + // @@ compute). + // @@ + BatchStats []*InferBatchStatistics `protobuf:"bytes,7,rep,name=batch_stats,json=batchStats,proto3" json:"batch_stats,omitempty"` + // @@ .. cpp:var:: MemoryUsage memory_usage (repeated) + // @@ + // @@ The memory usage detected during model loading, which may be used to + // @@ estimate the memory to be released once the model is unloaded. Note + // @@ that the estimation is inferenced by the profiling tools and + // @@ framework's memory schema, therefore it is advised to perform + // @@ experiments to understand the scenario that the reported memory usage + // @@ can be relied on. As a starting point, the GPU memory usage for + // @@ models in ONNX Runtime backend and TensorRT backend is usually + // @@ aligned. + // @@ + MemoryUsage []*MemoryUsage `protobuf:"bytes,8,rep,name=memory_usage,json=memoryUsage,proto3" json:"memory_usage,omitempty"` +} + +func (x *ModelStatistics) Reset() { + *x = ModelStatistics{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelStatistics) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelStatistics) ProtoMessage() {} + +func (x *ModelStatistics) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelStatistics.ProtoReflect.Descriptor instead. +func (*ModelStatistics) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{22} +} + +func (x *ModelStatistics) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelStatistics) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *ModelStatistics) GetLastInference() uint64 { + if x != nil { + return x.LastInference + } + return 0 +} + +func (x *ModelStatistics) GetInferenceCount() uint64 { + if x != nil { + return x.InferenceCount + } + return 0 +} + +func (x *ModelStatistics) GetExecutionCount() uint64 { + if x != nil { + return x.ExecutionCount + } + return 0 +} + +func (x *ModelStatistics) GetInferenceStats() *InferStatistics { + if x != nil { + return x.InferenceStats + } + return nil +} + +func (x *ModelStatistics) GetBatchStats() []*InferBatchStatistics { + if x != nil { + return x.BatchStats + } + return nil +} + +func (x *ModelStatistics) GetMemoryUsage() []*MemoryUsage { + if x != nil { + return x.MemoryUsage + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelStatisticsResponse +// @@ +// @@ Response message for ModelStatistics. +// @@ +type ModelStatisticsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: ModelStatistics model_stats (repeated) + // @@ + // @@ Statistics for each requested model. + // @@ + ModelStats []*ModelStatistics `protobuf:"bytes,1,rep,name=model_stats,json=modelStats,proto3" json:"model_stats,omitempty"` +} + +func (x *ModelStatisticsResponse) Reset() { + *x = ModelStatisticsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelStatisticsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelStatisticsResponse) ProtoMessage() {} + +func (x *ModelStatisticsResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelStatisticsResponse.ProtoReflect.Descriptor instead. +func (*ModelStatisticsResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{23} +} + +func (x *ModelStatisticsResponse) GetModelStats() []*ModelStatistics { + if x != nil { + return x.ModelStats + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelRepositoryParameter +// @@ +// @@ An model repository parameter value. +// @@ +type ModelRepositoryParameter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: oneof parameter_choice + // @@ + // @@ The parameter value can be a string, an int64 or + // @@ a boolean + // @@ + // + // Types that are assignable to ParameterChoice: + // + // *ModelRepositoryParameter_BoolParam + // *ModelRepositoryParameter_Int64Param + // *ModelRepositoryParameter_StringParam + // *ModelRepositoryParameter_BytesParam + ParameterChoice isModelRepositoryParameter_ParameterChoice `protobuf_oneof:"parameter_choice"` +} + +func (x *ModelRepositoryParameter) Reset() { + *x = ModelRepositoryParameter{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelRepositoryParameter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelRepositoryParameter) ProtoMessage() {} + +func (x *ModelRepositoryParameter) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelRepositoryParameter.ProtoReflect.Descriptor instead. +func (*ModelRepositoryParameter) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{24} +} + +func (m *ModelRepositoryParameter) GetParameterChoice() isModelRepositoryParameter_ParameterChoice { + if m != nil { + return m.ParameterChoice + } + return nil +} + +func (x *ModelRepositoryParameter) GetBoolParam() bool { + if x, ok := x.GetParameterChoice().(*ModelRepositoryParameter_BoolParam); ok { + return x.BoolParam + } + return false +} + +func (x *ModelRepositoryParameter) GetInt64Param() int64 { + if x, ok := x.GetParameterChoice().(*ModelRepositoryParameter_Int64Param); ok { + return x.Int64Param + } + return 0 +} + +func (x *ModelRepositoryParameter) GetStringParam() string { + if x, ok := x.GetParameterChoice().(*ModelRepositoryParameter_StringParam); ok { + return x.StringParam + } + return "" +} + +func (x *ModelRepositoryParameter) GetBytesParam() []byte { + if x, ok := x.GetParameterChoice().(*ModelRepositoryParameter_BytesParam); ok { + return x.BytesParam + } + return nil +} + +type isModelRepositoryParameter_ParameterChoice interface { + isModelRepositoryParameter_ParameterChoice() +} + +type ModelRepositoryParameter_BoolParam struct { + // @@ .. cpp:var:: bool bool_param + // @@ + // @@ A boolean parameter value. + // @@ + BoolParam bool `protobuf:"varint,1,opt,name=bool_param,json=boolParam,proto3,oneof"` +} + +type ModelRepositoryParameter_Int64Param struct { + // @@ .. cpp:var:: int64 int64_param + // @@ + // @@ An int64 parameter value. + // @@ + Int64Param int64 `protobuf:"varint,2,opt,name=int64_param,json=int64Param,proto3,oneof"` +} + +type ModelRepositoryParameter_StringParam struct { + // @@ .. cpp:var:: string string_param + // @@ + // @@ A string parameter value. + // @@ + StringParam string `protobuf:"bytes,3,opt,name=string_param,json=stringParam,proto3,oneof"` +} + +type ModelRepositoryParameter_BytesParam struct { + // @@ .. cpp:var:: bytes bytes_param + // @@ + // @@ A bytes parameter value. + // @@ + BytesParam []byte `protobuf:"bytes,4,opt,name=bytes_param,json=bytesParam,proto3,oneof"` +} + +func (*ModelRepositoryParameter_BoolParam) isModelRepositoryParameter_ParameterChoice() {} + +func (*ModelRepositoryParameter_Int64Param) isModelRepositoryParameter_ParameterChoice() {} + +func (*ModelRepositoryParameter_StringParam) isModelRepositoryParameter_ParameterChoice() {} + +func (*ModelRepositoryParameter_BytesParam) isModelRepositoryParameter_ParameterChoice() {} + +// @@ +// @@.. cpp:var:: message RepositoryIndexRequest +// @@ +// @@ Request message for RepositoryIndex. +// @@ +type RepositoryIndexRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string repository_name + // @@ + // @@ The name of the repository. If empty the index is returned + // @@ for all repositories. + // @@ + RepositoryName string `protobuf:"bytes,1,opt,name=repository_name,json=repositoryName,proto3" json:"repository_name,omitempty"` + // @@ .. cpp:var:: bool ready + // @@ + // @@ If true returned only models currently ready for inferencing. + // @@ + Ready bool `protobuf:"varint,2,opt,name=ready,proto3" json:"ready,omitempty"` +} + +func (x *RepositoryIndexRequest) Reset() { + *x = RepositoryIndexRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RepositoryIndexRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RepositoryIndexRequest) ProtoMessage() {} + +func (x *RepositoryIndexRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RepositoryIndexRequest.ProtoReflect.Descriptor instead. +func (*RepositoryIndexRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{25} +} + +func (x *RepositoryIndexRequest) GetRepositoryName() string { + if x != nil { + return x.RepositoryName + } + return "" +} + +func (x *RepositoryIndexRequest) GetReady() bool { + if x != nil { + return x.Ready + } + return false +} + +// @@ +// @@.. cpp:var:: message RepositoryIndexResponse +// @@ +// @@ Response message for RepositoryIndex. +// @@ +type RepositoryIndexResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: ModelIndex models (repeated) + // @@ + // @@ An index entry for each model. + // @@ + Models []*RepositoryIndexResponse_ModelIndex `protobuf:"bytes,1,rep,name=models,proto3" json:"models,omitempty"` +} + +func (x *RepositoryIndexResponse) Reset() { + *x = RepositoryIndexResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RepositoryIndexResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RepositoryIndexResponse) ProtoMessage() {} + +func (x *RepositoryIndexResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[26] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RepositoryIndexResponse.ProtoReflect.Descriptor instead. +func (*RepositoryIndexResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{26} +} + +func (x *RepositoryIndexResponse) GetModels() []*RepositoryIndexResponse_ModelIndex { + if x != nil { + return x.Models + } + return nil +} + +// @@ +// @@.. cpp:var:: message RepositoryModelLoadRequest +// @@ +// @@ Request message for RepositoryModelLoad. +// @@ +type RepositoryModelLoadRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string repository_name + // @@ + // @@ The name of the repository to load from. If empty the model + // @@ is loaded from any repository. + // @@ + RepositoryName string `protobuf:"bytes,1,opt,name=repository_name,json=repositoryName,proto3" json:"repository_name,omitempty"` + // @@ .. cpp:var:: string repository_name + // @@ + // @@ The name of the model to load, or reload. + // @@ + ModelName string `protobuf:"bytes,2,opt,name=model_name,json=modelName,proto3" json:"model_name,omitempty"` + // @@ .. cpp:var:: map parameters + // @@ + // @@ Optional model repository request parameters. + // @@ + Parameters map[string]*ModelRepositoryParameter `protobuf:"bytes,3,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *RepositoryModelLoadRequest) Reset() { + *x = RepositoryModelLoadRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RepositoryModelLoadRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RepositoryModelLoadRequest) ProtoMessage() {} + +func (x *RepositoryModelLoadRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[27] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RepositoryModelLoadRequest.ProtoReflect.Descriptor instead. +func (*RepositoryModelLoadRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{27} +} + +func (x *RepositoryModelLoadRequest) GetRepositoryName() string { + if x != nil { + return x.RepositoryName + } + return "" +} + +func (x *RepositoryModelLoadRequest) GetModelName() string { + if x != nil { + return x.ModelName + } + return "" +} + +func (x *RepositoryModelLoadRequest) GetParameters() map[string]*ModelRepositoryParameter { + if x != nil { + return x.Parameters + } + return nil +} + +// @@ +// @@.. cpp:var:: message RepositoryModelLoadResponse +// @@ +// @@ Response message for RepositoryModelLoad. +// @@ +type RepositoryModelLoadResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RepositoryModelLoadResponse) Reset() { + *x = RepositoryModelLoadResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RepositoryModelLoadResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RepositoryModelLoadResponse) ProtoMessage() {} + +func (x *RepositoryModelLoadResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[28] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RepositoryModelLoadResponse.ProtoReflect.Descriptor instead. +func (*RepositoryModelLoadResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{28} +} + +// @@ +// @@.. cpp:var:: message RepositoryModelUnloadRequest +// @@ +// @@ Request message for RepositoryModelUnload. +// @@ +type RepositoryModelUnloadRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string repository_name + // @@ + // @@ The name of the repository from which the model was originally + // @@ loaded. If empty the repository is not considered. + // @@ + RepositoryName string `protobuf:"bytes,1,opt,name=repository_name,json=repositoryName,proto3" json:"repository_name,omitempty"` + // @@ .. cpp:var:: string repository_name + // @@ + // @@ The name of the model to unload. + // @@ + ModelName string `protobuf:"bytes,2,opt,name=model_name,json=modelName,proto3" json:"model_name,omitempty"` + // @@ .. cpp:var:: map parameters + // @@ + // @@ Optional model repository request parameters. + // @@ + Parameters map[string]*ModelRepositoryParameter `protobuf:"bytes,3,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *RepositoryModelUnloadRequest) Reset() { + *x = RepositoryModelUnloadRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RepositoryModelUnloadRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RepositoryModelUnloadRequest) ProtoMessage() {} + +func (x *RepositoryModelUnloadRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[29] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RepositoryModelUnloadRequest.ProtoReflect.Descriptor instead. +func (*RepositoryModelUnloadRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{29} +} + +func (x *RepositoryModelUnloadRequest) GetRepositoryName() string { + if x != nil { + return x.RepositoryName + } + return "" +} + +func (x *RepositoryModelUnloadRequest) GetModelName() string { + if x != nil { + return x.ModelName + } + return "" +} + +func (x *RepositoryModelUnloadRequest) GetParameters() map[string]*ModelRepositoryParameter { + if x != nil { + return x.Parameters + } + return nil +} + +// @@ +// @@.. cpp:var:: message RepositoryModelUnloadResponse +// @@ +// @@ Response message for RepositoryModelUnload. +// @@ +type RepositoryModelUnloadResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RepositoryModelUnloadResponse) Reset() { + *x = RepositoryModelUnloadResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RepositoryModelUnloadResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RepositoryModelUnloadResponse) ProtoMessage() {} + +func (x *RepositoryModelUnloadResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[30] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RepositoryModelUnloadResponse.ProtoReflect.Descriptor instead. +func (*RepositoryModelUnloadResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{30} +} + +// @@ +// @@.. cpp:var:: message SystemSharedMemoryStatusRequest +// @@ +// @@ Request message for SystemSharedMemoryStatus. +// @@ +type SystemSharedMemoryStatusRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the region to get status for. If empty the + // @@ status is returned for all registered regions. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *SystemSharedMemoryStatusRequest) Reset() { + *x = SystemSharedMemoryStatusRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SystemSharedMemoryStatusRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SystemSharedMemoryStatusRequest) ProtoMessage() {} + +func (x *SystemSharedMemoryStatusRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[31] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SystemSharedMemoryStatusRequest.ProtoReflect.Descriptor instead. +func (*SystemSharedMemoryStatusRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{31} +} + +func (x *SystemSharedMemoryStatusRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +// @@ +// @@.. cpp:var:: message SystemSharedMemoryStatusResponse +// @@ +// @@ Response message for SystemSharedMemoryStatus. +// @@ +type SystemSharedMemoryStatusResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: map regions + // @@ + // @@ Status for each of the registered regions, indexed by + // @@ region name. + // @@ + Regions map[string]*SystemSharedMemoryStatusResponse_RegionStatus `protobuf:"bytes,1,rep,name=regions,proto3" json:"regions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *SystemSharedMemoryStatusResponse) Reset() { + *x = SystemSharedMemoryStatusResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SystemSharedMemoryStatusResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SystemSharedMemoryStatusResponse) ProtoMessage() {} + +func (x *SystemSharedMemoryStatusResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[32] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SystemSharedMemoryStatusResponse.ProtoReflect.Descriptor instead. +func (*SystemSharedMemoryStatusResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{32} +} + +func (x *SystemSharedMemoryStatusResponse) GetRegions() map[string]*SystemSharedMemoryStatusResponse_RegionStatus { + if x != nil { + return x.Regions + } + return nil +} + +// @@ +// @@.. cpp:var:: message SystemSharedMemoryRegisterRequest +// @@ +// @@ Request message for SystemSharedMemoryRegister. +// @@ +type SystemSharedMemoryRegisterRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the region to register. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: string shared_memory_key + // @@ + // @@ The key of the underlying memory object that contains the + // @@ shared memory region. + // @@ + Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key,omitempty"` + // @@ .. cpp:var:: uint64 offset + // @@ + // @@ Offset, in bytes, within the underlying memory object to + // @@ the start of the shared memory region. + // @@ + Offset uint64 `protobuf:"varint,3,opt,name=offset,proto3" json:"offset,omitempty"` + // @@ .. cpp:var:: uint64 byte_size + // @@ + // @@ Size of the shared memory region, in bytes. + // @@ + ByteSize uint64 `protobuf:"varint,4,opt,name=byte_size,json=byteSize,proto3" json:"byte_size,omitempty"` +} + +func (x *SystemSharedMemoryRegisterRequest) Reset() { + *x = SystemSharedMemoryRegisterRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SystemSharedMemoryRegisterRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SystemSharedMemoryRegisterRequest) ProtoMessage() {} + +func (x *SystemSharedMemoryRegisterRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[33] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SystemSharedMemoryRegisterRequest.ProtoReflect.Descriptor instead. +func (*SystemSharedMemoryRegisterRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{33} +} + +func (x *SystemSharedMemoryRegisterRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *SystemSharedMemoryRegisterRequest) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +func (x *SystemSharedMemoryRegisterRequest) GetOffset() uint64 { + if x != nil { + return x.Offset + } + return 0 +} + +func (x *SystemSharedMemoryRegisterRequest) GetByteSize() uint64 { + if x != nil { + return x.ByteSize + } + return 0 +} + +// @@ +// @@.. cpp:var:: message SystemSharedMemoryRegisterResponse +// @@ +// @@ Response message for SystemSharedMemoryRegister. +// @@ +type SystemSharedMemoryRegisterResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *SystemSharedMemoryRegisterResponse) Reset() { + *x = SystemSharedMemoryRegisterResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SystemSharedMemoryRegisterResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SystemSharedMemoryRegisterResponse) ProtoMessage() {} + +func (x *SystemSharedMemoryRegisterResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[34] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SystemSharedMemoryRegisterResponse.ProtoReflect.Descriptor instead. +func (*SystemSharedMemoryRegisterResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{34} +} + +// @@ +// @@.. cpp:var:: message SystemSharedMemoryUnregisterRequest +// @@ +// @@ Request message for SystemSharedMemoryUnregister. +// @@ +type SystemSharedMemoryUnregisterRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the system region to unregister. If empty + // @@ all system shared-memory regions are unregistered. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *SystemSharedMemoryUnregisterRequest) Reset() { + *x = SystemSharedMemoryUnregisterRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[35] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SystemSharedMemoryUnregisterRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SystemSharedMemoryUnregisterRequest) ProtoMessage() {} + +func (x *SystemSharedMemoryUnregisterRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[35] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SystemSharedMemoryUnregisterRequest.ProtoReflect.Descriptor instead. +func (*SystemSharedMemoryUnregisterRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{35} +} + +func (x *SystemSharedMemoryUnregisterRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +// @@ +// @@.. cpp:var:: message SystemSharedMemoryUnregisterResponse +// @@ +// @@ Response message for SystemSharedMemoryUnregister. +// @@ +type SystemSharedMemoryUnregisterResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *SystemSharedMemoryUnregisterResponse) Reset() { + *x = SystemSharedMemoryUnregisterResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[36] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SystemSharedMemoryUnregisterResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SystemSharedMemoryUnregisterResponse) ProtoMessage() {} + +func (x *SystemSharedMemoryUnregisterResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[36] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SystemSharedMemoryUnregisterResponse.ProtoReflect.Descriptor instead. +func (*SystemSharedMemoryUnregisterResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{36} +} + +// @@ +// @@.. cpp:var:: message CudaSharedMemoryStatusRequest +// @@ +// @@ Request message for CudaSharedMemoryStatus. +// @@ +type CudaSharedMemoryStatusRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the region to get status for. If empty the + // @@ status is returned for all registered regions. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *CudaSharedMemoryStatusRequest) Reset() { + *x = CudaSharedMemoryStatusRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[37] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CudaSharedMemoryStatusRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CudaSharedMemoryStatusRequest) ProtoMessage() {} + +func (x *CudaSharedMemoryStatusRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[37] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CudaSharedMemoryStatusRequest.ProtoReflect.Descriptor instead. +func (*CudaSharedMemoryStatusRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{37} +} + +func (x *CudaSharedMemoryStatusRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +// @@ +// @@.. cpp:var:: message CudaSharedMemoryStatusResponse +// @@ +// @@ Response message for CudaSharedMemoryStatus. +// @@ +type CudaSharedMemoryStatusResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: map regions + // @@ + // @@ Status for each of the registered regions, indexed by + // @@ region name. + // @@ + Regions map[string]*CudaSharedMemoryStatusResponse_RegionStatus `protobuf:"bytes,1,rep,name=regions,proto3" json:"regions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *CudaSharedMemoryStatusResponse) Reset() { + *x = CudaSharedMemoryStatusResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[38] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CudaSharedMemoryStatusResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CudaSharedMemoryStatusResponse) ProtoMessage() {} + +func (x *CudaSharedMemoryStatusResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[38] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CudaSharedMemoryStatusResponse.ProtoReflect.Descriptor instead. +func (*CudaSharedMemoryStatusResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{38} +} + +func (x *CudaSharedMemoryStatusResponse) GetRegions() map[string]*CudaSharedMemoryStatusResponse_RegionStatus { + if x != nil { + return x.Regions + } + return nil +} + +// @@ +// @@.. cpp:var:: message CudaSharedMemoryRegisterRequest +// @@ +// @@ Request message for CudaSharedMemoryRegister. +// @@ +type CudaSharedMemoryRegisterRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the region to register. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: bytes raw_handle + // @@ + // @@ The raw serialized cudaIPC handle. + // @@ + RawHandle []byte `protobuf:"bytes,2,opt,name=raw_handle,json=rawHandle,proto3" json:"raw_handle,omitempty"` + // @@ .. cpp:var:: int64 device_id + // @@ + // @@ The GPU device ID on which the cudaIPC handle was created. + // @@ + DeviceId int64 `protobuf:"varint,3,opt,name=device_id,json=deviceId,proto3" json:"device_id,omitempty"` + // @@ .. cpp:var:: uint64 byte_size + // @@ + // @@ Size of the shared memory block, in bytes. + // @@ + ByteSize uint64 `protobuf:"varint,4,opt,name=byte_size,json=byteSize,proto3" json:"byte_size,omitempty"` +} + +func (x *CudaSharedMemoryRegisterRequest) Reset() { + *x = CudaSharedMemoryRegisterRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[39] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CudaSharedMemoryRegisterRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CudaSharedMemoryRegisterRequest) ProtoMessage() {} + +func (x *CudaSharedMemoryRegisterRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[39] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CudaSharedMemoryRegisterRequest.ProtoReflect.Descriptor instead. +func (*CudaSharedMemoryRegisterRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{39} +} + +func (x *CudaSharedMemoryRegisterRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *CudaSharedMemoryRegisterRequest) GetRawHandle() []byte { + if x != nil { + return x.RawHandle + } + return nil +} + +func (x *CudaSharedMemoryRegisterRequest) GetDeviceId() int64 { + if x != nil { + return x.DeviceId + } + return 0 +} + +func (x *CudaSharedMemoryRegisterRequest) GetByteSize() uint64 { + if x != nil { + return x.ByteSize + } + return 0 +} + +// @@ +// @@.. cpp:var:: message CudaSharedMemoryRegisterResponse +// @@ +// @@ Response message for CudaSharedMemoryRegister. +// @@ +type CudaSharedMemoryRegisterResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *CudaSharedMemoryRegisterResponse) Reset() { + *x = CudaSharedMemoryRegisterResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[40] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CudaSharedMemoryRegisterResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CudaSharedMemoryRegisterResponse) ProtoMessage() {} + +func (x *CudaSharedMemoryRegisterResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[40] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CudaSharedMemoryRegisterResponse.ProtoReflect.Descriptor instead. +func (*CudaSharedMemoryRegisterResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{40} +} + +// @@ +// @@.. cpp:var:: message CudaSharedMemoryUnregisterRequest +// @@ +// @@ Request message for CudaSharedMemoryUnregister. +// @@ +type CudaSharedMemoryUnregisterRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the cuda region to unregister. If empty + // @@ all cuda shared-memory regions are unregistered. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *CudaSharedMemoryUnregisterRequest) Reset() { + *x = CudaSharedMemoryUnregisterRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[41] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CudaSharedMemoryUnregisterRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CudaSharedMemoryUnregisterRequest) ProtoMessage() {} + +func (x *CudaSharedMemoryUnregisterRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[41] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CudaSharedMemoryUnregisterRequest.ProtoReflect.Descriptor instead. +func (*CudaSharedMemoryUnregisterRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{41} +} + +func (x *CudaSharedMemoryUnregisterRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +// @@ +// @@.. cpp:var:: message CudaSharedMemoryUnregisterResponse +// @@ +// @@ Response message for CudaSharedMemoryUnregister. +// @@ +type CudaSharedMemoryUnregisterResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *CudaSharedMemoryUnregisterResponse) Reset() { + *x = CudaSharedMemoryUnregisterResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[42] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CudaSharedMemoryUnregisterResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CudaSharedMemoryUnregisterResponse) ProtoMessage() {} + +func (x *CudaSharedMemoryUnregisterResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[42] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CudaSharedMemoryUnregisterResponse.ProtoReflect.Descriptor instead. +func (*CudaSharedMemoryUnregisterResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{42} +} + +// @@ +// @@.. cpp:var:: message TraceSettingRequest +// @@ +// @@ Request message for TraceSetting. +// @@ +type TraceSettingRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: map settings + // @@ + // @@ The new setting values to be updated, + // @@ settings that are not specified will remain unchanged. + // @@ + Settings map[string]*TraceSettingRequest_SettingValue `protobuf:"bytes,1,rep,name=settings,proto3" json:"settings,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ + // @@ .. cpp:var:: string model_name + // @@ + // @@ The name of the model to apply the new trace settings. + // @@ If not given, the new settings will be applied globally. + // @@ + ModelName string `protobuf:"bytes,2,opt,name=model_name,json=modelName,proto3" json:"model_name,omitempty"` +} + +func (x *TraceSettingRequest) Reset() { + *x = TraceSettingRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[43] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TraceSettingRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TraceSettingRequest) ProtoMessage() {} + +func (x *TraceSettingRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[43] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TraceSettingRequest.ProtoReflect.Descriptor instead. +func (*TraceSettingRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{43} +} + +func (x *TraceSettingRequest) GetSettings() map[string]*TraceSettingRequest_SettingValue { + if x != nil { + return x.Settings + } + return nil +} + +func (x *TraceSettingRequest) GetModelName() string { + if x != nil { + return x.ModelName + } + return "" +} + +// @@ +// @@.. cpp:var:: message TraceSettingResponse +// @@ +// @@ Response message for TraceSetting. +// @@ +type TraceSettingResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: map settings + // @@ + // @@ The current trace settings, including any changes specified + // @@ by TraceSettingRequest. + // @@ + Settings map[string]*TraceSettingResponse_SettingValue `protobuf:"bytes,1,rep,name=settings,proto3" json:"settings,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *TraceSettingResponse) Reset() { + *x = TraceSettingResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[44] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TraceSettingResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TraceSettingResponse) ProtoMessage() {} + +func (x *TraceSettingResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[44] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TraceSettingResponse.ProtoReflect.Descriptor instead. +func (*TraceSettingResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{44} +} + +func (x *TraceSettingResponse) GetSettings() map[string]*TraceSettingResponse_SettingValue { + if x != nil { + return x.Settings + } + return nil +} + +// @@ +// @@.. cpp:var:: message LogSettingsRequest +// @@ +// @@ Request message for LogSettings. +// @@ +type LogSettingsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: map settings + // @@ + // @@ The current log settings. + // @@ + Settings map[string]*LogSettingsRequest_SettingValue `protobuf:"bytes,1,rep,name=settings,proto3" json:"settings,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *LogSettingsRequest) Reset() { + *x = LogSettingsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[45] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *LogSettingsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LogSettingsRequest) ProtoMessage() {} + +func (x *LogSettingsRequest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[45] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LogSettingsRequest.ProtoReflect.Descriptor instead. +func (*LogSettingsRequest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{45} +} + +func (x *LogSettingsRequest) GetSettings() map[string]*LogSettingsRequest_SettingValue { + if x != nil { + return x.Settings + } + return nil +} + +// @@ +// @@.. cpp:var:: message LogSettingsResponse +// @@ +// @@ Response message for LogSettings. +// @@ +type LogSettingsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: map settings + // @@ + // @@ The current log settings. + // @@ + Settings map[string]*LogSettingsResponse_SettingValue `protobuf:"bytes,1,rep,name=settings,proto3" json:"settings,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *LogSettingsResponse) Reset() { + *x = LogSettingsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[46] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *LogSettingsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LogSettingsResponse) ProtoMessage() {} + +func (x *LogSettingsResponse) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[46] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LogSettingsResponse.ProtoReflect.Descriptor instead. +func (*LogSettingsResponse) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{46} +} + +func (x *LogSettingsResponse) GetSettings() map[string]*LogSettingsResponse_SettingValue { + if x != nil { + return x.Settings + } + return nil +} + +// @@ +// @@ .. cpp:var:: message TensorMetadata +// @@ +// @@ Metadata for a tensor. +// @@ +type ModelMetadataResponse_TensorMetadata struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The tensor name. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ + // @@ .. cpp:var:: string datatype + // @@ + // @@ The tensor data type. + // @@ + Datatype string `protobuf:"bytes,2,opt,name=datatype,proto3" json:"datatype,omitempty"` + // @@ + // @@ .. cpp:var:: int64 shape (repeated) + // @@ + // @@ The tensor shape. A variable-size dimension is represented + // @@ by a -1 value. + // @@ + Shape []int64 `protobuf:"varint,3,rep,packed,name=shape,proto3" json:"shape,omitempty"` +} + +func (x *ModelMetadataResponse_TensorMetadata) Reset() { + *x = ModelMetadataResponse_TensorMetadata{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[47] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelMetadataResponse_TensorMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelMetadataResponse_TensorMetadata) ProtoMessage() {} + +func (x *ModelMetadataResponse_TensorMetadata) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[47] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelMetadataResponse_TensorMetadata.ProtoReflect.Descriptor instead. +func (*ModelMetadataResponse_TensorMetadata) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{9, 0} +} + +func (x *ModelMetadataResponse_TensorMetadata) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelMetadataResponse_TensorMetadata) GetDatatype() string { + if x != nil { + return x.Datatype + } + return "" +} + +func (x *ModelMetadataResponse_TensorMetadata) GetShape() []int64 { + if x != nil { + return x.Shape + } + return nil +} + +// @@ +// @@ .. cpp:var:: message InferInputTensor +// @@ +// @@ An input tensor for an inference request. +// @@ +type ModelInferRequest_InferInputTensor struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The tensor name. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ + // @@ .. cpp:var:: string datatype + // @@ + // @@ The tensor data type. + // @@ + Datatype string `protobuf:"bytes,2,opt,name=datatype,proto3" json:"datatype,omitempty"` + // @@ + // @@ .. cpp:var:: int64 shape (repeated) + // @@ + // @@ The tensor shape. + // @@ + Shape []int64 `protobuf:"varint,3,rep,packed,name=shape,proto3" json:"shape,omitempty"` + // @@ .. cpp:var:: map parameters + // @@ + // @@ Optional inference input tensor parameters. + // @@ + Parameters map[string]*InferParameter `protobuf:"bytes,4,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ .. cpp:var:: InferTensorContents contents + // @@ + // @@ The tensor contents using a data-type format. This field + // @@ must not be specified if tensor contents are being specified + // @@ in ModelInferRequest.raw_input_contents. + // @@ + Contents *InferTensorContents `protobuf:"bytes,5,opt,name=contents,proto3" json:"contents,omitempty"` +} + +func (x *ModelInferRequest_InferInputTensor) Reset() { + *x = ModelInferRequest_InferInputTensor{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[48] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelInferRequest_InferInputTensor) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelInferRequest_InferInputTensor) ProtoMessage() {} + +func (x *ModelInferRequest_InferInputTensor) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[48] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelInferRequest_InferInputTensor.ProtoReflect.Descriptor instead. +func (*ModelInferRequest_InferInputTensor) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{12, 0} +} + +func (x *ModelInferRequest_InferInputTensor) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelInferRequest_InferInputTensor) GetDatatype() string { + if x != nil { + return x.Datatype + } + return "" +} + +func (x *ModelInferRequest_InferInputTensor) GetShape() []int64 { + if x != nil { + return x.Shape + } + return nil +} + +func (x *ModelInferRequest_InferInputTensor) GetParameters() map[string]*InferParameter { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *ModelInferRequest_InferInputTensor) GetContents() *InferTensorContents { + if x != nil { + return x.Contents + } + return nil +} + +// @@ +// @@ .. cpp:var:: message InferRequestedOutputTensor +// @@ +// @@ An output tensor requested for an inference request. +// @@ +type ModelInferRequest_InferRequestedOutputTensor struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The tensor name. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: map parameters + // @@ + // @@ Optional requested output tensor parameters. + // @@ + Parameters map[string]*InferParameter `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *ModelInferRequest_InferRequestedOutputTensor) Reset() { + *x = ModelInferRequest_InferRequestedOutputTensor{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[49] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelInferRequest_InferRequestedOutputTensor) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelInferRequest_InferRequestedOutputTensor) ProtoMessage() {} + +func (x *ModelInferRequest_InferRequestedOutputTensor) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[49] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelInferRequest_InferRequestedOutputTensor.ProtoReflect.Descriptor instead. +func (*ModelInferRequest_InferRequestedOutputTensor) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{12, 1} +} + +func (x *ModelInferRequest_InferRequestedOutputTensor) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelInferRequest_InferRequestedOutputTensor) GetParameters() map[string]*InferParameter { + if x != nil { + return x.Parameters + } + return nil +} + +// @@ +// @@ .. cpp:var:: message InferOutputTensor +// @@ +// @@ An output tensor returned for an inference request. +// @@ +type ModelInferResponse_InferOutputTensor struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The tensor name. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ + // @@ .. cpp:var:: string datatype + // @@ + // @@ The tensor data type. + // @@ + Datatype string `protobuf:"bytes,2,opt,name=datatype,proto3" json:"datatype,omitempty"` + // @@ + // @@ .. cpp:var:: int64 shape (repeated) + // @@ + // @@ The tensor shape. + // @@ + Shape []int64 `protobuf:"varint,3,rep,packed,name=shape,proto3" json:"shape,omitempty"` + // @@ .. cpp:var:: map parameters + // @@ + // @@ Optional output tensor parameters. + // @@ + Parameters map[string]*InferParameter `protobuf:"bytes,4,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ .. cpp:var:: InferTensorContents contents + // @@ + // @@ The tensor contents using a data-type format. This field + // @@ must not be specified if tensor contents are being specified + // @@ in ModelInferResponse.raw_output_contents. + // @@ + Contents *InferTensorContents `protobuf:"bytes,5,opt,name=contents,proto3" json:"contents,omitempty"` +} + +func (x *ModelInferResponse_InferOutputTensor) Reset() { + *x = ModelInferResponse_InferOutputTensor{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[53] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelInferResponse_InferOutputTensor) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelInferResponse_InferOutputTensor) ProtoMessage() {} + +func (x *ModelInferResponse_InferOutputTensor) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[53] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelInferResponse_InferOutputTensor.ProtoReflect.Descriptor instead. +func (*ModelInferResponse_InferOutputTensor) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{13, 0} +} + +func (x *ModelInferResponse_InferOutputTensor) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelInferResponse_InferOutputTensor) GetDatatype() string { + if x != nil { + return x.Datatype + } + return "" +} + +func (x *ModelInferResponse_InferOutputTensor) GetShape() []int64 { + if x != nil { + return x.Shape + } + return nil +} + +func (x *ModelInferResponse_InferOutputTensor) GetParameters() map[string]*InferParameter { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *ModelInferResponse_InferOutputTensor) GetContents() *InferTensorContents { + if x != nil { + return x.Contents + } + return nil +} + +// @@ +// @@ .. cpp:var:: message ModelIndex +// @@ +// @@ Index entry for a model. +// @@ +type RepositoryIndexResponse_ModelIndex struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the model. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: string version + // @@ + // @@ The version of the model. + // @@ + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // @@ + // @@ .. cpp:var:: string state + // @@ + // @@ The state of the model. + // @@ + State string `protobuf:"bytes,3,opt,name=state,proto3" json:"state,omitempty"` + // @@ + // @@ .. cpp:var:: string reason + // @@ + // @@ The reason, if any, that the model is in the given state. + // @@ + Reason string `protobuf:"bytes,4,opt,name=reason,proto3" json:"reason,omitempty"` +} + +func (x *RepositoryIndexResponse_ModelIndex) Reset() { + *x = RepositoryIndexResponse_ModelIndex{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[56] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RepositoryIndexResponse_ModelIndex) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RepositoryIndexResponse_ModelIndex) ProtoMessage() {} + +func (x *RepositoryIndexResponse_ModelIndex) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[56] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RepositoryIndexResponse_ModelIndex.ProtoReflect.Descriptor instead. +func (*RepositoryIndexResponse_ModelIndex) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{26, 0} +} + +func (x *RepositoryIndexResponse_ModelIndex) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *RepositoryIndexResponse_ModelIndex) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *RepositoryIndexResponse_ModelIndex) GetState() string { + if x != nil { + return x.State + } + return "" +} + +func (x *RepositoryIndexResponse_ModelIndex) GetReason() string { + if x != nil { + return x.Reason + } + return "" +} + +// @@ +// @@ .. cpp:var:: message RegionStatus +// @@ +// @@ Status for a shared memory region. +// @@ +type SystemSharedMemoryStatusResponse_RegionStatus struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name for the shared memory region. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: string shared_memory_key + // @@ + // @@ The key of the underlying memory object that contains the + // @@ shared memory region. + // @@ + Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key,omitempty"` + // @@ .. cpp:var:: uint64 offset + // @@ + // @@ Offset, in bytes, within the underlying memory object to + // @@ the start of the shared memory region. + // @@ + Offset uint64 `protobuf:"varint,3,opt,name=offset,proto3" json:"offset,omitempty"` + // @@ .. cpp:var:: uint64 byte_size + // @@ + // @@ Size of the shared memory region, in bytes. + // @@ + ByteSize uint64 `protobuf:"varint,4,opt,name=byte_size,json=byteSize,proto3" json:"byte_size,omitempty"` +} + +func (x *SystemSharedMemoryStatusResponse_RegionStatus) Reset() { + *x = SystemSharedMemoryStatusResponse_RegionStatus{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[59] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SystemSharedMemoryStatusResponse_RegionStatus) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SystemSharedMemoryStatusResponse_RegionStatus) ProtoMessage() {} + +func (x *SystemSharedMemoryStatusResponse_RegionStatus) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[59] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SystemSharedMemoryStatusResponse_RegionStatus.ProtoReflect.Descriptor instead. +func (*SystemSharedMemoryStatusResponse_RegionStatus) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{32, 0} +} + +func (x *SystemSharedMemoryStatusResponse_RegionStatus) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *SystemSharedMemoryStatusResponse_RegionStatus) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +func (x *SystemSharedMemoryStatusResponse_RegionStatus) GetOffset() uint64 { + if x != nil { + return x.Offset + } + return 0 +} + +func (x *SystemSharedMemoryStatusResponse_RegionStatus) GetByteSize() uint64 { + if x != nil { + return x.ByteSize + } + return 0 +} + +// @@ +// @@ .. cpp:var:: message RegionStatus +// @@ +// @@ Status for a shared memory region. +// @@ +type CudaSharedMemoryStatusResponse_RegionStatus struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string name + // @@ + // @@ The name for the shared memory region. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: uin64 device_id + // @@ + // @@ The GPU device ID where the cudaIPC handle was created. + // @@ + DeviceId uint64 `protobuf:"varint,2,opt,name=device_id,json=deviceId,proto3" json:"device_id,omitempty"` + // @@ .. cpp:var:: uint64 byte_size + // @@ + // @@ Size of the shared memory region, in bytes. + // @@ + ByteSize uint64 `protobuf:"varint,3,opt,name=byte_size,json=byteSize,proto3" json:"byte_size,omitempty"` +} + +func (x *CudaSharedMemoryStatusResponse_RegionStatus) Reset() { + *x = CudaSharedMemoryStatusResponse_RegionStatus{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[61] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CudaSharedMemoryStatusResponse_RegionStatus) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CudaSharedMemoryStatusResponse_RegionStatus) ProtoMessage() {} + +func (x *CudaSharedMemoryStatusResponse_RegionStatus) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[61] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CudaSharedMemoryStatusResponse_RegionStatus.ProtoReflect.Descriptor instead. +func (*CudaSharedMemoryStatusResponse_RegionStatus) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{38, 0} +} + +func (x *CudaSharedMemoryStatusResponse_RegionStatus) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *CudaSharedMemoryStatusResponse_RegionStatus) GetDeviceId() uint64 { + if x != nil { + return x.DeviceId + } + return 0 +} + +func (x *CudaSharedMemoryStatusResponse_RegionStatus) GetByteSize() uint64 { + if x != nil { + return x.ByteSize + } + return 0 +} + +// @@ +// @@ .. cpp:var:: message SettingValue +// @@ +// @@ The values to be associated with a trace setting. +// @@ If no value is provided, the setting will be clear and +// @@ the global setting value will be used. +// @@ +type TraceSettingRequest_SettingValue struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string value (repeated) + // @@ + // @@ The value. + // @@ + Value []string `protobuf:"bytes,1,rep,name=value,proto3" json:"value,omitempty"` +} + +func (x *TraceSettingRequest_SettingValue) Reset() { + *x = TraceSettingRequest_SettingValue{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[63] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TraceSettingRequest_SettingValue) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TraceSettingRequest_SettingValue) ProtoMessage() {} + +func (x *TraceSettingRequest_SettingValue) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[63] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TraceSettingRequest_SettingValue.ProtoReflect.Descriptor instead. +func (*TraceSettingRequest_SettingValue) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{43, 0} +} + +func (x *TraceSettingRequest_SettingValue) GetValue() []string { + if x != nil { + return x.Value + } + return nil +} + +// @@ +// @@ .. cpp:var:: message SettingValue +// @@ +// @@ The values to be associated with a trace setting. +// @@ +type TraceSettingResponse_SettingValue struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: string value (repeated) + // @@ + // @@ The value. + // @@ + Value []string `protobuf:"bytes,1,rep,name=value,proto3" json:"value,omitempty"` +} + +func (x *TraceSettingResponse_SettingValue) Reset() { + *x = TraceSettingResponse_SettingValue{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[65] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TraceSettingResponse_SettingValue) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TraceSettingResponse_SettingValue) ProtoMessage() {} + +func (x *TraceSettingResponse_SettingValue) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[65] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TraceSettingResponse_SettingValue.ProtoReflect.Descriptor instead. +func (*TraceSettingResponse_SettingValue) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{44, 0} +} + +func (x *TraceSettingResponse_SettingValue) GetValue() []string { + if x != nil { + return x.Value + } + return nil +} + +type LogSettingsRequest_SettingValue struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to ParameterChoice: + // + // *LogSettingsRequest_SettingValue_BoolParam + // *LogSettingsRequest_SettingValue_Uint32Param + // *LogSettingsRequest_SettingValue_StringParam + ParameterChoice isLogSettingsRequest_SettingValue_ParameterChoice `protobuf_oneof:"parameter_choice"` +} + +func (x *LogSettingsRequest_SettingValue) Reset() { + *x = LogSettingsRequest_SettingValue{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[67] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *LogSettingsRequest_SettingValue) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LogSettingsRequest_SettingValue) ProtoMessage() {} + +func (x *LogSettingsRequest_SettingValue) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[67] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LogSettingsRequest_SettingValue.ProtoReflect.Descriptor instead. +func (*LogSettingsRequest_SettingValue) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{45, 0} +} + +func (m *LogSettingsRequest_SettingValue) GetParameterChoice() isLogSettingsRequest_SettingValue_ParameterChoice { + if m != nil { + return m.ParameterChoice + } + return nil +} + +func (x *LogSettingsRequest_SettingValue) GetBoolParam() bool { + if x, ok := x.GetParameterChoice().(*LogSettingsRequest_SettingValue_BoolParam); ok { + return x.BoolParam + } + return false +} + +func (x *LogSettingsRequest_SettingValue) GetUint32Param() uint32 { + if x, ok := x.GetParameterChoice().(*LogSettingsRequest_SettingValue_Uint32Param); ok { + return x.Uint32Param + } + return 0 +} + +func (x *LogSettingsRequest_SettingValue) GetStringParam() string { + if x, ok := x.GetParameterChoice().(*LogSettingsRequest_SettingValue_StringParam); ok { + return x.StringParam + } + return "" +} + +type isLogSettingsRequest_SettingValue_ParameterChoice interface { + isLogSettingsRequest_SettingValue_ParameterChoice() +} + +type LogSettingsRequest_SettingValue_BoolParam struct { + // @@ .. cpp:var:: bool bool_param + // @@ + // @@ A boolean parameter value. + // @@ + BoolParam bool `protobuf:"varint,1,opt,name=bool_param,json=boolParam,proto3,oneof"` +} + +type LogSettingsRequest_SettingValue_Uint32Param struct { + // @@ .. cpp:var:: uint32 uint32_param + // @@ + // @@ An uint32 parameter value. + // @@ + Uint32Param uint32 `protobuf:"varint,2,opt,name=uint32_param,json=uint32Param,proto3,oneof"` +} + +type LogSettingsRequest_SettingValue_StringParam struct { + // @@ .. cpp:var:: string string_param + // @@ + // @@ A string parameter value. + // @@ + StringParam string `protobuf:"bytes,3,opt,name=string_param,json=stringParam,proto3,oneof"` +} + +func (*LogSettingsRequest_SettingValue_BoolParam) isLogSettingsRequest_SettingValue_ParameterChoice() { +} + +func (*LogSettingsRequest_SettingValue_Uint32Param) isLogSettingsRequest_SettingValue_ParameterChoice() { +} + +func (*LogSettingsRequest_SettingValue_StringParam) isLogSettingsRequest_SettingValue_ParameterChoice() { +} + +type LogSettingsResponse_SettingValue struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to ParameterChoice: + // + // *LogSettingsResponse_SettingValue_BoolParam + // *LogSettingsResponse_SettingValue_Uint32Param + // *LogSettingsResponse_SettingValue_StringParam + ParameterChoice isLogSettingsResponse_SettingValue_ParameterChoice `protobuf_oneof:"parameter_choice"` +} + +func (x *LogSettingsResponse_SettingValue) Reset() { + *x = LogSettingsResponse_SettingValue{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[69] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *LogSettingsResponse_SettingValue) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LogSettingsResponse_SettingValue) ProtoMessage() {} + +func (x *LogSettingsResponse_SettingValue) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[69] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LogSettingsResponse_SettingValue.ProtoReflect.Descriptor instead. +func (*LogSettingsResponse_SettingValue) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP(), []int{46, 0} +} + +func (m *LogSettingsResponse_SettingValue) GetParameterChoice() isLogSettingsResponse_SettingValue_ParameterChoice { + if m != nil { + return m.ParameterChoice + } + return nil +} + +func (x *LogSettingsResponse_SettingValue) GetBoolParam() bool { + if x, ok := x.GetParameterChoice().(*LogSettingsResponse_SettingValue_BoolParam); ok { + return x.BoolParam + } + return false +} + +func (x *LogSettingsResponse_SettingValue) GetUint32Param() uint32 { + if x, ok := x.GetParameterChoice().(*LogSettingsResponse_SettingValue_Uint32Param); ok { + return x.Uint32Param + } + return 0 +} + +func (x *LogSettingsResponse_SettingValue) GetStringParam() string { + if x, ok := x.GetParameterChoice().(*LogSettingsResponse_SettingValue_StringParam); ok { + return x.StringParam + } + return "" +} + +type isLogSettingsResponse_SettingValue_ParameterChoice interface { + isLogSettingsResponse_SettingValue_ParameterChoice() +} + +type LogSettingsResponse_SettingValue_BoolParam struct { + // @@ .. cpp:var:: bool bool_param + // @@ + // @@ A boolean parameter value. + // @@ + BoolParam bool `protobuf:"varint,1,opt,name=bool_param,json=boolParam,proto3,oneof"` +} + +type LogSettingsResponse_SettingValue_Uint32Param struct { + // @@ .. cpp:var:: uint32 uint32_param + // @@ + // @@ An int32 parameter value. + // @@ + Uint32Param uint32 `protobuf:"varint,2,opt,name=uint32_param,json=uint32Param,proto3,oneof"` +} + +type LogSettingsResponse_SettingValue_StringParam struct { + // @@ .. cpp:var:: string string_param + // @@ + // @@ A string parameter value. + // @@ + StringParam string `protobuf:"bytes,3,opt,name=string_param,json=stringParam,proto3,oneof"` +} + +func (*LogSettingsResponse_SettingValue_BoolParam) isLogSettingsResponse_SettingValue_ParameterChoice() { +} + +func (*LogSettingsResponse_SettingValue_Uint32Param) isLogSettingsResponse_SettingValue_ParameterChoice() { +} + +func (*LogSettingsResponse_SettingValue_StringParam) isLogSettingsResponse_SettingValue_ParameterChoice() { +} + +var File_pkg_apis_inference_v1_grpc_service_proto protoreflect.FileDescriptor + +var file_pkg_apis_inference_v1_grpc_service_proto_rawDesc = []byte{ + 0x0a, 0x28, 0x70, 0x6b, 0x67, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x5f, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x1a, 0x28, 0x70, 0x6b, 0x67, 0x2f, 0x61, 0x70, + 0x69, 0x73, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2f, 0x76, 0x31, 0x2f, + 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x22, 0x13, 0x0a, 0x11, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4c, 0x69, 0x76, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x28, 0x0a, 0x12, 0x53, 0x65, 0x72, 0x76, 0x65, + 0x72, 0x4c, 0x69, 0x76, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, + 0x04, 0x6c, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x04, 0x6c, 0x69, 0x76, + 0x65, 0x22, 0x14, 0x0a, 0x12, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x52, 0x65, 0x61, 0x64, 0x79, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x2b, 0x0a, 0x13, 0x53, 0x65, 0x72, 0x76, 0x65, + 0x72, 0x52, 0x65, 0x61, 0x64, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, + 0x0a, 0x05, 0x72, 0x65, 0x61, 0x64, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x72, + 0x65, 0x61, 0x64, 0x79, 0x22, 0x41, 0x0a, 0x11, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x65, 0x61, + 0x64, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, + 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x2a, 0x0a, 0x12, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x52, 0x65, 0x61, 0x64, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, + 0x05, 0x72, 0x65, 0x61, 0x64, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x72, 0x65, + 0x61, 0x64, 0x79, 0x22, 0x17, 0x0a, 0x15, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x66, 0x0a, 0x16, + 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, + 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, + 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x44, 0x0a, 0x14, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xd5, 0x02, 0x0a, 0x15, 0x4d, + 0x6f, 0x64, 0x65, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, + 0x12, 0x4a, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x32, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x4c, 0x0a, 0x07, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, + 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, + 0x65, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x2e, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x52, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x1a, 0x56, 0x0a, 0x0e, 0x54, 0x65, + 0x6e, 0x73, 0x6f, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x61, 0x74, 0x61, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x74, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x73, 0x68, 0x61, 0x70, 0x65, 0x18, 0x03, 0x20, 0x03, 0x28, 0x03, 0x52, 0x05, 0x73, 0x68, 0x61, + 0x70, 0x65, 0x22, 0xd7, 0x01, 0x0a, 0x0e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x1f, 0x0a, 0x0a, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x09, 0x62, 0x6f, 0x6f, + 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0b, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x5f, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x0a, 0x69, + 0x6e, 0x74, 0x36, 0x34, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x23, + 0x0a, 0x0c, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x0b, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x12, 0x23, 0x0a, 0x0c, 0x75, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x5f, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x48, 0x00, 0x52, 0x0b, 0x75, 0x69, 0x6e, + 0x74, 0x36, 0x34, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x42, 0x12, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x63, 0x68, 0x6f, 0x69, 0x63, 0x65, 0x22, 0xc3, 0x02, 0x0a, + 0x13, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x43, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x08, 0x52, 0x0c, 0x62, 0x6f, 0x6f, + 0x6c, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x74, + 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x05, 0x52, + 0x0b, 0x69, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x25, 0x0a, 0x0e, + 0x69, 0x6e, 0x74, 0x36, 0x34, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, + 0x20, 0x03, 0x28, 0x03, 0x52, 0x0d, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x43, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x75, 0x69, 0x6e, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0d, 0x52, 0x0c, 0x75, 0x69, 0x6e, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x27, 0x0a, 0x0f, 0x75, 0x69, 0x6e, 0x74, + 0x36, 0x34, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, + 0x04, 0x52, 0x0e, 0x75, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x70, 0x33, 0x32, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x02, 0x52, 0x0c, 0x66, 0x70, 0x33, 0x32, 0x43, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x70, 0x36, 0x34, 0x5f, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0c, 0x66, + 0x70, 0x36, 0x34, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x62, + 0x79, 0x74, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x08, 0x20, + 0x03, 0x28, 0x0c, 0x52, 0x0d, 0x62, 0x79, 0x74, 0x65, 0x73, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x73, 0x22, 0xb8, 0x08, 0x0a, 0x11, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, 0x65, + 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x6f, 0x64, 0x65, + 0x6c, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, + 0x64, 0x65, 0x6c, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, + 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, + 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x4f, 0x0a, 0x0a, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x2f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x48, 0x0a, + 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, + 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, + 0x65, 0x6c, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, + 0x6e, 0x66, 0x65, 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, + 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x54, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, + 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, + 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x65, 0x64, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x65, + 0x6e, 0x73, 0x6f, 0x72, 0x52, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x12, 0x2c, 0x0a, + 0x12, 0x72, 0x61, 0x77, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x10, 0x72, 0x61, 0x77, 0x49, 0x6e, + 0x70, 0x75, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0xd6, 0x02, 0x0a, 0x10, + 0x49, 0x6e, 0x66, 0x65, 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, + 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x61, 0x74, 0x61, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x74, 0x79, 0x70, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x18, 0x03, 0x20, 0x03, 0x28, 0x03, 0x52, + 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, 0x60, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x40, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, + 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, 0x6e, 0x66, 0x65, + 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x2e, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x3d, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x54, + 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x08, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x5b, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x02, 0x38, 0x01, 0x1a, 0xf9, 0x01, 0x0a, 0x1a, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x65, 0x64, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x65, 0x6e, + 0x73, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x6a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x4a, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, 0x6e, 0x66, + 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x65, 0x64, 0x4f, 0x75, 0x74, 0x70, 0x75, + 0x74, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x73, 0x1a, 0x5b, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, + 0x1a, 0x5b, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, + 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xf1, 0x05, + 0x0a, 0x12, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x4e, + 0x61, 0x6d, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x76, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x6d, 0x6f, 0x64, 0x65, + 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x50, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x69, + 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x4c, 0x0a, 0x07, 0x6f, 0x75, + 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x49, 0x6e, + 0x66, 0x65, 0x72, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, + 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x72, 0x61, 0x77, 0x5f, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, + 0x06, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x11, 0x72, 0x61, 0x77, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0xd9, 0x02, 0x0a, 0x11, 0x49, 0x6e, 0x66, + 0x65, 0x72, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x12, 0x12, + 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x61, 0x74, 0x61, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x74, 0x79, 0x70, 0x65, 0x12, 0x14, + 0x0a, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x18, 0x03, 0x20, 0x03, 0x28, 0x03, 0x52, 0x05, 0x73, + 0x68, 0x61, 0x70, 0x65, 0x12, 0x62, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x42, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, + 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, + 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x2e, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x3d, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x54, + 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x08, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x5b, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x5b, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, + 0x01, 0x22, 0x88, 0x01, 0x0a, 0x18, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x74, 0x72, 0x65, 0x61, + 0x6d, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, + 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x12, 0x47, 0x0a, 0x0e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x5f, 0x72, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x52, 0x0d, 0x69, + 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x42, 0x0a, 0x12, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x22, 0x48, 0x0a, 0x13, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x31, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x46, 0x0a, 0x16, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x22, 0x39, 0x0a, 0x11, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x44, + 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x0e, 0x0a, + 0x02, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x6e, 0x73, 0x22, 0x8a, 0x04, + 0x0a, 0x0f, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, + 0x73, 0x12, 0x39, 0x0a, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x44, 0x75, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x33, 0x0a, 0x04, + 0x66, 0x61, 0x69, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, + 0x74, 0x69, 0x63, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x66, 0x61, 0x69, + 0x6c, 0x12, 0x35, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x05, 0x71, 0x75, 0x65, 0x75, 0x65, 0x12, 0x44, 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x70, + 0x75, 0x74, 0x65, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x0c, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x44, + 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x44, 0x75, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x49, + 0x6e, 0x66, 0x65, 0x72, 0x12, 0x46, 0x0a, 0x0e, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x5f, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, + 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x74, + 0x69, 0x73, 0x74, 0x69, 0x63, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x63, + 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x3c, 0x0a, 0x09, + 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x68, 0x69, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x08, 0x63, 0x61, 0x63, 0x68, 0x65, 0x48, 0x69, 0x74, 0x12, 0x3e, 0x0a, 0x0a, 0x63, 0x61, + 0x63, 0x68, 0x65, 0x5f, 0x6d, 0x69, 0x73, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, + 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, + 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x09, 0x63, 0x61, 0x63, 0x68, 0x65, 0x4d, 0x69, 0x73, 0x73, 0x22, 0x89, 0x02, 0x0a, 0x14, 0x49, + 0x6e, 0x66, 0x65, 0x72, 0x42, 0x61, 0x74, 0x63, 0x68, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, + 0x69, 0x63, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x62, 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, + 0x7a, 0x65, 0x12, 0x44, 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x5f, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, + 0x69, 0x63, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x63, 0x6f, 0x6d, 0x70, + 0x75, 0x74, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x44, 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x70, + 0x75, 0x74, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x0c, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x12, 0x46, + 0x0a, 0x0e, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, + 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x44, + 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, + 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0x4e, 0x0a, 0x0b, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, + 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x62, 0x79, 0x74, + 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x62, 0x79, + 0x74, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x83, 0x03, 0x0a, 0x0f, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, + 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x6c, 0x61, 0x73, 0x74, + 0x5f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, + 0x52, 0x0d, 0x6c, 0x61, 0x73, 0x74, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, + 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x75, + 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x65, 0x78, 0x65, 0x63, + 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x04, 0x52, 0x0e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x75, 0x6e, + 0x74, 0x12, 0x46, 0x0a, 0x0f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x73, + 0x74, 0x61, 0x74, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x53, + 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x0e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x43, 0x0a, 0x0b, 0x62, 0x61, 0x74, + 0x63, 0x68, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, + 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, + 0x66, 0x65, 0x72, 0x42, 0x61, 0x74, 0x63, 0x68, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, + 0x63, 0x73, 0x52, 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x3c, + 0x0a, 0x0c, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x18, 0x08, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, + 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, + 0x0b, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x55, 0x73, 0x61, 0x67, 0x65, 0x22, 0x59, 0x0a, 0x17, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x0b, 0x6d, 0x6f, 0x64, 0x65, 0x6c, + 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, + 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x0a, 0x6d, 0x6f, 0x64, + 0x65, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x73, 0x22, 0xba, 0x01, 0x0a, 0x18, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x12, 0x1f, 0x0a, 0x0a, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x09, 0x62, 0x6f, 0x6f, 0x6c, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0b, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x5f, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x0a, 0x69, 0x6e, + 0x74, 0x36, 0x34, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x74, 0x72, 0x69, + 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, + 0x0b, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0c, 0x48, 0x00, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x42, 0x12, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x63, 0x68, + 0x6f, 0x69, 0x63, 0x65, 0x22, 0x57, 0x0a, 0x16, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, + 0x72, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, + 0x0a, 0x0f, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x6f, 0x72, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x65, 0x61, 0x64, 0x79, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x72, 0x65, 0x61, 0x64, 0x79, 0x22, 0xcd, 0x01, + 0x0a, 0x17, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x6e, 0x64, 0x65, + 0x78, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x48, 0x0a, 0x06, 0x6d, 0x6f, 0x64, + 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x6f, 0x72, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x06, 0x6d, 0x6f, 0x64, + 0x65, 0x6c, 0x73, 0x1a, 0x68, 0x0a, 0x0a, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x64, 0x65, + 0x78, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, + 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x22, 0xa5, 0x02, + 0x0a, 0x1a, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x4c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, + 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, + 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, 0x64, 0x65, 0x6c, + 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x58, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, + 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x1a, 0x65, + 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x3c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, + 0x79, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x1d, 0x0a, 0x1b, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x6f, 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa9, 0x02, 0x0a, 0x1c, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x6f, 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x55, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x6f, 0x72, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, + 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1d, + 0x0a, 0x0a, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x5a, 0x0a, + 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x3a, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x55, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x1a, 0x65, 0x0a, 0x0f, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3c, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, + 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, + 0x65, 0x6c, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, + 0x22, 0x1f, 0x0a, 0x1d, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x55, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x35, 0x0a, 0x1f, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, + 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xdd, 0x02, 0x0a, 0x20, 0x53, 0x79, 0x73, + 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x53, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x55, 0x0a, + 0x07, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3b, + 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, + 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, + 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, + 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x72, 0x65, 0x67, + 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x69, 0x0a, 0x0c, 0x52, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x53, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, + 0x66, 0x73, 0x65, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, + 0x65, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x62, 0x79, 0x74, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x62, 0x79, 0x74, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x1a, + 0x77, 0x0a, 0x0c, 0x52, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x51, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x3b, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, + 0x72, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x2e, 0x52, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x7e, 0x0a, 0x21, 0x53, 0x79, 0x73, 0x74, + 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, + 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x04, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x62, + 0x79, 0x74, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, + 0x62, 0x79, 0x74, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x24, 0x0a, 0x22, 0x53, 0x79, 0x73, 0x74, + 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, + 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x39, + 0x0a, 0x23, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, + 0x6d, 0x6f, 0x72, 0x79, 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x26, 0x0a, 0x24, 0x53, 0x79, 0x73, + 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x55, + 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x33, 0x0a, 0x1d, 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, + 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xca, 0x02, 0x0a, 0x1e, 0x43, 0x75, 0x64, 0x61, 0x53, + 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x53, 0x0a, 0x07, 0x72, 0x65, 0x67, + 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, + 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x5c, + 0x0a, 0x0c, 0x52, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x12, + 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x12, + 0x1b, 0x0a, 0x09, 0x62, 0x79, 0x74, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x04, 0x52, 0x08, 0x62, 0x79, 0x74, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x1a, 0x75, 0x0a, 0x0c, + 0x52, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x4f, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x39, 0x2e, + 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x75, 0x64, + 0x61, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x53, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x67, 0x69, + 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x02, 0x38, 0x01, 0x22, 0x8e, 0x01, 0x0a, 0x1f, 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, 0x61, 0x72, + 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x72, + 0x61, 0x77, 0x5f, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x09, 0x72, 0x61, 0x77, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x65, + 0x76, 0x69, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x64, + 0x65, 0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x62, 0x79, 0x74, 0x65, 0x5f, + 0x73, 0x69, 0x7a, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x62, 0x79, 0x74, 0x65, + 0x53, 0x69, 0x7a, 0x65, 0x22, 0x22, 0x0a, 0x20, 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, 0x61, 0x72, + 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x37, 0x0a, 0x21, 0x43, 0x75, 0x64, 0x61, + 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x55, 0x6e, 0x72, 0x65, + 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x22, 0x24, 0x0a, 0x22, 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, + 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x94, 0x02, 0x0a, 0x13, 0x54, 0x72, 0x61, 0x63, + 0x65, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x4b, 0x0a, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x2f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x54, 0x72, 0x61, 0x63, 0x65, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x52, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x1d, 0x0a, 0x0a, + 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0x24, 0x0a, 0x0c, 0x53, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x1a, 0x6b, 0x0a, 0x0d, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x44, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x63, 0x65, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xf8, + 0x01, 0x0a, 0x14, 0x54, 0x72, 0x61, 0x63, 0x65, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4c, 0x0a, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x63, 0x65, 0x53, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x73, 0x65, 0x74, + 0x74, 0x69, 0x6e, 0x67, 0x73, 0x1a, 0x24, 0x0a, 0x0c, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x6c, 0x0a, 0x0d, 0x53, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x45, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, + 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, + 0x63, 0x65, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xdc, 0x02, 0x0a, 0x12, 0x4c, 0x6f, + 0x67, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x4a, 0x0a, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x52, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x1a, 0x8d, 0x01, 0x0a, + 0x0c, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1f, 0x0a, + 0x0a, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x08, 0x48, 0x00, 0x52, 0x09, 0x62, 0x6f, 0x6f, 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x23, + 0x0a, 0x0c, 0x75, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0d, 0x48, 0x00, 0x52, 0x0b, 0x75, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x42, 0x12, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x63, 0x68, 0x6f, 0x69, 0x63, 0x65, 0x1a, 0x6a, 0x0a, 0x0d, + 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x43, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, + 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, + 0x67, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xdf, 0x02, 0x0a, 0x13, 0x4c, 0x6f, 0x67, + 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x4b, 0x0a, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x1a, 0x8d, 0x01, + 0x0a, 0x0c, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1f, + 0x0a, 0x0a, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x08, 0x48, 0x00, 0x52, 0x09, 0x62, 0x6f, 0x6f, 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, + 0x23, 0x0a, 0x0c, 0x75, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0d, 0x48, 0x00, 0x52, 0x0b, 0x75, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x42, 0x12, 0x0a, 0x10, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x63, 0x68, 0x6f, 0x69, 0x63, 0x65, 0x1a, 0x6b, 0x0a, + 0x0d, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, + 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, + 0x12, 0x44, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x2e, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4c, + 0x6f, 0x67, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x32, 0xb1, 0x10, 0x0a, 0x14, 0x47, + 0x52, 0x50, 0x43, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x12, 0x51, 0x0a, 0x0a, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4c, 0x69, 0x76, + 0x65, 0x12, 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4c, 0x69, 0x76, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4c, 0x69, 0x76, 0x65, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x54, 0x0a, 0x0b, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x52, 0x65, 0x61, 0x64, 0x79, 0x12, 0x20, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x52, 0x65, 0x61, 0x64, 0x79, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x52, 0x65, 0x61, + 0x64, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x51, 0x0a, 0x0a, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x65, 0x61, 0x64, 0x79, 0x12, 0x1f, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, + 0x65, 0x61, 0x64, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x52, 0x65, 0x61, 0x64, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x5d, 0x0a, 0x0e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x12, 0x23, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, + 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x5a, + 0x0a, 0x0d, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, + 0x22, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, + 0x6f, 0x64, 0x65, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x51, 0x0a, 0x0a, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x12, 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, + 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, + 0x66, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x61, 0x0a, + 0x10, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x49, 0x6e, 0x66, 0x65, + 0x72, 0x12, 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x49, 0x6e, 0x66, + 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x28, 0x01, 0x30, 0x01, + 0x12, 0x54, 0x0a, 0x0b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, + 0x20, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, + 0x6f, 0x64, 0x65, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x21, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x60, 0x0a, 0x0f, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, + 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x24, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x74, + 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x25, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, + 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x60, 0x0a, 0x0f, 0x52, 0x65, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x24, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x25, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6c, 0x0a, 0x13, 0x52, 0x65, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4c, 0x6f, 0x61, + 0x64, 0x12, 0x28, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x4c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4c, 0x6f, 0x61, 0x64, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x72, 0x0a, 0x15, 0x52, 0x65, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x55, 0x6e, 0x6c, 0x6f, 0x61, + 0x64, 0x12, 0x2a, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x55, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, + 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x55, 0x6e, 0x6c, 0x6f, + 0x61, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x7b, 0x0a, 0x18, + 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, + 0x72, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x2d, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, + 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2e, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, + 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x81, 0x01, 0x0a, 0x1a, 0x53, 0x79, + 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, + 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x12, 0x2f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, + 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, + 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x30, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x53, + 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x67, 0x69, 0x73, + 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x87, 0x01, + 0x0a, 0x1c, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, + 0x6d, 0x6f, 0x72, 0x79, 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x12, 0x31, + 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, + 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, + 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x32, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, + 0x6f, 0x72, 0x79, 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x75, 0x0a, 0x16, 0x43, 0x75, 0x64, 0x61, 0x53, + 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x12, 0x2b, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, + 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, + 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x75, + 0x64, 0x61, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x53, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x7b, + 0x0a, 0x18, 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, + 0x72, 0x79, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x12, 0x2d, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, + 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, + 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2e, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, 0x61, + 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, + 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x81, 0x01, 0x0a, 0x1a, + 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, + 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x12, 0x2f, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x75, 0x64, 0x61, 0x53, 0x68, + 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, + 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x30, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x75, 0x64, 0x61, 0x53, + 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x55, 0x6e, 0x72, 0x65, 0x67, + 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x57, 0x0a, 0x0c, 0x54, 0x72, 0x61, 0x63, 0x65, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x12, + 0x21, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x54, + 0x72, 0x61, 0x63, 0x65, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x54, 0x72, 0x61, 0x63, 0x65, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x54, 0x0a, 0x0b, 0x4c, 0x6f, 0x67, 0x53, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x20, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x65, 0x74, 0x74, + 0x69, 0x6e, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x2c, + 0x5a, 0x2a, 0x64, 0x37, 0x79, 0x2e, 0x69, 0x6f, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x70, 0x6b, 0x67, + 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2f, + 0x76, 0x31, 0x3b, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_pkg_apis_inference_v1_grpc_service_proto_rawDescOnce sync.Once + file_pkg_apis_inference_v1_grpc_service_proto_rawDescData = file_pkg_apis_inference_v1_grpc_service_proto_rawDesc +) + +func file_pkg_apis_inference_v1_grpc_service_proto_rawDescGZIP() []byte { + file_pkg_apis_inference_v1_grpc_service_proto_rawDescOnce.Do(func() { + file_pkg_apis_inference_v1_grpc_service_proto_rawDescData = protoimpl.X.CompressGZIP(file_pkg_apis_inference_v1_grpc_service_proto_rawDescData) + }) + return file_pkg_apis_inference_v1_grpc_service_proto_rawDescData +} + +var file_pkg_apis_inference_v1_grpc_service_proto_msgTypes = make([]protoimpl.MessageInfo, 71) +var file_pkg_apis_inference_v1_grpc_service_proto_goTypes = []interface{}{ + (*ServerLiveRequest)(nil), // 0: inference.v1.ServerLiveRequest + (*ServerLiveResponse)(nil), // 1: inference.v1.ServerLiveResponse + (*ServerReadyRequest)(nil), // 2: inference.v1.ServerReadyRequest + (*ServerReadyResponse)(nil), // 3: inference.v1.ServerReadyResponse + (*ModelReadyRequest)(nil), // 4: inference.v1.ModelReadyRequest + (*ModelReadyResponse)(nil), // 5: inference.v1.ModelReadyResponse + (*ServerMetadataRequest)(nil), // 6: inference.v1.ServerMetadataRequest + (*ServerMetadataResponse)(nil), // 7: inference.v1.ServerMetadataResponse + (*ModelMetadataRequest)(nil), // 8: inference.v1.ModelMetadataRequest + (*ModelMetadataResponse)(nil), // 9: inference.v1.ModelMetadataResponse + (*InferParameter)(nil), // 10: inference.v1.InferParameter + (*InferTensorContents)(nil), // 11: inference.v1.InferTensorContents + (*ModelInferRequest)(nil), // 12: inference.v1.ModelInferRequest + (*ModelInferResponse)(nil), // 13: inference.v1.ModelInferResponse + (*ModelStreamInferResponse)(nil), // 14: inference.v1.ModelStreamInferResponse + (*ModelConfigRequest)(nil), // 15: inference.v1.ModelConfigRequest + (*ModelConfigResponse)(nil), // 16: inference.v1.ModelConfigResponse + (*ModelStatisticsRequest)(nil), // 17: inference.v1.ModelStatisticsRequest + (*StatisticDuration)(nil), // 18: inference.v1.StatisticDuration + (*InferStatistics)(nil), // 19: inference.v1.InferStatistics + (*InferBatchStatistics)(nil), // 20: inference.v1.InferBatchStatistics + (*MemoryUsage)(nil), // 21: inference.v1.MemoryUsage + (*ModelStatistics)(nil), // 22: inference.v1.ModelStatistics + (*ModelStatisticsResponse)(nil), // 23: inference.v1.ModelStatisticsResponse + (*ModelRepositoryParameter)(nil), // 24: inference.v1.ModelRepositoryParameter + (*RepositoryIndexRequest)(nil), // 25: inference.v1.RepositoryIndexRequest + (*RepositoryIndexResponse)(nil), // 26: inference.v1.RepositoryIndexResponse + (*RepositoryModelLoadRequest)(nil), // 27: inference.v1.RepositoryModelLoadRequest + (*RepositoryModelLoadResponse)(nil), // 28: inference.v1.RepositoryModelLoadResponse + (*RepositoryModelUnloadRequest)(nil), // 29: inference.v1.RepositoryModelUnloadRequest + (*RepositoryModelUnloadResponse)(nil), // 30: inference.v1.RepositoryModelUnloadResponse + (*SystemSharedMemoryStatusRequest)(nil), // 31: inference.v1.SystemSharedMemoryStatusRequest + (*SystemSharedMemoryStatusResponse)(nil), // 32: inference.v1.SystemSharedMemoryStatusResponse + (*SystemSharedMemoryRegisterRequest)(nil), // 33: inference.v1.SystemSharedMemoryRegisterRequest + (*SystemSharedMemoryRegisterResponse)(nil), // 34: inference.v1.SystemSharedMemoryRegisterResponse + (*SystemSharedMemoryUnregisterRequest)(nil), // 35: inference.v1.SystemSharedMemoryUnregisterRequest + (*SystemSharedMemoryUnregisterResponse)(nil), // 36: inference.v1.SystemSharedMemoryUnregisterResponse + (*CudaSharedMemoryStatusRequest)(nil), // 37: inference.v1.CudaSharedMemoryStatusRequest + (*CudaSharedMemoryStatusResponse)(nil), // 38: inference.v1.CudaSharedMemoryStatusResponse + (*CudaSharedMemoryRegisterRequest)(nil), // 39: inference.v1.CudaSharedMemoryRegisterRequest + (*CudaSharedMemoryRegisterResponse)(nil), // 40: inference.v1.CudaSharedMemoryRegisterResponse + (*CudaSharedMemoryUnregisterRequest)(nil), // 41: inference.v1.CudaSharedMemoryUnregisterRequest + (*CudaSharedMemoryUnregisterResponse)(nil), // 42: inference.v1.CudaSharedMemoryUnregisterResponse + (*TraceSettingRequest)(nil), // 43: inference.v1.TraceSettingRequest + (*TraceSettingResponse)(nil), // 44: inference.v1.TraceSettingResponse + (*LogSettingsRequest)(nil), // 45: inference.v1.LogSettingsRequest + (*LogSettingsResponse)(nil), // 46: inference.v1.LogSettingsResponse + (*ModelMetadataResponse_TensorMetadata)(nil), // 47: inference.v1.ModelMetadataResponse.TensorMetadata + (*ModelInferRequest_InferInputTensor)(nil), // 48: inference.v1.ModelInferRequest.InferInputTensor + (*ModelInferRequest_InferRequestedOutputTensor)(nil), // 49: inference.v1.ModelInferRequest.InferRequestedOutputTensor + nil, // 50: inference.v1.ModelInferRequest.ParametersEntry + nil, // 51: inference.v1.ModelInferRequest.InferInputTensor.ParametersEntry + nil, // 52: inference.v1.ModelInferRequest.InferRequestedOutputTensor.ParametersEntry + (*ModelInferResponse_InferOutputTensor)(nil), // 53: inference.v1.ModelInferResponse.InferOutputTensor + nil, // 54: inference.v1.ModelInferResponse.ParametersEntry + nil, // 55: inference.v1.ModelInferResponse.InferOutputTensor.ParametersEntry + (*RepositoryIndexResponse_ModelIndex)(nil), // 56: inference.v1.RepositoryIndexResponse.ModelIndex + nil, // 57: inference.v1.RepositoryModelLoadRequest.ParametersEntry + nil, // 58: inference.v1.RepositoryModelUnloadRequest.ParametersEntry + (*SystemSharedMemoryStatusResponse_RegionStatus)(nil), // 59: inference.v1.SystemSharedMemoryStatusResponse.RegionStatus + nil, // 60: inference.v1.SystemSharedMemoryStatusResponse.RegionsEntry + (*CudaSharedMemoryStatusResponse_RegionStatus)(nil), // 61: inference.v1.CudaSharedMemoryStatusResponse.RegionStatus + nil, // 62: inference.v1.CudaSharedMemoryStatusResponse.RegionsEntry + (*TraceSettingRequest_SettingValue)(nil), // 63: inference.v1.TraceSettingRequest.SettingValue + nil, // 64: inference.v1.TraceSettingRequest.SettingsEntry + (*TraceSettingResponse_SettingValue)(nil), // 65: inference.v1.TraceSettingResponse.SettingValue + nil, // 66: inference.v1.TraceSettingResponse.SettingsEntry + (*LogSettingsRequest_SettingValue)(nil), // 67: inference.v1.LogSettingsRequest.SettingValue + nil, // 68: inference.v1.LogSettingsRequest.SettingsEntry + (*LogSettingsResponse_SettingValue)(nil), // 69: inference.v1.LogSettingsResponse.SettingValue + nil, // 70: inference.v1.LogSettingsResponse.SettingsEntry + (*ModelConfig)(nil), // 71: inference.v1.ModelConfig +} +var file_pkg_apis_inference_v1_grpc_service_proto_depIdxs = []int32{ + 47, // 0: inference.v1.ModelMetadataResponse.inputs:type_name -> inference.v1.ModelMetadataResponse.TensorMetadata + 47, // 1: inference.v1.ModelMetadataResponse.outputs:type_name -> inference.v1.ModelMetadataResponse.TensorMetadata + 50, // 2: inference.v1.ModelInferRequest.parameters:type_name -> inference.v1.ModelInferRequest.ParametersEntry + 48, // 3: inference.v1.ModelInferRequest.inputs:type_name -> inference.v1.ModelInferRequest.InferInputTensor + 49, // 4: inference.v1.ModelInferRequest.outputs:type_name -> inference.v1.ModelInferRequest.InferRequestedOutputTensor + 54, // 5: inference.v1.ModelInferResponse.parameters:type_name -> inference.v1.ModelInferResponse.ParametersEntry + 53, // 6: inference.v1.ModelInferResponse.outputs:type_name -> inference.v1.ModelInferResponse.InferOutputTensor + 13, // 7: inference.v1.ModelStreamInferResponse.infer_response:type_name -> inference.v1.ModelInferResponse + 71, // 8: inference.v1.ModelConfigResponse.config:type_name -> inference.v1.ModelConfig + 18, // 9: inference.v1.InferStatistics.success:type_name -> inference.v1.StatisticDuration + 18, // 10: inference.v1.InferStatistics.fail:type_name -> inference.v1.StatisticDuration + 18, // 11: inference.v1.InferStatistics.queue:type_name -> inference.v1.StatisticDuration + 18, // 12: inference.v1.InferStatistics.compute_input:type_name -> inference.v1.StatisticDuration + 18, // 13: inference.v1.InferStatistics.compute_infer:type_name -> inference.v1.StatisticDuration + 18, // 14: inference.v1.InferStatistics.compute_output:type_name -> inference.v1.StatisticDuration + 18, // 15: inference.v1.InferStatistics.cache_hit:type_name -> inference.v1.StatisticDuration + 18, // 16: inference.v1.InferStatistics.cache_miss:type_name -> inference.v1.StatisticDuration + 18, // 17: inference.v1.InferBatchStatistics.compute_input:type_name -> inference.v1.StatisticDuration + 18, // 18: inference.v1.InferBatchStatistics.compute_infer:type_name -> inference.v1.StatisticDuration + 18, // 19: inference.v1.InferBatchStatistics.compute_output:type_name -> inference.v1.StatisticDuration + 19, // 20: inference.v1.ModelStatistics.inference_stats:type_name -> inference.v1.InferStatistics + 20, // 21: inference.v1.ModelStatistics.batch_stats:type_name -> inference.v1.InferBatchStatistics + 21, // 22: inference.v1.ModelStatistics.memory_usage:type_name -> inference.v1.MemoryUsage + 22, // 23: inference.v1.ModelStatisticsResponse.model_stats:type_name -> inference.v1.ModelStatistics + 56, // 24: inference.v1.RepositoryIndexResponse.models:type_name -> inference.v1.RepositoryIndexResponse.ModelIndex + 57, // 25: inference.v1.RepositoryModelLoadRequest.parameters:type_name -> inference.v1.RepositoryModelLoadRequest.ParametersEntry + 58, // 26: inference.v1.RepositoryModelUnloadRequest.parameters:type_name -> inference.v1.RepositoryModelUnloadRequest.ParametersEntry + 60, // 27: inference.v1.SystemSharedMemoryStatusResponse.regions:type_name -> inference.v1.SystemSharedMemoryStatusResponse.RegionsEntry + 62, // 28: inference.v1.CudaSharedMemoryStatusResponse.regions:type_name -> inference.v1.CudaSharedMemoryStatusResponse.RegionsEntry + 64, // 29: inference.v1.TraceSettingRequest.settings:type_name -> inference.v1.TraceSettingRequest.SettingsEntry + 66, // 30: inference.v1.TraceSettingResponse.settings:type_name -> inference.v1.TraceSettingResponse.SettingsEntry + 68, // 31: inference.v1.LogSettingsRequest.settings:type_name -> inference.v1.LogSettingsRequest.SettingsEntry + 70, // 32: inference.v1.LogSettingsResponse.settings:type_name -> inference.v1.LogSettingsResponse.SettingsEntry + 51, // 33: inference.v1.ModelInferRequest.InferInputTensor.parameters:type_name -> inference.v1.ModelInferRequest.InferInputTensor.ParametersEntry + 11, // 34: inference.v1.ModelInferRequest.InferInputTensor.contents:type_name -> inference.v1.InferTensorContents + 52, // 35: inference.v1.ModelInferRequest.InferRequestedOutputTensor.parameters:type_name -> inference.v1.ModelInferRequest.InferRequestedOutputTensor.ParametersEntry + 10, // 36: inference.v1.ModelInferRequest.ParametersEntry.value:type_name -> inference.v1.InferParameter + 10, // 37: inference.v1.ModelInferRequest.InferInputTensor.ParametersEntry.value:type_name -> inference.v1.InferParameter + 10, // 38: inference.v1.ModelInferRequest.InferRequestedOutputTensor.ParametersEntry.value:type_name -> inference.v1.InferParameter + 55, // 39: inference.v1.ModelInferResponse.InferOutputTensor.parameters:type_name -> inference.v1.ModelInferResponse.InferOutputTensor.ParametersEntry + 11, // 40: inference.v1.ModelInferResponse.InferOutputTensor.contents:type_name -> inference.v1.InferTensorContents + 10, // 41: inference.v1.ModelInferResponse.ParametersEntry.value:type_name -> inference.v1.InferParameter + 10, // 42: inference.v1.ModelInferResponse.InferOutputTensor.ParametersEntry.value:type_name -> inference.v1.InferParameter + 24, // 43: inference.v1.RepositoryModelLoadRequest.ParametersEntry.value:type_name -> inference.v1.ModelRepositoryParameter + 24, // 44: inference.v1.RepositoryModelUnloadRequest.ParametersEntry.value:type_name -> inference.v1.ModelRepositoryParameter + 59, // 45: inference.v1.SystemSharedMemoryStatusResponse.RegionsEntry.value:type_name -> inference.v1.SystemSharedMemoryStatusResponse.RegionStatus + 61, // 46: inference.v1.CudaSharedMemoryStatusResponse.RegionsEntry.value:type_name -> inference.v1.CudaSharedMemoryStatusResponse.RegionStatus + 63, // 47: inference.v1.TraceSettingRequest.SettingsEntry.value:type_name -> inference.v1.TraceSettingRequest.SettingValue + 65, // 48: inference.v1.TraceSettingResponse.SettingsEntry.value:type_name -> inference.v1.TraceSettingResponse.SettingValue + 67, // 49: inference.v1.LogSettingsRequest.SettingsEntry.value:type_name -> inference.v1.LogSettingsRequest.SettingValue + 69, // 50: inference.v1.LogSettingsResponse.SettingsEntry.value:type_name -> inference.v1.LogSettingsResponse.SettingValue + 0, // 51: inference.v1.GRPCInferenceService.ServerLive:input_type -> inference.v1.ServerLiveRequest + 2, // 52: inference.v1.GRPCInferenceService.ServerReady:input_type -> inference.v1.ServerReadyRequest + 4, // 53: inference.v1.GRPCInferenceService.ModelReady:input_type -> inference.v1.ModelReadyRequest + 6, // 54: inference.v1.GRPCInferenceService.ServerMetadata:input_type -> inference.v1.ServerMetadataRequest + 8, // 55: inference.v1.GRPCInferenceService.ModelMetadata:input_type -> inference.v1.ModelMetadataRequest + 12, // 56: inference.v1.GRPCInferenceService.ModelInfer:input_type -> inference.v1.ModelInferRequest + 12, // 57: inference.v1.GRPCInferenceService.ModelStreamInfer:input_type -> inference.v1.ModelInferRequest + 15, // 58: inference.v1.GRPCInferenceService.ModelConfig:input_type -> inference.v1.ModelConfigRequest + 17, // 59: inference.v1.GRPCInferenceService.ModelStatistics:input_type -> inference.v1.ModelStatisticsRequest + 25, // 60: inference.v1.GRPCInferenceService.RepositoryIndex:input_type -> inference.v1.RepositoryIndexRequest + 27, // 61: inference.v1.GRPCInferenceService.RepositoryModelLoad:input_type -> inference.v1.RepositoryModelLoadRequest + 29, // 62: inference.v1.GRPCInferenceService.RepositoryModelUnload:input_type -> inference.v1.RepositoryModelUnloadRequest + 31, // 63: inference.v1.GRPCInferenceService.SystemSharedMemoryStatus:input_type -> inference.v1.SystemSharedMemoryStatusRequest + 33, // 64: inference.v1.GRPCInferenceService.SystemSharedMemoryRegister:input_type -> inference.v1.SystemSharedMemoryRegisterRequest + 35, // 65: inference.v1.GRPCInferenceService.SystemSharedMemoryUnregister:input_type -> inference.v1.SystemSharedMemoryUnregisterRequest + 37, // 66: inference.v1.GRPCInferenceService.CudaSharedMemoryStatus:input_type -> inference.v1.CudaSharedMemoryStatusRequest + 39, // 67: inference.v1.GRPCInferenceService.CudaSharedMemoryRegister:input_type -> inference.v1.CudaSharedMemoryRegisterRequest + 41, // 68: inference.v1.GRPCInferenceService.CudaSharedMemoryUnregister:input_type -> inference.v1.CudaSharedMemoryUnregisterRequest + 43, // 69: inference.v1.GRPCInferenceService.TraceSetting:input_type -> inference.v1.TraceSettingRequest + 45, // 70: inference.v1.GRPCInferenceService.LogSettings:input_type -> inference.v1.LogSettingsRequest + 1, // 71: inference.v1.GRPCInferenceService.ServerLive:output_type -> inference.v1.ServerLiveResponse + 3, // 72: inference.v1.GRPCInferenceService.ServerReady:output_type -> inference.v1.ServerReadyResponse + 5, // 73: inference.v1.GRPCInferenceService.ModelReady:output_type -> inference.v1.ModelReadyResponse + 7, // 74: inference.v1.GRPCInferenceService.ServerMetadata:output_type -> inference.v1.ServerMetadataResponse + 9, // 75: inference.v1.GRPCInferenceService.ModelMetadata:output_type -> inference.v1.ModelMetadataResponse + 13, // 76: inference.v1.GRPCInferenceService.ModelInfer:output_type -> inference.v1.ModelInferResponse + 14, // 77: inference.v1.GRPCInferenceService.ModelStreamInfer:output_type -> inference.v1.ModelStreamInferResponse + 16, // 78: inference.v1.GRPCInferenceService.ModelConfig:output_type -> inference.v1.ModelConfigResponse + 23, // 79: inference.v1.GRPCInferenceService.ModelStatistics:output_type -> inference.v1.ModelStatisticsResponse + 26, // 80: inference.v1.GRPCInferenceService.RepositoryIndex:output_type -> inference.v1.RepositoryIndexResponse + 28, // 81: inference.v1.GRPCInferenceService.RepositoryModelLoad:output_type -> inference.v1.RepositoryModelLoadResponse + 30, // 82: inference.v1.GRPCInferenceService.RepositoryModelUnload:output_type -> inference.v1.RepositoryModelUnloadResponse + 32, // 83: inference.v1.GRPCInferenceService.SystemSharedMemoryStatus:output_type -> inference.v1.SystemSharedMemoryStatusResponse + 34, // 84: inference.v1.GRPCInferenceService.SystemSharedMemoryRegister:output_type -> inference.v1.SystemSharedMemoryRegisterResponse + 36, // 85: inference.v1.GRPCInferenceService.SystemSharedMemoryUnregister:output_type -> inference.v1.SystemSharedMemoryUnregisterResponse + 38, // 86: inference.v1.GRPCInferenceService.CudaSharedMemoryStatus:output_type -> inference.v1.CudaSharedMemoryStatusResponse + 40, // 87: inference.v1.GRPCInferenceService.CudaSharedMemoryRegister:output_type -> inference.v1.CudaSharedMemoryRegisterResponse + 42, // 88: inference.v1.GRPCInferenceService.CudaSharedMemoryUnregister:output_type -> inference.v1.CudaSharedMemoryUnregisterResponse + 44, // 89: inference.v1.GRPCInferenceService.TraceSetting:output_type -> inference.v1.TraceSettingResponse + 46, // 90: inference.v1.GRPCInferenceService.LogSettings:output_type -> inference.v1.LogSettingsResponse + 71, // [71:91] is the sub-list for method output_type + 51, // [51:71] is the sub-list for method input_type + 51, // [51:51] is the sub-list for extension type_name + 51, // [51:51] is the sub-list for extension extendee + 0, // [0:51] is the sub-list for field type_name +} + +func init() { file_pkg_apis_inference_v1_grpc_service_proto_init() } +func file_pkg_apis_inference_v1_grpc_service_proto_init() { + if File_pkg_apis_inference_v1_grpc_service_proto != nil { + return + } + file_pkg_apis_inference_v1_model_config_proto_init() + if !protoimpl.UnsafeEnabled { + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServerLiveRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServerLiveResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServerReadyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServerReadyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelReadyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelReadyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServerMetadataRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServerMetadataResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelMetadataRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelMetadataResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InferParameter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InferTensorContents); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelInferRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelInferResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelStreamInferResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelConfigRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelConfigResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelStatisticsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*StatisticDuration); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InferStatistics); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InferBatchStatistics); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MemoryUsage); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelStatistics); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelStatisticsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelRepositoryParameter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RepositoryIndexRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RepositoryIndexResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RepositoryModelLoadRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RepositoryModelLoadResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RepositoryModelUnloadRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RepositoryModelUnloadResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SystemSharedMemoryStatusRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SystemSharedMemoryStatusResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SystemSharedMemoryRegisterRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SystemSharedMemoryRegisterResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SystemSharedMemoryUnregisterRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SystemSharedMemoryUnregisterResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CudaSharedMemoryStatusRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CudaSharedMemoryStatusResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CudaSharedMemoryRegisterRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CudaSharedMemoryRegisterResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CudaSharedMemoryUnregisterRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CudaSharedMemoryUnregisterResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[43].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TraceSettingRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[44].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TraceSettingResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[45].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LogSettingsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[46].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LogSettingsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[47].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelMetadataResponse_TensorMetadata); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[48].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelInferRequest_InferInputTensor); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[49].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelInferRequest_InferRequestedOutputTensor); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[53].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelInferResponse_InferOutputTensor); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[56].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RepositoryIndexResponse_ModelIndex); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[59].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SystemSharedMemoryStatusResponse_RegionStatus); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[61].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CudaSharedMemoryStatusResponse_RegionStatus); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[63].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TraceSettingRequest_SettingValue); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[65].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TraceSettingResponse_SettingValue); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[67].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LogSettingsRequest_SettingValue); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[69].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LogSettingsResponse_SettingValue); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[10].OneofWrappers = []interface{}{ + (*InferParameter_BoolParam)(nil), + (*InferParameter_Int64Param)(nil), + (*InferParameter_StringParam)(nil), + (*InferParameter_DoubleParam)(nil), + (*InferParameter_Uint64Param)(nil), + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[24].OneofWrappers = []interface{}{ + (*ModelRepositoryParameter_BoolParam)(nil), + (*ModelRepositoryParameter_Int64Param)(nil), + (*ModelRepositoryParameter_StringParam)(nil), + (*ModelRepositoryParameter_BytesParam)(nil), + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[67].OneofWrappers = []interface{}{ + (*LogSettingsRequest_SettingValue_BoolParam)(nil), + (*LogSettingsRequest_SettingValue_Uint32Param)(nil), + (*LogSettingsRequest_SettingValue_StringParam)(nil), + } + file_pkg_apis_inference_v1_grpc_service_proto_msgTypes[69].OneofWrappers = []interface{}{ + (*LogSettingsResponse_SettingValue_BoolParam)(nil), + (*LogSettingsResponse_SettingValue_Uint32Param)(nil), + (*LogSettingsResponse_SettingValue_StringParam)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_pkg_apis_inference_v1_grpc_service_proto_rawDesc, + NumEnums: 0, + NumMessages: 71, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_pkg_apis_inference_v1_grpc_service_proto_goTypes, + DependencyIndexes: file_pkg_apis_inference_v1_grpc_service_proto_depIdxs, + MessageInfos: file_pkg_apis_inference_v1_grpc_service_proto_msgTypes, + }.Build() + File_pkg_apis_inference_v1_grpc_service_proto = out.File + file_pkg_apis_inference_v1_grpc_service_proto_rawDesc = nil + file_pkg_apis_inference_v1_grpc_service_proto_goTypes = nil + file_pkg_apis_inference_v1_grpc_service_proto_depIdxs = nil +} diff --git a/pkg/apis/inference/v1/grpc_service.pb.validate.go b/pkg/apis/inference/v1/grpc_service.pb.validate.go new file mode 100644 index 0000000..6a3d814 --- /dev/null +++ b/pkg/apis/inference/v1/grpc_service.pb.validate.go @@ -0,0 +1,7720 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: pkg/apis/inference/v1/grpc_service.proto + +package inference + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on ServerLiveRequest with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ServerLiveRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ServerLiveRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ServerLiveRequestMultiError, or nil if none found. +func (m *ServerLiveRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ServerLiveRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return ServerLiveRequestMultiError(errors) + } + + return nil +} + +// ServerLiveRequestMultiError is an error wrapping multiple validation errors +// returned by ServerLiveRequest.ValidateAll() if the designated constraints +// aren't met. +type ServerLiveRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ServerLiveRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ServerLiveRequestMultiError) AllErrors() []error { return m } + +// ServerLiveRequestValidationError is the validation error returned by +// ServerLiveRequest.Validate if the designated constraints aren't met. +type ServerLiveRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ServerLiveRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ServerLiveRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ServerLiveRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ServerLiveRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ServerLiveRequestValidationError) ErrorName() string { + return "ServerLiveRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e ServerLiveRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sServerLiveRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ServerLiveRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ServerLiveRequestValidationError{} + +// Validate checks the field values on ServerLiveResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ServerLiveResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ServerLiveResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ServerLiveResponseMultiError, or nil if none found. +func (m *ServerLiveResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *ServerLiveResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Live + + if len(errors) > 0 { + return ServerLiveResponseMultiError(errors) + } + + return nil +} + +// ServerLiveResponseMultiError is an error wrapping multiple validation errors +// returned by ServerLiveResponse.ValidateAll() if the designated constraints +// aren't met. +type ServerLiveResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ServerLiveResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ServerLiveResponseMultiError) AllErrors() []error { return m } + +// ServerLiveResponseValidationError is the validation error returned by +// ServerLiveResponse.Validate if the designated constraints aren't met. +type ServerLiveResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ServerLiveResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ServerLiveResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ServerLiveResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ServerLiveResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ServerLiveResponseValidationError) ErrorName() string { + return "ServerLiveResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e ServerLiveResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sServerLiveResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ServerLiveResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ServerLiveResponseValidationError{} + +// Validate checks the field values on ServerReadyRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ServerReadyRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ServerReadyRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ServerReadyRequestMultiError, or nil if none found. +func (m *ServerReadyRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ServerReadyRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return ServerReadyRequestMultiError(errors) + } + + return nil +} + +// ServerReadyRequestMultiError is an error wrapping multiple validation errors +// returned by ServerReadyRequest.ValidateAll() if the designated constraints +// aren't met. +type ServerReadyRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ServerReadyRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ServerReadyRequestMultiError) AllErrors() []error { return m } + +// ServerReadyRequestValidationError is the validation error returned by +// ServerReadyRequest.Validate if the designated constraints aren't met. +type ServerReadyRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ServerReadyRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ServerReadyRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ServerReadyRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ServerReadyRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ServerReadyRequestValidationError) ErrorName() string { + return "ServerReadyRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e ServerReadyRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sServerReadyRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ServerReadyRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ServerReadyRequestValidationError{} + +// Validate checks the field values on ServerReadyResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ServerReadyResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ServerReadyResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ServerReadyResponseMultiError, or nil if none found. +func (m *ServerReadyResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *ServerReadyResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Ready + + if len(errors) > 0 { + return ServerReadyResponseMultiError(errors) + } + + return nil +} + +// ServerReadyResponseMultiError is an error wrapping multiple validation +// errors returned by ServerReadyResponse.ValidateAll() if the designated +// constraints aren't met. +type ServerReadyResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ServerReadyResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ServerReadyResponseMultiError) AllErrors() []error { return m } + +// ServerReadyResponseValidationError is the validation error returned by +// ServerReadyResponse.Validate if the designated constraints aren't met. +type ServerReadyResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ServerReadyResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ServerReadyResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ServerReadyResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ServerReadyResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ServerReadyResponseValidationError) ErrorName() string { + return "ServerReadyResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e ServerReadyResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sServerReadyResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ServerReadyResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ServerReadyResponseValidationError{} + +// Validate checks the field values on ModelReadyRequest with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ModelReadyRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelReadyRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelReadyRequestMultiError, or nil if none found. +func (m *ModelReadyRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelReadyRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Version + + if len(errors) > 0 { + return ModelReadyRequestMultiError(errors) + } + + return nil +} + +// ModelReadyRequestMultiError is an error wrapping multiple validation errors +// returned by ModelReadyRequest.ValidateAll() if the designated constraints +// aren't met. +type ModelReadyRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelReadyRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelReadyRequestMultiError) AllErrors() []error { return m } + +// ModelReadyRequestValidationError is the validation error returned by +// ModelReadyRequest.Validate if the designated constraints aren't met. +type ModelReadyRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelReadyRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelReadyRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelReadyRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelReadyRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelReadyRequestValidationError) ErrorName() string { + return "ModelReadyRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelReadyRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelReadyRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelReadyRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelReadyRequestValidationError{} + +// Validate checks the field values on ModelReadyResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelReadyResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelReadyResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelReadyResponseMultiError, or nil if none found. +func (m *ModelReadyResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelReadyResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Ready + + if len(errors) > 0 { + return ModelReadyResponseMultiError(errors) + } + + return nil +} + +// ModelReadyResponseMultiError is an error wrapping multiple validation errors +// returned by ModelReadyResponse.ValidateAll() if the designated constraints +// aren't met. +type ModelReadyResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelReadyResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelReadyResponseMultiError) AllErrors() []error { return m } + +// ModelReadyResponseValidationError is the validation error returned by +// ModelReadyResponse.Validate if the designated constraints aren't met. +type ModelReadyResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelReadyResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelReadyResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelReadyResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelReadyResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelReadyResponseValidationError) ErrorName() string { + return "ModelReadyResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelReadyResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelReadyResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelReadyResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelReadyResponseValidationError{} + +// Validate checks the field values on ServerMetadataRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ServerMetadataRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ServerMetadataRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ServerMetadataRequestMultiError, or nil if none found. +func (m *ServerMetadataRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ServerMetadataRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return ServerMetadataRequestMultiError(errors) + } + + return nil +} + +// ServerMetadataRequestMultiError is an error wrapping multiple validation +// errors returned by ServerMetadataRequest.ValidateAll() if the designated +// constraints aren't met. +type ServerMetadataRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ServerMetadataRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ServerMetadataRequestMultiError) AllErrors() []error { return m } + +// ServerMetadataRequestValidationError is the validation error returned by +// ServerMetadataRequest.Validate if the designated constraints aren't met. +type ServerMetadataRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ServerMetadataRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ServerMetadataRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ServerMetadataRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ServerMetadataRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ServerMetadataRequestValidationError) ErrorName() string { + return "ServerMetadataRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e ServerMetadataRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sServerMetadataRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ServerMetadataRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ServerMetadataRequestValidationError{} + +// Validate checks the field values on ServerMetadataResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ServerMetadataResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ServerMetadataResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ServerMetadataResponseMultiError, or nil if none found. +func (m *ServerMetadataResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *ServerMetadataResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Version + + if len(errors) > 0 { + return ServerMetadataResponseMultiError(errors) + } + + return nil +} + +// ServerMetadataResponseMultiError is an error wrapping multiple validation +// errors returned by ServerMetadataResponse.ValidateAll() if the designated +// constraints aren't met. +type ServerMetadataResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ServerMetadataResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ServerMetadataResponseMultiError) AllErrors() []error { return m } + +// ServerMetadataResponseValidationError is the validation error returned by +// ServerMetadataResponse.Validate if the designated constraints aren't met. +type ServerMetadataResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ServerMetadataResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ServerMetadataResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ServerMetadataResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ServerMetadataResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ServerMetadataResponseValidationError) ErrorName() string { + return "ServerMetadataResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e ServerMetadataResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sServerMetadataResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ServerMetadataResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ServerMetadataResponseValidationError{} + +// Validate checks the field values on ModelMetadataRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelMetadataRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelMetadataRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelMetadataRequestMultiError, or nil if none found. +func (m *ModelMetadataRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelMetadataRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Version + + if len(errors) > 0 { + return ModelMetadataRequestMultiError(errors) + } + + return nil +} + +// ModelMetadataRequestMultiError is an error wrapping multiple validation +// errors returned by ModelMetadataRequest.ValidateAll() if the designated +// constraints aren't met. +type ModelMetadataRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelMetadataRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelMetadataRequestMultiError) AllErrors() []error { return m } + +// ModelMetadataRequestValidationError is the validation error returned by +// ModelMetadataRequest.Validate if the designated constraints aren't met. +type ModelMetadataRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelMetadataRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelMetadataRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelMetadataRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelMetadataRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelMetadataRequestValidationError) ErrorName() string { + return "ModelMetadataRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelMetadataRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelMetadataRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelMetadataRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelMetadataRequestValidationError{} + +// Validate checks the field values on ModelMetadataResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelMetadataResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelMetadataResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelMetadataResponseMultiError, or nil if none found. +func (m *ModelMetadataResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelMetadataResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Platform + + for idx, item := range m.GetInputs() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelMetadataResponseValidationError{ + field: fmt.Sprintf("Inputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelMetadataResponseValidationError{ + field: fmt.Sprintf("Inputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelMetadataResponseValidationError{ + field: fmt.Sprintf("Inputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + for idx, item := range m.GetOutputs() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelMetadataResponseValidationError{ + field: fmt.Sprintf("Outputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelMetadataResponseValidationError{ + field: fmt.Sprintf("Outputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelMetadataResponseValidationError{ + field: fmt.Sprintf("Outputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ModelMetadataResponseMultiError(errors) + } + + return nil +} + +// ModelMetadataResponseMultiError is an error wrapping multiple validation +// errors returned by ModelMetadataResponse.ValidateAll() if the designated +// constraints aren't met. +type ModelMetadataResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelMetadataResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelMetadataResponseMultiError) AllErrors() []error { return m } + +// ModelMetadataResponseValidationError is the validation error returned by +// ModelMetadataResponse.Validate if the designated constraints aren't met. +type ModelMetadataResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelMetadataResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelMetadataResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelMetadataResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelMetadataResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelMetadataResponseValidationError) ErrorName() string { + return "ModelMetadataResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelMetadataResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelMetadataResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelMetadataResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelMetadataResponseValidationError{} + +// Validate checks the field values on InferParameter with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *InferParameter) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on InferParameter with the rules defined +// in the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in InferParameterMultiError, +// or nil if none found. +func (m *InferParameter) ValidateAll() error { + return m.validate(true) +} + +func (m *InferParameter) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + switch v := m.ParameterChoice.(type) { + case *InferParameter_BoolParam: + if v == nil { + err := InferParameterValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for BoolParam + case *InferParameter_Int64Param: + if v == nil { + err := InferParameterValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for Int64Param + case *InferParameter_StringParam: + if v == nil { + err := InferParameterValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for StringParam + case *InferParameter_DoubleParam: + if v == nil { + err := InferParameterValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for DoubleParam + case *InferParameter_Uint64Param: + if v == nil { + err := InferParameterValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for Uint64Param + default: + _ = v // ensures v is used + } + + if len(errors) > 0 { + return InferParameterMultiError(errors) + } + + return nil +} + +// InferParameterMultiError is an error wrapping multiple validation errors +// returned by InferParameter.ValidateAll() if the designated constraints +// aren't met. +type InferParameterMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m InferParameterMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m InferParameterMultiError) AllErrors() []error { return m } + +// InferParameterValidationError is the validation error returned by +// InferParameter.Validate if the designated constraints aren't met. +type InferParameterValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e InferParameterValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e InferParameterValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e InferParameterValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e InferParameterValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e InferParameterValidationError) ErrorName() string { return "InferParameterValidationError" } + +// Error satisfies the builtin error interface +func (e InferParameterValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sInferParameter.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = InferParameterValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = InferParameterValidationError{} + +// Validate checks the field values on InferTensorContents with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *InferTensorContents) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on InferTensorContents with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// InferTensorContentsMultiError, or nil if none found. +func (m *InferTensorContents) ValidateAll() error { + return m.validate(true) +} + +func (m *InferTensorContents) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return InferTensorContentsMultiError(errors) + } + + return nil +} + +// InferTensorContentsMultiError is an error wrapping multiple validation +// errors returned by InferTensorContents.ValidateAll() if the designated +// constraints aren't met. +type InferTensorContentsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m InferTensorContentsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m InferTensorContentsMultiError) AllErrors() []error { return m } + +// InferTensorContentsValidationError is the validation error returned by +// InferTensorContents.Validate if the designated constraints aren't met. +type InferTensorContentsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e InferTensorContentsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e InferTensorContentsValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e InferTensorContentsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e InferTensorContentsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e InferTensorContentsValidationError) ErrorName() string { + return "InferTensorContentsValidationError" +} + +// Error satisfies the builtin error interface +func (e InferTensorContentsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sInferTensorContents.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = InferTensorContentsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = InferTensorContentsValidationError{} + +// Validate checks the field values on ModelInferRequest with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ModelInferRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelInferRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelInferRequestMultiError, or nil if none found. +func (m *ModelInferRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelInferRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for ModelName + + // no validation rules for ModelVersion + + // no validation rules for Id + + { + sorted_keys := make([]string, len(m.GetParameters())) + i := 0 + for key := range m.GetParameters() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetParameters()[key] + _ = val + + // no validation rules for Parameters[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInferRequestValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInferRequestValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInferRequestValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + for idx, item := range m.GetInputs() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInferRequestValidationError{ + field: fmt.Sprintf("Inputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInferRequestValidationError{ + field: fmt.Sprintf("Inputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInferRequestValidationError{ + field: fmt.Sprintf("Inputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + for idx, item := range m.GetOutputs() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInferRequestValidationError{ + field: fmt.Sprintf("Outputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInferRequestValidationError{ + field: fmt.Sprintf("Outputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInferRequestValidationError{ + field: fmt.Sprintf("Outputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ModelInferRequestMultiError(errors) + } + + return nil +} + +// ModelInferRequestMultiError is an error wrapping multiple validation errors +// returned by ModelInferRequest.ValidateAll() if the designated constraints +// aren't met. +type ModelInferRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelInferRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelInferRequestMultiError) AllErrors() []error { return m } + +// ModelInferRequestValidationError is the validation error returned by +// ModelInferRequest.Validate if the designated constraints aren't met. +type ModelInferRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelInferRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelInferRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelInferRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelInferRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelInferRequestValidationError) ErrorName() string { + return "ModelInferRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelInferRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelInferRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelInferRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelInferRequestValidationError{} + +// Validate checks the field values on ModelInferResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelInferResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelInferResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelInferResponseMultiError, or nil if none found. +func (m *ModelInferResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelInferResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for ModelName + + // no validation rules for ModelVersion + + // no validation rules for Id + + { + sorted_keys := make([]string, len(m.GetParameters())) + i := 0 + for key := range m.GetParameters() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetParameters()[key] + _ = val + + // no validation rules for Parameters[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInferResponseValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInferResponseValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInferResponseValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + for idx, item := range m.GetOutputs() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInferResponseValidationError{ + field: fmt.Sprintf("Outputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInferResponseValidationError{ + field: fmt.Sprintf("Outputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInferResponseValidationError{ + field: fmt.Sprintf("Outputs[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ModelInferResponseMultiError(errors) + } + + return nil +} + +// ModelInferResponseMultiError is an error wrapping multiple validation errors +// returned by ModelInferResponse.ValidateAll() if the designated constraints +// aren't met. +type ModelInferResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelInferResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelInferResponseMultiError) AllErrors() []error { return m } + +// ModelInferResponseValidationError is the validation error returned by +// ModelInferResponse.Validate if the designated constraints aren't met. +type ModelInferResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelInferResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelInferResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelInferResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelInferResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelInferResponseValidationError) ErrorName() string { + return "ModelInferResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelInferResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelInferResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelInferResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelInferResponseValidationError{} + +// Validate checks the field values on ModelStreamInferResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelStreamInferResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelStreamInferResponse with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelStreamInferResponseMultiError, or nil if none found. +func (m *ModelStreamInferResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelStreamInferResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for ErrorMessage + + if all { + switch v := interface{}(m.GetInferResponse()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelStreamInferResponseValidationError{ + field: "InferResponse", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelStreamInferResponseValidationError{ + field: "InferResponse", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetInferResponse()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelStreamInferResponseValidationError{ + field: "InferResponse", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return ModelStreamInferResponseMultiError(errors) + } + + return nil +} + +// ModelStreamInferResponseMultiError is an error wrapping multiple validation +// errors returned by ModelStreamInferResponse.ValidateAll() if the designated +// constraints aren't met. +type ModelStreamInferResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelStreamInferResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelStreamInferResponseMultiError) AllErrors() []error { return m } + +// ModelStreamInferResponseValidationError is the validation error returned by +// ModelStreamInferResponse.Validate if the designated constraints aren't met. +type ModelStreamInferResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelStreamInferResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelStreamInferResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelStreamInferResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelStreamInferResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelStreamInferResponseValidationError) ErrorName() string { + return "ModelStreamInferResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelStreamInferResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelStreamInferResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelStreamInferResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelStreamInferResponseValidationError{} + +// Validate checks the field values on ModelConfigRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelConfigRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelConfigRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelConfigRequestMultiError, or nil if none found. +func (m *ModelConfigRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelConfigRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Version + + if len(errors) > 0 { + return ModelConfigRequestMultiError(errors) + } + + return nil +} + +// ModelConfigRequestMultiError is an error wrapping multiple validation errors +// returned by ModelConfigRequest.ValidateAll() if the designated constraints +// aren't met. +type ModelConfigRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelConfigRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelConfigRequestMultiError) AllErrors() []error { return m } + +// ModelConfigRequestValidationError is the validation error returned by +// ModelConfigRequest.Validate if the designated constraints aren't met. +type ModelConfigRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelConfigRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelConfigRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelConfigRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelConfigRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelConfigRequestValidationError) ErrorName() string { + return "ModelConfigRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelConfigRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelConfigRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelConfigRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelConfigRequestValidationError{} + +// Validate checks the field values on ModelConfigResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelConfigResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelConfigResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelConfigResponseMultiError, or nil if none found. +func (m *ModelConfigResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelConfigResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if all { + switch v := interface{}(m.GetConfig()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigResponseValidationError{ + field: "Config", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigResponseValidationError{ + field: "Config", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetConfig()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigResponseValidationError{ + field: "Config", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return ModelConfigResponseMultiError(errors) + } + + return nil +} + +// ModelConfigResponseMultiError is an error wrapping multiple validation +// errors returned by ModelConfigResponse.ValidateAll() if the designated +// constraints aren't met. +type ModelConfigResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelConfigResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelConfigResponseMultiError) AllErrors() []error { return m } + +// ModelConfigResponseValidationError is the validation error returned by +// ModelConfigResponse.Validate if the designated constraints aren't met. +type ModelConfigResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelConfigResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelConfigResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelConfigResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelConfigResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelConfigResponseValidationError) ErrorName() string { + return "ModelConfigResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelConfigResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelConfigResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelConfigResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelConfigResponseValidationError{} + +// Validate checks the field values on ModelStatisticsRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelStatisticsRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelStatisticsRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelStatisticsRequestMultiError, or nil if none found. +func (m *ModelStatisticsRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelStatisticsRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Version + + if len(errors) > 0 { + return ModelStatisticsRequestMultiError(errors) + } + + return nil +} + +// ModelStatisticsRequestMultiError is an error wrapping multiple validation +// errors returned by ModelStatisticsRequest.ValidateAll() if the designated +// constraints aren't met. +type ModelStatisticsRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelStatisticsRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelStatisticsRequestMultiError) AllErrors() []error { return m } + +// ModelStatisticsRequestValidationError is the validation error returned by +// ModelStatisticsRequest.Validate if the designated constraints aren't met. +type ModelStatisticsRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelStatisticsRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelStatisticsRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelStatisticsRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelStatisticsRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelStatisticsRequestValidationError) ErrorName() string { + return "ModelStatisticsRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelStatisticsRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelStatisticsRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelStatisticsRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelStatisticsRequestValidationError{} + +// Validate checks the field values on StatisticDuration with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *StatisticDuration) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on StatisticDuration with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// StatisticDurationMultiError, or nil if none found. +func (m *StatisticDuration) ValidateAll() error { + return m.validate(true) +} + +func (m *StatisticDuration) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Count + + // no validation rules for Ns + + if len(errors) > 0 { + return StatisticDurationMultiError(errors) + } + + return nil +} + +// StatisticDurationMultiError is an error wrapping multiple validation errors +// returned by StatisticDuration.ValidateAll() if the designated constraints +// aren't met. +type StatisticDurationMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m StatisticDurationMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m StatisticDurationMultiError) AllErrors() []error { return m } + +// StatisticDurationValidationError is the validation error returned by +// StatisticDuration.Validate if the designated constraints aren't met. +type StatisticDurationValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e StatisticDurationValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e StatisticDurationValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e StatisticDurationValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e StatisticDurationValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e StatisticDurationValidationError) ErrorName() string { + return "StatisticDurationValidationError" +} + +// Error satisfies the builtin error interface +func (e StatisticDurationValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sStatisticDuration.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = StatisticDurationValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = StatisticDurationValidationError{} + +// Validate checks the field values on InferStatistics with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *InferStatistics) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on InferStatistics with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// InferStatisticsMultiError, or nil if none found. +func (m *InferStatistics) ValidateAll() error { + return m.validate(true) +} + +func (m *InferStatistics) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if all { + switch v := interface{}(m.GetSuccess()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "Success", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "Success", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetSuccess()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferStatisticsValidationError{ + field: "Success", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetFail()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "Fail", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "Fail", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetFail()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferStatisticsValidationError{ + field: "Fail", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetQueue()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "Queue", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "Queue", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetQueue()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferStatisticsValidationError{ + field: "Queue", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetComputeInput()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "ComputeInput", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "ComputeInput", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetComputeInput()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferStatisticsValidationError{ + field: "ComputeInput", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetComputeInfer()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "ComputeInfer", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "ComputeInfer", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetComputeInfer()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferStatisticsValidationError{ + field: "ComputeInfer", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetComputeOutput()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "ComputeOutput", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "ComputeOutput", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetComputeOutput()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferStatisticsValidationError{ + field: "ComputeOutput", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetCacheHit()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "CacheHit", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "CacheHit", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetCacheHit()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferStatisticsValidationError{ + field: "CacheHit", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetCacheMiss()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "CacheMiss", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferStatisticsValidationError{ + field: "CacheMiss", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetCacheMiss()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferStatisticsValidationError{ + field: "CacheMiss", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return InferStatisticsMultiError(errors) + } + + return nil +} + +// InferStatisticsMultiError is an error wrapping multiple validation errors +// returned by InferStatistics.ValidateAll() if the designated constraints +// aren't met. +type InferStatisticsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m InferStatisticsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m InferStatisticsMultiError) AllErrors() []error { return m } + +// InferStatisticsValidationError is the validation error returned by +// InferStatistics.Validate if the designated constraints aren't met. +type InferStatisticsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e InferStatisticsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e InferStatisticsValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e InferStatisticsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e InferStatisticsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e InferStatisticsValidationError) ErrorName() string { return "InferStatisticsValidationError" } + +// Error satisfies the builtin error interface +func (e InferStatisticsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sInferStatistics.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = InferStatisticsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = InferStatisticsValidationError{} + +// Validate checks the field values on InferBatchStatistics with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *InferBatchStatistics) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on InferBatchStatistics with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// InferBatchStatisticsMultiError, or nil if none found. +func (m *InferBatchStatistics) ValidateAll() error { + return m.validate(true) +} + +func (m *InferBatchStatistics) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for BatchSize + + if all { + switch v := interface{}(m.GetComputeInput()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferBatchStatisticsValidationError{ + field: "ComputeInput", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferBatchStatisticsValidationError{ + field: "ComputeInput", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetComputeInput()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferBatchStatisticsValidationError{ + field: "ComputeInput", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetComputeInfer()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferBatchStatisticsValidationError{ + field: "ComputeInfer", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferBatchStatisticsValidationError{ + field: "ComputeInfer", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetComputeInfer()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferBatchStatisticsValidationError{ + field: "ComputeInfer", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetComputeOutput()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, InferBatchStatisticsValidationError{ + field: "ComputeOutput", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, InferBatchStatisticsValidationError{ + field: "ComputeOutput", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetComputeOutput()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return InferBatchStatisticsValidationError{ + field: "ComputeOutput", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return InferBatchStatisticsMultiError(errors) + } + + return nil +} + +// InferBatchStatisticsMultiError is an error wrapping multiple validation +// errors returned by InferBatchStatistics.ValidateAll() if the designated +// constraints aren't met. +type InferBatchStatisticsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m InferBatchStatisticsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m InferBatchStatisticsMultiError) AllErrors() []error { return m } + +// InferBatchStatisticsValidationError is the validation error returned by +// InferBatchStatistics.Validate if the designated constraints aren't met. +type InferBatchStatisticsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e InferBatchStatisticsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e InferBatchStatisticsValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e InferBatchStatisticsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e InferBatchStatisticsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e InferBatchStatisticsValidationError) ErrorName() string { + return "InferBatchStatisticsValidationError" +} + +// Error satisfies the builtin error interface +func (e InferBatchStatisticsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sInferBatchStatistics.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = InferBatchStatisticsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = InferBatchStatisticsValidationError{} + +// Validate checks the field values on MemoryUsage with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *MemoryUsage) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on MemoryUsage with the rules defined in +// the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in MemoryUsageMultiError, or +// nil if none found. +func (m *MemoryUsage) ValidateAll() error { + return m.validate(true) +} + +func (m *MemoryUsage) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Type + + // no validation rules for Id + + // no validation rules for ByteSize + + if len(errors) > 0 { + return MemoryUsageMultiError(errors) + } + + return nil +} + +// MemoryUsageMultiError is an error wrapping multiple validation errors +// returned by MemoryUsage.ValidateAll() if the designated constraints aren't met. +type MemoryUsageMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m MemoryUsageMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m MemoryUsageMultiError) AllErrors() []error { return m } + +// MemoryUsageValidationError is the validation error returned by +// MemoryUsage.Validate if the designated constraints aren't met. +type MemoryUsageValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e MemoryUsageValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e MemoryUsageValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e MemoryUsageValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e MemoryUsageValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e MemoryUsageValidationError) ErrorName() string { return "MemoryUsageValidationError" } + +// Error satisfies the builtin error interface +func (e MemoryUsageValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sMemoryUsage.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = MemoryUsageValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = MemoryUsageValidationError{} + +// Validate checks the field values on ModelStatistics with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ModelStatistics) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelStatistics with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelStatisticsMultiError, or nil if none found. +func (m *ModelStatistics) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelStatistics) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Version + + // no validation rules for LastInference + + // no validation rules for InferenceCount + + // no validation rules for ExecutionCount + + if all { + switch v := interface{}(m.GetInferenceStats()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelStatisticsValidationError{ + field: "InferenceStats", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelStatisticsValidationError{ + field: "InferenceStats", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetInferenceStats()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelStatisticsValidationError{ + field: "InferenceStats", + reason: "embedded message failed validation", + cause: err, + } + } + } + + for idx, item := range m.GetBatchStats() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelStatisticsValidationError{ + field: fmt.Sprintf("BatchStats[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelStatisticsValidationError{ + field: fmt.Sprintf("BatchStats[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelStatisticsValidationError{ + field: fmt.Sprintf("BatchStats[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + for idx, item := range m.GetMemoryUsage() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelStatisticsValidationError{ + field: fmt.Sprintf("MemoryUsage[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelStatisticsValidationError{ + field: fmt.Sprintf("MemoryUsage[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelStatisticsValidationError{ + field: fmt.Sprintf("MemoryUsage[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ModelStatisticsMultiError(errors) + } + + return nil +} + +// ModelStatisticsMultiError is an error wrapping multiple validation errors +// returned by ModelStatistics.ValidateAll() if the designated constraints +// aren't met. +type ModelStatisticsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelStatisticsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelStatisticsMultiError) AllErrors() []error { return m } + +// ModelStatisticsValidationError is the validation error returned by +// ModelStatistics.Validate if the designated constraints aren't met. +type ModelStatisticsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelStatisticsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelStatisticsValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelStatisticsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelStatisticsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelStatisticsValidationError) ErrorName() string { return "ModelStatisticsValidationError" } + +// Error satisfies the builtin error interface +func (e ModelStatisticsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelStatistics.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelStatisticsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelStatisticsValidationError{} + +// Validate checks the field values on ModelStatisticsResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelStatisticsResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelStatisticsResponse with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelStatisticsResponseMultiError, or nil if none found. +func (m *ModelStatisticsResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelStatisticsResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + for idx, item := range m.GetModelStats() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelStatisticsResponseValidationError{ + field: fmt.Sprintf("ModelStats[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelStatisticsResponseValidationError{ + field: fmt.Sprintf("ModelStats[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelStatisticsResponseValidationError{ + field: fmt.Sprintf("ModelStats[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ModelStatisticsResponseMultiError(errors) + } + + return nil +} + +// ModelStatisticsResponseMultiError is an error wrapping multiple validation +// errors returned by ModelStatisticsResponse.ValidateAll() if the designated +// constraints aren't met. +type ModelStatisticsResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelStatisticsResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelStatisticsResponseMultiError) AllErrors() []error { return m } + +// ModelStatisticsResponseValidationError is the validation error returned by +// ModelStatisticsResponse.Validate if the designated constraints aren't met. +type ModelStatisticsResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelStatisticsResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelStatisticsResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelStatisticsResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelStatisticsResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelStatisticsResponseValidationError) ErrorName() string { + return "ModelStatisticsResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelStatisticsResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelStatisticsResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelStatisticsResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelStatisticsResponseValidationError{} + +// Validate checks the field values on ModelRepositoryParameter with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelRepositoryParameter) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelRepositoryParameter with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelRepositoryParameterMultiError, or nil if none found. +func (m *ModelRepositoryParameter) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelRepositoryParameter) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + switch v := m.ParameterChoice.(type) { + case *ModelRepositoryParameter_BoolParam: + if v == nil { + err := ModelRepositoryParameterValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for BoolParam + case *ModelRepositoryParameter_Int64Param: + if v == nil { + err := ModelRepositoryParameterValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for Int64Param + case *ModelRepositoryParameter_StringParam: + if v == nil { + err := ModelRepositoryParameterValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for StringParam + case *ModelRepositoryParameter_BytesParam: + if v == nil { + err := ModelRepositoryParameterValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for BytesParam + default: + _ = v // ensures v is used + } + + if len(errors) > 0 { + return ModelRepositoryParameterMultiError(errors) + } + + return nil +} + +// ModelRepositoryParameterMultiError is an error wrapping multiple validation +// errors returned by ModelRepositoryParameter.ValidateAll() if the designated +// constraints aren't met. +type ModelRepositoryParameterMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelRepositoryParameterMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelRepositoryParameterMultiError) AllErrors() []error { return m } + +// ModelRepositoryParameterValidationError is the validation error returned by +// ModelRepositoryParameter.Validate if the designated constraints aren't met. +type ModelRepositoryParameterValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelRepositoryParameterValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelRepositoryParameterValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelRepositoryParameterValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelRepositoryParameterValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelRepositoryParameterValidationError) ErrorName() string { + return "ModelRepositoryParameterValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelRepositoryParameterValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelRepositoryParameter.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelRepositoryParameterValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelRepositoryParameterValidationError{} + +// Validate checks the field values on RepositoryIndexRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *RepositoryIndexRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on RepositoryIndexRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// RepositoryIndexRequestMultiError, or nil if none found. +func (m *RepositoryIndexRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *RepositoryIndexRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for RepositoryName + + // no validation rules for Ready + + if len(errors) > 0 { + return RepositoryIndexRequestMultiError(errors) + } + + return nil +} + +// RepositoryIndexRequestMultiError is an error wrapping multiple validation +// errors returned by RepositoryIndexRequest.ValidateAll() if the designated +// constraints aren't met. +type RepositoryIndexRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m RepositoryIndexRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m RepositoryIndexRequestMultiError) AllErrors() []error { return m } + +// RepositoryIndexRequestValidationError is the validation error returned by +// RepositoryIndexRequest.Validate if the designated constraints aren't met. +type RepositoryIndexRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e RepositoryIndexRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e RepositoryIndexRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e RepositoryIndexRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e RepositoryIndexRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e RepositoryIndexRequestValidationError) ErrorName() string { + return "RepositoryIndexRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e RepositoryIndexRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sRepositoryIndexRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = RepositoryIndexRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = RepositoryIndexRequestValidationError{} + +// Validate checks the field values on RepositoryIndexResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *RepositoryIndexResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on RepositoryIndexResponse with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// RepositoryIndexResponseMultiError, or nil if none found. +func (m *RepositoryIndexResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *RepositoryIndexResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + for idx, item := range m.GetModels() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, RepositoryIndexResponseValidationError{ + field: fmt.Sprintf("Models[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, RepositoryIndexResponseValidationError{ + field: fmt.Sprintf("Models[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return RepositoryIndexResponseValidationError{ + field: fmt.Sprintf("Models[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return RepositoryIndexResponseMultiError(errors) + } + + return nil +} + +// RepositoryIndexResponseMultiError is an error wrapping multiple validation +// errors returned by RepositoryIndexResponse.ValidateAll() if the designated +// constraints aren't met. +type RepositoryIndexResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m RepositoryIndexResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m RepositoryIndexResponseMultiError) AllErrors() []error { return m } + +// RepositoryIndexResponseValidationError is the validation error returned by +// RepositoryIndexResponse.Validate if the designated constraints aren't met. +type RepositoryIndexResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e RepositoryIndexResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e RepositoryIndexResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e RepositoryIndexResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e RepositoryIndexResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e RepositoryIndexResponseValidationError) ErrorName() string { + return "RepositoryIndexResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e RepositoryIndexResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sRepositoryIndexResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = RepositoryIndexResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = RepositoryIndexResponseValidationError{} + +// Validate checks the field values on RepositoryModelLoadRequest with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *RepositoryModelLoadRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on RepositoryModelLoadRequest with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// RepositoryModelLoadRequestMultiError, or nil if none found. +func (m *RepositoryModelLoadRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *RepositoryModelLoadRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for RepositoryName + + // no validation rules for ModelName + + { + sorted_keys := make([]string, len(m.GetParameters())) + i := 0 + for key := range m.GetParameters() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetParameters()[key] + _ = val + + // no validation rules for Parameters[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, RepositoryModelLoadRequestValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, RepositoryModelLoadRequestValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return RepositoryModelLoadRequestValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if len(errors) > 0 { + return RepositoryModelLoadRequestMultiError(errors) + } + + return nil +} + +// RepositoryModelLoadRequestMultiError is an error wrapping multiple +// validation errors returned by RepositoryModelLoadRequest.ValidateAll() if +// the designated constraints aren't met. +type RepositoryModelLoadRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m RepositoryModelLoadRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m RepositoryModelLoadRequestMultiError) AllErrors() []error { return m } + +// RepositoryModelLoadRequestValidationError is the validation error returned +// by RepositoryModelLoadRequest.Validate if the designated constraints aren't met. +type RepositoryModelLoadRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e RepositoryModelLoadRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e RepositoryModelLoadRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e RepositoryModelLoadRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e RepositoryModelLoadRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e RepositoryModelLoadRequestValidationError) ErrorName() string { + return "RepositoryModelLoadRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e RepositoryModelLoadRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sRepositoryModelLoadRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = RepositoryModelLoadRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = RepositoryModelLoadRequestValidationError{} + +// Validate checks the field values on RepositoryModelLoadResponse with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *RepositoryModelLoadResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on RepositoryModelLoadResponse with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// RepositoryModelLoadResponseMultiError, or nil if none found. +func (m *RepositoryModelLoadResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *RepositoryModelLoadResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return RepositoryModelLoadResponseMultiError(errors) + } + + return nil +} + +// RepositoryModelLoadResponseMultiError is an error wrapping multiple +// validation errors returned by RepositoryModelLoadResponse.ValidateAll() if +// the designated constraints aren't met. +type RepositoryModelLoadResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m RepositoryModelLoadResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m RepositoryModelLoadResponseMultiError) AllErrors() []error { return m } + +// RepositoryModelLoadResponseValidationError is the validation error returned +// by RepositoryModelLoadResponse.Validate if the designated constraints +// aren't met. +type RepositoryModelLoadResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e RepositoryModelLoadResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e RepositoryModelLoadResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e RepositoryModelLoadResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e RepositoryModelLoadResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e RepositoryModelLoadResponseValidationError) ErrorName() string { + return "RepositoryModelLoadResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e RepositoryModelLoadResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sRepositoryModelLoadResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = RepositoryModelLoadResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = RepositoryModelLoadResponseValidationError{} + +// Validate checks the field values on RepositoryModelUnloadRequest with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *RepositoryModelUnloadRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on RepositoryModelUnloadRequest with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// RepositoryModelUnloadRequestMultiError, or nil if none found. +func (m *RepositoryModelUnloadRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *RepositoryModelUnloadRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for RepositoryName + + // no validation rules for ModelName + + { + sorted_keys := make([]string, len(m.GetParameters())) + i := 0 + for key := range m.GetParameters() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetParameters()[key] + _ = val + + // no validation rules for Parameters[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, RepositoryModelUnloadRequestValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, RepositoryModelUnloadRequestValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return RepositoryModelUnloadRequestValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if len(errors) > 0 { + return RepositoryModelUnloadRequestMultiError(errors) + } + + return nil +} + +// RepositoryModelUnloadRequestMultiError is an error wrapping multiple +// validation errors returned by RepositoryModelUnloadRequest.ValidateAll() if +// the designated constraints aren't met. +type RepositoryModelUnloadRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m RepositoryModelUnloadRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m RepositoryModelUnloadRequestMultiError) AllErrors() []error { return m } + +// RepositoryModelUnloadRequestValidationError is the validation error returned +// by RepositoryModelUnloadRequest.Validate if the designated constraints +// aren't met. +type RepositoryModelUnloadRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e RepositoryModelUnloadRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e RepositoryModelUnloadRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e RepositoryModelUnloadRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e RepositoryModelUnloadRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e RepositoryModelUnloadRequestValidationError) ErrorName() string { + return "RepositoryModelUnloadRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e RepositoryModelUnloadRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sRepositoryModelUnloadRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = RepositoryModelUnloadRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = RepositoryModelUnloadRequestValidationError{} + +// Validate checks the field values on RepositoryModelUnloadResponse with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *RepositoryModelUnloadResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on RepositoryModelUnloadResponse with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// RepositoryModelUnloadResponseMultiError, or nil if none found. +func (m *RepositoryModelUnloadResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *RepositoryModelUnloadResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return RepositoryModelUnloadResponseMultiError(errors) + } + + return nil +} + +// RepositoryModelUnloadResponseMultiError is an error wrapping multiple +// validation errors returned by RepositoryModelUnloadResponse.ValidateAll() +// if the designated constraints aren't met. +type RepositoryModelUnloadResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m RepositoryModelUnloadResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m RepositoryModelUnloadResponseMultiError) AllErrors() []error { return m } + +// RepositoryModelUnloadResponseValidationError is the validation error +// returned by RepositoryModelUnloadResponse.Validate if the designated +// constraints aren't met. +type RepositoryModelUnloadResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e RepositoryModelUnloadResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e RepositoryModelUnloadResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e RepositoryModelUnloadResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e RepositoryModelUnloadResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e RepositoryModelUnloadResponseValidationError) ErrorName() string { + return "RepositoryModelUnloadResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e RepositoryModelUnloadResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sRepositoryModelUnloadResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = RepositoryModelUnloadResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = RepositoryModelUnloadResponseValidationError{} + +// Validate checks the field values on SystemSharedMemoryStatusRequest with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *SystemSharedMemoryStatusRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on SystemSharedMemoryStatusRequest with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// SystemSharedMemoryStatusRequestMultiError, or nil if none found. +func (m *SystemSharedMemoryStatusRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *SystemSharedMemoryStatusRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + if len(errors) > 0 { + return SystemSharedMemoryStatusRequestMultiError(errors) + } + + return nil +} + +// SystemSharedMemoryStatusRequestMultiError is an error wrapping multiple +// validation errors returned by SystemSharedMemoryStatusRequest.ValidateAll() +// if the designated constraints aren't met. +type SystemSharedMemoryStatusRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m SystemSharedMemoryStatusRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m SystemSharedMemoryStatusRequestMultiError) AllErrors() []error { return m } + +// SystemSharedMemoryStatusRequestValidationError is the validation error +// returned by SystemSharedMemoryStatusRequest.Validate if the designated +// constraints aren't met. +type SystemSharedMemoryStatusRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e SystemSharedMemoryStatusRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e SystemSharedMemoryStatusRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e SystemSharedMemoryStatusRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e SystemSharedMemoryStatusRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e SystemSharedMemoryStatusRequestValidationError) ErrorName() string { + return "SystemSharedMemoryStatusRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e SystemSharedMemoryStatusRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sSystemSharedMemoryStatusRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = SystemSharedMemoryStatusRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = SystemSharedMemoryStatusRequestValidationError{} + +// Validate checks the field values on SystemSharedMemoryStatusResponse with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *SystemSharedMemoryStatusResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on SystemSharedMemoryStatusResponse with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// SystemSharedMemoryStatusResponseMultiError, or nil if none found. +func (m *SystemSharedMemoryStatusResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *SystemSharedMemoryStatusResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + { + sorted_keys := make([]string, len(m.GetRegions())) + i := 0 + for key := range m.GetRegions() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetRegions()[key] + _ = val + + // no validation rules for Regions[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, SystemSharedMemoryStatusResponseValidationError{ + field: fmt.Sprintf("Regions[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, SystemSharedMemoryStatusResponseValidationError{ + field: fmt.Sprintf("Regions[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return SystemSharedMemoryStatusResponseValidationError{ + field: fmt.Sprintf("Regions[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if len(errors) > 0 { + return SystemSharedMemoryStatusResponseMultiError(errors) + } + + return nil +} + +// SystemSharedMemoryStatusResponseMultiError is an error wrapping multiple +// validation errors returned by +// SystemSharedMemoryStatusResponse.ValidateAll() if the designated +// constraints aren't met. +type SystemSharedMemoryStatusResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m SystemSharedMemoryStatusResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m SystemSharedMemoryStatusResponseMultiError) AllErrors() []error { return m } + +// SystemSharedMemoryStatusResponseValidationError is the validation error +// returned by SystemSharedMemoryStatusResponse.Validate if the designated +// constraints aren't met. +type SystemSharedMemoryStatusResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e SystemSharedMemoryStatusResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e SystemSharedMemoryStatusResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e SystemSharedMemoryStatusResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e SystemSharedMemoryStatusResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e SystemSharedMemoryStatusResponseValidationError) ErrorName() string { + return "SystemSharedMemoryStatusResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e SystemSharedMemoryStatusResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sSystemSharedMemoryStatusResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = SystemSharedMemoryStatusResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = SystemSharedMemoryStatusResponseValidationError{} + +// Validate checks the field values on SystemSharedMemoryRegisterRequest with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *SystemSharedMemoryRegisterRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on SystemSharedMemoryRegisterRequest +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// SystemSharedMemoryRegisterRequestMultiError, or nil if none found. +func (m *SystemSharedMemoryRegisterRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *SystemSharedMemoryRegisterRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Key + + // no validation rules for Offset + + // no validation rules for ByteSize + + if len(errors) > 0 { + return SystemSharedMemoryRegisterRequestMultiError(errors) + } + + return nil +} + +// SystemSharedMemoryRegisterRequestMultiError is an error wrapping multiple +// validation errors returned by +// SystemSharedMemoryRegisterRequest.ValidateAll() if the designated +// constraints aren't met. +type SystemSharedMemoryRegisterRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m SystemSharedMemoryRegisterRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m SystemSharedMemoryRegisterRequestMultiError) AllErrors() []error { return m } + +// SystemSharedMemoryRegisterRequestValidationError is the validation error +// returned by SystemSharedMemoryRegisterRequest.Validate if the designated +// constraints aren't met. +type SystemSharedMemoryRegisterRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e SystemSharedMemoryRegisterRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e SystemSharedMemoryRegisterRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e SystemSharedMemoryRegisterRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e SystemSharedMemoryRegisterRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e SystemSharedMemoryRegisterRequestValidationError) ErrorName() string { + return "SystemSharedMemoryRegisterRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e SystemSharedMemoryRegisterRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sSystemSharedMemoryRegisterRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = SystemSharedMemoryRegisterRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = SystemSharedMemoryRegisterRequestValidationError{} + +// Validate checks the field values on SystemSharedMemoryRegisterResponse with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *SystemSharedMemoryRegisterResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on SystemSharedMemoryRegisterResponse +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// SystemSharedMemoryRegisterResponseMultiError, or nil if none found. +func (m *SystemSharedMemoryRegisterResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *SystemSharedMemoryRegisterResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return SystemSharedMemoryRegisterResponseMultiError(errors) + } + + return nil +} + +// SystemSharedMemoryRegisterResponseMultiError is an error wrapping multiple +// validation errors returned by +// SystemSharedMemoryRegisterResponse.ValidateAll() if the designated +// constraints aren't met. +type SystemSharedMemoryRegisterResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m SystemSharedMemoryRegisterResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m SystemSharedMemoryRegisterResponseMultiError) AllErrors() []error { return m } + +// SystemSharedMemoryRegisterResponseValidationError is the validation error +// returned by SystemSharedMemoryRegisterResponse.Validate if the designated +// constraints aren't met. +type SystemSharedMemoryRegisterResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e SystemSharedMemoryRegisterResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e SystemSharedMemoryRegisterResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e SystemSharedMemoryRegisterResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e SystemSharedMemoryRegisterResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e SystemSharedMemoryRegisterResponseValidationError) ErrorName() string { + return "SystemSharedMemoryRegisterResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e SystemSharedMemoryRegisterResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sSystemSharedMemoryRegisterResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = SystemSharedMemoryRegisterResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = SystemSharedMemoryRegisterResponseValidationError{} + +// Validate checks the field values on SystemSharedMemoryUnregisterRequest with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *SystemSharedMemoryUnregisterRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on SystemSharedMemoryUnregisterRequest +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// SystemSharedMemoryUnregisterRequestMultiError, or nil if none found. +func (m *SystemSharedMemoryUnregisterRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *SystemSharedMemoryUnregisterRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + if len(errors) > 0 { + return SystemSharedMemoryUnregisterRequestMultiError(errors) + } + + return nil +} + +// SystemSharedMemoryUnregisterRequestMultiError is an error wrapping multiple +// validation errors returned by +// SystemSharedMemoryUnregisterRequest.ValidateAll() if the designated +// constraints aren't met. +type SystemSharedMemoryUnregisterRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m SystemSharedMemoryUnregisterRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m SystemSharedMemoryUnregisterRequestMultiError) AllErrors() []error { return m } + +// SystemSharedMemoryUnregisterRequestValidationError is the validation error +// returned by SystemSharedMemoryUnregisterRequest.Validate if the designated +// constraints aren't met. +type SystemSharedMemoryUnregisterRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e SystemSharedMemoryUnregisterRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e SystemSharedMemoryUnregisterRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e SystemSharedMemoryUnregisterRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e SystemSharedMemoryUnregisterRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e SystemSharedMemoryUnregisterRequestValidationError) ErrorName() string { + return "SystemSharedMemoryUnregisterRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e SystemSharedMemoryUnregisterRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sSystemSharedMemoryUnregisterRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = SystemSharedMemoryUnregisterRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = SystemSharedMemoryUnregisterRequestValidationError{} + +// Validate checks the field values on SystemSharedMemoryUnregisterResponse +// with the rules defined in the proto definition for this message. If any +// rules are violated, the first error encountered is returned, or nil if +// there are no violations. +func (m *SystemSharedMemoryUnregisterResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on SystemSharedMemoryUnregisterResponse +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// SystemSharedMemoryUnregisterResponseMultiError, or nil if none found. +func (m *SystemSharedMemoryUnregisterResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *SystemSharedMemoryUnregisterResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return SystemSharedMemoryUnregisterResponseMultiError(errors) + } + + return nil +} + +// SystemSharedMemoryUnregisterResponseMultiError is an error wrapping multiple +// validation errors returned by +// SystemSharedMemoryUnregisterResponse.ValidateAll() if the designated +// constraints aren't met. +type SystemSharedMemoryUnregisterResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m SystemSharedMemoryUnregisterResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m SystemSharedMemoryUnregisterResponseMultiError) AllErrors() []error { return m } + +// SystemSharedMemoryUnregisterResponseValidationError is the validation error +// returned by SystemSharedMemoryUnregisterResponse.Validate if the designated +// constraints aren't met. +type SystemSharedMemoryUnregisterResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e SystemSharedMemoryUnregisterResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e SystemSharedMemoryUnregisterResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e SystemSharedMemoryUnregisterResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e SystemSharedMemoryUnregisterResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e SystemSharedMemoryUnregisterResponseValidationError) ErrorName() string { + return "SystemSharedMemoryUnregisterResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e SystemSharedMemoryUnregisterResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sSystemSharedMemoryUnregisterResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = SystemSharedMemoryUnregisterResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = SystemSharedMemoryUnregisterResponseValidationError{} + +// Validate checks the field values on CudaSharedMemoryStatusRequest with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *CudaSharedMemoryStatusRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CudaSharedMemoryStatusRequest with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// CudaSharedMemoryStatusRequestMultiError, or nil if none found. +func (m *CudaSharedMemoryStatusRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *CudaSharedMemoryStatusRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + if len(errors) > 0 { + return CudaSharedMemoryStatusRequestMultiError(errors) + } + + return nil +} + +// CudaSharedMemoryStatusRequestMultiError is an error wrapping multiple +// validation errors returned by CudaSharedMemoryStatusRequest.ValidateAll() +// if the designated constraints aren't met. +type CudaSharedMemoryStatusRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CudaSharedMemoryStatusRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CudaSharedMemoryStatusRequestMultiError) AllErrors() []error { return m } + +// CudaSharedMemoryStatusRequestValidationError is the validation error +// returned by CudaSharedMemoryStatusRequest.Validate if the designated +// constraints aren't met. +type CudaSharedMemoryStatusRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CudaSharedMemoryStatusRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CudaSharedMemoryStatusRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CudaSharedMemoryStatusRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CudaSharedMemoryStatusRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CudaSharedMemoryStatusRequestValidationError) ErrorName() string { + return "CudaSharedMemoryStatusRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e CudaSharedMemoryStatusRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCudaSharedMemoryStatusRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CudaSharedMemoryStatusRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CudaSharedMemoryStatusRequestValidationError{} + +// Validate checks the field values on CudaSharedMemoryStatusResponse with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *CudaSharedMemoryStatusResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CudaSharedMemoryStatusResponse with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// CudaSharedMemoryStatusResponseMultiError, or nil if none found. +func (m *CudaSharedMemoryStatusResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *CudaSharedMemoryStatusResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + { + sorted_keys := make([]string, len(m.GetRegions())) + i := 0 + for key := range m.GetRegions() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetRegions()[key] + _ = val + + // no validation rules for Regions[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, CudaSharedMemoryStatusResponseValidationError{ + field: fmt.Sprintf("Regions[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, CudaSharedMemoryStatusResponseValidationError{ + field: fmt.Sprintf("Regions[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return CudaSharedMemoryStatusResponseValidationError{ + field: fmt.Sprintf("Regions[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if len(errors) > 0 { + return CudaSharedMemoryStatusResponseMultiError(errors) + } + + return nil +} + +// CudaSharedMemoryStatusResponseMultiError is an error wrapping multiple +// validation errors returned by CudaSharedMemoryStatusResponse.ValidateAll() +// if the designated constraints aren't met. +type CudaSharedMemoryStatusResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CudaSharedMemoryStatusResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CudaSharedMemoryStatusResponseMultiError) AllErrors() []error { return m } + +// CudaSharedMemoryStatusResponseValidationError is the validation error +// returned by CudaSharedMemoryStatusResponse.Validate if the designated +// constraints aren't met. +type CudaSharedMemoryStatusResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CudaSharedMemoryStatusResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CudaSharedMemoryStatusResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CudaSharedMemoryStatusResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CudaSharedMemoryStatusResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CudaSharedMemoryStatusResponseValidationError) ErrorName() string { + return "CudaSharedMemoryStatusResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e CudaSharedMemoryStatusResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCudaSharedMemoryStatusResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CudaSharedMemoryStatusResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CudaSharedMemoryStatusResponseValidationError{} + +// Validate checks the field values on CudaSharedMemoryRegisterRequest with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *CudaSharedMemoryRegisterRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CudaSharedMemoryRegisterRequest with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// CudaSharedMemoryRegisterRequestMultiError, or nil if none found. +func (m *CudaSharedMemoryRegisterRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *CudaSharedMemoryRegisterRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for RawHandle + + // no validation rules for DeviceId + + // no validation rules for ByteSize + + if len(errors) > 0 { + return CudaSharedMemoryRegisterRequestMultiError(errors) + } + + return nil +} + +// CudaSharedMemoryRegisterRequestMultiError is an error wrapping multiple +// validation errors returned by CudaSharedMemoryRegisterRequest.ValidateAll() +// if the designated constraints aren't met. +type CudaSharedMemoryRegisterRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CudaSharedMemoryRegisterRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CudaSharedMemoryRegisterRequestMultiError) AllErrors() []error { return m } + +// CudaSharedMemoryRegisterRequestValidationError is the validation error +// returned by CudaSharedMemoryRegisterRequest.Validate if the designated +// constraints aren't met. +type CudaSharedMemoryRegisterRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CudaSharedMemoryRegisterRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CudaSharedMemoryRegisterRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CudaSharedMemoryRegisterRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CudaSharedMemoryRegisterRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CudaSharedMemoryRegisterRequestValidationError) ErrorName() string { + return "CudaSharedMemoryRegisterRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e CudaSharedMemoryRegisterRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCudaSharedMemoryRegisterRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CudaSharedMemoryRegisterRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CudaSharedMemoryRegisterRequestValidationError{} + +// Validate checks the field values on CudaSharedMemoryRegisterResponse with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *CudaSharedMemoryRegisterResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CudaSharedMemoryRegisterResponse with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// CudaSharedMemoryRegisterResponseMultiError, or nil if none found. +func (m *CudaSharedMemoryRegisterResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *CudaSharedMemoryRegisterResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return CudaSharedMemoryRegisterResponseMultiError(errors) + } + + return nil +} + +// CudaSharedMemoryRegisterResponseMultiError is an error wrapping multiple +// validation errors returned by +// CudaSharedMemoryRegisterResponse.ValidateAll() if the designated +// constraints aren't met. +type CudaSharedMemoryRegisterResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CudaSharedMemoryRegisterResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CudaSharedMemoryRegisterResponseMultiError) AllErrors() []error { return m } + +// CudaSharedMemoryRegisterResponseValidationError is the validation error +// returned by CudaSharedMemoryRegisterResponse.Validate if the designated +// constraints aren't met. +type CudaSharedMemoryRegisterResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CudaSharedMemoryRegisterResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CudaSharedMemoryRegisterResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CudaSharedMemoryRegisterResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CudaSharedMemoryRegisterResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CudaSharedMemoryRegisterResponseValidationError) ErrorName() string { + return "CudaSharedMemoryRegisterResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e CudaSharedMemoryRegisterResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCudaSharedMemoryRegisterResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CudaSharedMemoryRegisterResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CudaSharedMemoryRegisterResponseValidationError{} + +// Validate checks the field values on CudaSharedMemoryUnregisterRequest with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *CudaSharedMemoryUnregisterRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CudaSharedMemoryUnregisterRequest +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// CudaSharedMemoryUnregisterRequestMultiError, or nil if none found. +func (m *CudaSharedMemoryUnregisterRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *CudaSharedMemoryUnregisterRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + if len(errors) > 0 { + return CudaSharedMemoryUnregisterRequestMultiError(errors) + } + + return nil +} + +// CudaSharedMemoryUnregisterRequestMultiError is an error wrapping multiple +// validation errors returned by +// CudaSharedMemoryUnregisterRequest.ValidateAll() if the designated +// constraints aren't met. +type CudaSharedMemoryUnregisterRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CudaSharedMemoryUnregisterRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CudaSharedMemoryUnregisterRequestMultiError) AllErrors() []error { return m } + +// CudaSharedMemoryUnregisterRequestValidationError is the validation error +// returned by CudaSharedMemoryUnregisterRequest.Validate if the designated +// constraints aren't met. +type CudaSharedMemoryUnregisterRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CudaSharedMemoryUnregisterRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CudaSharedMemoryUnregisterRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CudaSharedMemoryUnregisterRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CudaSharedMemoryUnregisterRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CudaSharedMemoryUnregisterRequestValidationError) ErrorName() string { + return "CudaSharedMemoryUnregisterRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e CudaSharedMemoryUnregisterRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCudaSharedMemoryUnregisterRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CudaSharedMemoryUnregisterRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CudaSharedMemoryUnregisterRequestValidationError{} + +// Validate checks the field values on CudaSharedMemoryUnregisterResponse with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *CudaSharedMemoryUnregisterResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on CudaSharedMemoryUnregisterResponse +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// CudaSharedMemoryUnregisterResponseMultiError, or nil if none found. +func (m *CudaSharedMemoryUnregisterResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *CudaSharedMemoryUnregisterResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return CudaSharedMemoryUnregisterResponseMultiError(errors) + } + + return nil +} + +// CudaSharedMemoryUnregisterResponseMultiError is an error wrapping multiple +// validation errors returned by +// CudaSharedMemoryUnregisterResponse.ValidateAll() if the designated +// constraints aren't met. +type CudaSharedMemoryUnregisterResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CudaSharedMemoryUnregisterResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CudaSharedMemoryUnregisterResponseMultiError) AllErrors() []error { return m } + +// CudaSharedMemoryUnregisterResponseValidationError is the validation error +// returned by CudaSharedMemoryUnregisterResponse.Validate if the designated +// constraints aren't met. +type CudaSharedMemoryUnregisterResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CudaSharedMemoryUnregisterResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CudaSharedMemoryUnregisterResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CudaSharedMemoryUnregisterResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CudaSharedMemoryUnregisterResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CudaSharedMemoryUnregisterResponseValidationError) ErrorName() string { + return "CudaSharedMemoryUnregisterResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e CudaSharedMemoryUnregisterResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCudaSharedMemoryUnregisterResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CudaSharedMemoryUnregisterResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CudaSharedMemoryUnregisterResponseValidationError{} + +// Validate checks the field values on TraceSettingRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *TraceSettingRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on TraceSettingRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// TraceSettingRequestMultiError, or nil if none found. +func (m *TraceSettingRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *TraceSettingRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + { + sorted_keys := make([]string, len(m.GetSettings())) + i := 0 + for key := range m.GetSettings() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetSettings()[key] + _ = val + + // no validation rules for Settings[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, TraceSettingRequestValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, TraceSettingRequestValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return TraceSettingRequestValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + // no validation rules for ModelName + + if len(errors) > 0 { + return TraceSettingRequestMultiError(errors) + } + + return nil +} + +// TraceSettingRequestMultiError is an error wrapping multiple validation +// errors returned by TraceSettingRequest.ValidateAll() if the designated +// constraints aren't met. +type TraceSettingRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m TraceSettingRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m TraceSettingRequestMultiError) AllErrors() []error { return m } + +// TraceSettingRequestValidationError is the validation error returned by +// TraceSettingRequest.Validate if the designated constraints aren't met. +type TraceSettingRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e TraceSettingRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e TraceSettingRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e TraceSettingRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e TraceSettingRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e TraceSettingRequestValidationError) ErrorName() string { + return "TraceSettingRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e TraceSettingRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sTraceSettingRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = TraceSettingRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = TraceSettingRequestValidationError{} + +// Validate checks the field values on TraceSettingResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *TraceSettingResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on TraceSettingResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// TraceSettingResponseMultiError, or nil if none found. +func (m *TraceSettingResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *TraceSettingResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + { + sorted_keys := make([]string, len(m.GetSettings())) + i := 0 + for key := range m.GetSettings() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetSettings()[key] + _ = val + + // no validation rules for Settings[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, TraceSettingResponseValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, TraceSettingResponseValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return TraceSettingResponseValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if len(errors) > 0 { + return TraceSettingResponseMultiError(errors) + } + + return nil +} + +// TraceSettingResponseMultiError is an error wrapping multiple validation +// errors returned by TraceSettingResponse.ValidateAll() if the designated +// constraints aren't met. +type TraceSettingResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m TraceSettingResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m TraceSettingResponseMultiError) AllErrors() []error { return m } + +// TraceSettingResponseValidationError is the validation error returned by +// TraceSettingResponse.Validate if the designated constraints aren't met. +type TraceSettingResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e TraceSettingResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e TraceSettingResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e TraceSettingResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e TraceSettingResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e TraceSettingResponseValidationError) ErrorName() string { + return "TraceSettingResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e TraceSettingResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sTraceSettingResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = TraceSettingResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = TraceSettingResponseValidationError{} + +// Validate checks the field values on LogSettingsRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *LogSettingsRequest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on LogSettingsRequest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// LogSettingsRequestMultiError, or nil if none found. +func (m *LogSettingsRequest) ValidateAll() error { + return m.validate(true) +} + +func (m *LogSettingsRequest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + { + sorted_keys := make([]string, len(m.GetSettings())) + i := 0 + for key := range m.GetSettings() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetSettings()[key] + _ = val + + // no validation rules for Settings[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, LogSettingsRequestValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, LogSettingsRequestValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return LogSettingsRequestValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if len(errors) > 0 { + return LogSettingsRequestMultiError(errors) + } + + return nil +} + +// LogSettingsRequestMultiError is an error wrapping multiple validation errors +// returned by LogSettingsRequest.ValidateAll() if the designated constraints +// aren't met. +type LogSettingsRequestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m LogSettingsRequestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m LogSettingsRequestMultiError) AllErrors() []error { return m } + +// LogSettingsRequestValidationError is the validation error returned by +// LogSettingsRequest.Validate if the designated constraints aren't met. +type LogSettingsRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e LogSettingsRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e LogSettingsRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e LogSettingsRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e LogSettingsRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e LogSettingsRequestValidationError) ErrorName() string { + return "LogSettingsRequestValidationError" +} + +// Error satisfies the builtin error interface +func (e LogSettingsRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sLogSettingsRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = LogSettingsRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = LogSettingsRequestValidationError{} + +// Validate checks the field values on LogSettingsResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *LogSettingsResponse) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on LogSettingsResponse with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// LogSettingsResponseMultiError, or nil if none found. +func (m *LogSettingsResponse) ValidateAll() error { + return m.validate(true) +} + +func (m *LogSettingsResponse) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + { + sorted_keys := make([]string, len(m.GetSettings())) + i := 0 + for key := range m.GetSettings() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetSettings()[key] + _ = val + + // no validation rules for Settings[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, LogSettingsResponseValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, LogSettingsResponseValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return LogSettingsResponseValidationError{ + field: fmt.Sprintf("Settings[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if len(errors) > 0 { + return LogSettingsResponseMultiError(errors) + } + + return nil +} + +// LogSettingsResponseMultiError is an error wrapping multiple validation +// errors returned by LogSettingsResponse.ValidateAll() if the designated +// constraints aren't met. +type LogSettingsResponseMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m LogSettingsResponseMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m LogSettingsResponseMultiError) AllErrors() []error { return m } + +// LogSettingsResponseValidationError is the validation error returned by +// LogSettingsResponse.Validate if the designated constraints aren't met. +type LogSettingsResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e LogSettingsResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e LogSettingsResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e LogSettingsResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e LogSettingsResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e LogSettingsResponseValidationError) ErrorName() string { + return "LogSettingsResponseValidationError" +} + +// Error satisfies the builtin error interface +func (e LogSettingsResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sLogSettingsResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = LogSettingsResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = LogSettingsResponseValidationError{} + +// Validate checks the field values on ModelMetadataResponse_TensorMetadata +// with the rules defined in the proto definition for this message. If any +// rules are violated, the first error encountered is returned, or nil if +// there are no violations. +func (m *ModelMetadataResponse_TensorMetadata) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelMetadataResponse_TensorMetadata +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// ModelMetadataResponse_TensorMetadataMultiError, or nil if none found. +func (m *ModelMetadataResponse_TensorMetadata) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelMetadataResponse_TensorMetadata) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Datatype + + if len(errors) > 0 { + return ModelMetadataResponse_TensorMetadataMultiError(errors) + } + + return nil +} + +// ModelMetadataResponse_TensorMetadataMultiError is an error wrapping multiple +// validation errors returned by +// ModelMetadataResponse_TensorMetadata.ValidateAll() if the designated +// constraints aren't met. +type ModelMetadataResponse_TensorMetadataMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelMetadataResponse_TensorMetadataMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelMetadataResponse_TensorMetadataMultiError) AllErrors() []error { return m } + +// ModelMetadataResponse_TensorMetadataValidationError is the validation error +// returned by ModelMetadataResponse_TensorMetadata.Validate if the designated +// constraints aren't met. +type ModelMetadataResponse_TensorMetadataValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelMetadataResponse_TensorMetadataValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelMetadataResponse_TensorMetadataValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelMetadataResponse_TensorMetadataValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelMetadataResponse_TensorMetadataValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelMetadataResponse_TensorMetadataValidationError) ErrorName() string { + return "ModelMetadataResponse_TensorMetadataValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelMetadataResponse_TensorMetadataValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelMetadataResponse_TensorMetadata.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelMetadataResponse_TensorMetadataValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelMetadataResponse_TensorMetadataValidationError{} + +// Validate checks the field values on ModelInferRequest_InferInputTensor with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *ModelInferRequest_InferInputTensor) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelInferRequest_InferInputTensor +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// ModelInferRequest_InferInputTensorMultiError, or nil if none found. +func (m *ModelInferRequest_InferInputTensor) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelInferRequest_InferInputTensor) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Datatype + + { + sorted_keys := make([]string, len(m.GetParameters())) + i := 0 + for key := range m.GetParameters() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetParameters()[key] + _ = val + + // no validation rules for Parameters[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInferRequest_InferInputTensorValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInferRequest_InferInputTensorValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInferRequest_InferInputTensorValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if all { + switch v := interface{}(m.GetContents()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInferRequest_InferInputTensorValidationError{ + field: "Contents", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInferRequest_InferInputTensorValidationError{ + field: "Contents", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetContents()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInferRequest_InferInputTensorValidationError{ + field: "Contents", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return ModelInferRequest_InferInputTensorMultiError(errors) + } + + return nil +} + +// ModelInferRequest_InferInputTensorMultiError is an error wrapping multiple +// validation errors returned by +// ModelInferRequest_InferInputTensor.ValidateAll() if the designated +// constraints aren't met. +type ModelInferRequest_InferInputTensorMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelInferRequest_InferInputTensorMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelInferRequest_InferInputTensorMultiError) AllErrors() []error { return m } + +// ModelInferRequest_InferInputTensorValidationError is the validation error +// returned by ModelInferRequest_InferInputTensor.Validate if the designated +// constraints aren't met. +type ModelInferRequest_InferInputTensorValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelInferRequest_InferInputTensorValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelInferRequest_InferInputTensorValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelInferRequest_InferInputTensorValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelInferRequest_InferInputTensorValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelInferRequest_InferInputTensorValidationError) ErrorName() string { + return "ModelInferRequest_InferInputTensorValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelInferRequest_InferInputTensorValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelInferRequest_InferInputTensor.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelInferRequest_InferInputTensorValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelInferRequest_InferInputTensorValidationError{} + +// Validate checks the field values on +// ModelInferRequest_InferRequestedOutputTensor with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ModelInferRequest_InferRequestedOutputTensor) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on +// ModelInferRequest_InferRequestedOutputTensor with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in +// ModelInferRequest_InferRequestedOutputTensorMultiError, or nil if none found. +func (m *ModelInferRequest_InferRequestedOutputTensor) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelInferRequest_InferRequestedOutputTensor) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + { + sorted_keys := make([]string, len(m.GetParameters())) + i := 0 + for key := range m.GetParameters() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetParameters()[key] + _ = val + + // no validation rules for Parameters[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInferRequest_InferRequestedOutputTensorValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInferRequest_InferRequestedOutputTensorValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInferRequest_InferRequestedOutputTensorValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if len(errors) > 0 { + return ModelInferRequest_InferRequestedOutputTensorMultiError(errors) + } + + return nil +} + +// ModelInferRequest_InferRequestedOutputTensorMultiError is an error wrapping +// multiple validation errors returned by +// ModelInferRequest_InferRequestedOutputTensor.ValidateAll() if the +// designated constraints aren't met. +type ModelInferRequest_InferRequestedOutputTensorMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelInferRequest_InferRequestedOutputTensorMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelInferRequest_InferRequestedOutputTensorMultiError) AllErrors() []error { return m } + +// ModelInferRequest_InferRequestedOutputTensorValidationError is the +// validation error returned by +// ModelInferRequest_InferRequestedOutputTensor.Validate if the designated +// constraints aren't met. +type ModelInferRequest_InferRequestedOutputTensorValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelInferRequest_InferRequestedOutputTensorValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelInferRequest_InferRequestedOutputTensorValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelInferRequest_InferRequestedOutputTensorValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelInferRequest_InferRequestedOutputTensorValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelInferRequest_InferRequestedOutputTensorValidationError) ErrorName() string { + return "ModelInferRequest_InferRequestedOutputTensorValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelInferRequest_InferRequestedOutputTensorValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelInferRequest_InferRequestedOutputTensor.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelInferRequest_InferRequestedOutputTensorValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelInferRequest_InferRequestedOutputTensorValidationError{} + +// Validate checks the field values on ModelInferResponse_InferOutputTensor +// with the rules defined in the proto definition for this message. If any +// rules are violated, the first error encountered is returned, or nil if +// there are no violations. +func (m *ModelInferResponse_InferOutputTensor) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelInferResponse_InferOutputTensor +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// ModelInferResponse_InferOutputTensorMultiError, or nil if none found. +func (m *ModelInferResponse_InferOutputTensor) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelInferResponse_InferOutputTensor) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Datatype + + { + sorted_keys := make([]string, len(m.GetParameters())) + i := 0 + for key := range m.GetParameters() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetParameters()[key] + _ = val + + // no validation rules for Parameters[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInferResponse_InferOutputTensorValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInferResponse_InferOutputTensorValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInferResponse_InferOutputTensorValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if all { + switch v := interface{}(m.GetContents()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInferResponse_InferOutputTensorValidationError{ + field: "Contents", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInferResponse_InferOutputTensorValidationError{ + field: "Contents", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetContents()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInferResponse_InferOutputTensorValidationError{ + field: "Contents", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return ModelInferResponse_InferOutputTensorMultiError(errors) + } + + return nil +} + +// ModelInferResponse_InferOutputTensorMultiError is an error wrapping multiple +// validation errors returned by +// ModelInferResponse_InferOutputTensor.ValidateAll() if the designated +// constraints aren't met. +type ModelInferResponse_InferOutputTensorMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelInferResponse_InferOutputTensorMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelInferResponse_InferOutputTensorMultiError) AllErrors() []error { return m } + +// ModelInferResponse_InferOutputTensorValidationError is the validation error +// returned by ModelInferResponse_InferOutputTensor.Validate if the designated +// constraints aren't met. +type ModelInferResponse_InferOutputTensorValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelInferResponse_InferOutputTensorValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelInferResponse_InferOutputTensorValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelInferResponse_InferOutputTensorValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelInferResponse_InferOutputTensorValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelInferResponse_InferOutputTensorValidationError) ErrorName() string { + return "ModelInferResponse_InferOutputTensorValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelInferResponse_InferOutputTensorValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelInferResponse_InferOutputTensor.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelInferResponse_InferOutputTensorValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelInferResponse_InferOutputTensorValidationError{} + +// Validate checks the field values on RepositoryIndexResponse_ModelIndex with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *RepositoryIndexResponse_ModelIndex) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on RepositoryIndexResponse_ModelIndex +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// RepositoryIndexResponse_ModelIndexMultiError, or nil if none found. +func (m *RepositoryIndexResponse_ModelIndex) ValidateAll() error { + return m.validate(true) +} + +func (m *RepositoryIndexResponse_ModelIndex) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Version + + // no validation rules for State + + // no validation rules for Reason + + if len(errors) > 0 { + return RepositoryIndexResponse_ModelIndexMultiError(errors) + } + + return nil +} + +// RepositoryIndexResponse_ModelIndexMultiError is an error wrapping multiple +// validation errors returned by +// RepositoryIndexResponse_ModelIndex.ValidateAll() if the designated +// constraints aren't met. +type RepositoryIndexResponse_ModelIndexMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m RepositoryIndexResponse_ModelIndexMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m RepositoryIndexResponse_ModelIndexMultiError) AllErrors() []error { return m } + +// RepositoryIndexResponse_ModelIndexValidationError is the validation error +// returned by RepositoryIndexResponse_ModelIndex.Validate if the designated +// constraints aren't met. +type RepositoryIndexResponse_ModelIndexValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e RepositoryIndexResponse_ModelIndexValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e RepositoryIndexResponse_ModelIndexValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e RepositoryIndexResponse_ModelIndexValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e RepositoryIndexResponse_ModelIndexValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e RepositoryIndexResponse_ModelIndexValidationError) ErrorName() string { + return "RepositoryIndexResponse_ModelIndexValidationError" +} + +// Error satisfies the builtin error interface +func (e RepositoryIndexResponse_ModelIndexValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sRepositoryIndexResponse_ModelIndex.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = RepositoryIndexResponse_ModelIndexValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = RepositoryIndexResponse_ModelIndexValidationError{} + +// Validate checks the field values on +// SystemSharedMemoryStatusResponse_RegionStatus with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *SystemSharedMemoryStatusResponse_RegionStatus) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on +// SystemSharedMemoryStatusResponse_RegionStatus with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in +// SystemSharedMemoryStatusResponse_RegionStatusMultiError, or nil if none found. +func (m *SystemSharedMemoryStatusResponse_RegionStatus) ValidateAll() error { + return m.validate(true) +} + +func (m *SystemSharedMemoryStatusResponse_RegionStatus) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Key + + // no validation rules for Offset + + // no validation rules for ByteSize + + if len(errors) > 0 { + return SystemSharedMemoryStatusResponse_RegionStatusMultiError(errors) + } + + return nil +} + +// SystemSharedMemoryStatusResponse_RegionStatusMultiError is an error wrapping +// multiple validation errors returned by +// SystemSharedMemoryStatusResponse_RegionStatus.ValidateAll() if the +// designated constraints aren't met. +type SystemSharedMemoryStatusResponse_RegionStatusMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m SystemSharedMemoryStatusResponse_RegionStatusMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m SystemSharedMemoryStatusResponse_RegionStatusMultiError) AllErrors() []error { return m } + +// SystemSharedMemoryStatusResponse_RegionStatusValidationError is the +// validation error returned by +// SystemSharedMemoryStatusResponse_RegionStatus.Validate if the designated +// constraints aren't met. +type SystemSharedMemoryStatusResponse_RegionStatusValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e SystemSharedMemoryStatusResponse_RegionStatusValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e SystemSharedMemoryStatusResponse_RegionStatusValidationError) Reason() string { + return e.reason +} + +// Cause function returns cause value. +func (e SystemSharedMemoryStatusResponse_RegionStatusValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e SystemSharedMemoryStatusResponse_RegionStatusValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e SystemSharedMemoryStatusResponse_RegionStatusValidationError) ErrorName() string { + return "SystemSharedMemoryStatusResponse_RegionStatusValidationError" +} + +// Error satisfies the builtin error interface +func (e SystemSharedMemoryStatusResponse_RegionStatusValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sSystemSharedMemoryStatusResponse_RegionStatus.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = SystemSharedMemoryStatusResponse_RegionStatusValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = SystemSharedMemoryStatusResponse_RegionStatusValidationError{} + +// Validate checks the field values on +// CudaSharedMemoryStatusResponse_RegionStatus with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *CudaSharedMemoryStatusResponse_RegionStatus) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on +// CudaSharedMemoryStatusResponse_RegionStatus with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in +// CudaSharedMemoryStatusResponse_RegionStatusMultiError, or nil if none found. +func (m *CudaSharedMemoryStatusResponse_RegionStatus) ValidateAll() error { + return m.validate(true) +} + +func (m *CudaSharedMemoryStatusResponse_RegionStatus) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for DeviceId + + // no validation rules for ByteSize + + if len(errors) > 0 { + return CudaSharedMemoryStatusResponse_RegionStatusMultiError(errors) + } + + return nil +} + +// CudaSharedMemoryStatusResponse_RegionStatusMultiError is an error wrapping +// multiple validation errors returned by +// CudaSharedMemoryStatusResponse_RegionStatus.ValidateAll() if the designated +// constraints aren't met. +type CudaSharedMemoryStatusResponse_RegionStatusMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m CudaSharedMemoryStatusResponse_RegionStatusMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m CudaSharedMemoryStatusResponse_RegionStatusMultiError) AllErrors() []error { return m } + +// CudaSharedMemoryStatusResponse_RegionStatusValidationError is the validation +// error returned by CudaSharedMemoryStatusResponse_RegionStatus.Validate if +// the designated constraints aren't met. +type CudaSharedMemoryStatusResponse_RegionStatusValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e CudaSharedMemoryStatusResponse_RegionStatusValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e CudaSharedMemoryStatusResponse_RegionStatusValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e CudaSharedMemoryStatusResponse_RegionStatusValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e CudaSharedMemoryStatusResponse_RegionStatusValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e CudaSharedMemoryStatusResponse_RegionStatusValidationError) ErrorName() string { + return "CudaSharedMemoryStatusResponse_RegionStatusValidationError" +} + +// Error satisfies the builtin error interface +func (e CudaSharedMemoryStatusResponse_RegionStatusValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sCudaSharedMemoryStatusResponse_RegionStatus.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = CudaSharedMemoryStatusResponse_RegionStatusValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = CudaSharedMemoryStatusResponse_RegionStatusValidationError{} + +// Validate checks the field values on TraceSettingRequest_SettingValue with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *TraceSettingRequest_SettingValue) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on TraceSettingRequest_SettingValue with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// TraceSettingRequest_SettingValueMultiError, or nil if none found. +func (m *TraceSettingRequest_SettingValue) ValidateAll() error { + return m.validate(true) +} + +func (m *TraceSettingRequest_SettingValue) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return TraceSettingRequest_SettingValueMultiError(errors) + } + + return nil +} + +// TraceSettingRequest_SettingValueMultiError is an error wrapping multiple +// validation errors returned by +// TraceSettingRequest_SettingValue.ValidateAll() if the designated +// constraints aren't met. +type TraceSettingRequest_SettingValueMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m TraceSettingRequest_SettingValueMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m TraceSettingRequest_SettingValueMultiError) AllErrors() []error { return m } + +// TraceSettingRequest_SettingValueValidationError is the validation error +// returned by TraceSettingRequest_SettingValue.Validate if the designated +// constraints aren't met. +type TraceSettingRequest_SettingValueValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e TraceSettingRequest_SettingValueValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e TraceSettingRequest_SettingValueValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e TraceSettingRequest_SettingValueValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e TraceSettingRequest_SettingValueValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e TraceSettingRequest_SettingValueValidationError) ErrorName() string { + return "TraceSettingRequest_SettingValueValidationError" +} + +// Error satisfies the builtin error interface +func (e TraceSettingRequest_SettingValueValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sTraceSettingRequest_SettingValue.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = TraceSettingRequest_SettingValueValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = TraceSettingRequest_SettingValueValidationError{} + +// Validate checks the field values on TraceSettingResponse_SettingValue with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *TraceSettingResponse_SettingValue) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on TraceSettingResponse_SettingValue +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// TraceSettingResponse_SettingValueMultiError, or nil if none found. +func (m *TraceSettingResponse_SettingValue) ValidateAll() error { + return m.validate(true) +} + +func (m *TraceSettingResponse_SettingValue) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return TraceSettingResponse_SettingValueMultiError(errors) + } + + return nil +} + +// TraceSettingResponse_SettingValueMultiError is an error wrapping multiple +// validation errors returned by +// TraceSettingResponse_SettingValue.ValidateAll() if the designated +// constraints aren't met. +type TraceSettingResponse_SettingValueMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m TraceSettingResponse_SettingValueMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m TraceSettingResponse_SettingValueMultiError) AllErrors() []error { return m } + +// TraceSettingResponse_SettingValueValidationError is the validation error +// returned by TraceSettingResponse_SettingValue.Validate if the designated +// constraints aren't met. +type TraceSettingResponse_SettingValueValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e TraceSettingResponse_SettingValueValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e TraceSettingResponse_SettingValueValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e TraceSettingResponse_SettingValueValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e TraceSettingResponse_SettingValueValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e TraceSettingResponse_SettingValueValidationError) ErrorName() string { + return "TraceSettingResponse_SettingValueValidationError" +} + +// Error satisfies the builtin error interface +func (e TraceSettingResponse_SettingValueValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sTraceSettingResponse_SettingValue.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = TraceSettingResponse_SettingValueValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = TraceSettingResponse_SettingValueValidationError{} + +// Validate checks the field values on LogSettingsRequest_SettingValue with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *LogSettingsRequest_SettingValue) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on LogSettingsRequest_SettingValue with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// LogSettingsRequest_SettingValueMultiError, or nil if none found. +func (m *LogSettingsRequest_SettingValue) ValidateAll() error { + return m.validate(true) +} + +func (m *LogSettingsRequest_SettingValue) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + switch v := m.ParameterChoice.(type) { + case *LogSettingsRequest_SettingValue_BoolParam: + if v == nil { + err := LogSettingsRequest_SettingValueValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for BoolParam + case *LogSettingsRequest_SettingValue_Uint32Param: + if v == nil { + err := LogSettingsRequest_SettingValueValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for Uint32Param + case *LogSettingsRequest_SettingValue_StringParam: + if v == nil { + err := LogSettingsRequest_SettingValueValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for StringParam + default: + _ = v // ensures v is used + } + + if len(errors) > 0 { + return LogSettingsRequest_SettingValueMultiError(errors) + } + + return nil +} + +// LogSettingsRequest_SettingValueMultiError is an error wrapping multiple +// validation errors returned by LogSettingsRequest_SettingValue.ValidateAll() +// if the designated constraints aren't met. +type LogSettingsRequest_SettingValueMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m LogSettingsRequest_SettingValueMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m LogSettingsRequest_SettingValueMultiError) AllErrors() []error { return m } + +// LogSettingsRequest_SettingValueValidationError is the validation error +// returned by LogSettingsRequest_SettingValue.Validate if the designated +// constraints aren't met. +type LogSettingsRequest_SettingValueValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e LogSettingsRequest_SettingValueValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e LogSettingsRequest_SettingValueValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e LogSettingsRequest_SettingValueValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e LogSettingsRequest_SettingValueValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e LogSettingsRequest_SettingValueValidationError) ErrorName() string { + return "LogSettingsRequest_SettingValueValidationError" +} + +// Error satisfies the builtin error interface +func (e LogSettingsRequest_SettingValueValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sLogSettingsRequest_SettingValue.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = LogSettingsRequest_SettingValueValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = LogSettingsRequest_SettingValueValidationError{} + +// Validate checks the field values on LogSettingsResponse_SettingValue with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *LogSettingsResponse_SettingValue) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on LogSettingsResponse_SettingValue with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// LogSettingsResponse_SettingValueMultiError, or nil if none found. +func (m *LogSettingsResponse_SettingValue) ValidateAll() error { + return m.validate(true) +} + +func (m *LogSettingsResponse_SettingValue) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + switch v := m.ParameterChoice.(type) { + case *LogSettingsResponse_SettingValue_BoolParam: + if v == nil { + err := LogSettingsResponse_SettingValueValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for BoolParam + case *LogSettingsResponse_SettingValue_Uint32Param: + if v == nil { + err := LogSettingsResponse_SettingValueValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for Uint32Param + case *LogSettingsResponse_SettingValue_StringParam: + if v == nil { + err := LogSettingsResponse_SettingValueValidationError{ + field: "ParameterChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for StringParam + default: + _ = v // ensures v is used + } + + if len(errors) > 0 { + return LogSettingsResponse_SettingValueMultiError(errors) + } + + return nil +} + +// LogSettingsResponse_SettingValueMultiError is an error wrapping multiple +// validation errors returned by +// LogSettingsResponse_SettingValue.ValidateAll() if the designated +// constraints aren't met. +type LogSettingsResponse_SettingValueMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m LogSettingsResponse_SettingValueMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m LogSettingsResponse_SettingValueMultiError) AllErrors() []error { return m } + +// LogSettingsResponse_SettingValueValidationError is the validation error +// returned by LogSettingsResponse_SettingValue.Validate if the designated +// constraints aren't met. +type LogSettingsResponse_SettingValueValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e LogSettingsResponse_SettingValueValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e LogSettingsResponse_SettingValueValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e LogSettingsResponse_SettingValueValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e LogSettingsResponse_SettingValueValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e LogSettingsResponse_SettingValueValidationError) ErrorName() string { + return "LogSettingsResponse_SettingValueValidationError" +} + +// Error satisfies the builtin error interface +func (e LogSettingsResponse_SettingValueValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sLogSettingsResponse_SettingValue.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = LogSettingsResponse_SettingValueValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = LogSettingsResponse_SettingValueValidationError{} diff --git a/pkg/apis/inference/v1/grpc_service.proto b/pkg/apis/inference/v1/grpc_service.proto new file mode 100644 index 0000000..9033fb7 --- /dev/null +++ b/pkg/apis/inference/v1/grpc_service.proto @@ -0,0 +1,1759 @@ +// Copyright 2020-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// * Neither the name of NVIDIA CORPORATION nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY +// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package inference.v1; + +//@@.. cpp:namespace:: inference + +import "pkg/apis/inference/v1/model_config.proto"; + +option go_package = "d7y.io/api/pkg/apis/inference/v1;inference"; + +//@@ +//@@.. cpp:var:: service InferenceService +//@@ +//@@ Inference Server GRPC endpoints. +//@@ +service GRPCInferenceService +{ + //@@ .. cpp:var:: rpc ServerLive(ServerLiveRequest) returns + //@@ (ServerLiveResponse) + //@@ + //@@ Check liveness of the inference server. + //@@ + rpc ServerLive(ServerLiveRequest) returns (ServerLiveResponse) {} + + //@@ .. cpp:var:: rpc ServerReady(ServerReadyRequest) returns + //@@ (ServerReadyResponse) + //@@ + //@@ Check readiness of the inference server. + //@@ + rpc ServerReady(ServerReadyRequest) returns (ServerReadyResponse) {} + + //@@ .. cpp:var:: rpc ModelReady(ModelReadyRequest) returns + //@@ (ModelReadyResponse) + //@@ + //@@ Check readiness of a model in the inference server. + //@@ + rpc ModelReady(ModelReadyRequest) returns (ModelReadyResponse) {} + + //@@ .. cpp:var:: rpc ServerMetadata(ServerMetadataRequest) returns + //@@ (ServerMetadataResponse) + //@@ + //@@ Get server metadata. + //@@ + rpc ServerMetadata(ServerMetadataRequest) returns (ServerMetadataResponse) {} + + //@@ .. cpp:var:: rpc ModelMetadata(ModelMetadataRequest) returns + //@@ (ModelMetadataResponse) + //@@ + //@@ Get model metadata. + //@@ + rpc ModelMetadata(ModelMetadataRequest) returns (ModelMetadataResponse) {} + + //@@ .. cpp:var:: rpc ModelInfer(ModelInferRequest) returns + //@@ (ModelInferResponse) + //@@ + //@@ Perform inference using a specific model. + //@@ + rpc ModelInfer(ModelInferRequest) returns (ModelInferResponse) {} + + //@@ .. cpp:var:: rpc ModelStreamInfer(stream ModelInferRequest) returns + //@@ (stream ModelStreamInferResponse) + //@@ + //@@ Perform streaming inference. + //@@ + rpc ModelStreamInfer(stream ModelInferRequest) + returns (stream ModelStreamInferResponse) + { + } + + //@@ .. cpp:var:: rpc ModelConfig(ModelConfigRequest) returns + //@@ (ModelConfigResponse) + //@@ + //@@ Get model configuration. + //@@ + rpc ModelConfig(ModelConfigRequest) returns (ModelConfigResponse) {} + + //@@ .. cpp:var:: rpc ModelStatistics( + //@@ ModelStatisticsRequest) + //@@ returns (ModelStatisticsResponse) + //@@ + //@@ Get the cumulative inference statistics for a model. + //@@ + rpc ModelStatistics(ModelStatisticsRequest) returns (ModelStatisticsResponse) + { + } + + //@@ .. cpp:var:: rpc RepositoryIndex(RepositoryIndexRequest) returns + //@@ (RepositoryIndexResponse) + //@@ + //@@ Get the index of model repository contents. + //@@ + rpc RepositoryIndex(RepositoryIndexRequest) returns (RepositoryIndexResponse) + { + } + + //@@ .. cpp:var:: rpc RepositoryModelLoad(RepositoryModelLoadRequest) returns + //@@ (RepositoryModelLoadResponse) + //@@ + //@@ Load or reload a model from a repository. + //@@ + rpc RepositoryModelLoad(RepositoryModelLoadRequest) + returns (RepositoryModelLoadResponse) + { + } + + //@@ .. cpp:var:: rpc RepositoryModelUnload(RepositoryModelUnloadRequest) + //@@ returns (RepositoryModelUnloadResponse) + //@@ + //@@ Unload a model. + //@@ + rpc RepositoryModelUnload(RepositoryModelUnloadRequest) + returns (RepositoryModelUnloadResponse) + { + } + + //@@ .. cpp:var:: rpc SystemSharedMemoryStatus( + //@@ SystemSharedMemoryStatusRequest) + //@@ returns (SystemSharedMemoryStatusRespose) + //@@ + //@@ Get the status of all registered system-shared-memory regions. + //@@ + rpc SystemSharedMemoryStatus(SystemSharedMemoryStatusRequest) + returns (SystemSharedMemoryStatusResponse) + { + } + + //@@ .. cpp:var:: rpc SystemSharedMemoryRegister( + //@@ SystemSharedMemoryRegisterRequest) + //@@ returns (SystemSharedMemoryRegisterResponse) + //@@ + //@@ Register a system-shared-memory region. + //@@ + rpc SystemSharedMemoryRegister(SystemSharedMemoryRegisterRequest) + returns (SystemSharedMemoryRegisterResponse) + { + } + + //@@ .. cpp:var:: rpc SystemSharedMemoryUnregister( + //@@ SystemSharedMemoryUnregisterRequest) + //@@ returns (SystemSharedMemoryUnregisterResponse) + //@@ + //@@ Unregister a system-shared-memory region. + //@@ + rpc SystemSharedMemoryUnregister(SystemSharedMemoryUnregisterRequest) + returns (SystemSharedMemoryUnregisterResponse) + { + } + + //@@ .. cpp:var:: rpc CudaSharedMemoryStatus( + //@@ CudaSharedMemoryStatusRequest) + //@@ returns (CudaSharedMemoryStatusRespose) + //@@ + //@@ Get the status of all registered CUDA-shared-memory regions. + //@@ + rpc CudaSharedMemoryStatus(CudaSharedMemoryStatusRequest) + returns (CudaSharedMemoryStatusResponse) + { + } + + //@@ .. cpp:var:: rpc CudaSharedMemoryRegister( + //@@ CudaSharedMemoryRegisterRequest) + //@@ returns (CudaSharedMemoryRegisterResponse) + //@@ + //@@ Register a CUDA-shared-memory region. + //@@ + rpc CudaSharedMemoryRegister(CudaSharedMemoryRegisterRequest) + returns (CudaSharedMemoryRegisterResponse) + { + } + + //@@ .. cpp:var:: rpc CudaSharedMemoryUnregister( + //@@ CudaSharedMemoryUnregisterRequest) + //@@ returns (CudaSharedMemoryUnregisterResponse) + //@@ + //@@ Unregister a CUDA-shared-memory region. + //@@ + rpc CudaSharedMemoryUnregister(CudaSharedMemoryUnregisterRequest) + returns (CudaSharedMemoryUnregisterResponse) + { + } + + //@@ .. cpp:var:: rpc TraceSetting(TraceSettingRequest) + //@@ returns (TraceSettingResponse) + //@@ + //@@ Update and get the trace setting of the Triton server. + //@@ + rpc TraceSetting(TraceSettingRequest) returns (TraceSettingResponse) {} + + //@@ .. cpp:var:: rpc LogSettings(LogSettingsRequest) + //@@ returns (LogSettingsResponse) + //@@ + //@@ Update and get the log settings of the Triton server. + //@@ + rpc LogSettings(LogSettingsRequest) returns (LogSettingsResponse) {} +} + +//@@ +//@@.. cpp:var:: message ServerLiveRequest +//@@ +//@@ Request message for ServerLive. +//@@ +message ServerLiveRequest {} + +//@@ +//@@.. cpp:var:: message ServerLiveResponse +//@@ +//@@ Response message for ServerLive. +//@@ +message ServerLiveResponse +{ + //@@ + //@@ .. cpp:var:: bool live + //@@ + //@@ True if the inference server is live, false it not live. + //@@ + bool live = 1; +} + +//@@ +//@@.. cpp:var:: message ServerReadyRequest +//@@ +//@@ Request message for ServerReady. +//@@ +message ServerReadyRequest {} + +//@@ +//@@.. cpp:var:: message ServerReadyResponse +//@@ +//@@ Response message for ServerReady. +//@@ +message ServerReadyResponse +{ + //@@ + //@@ .. cpp:var:: bool ready + //@@ + //@@ True if the inference server is ready, false it not ready. + //@@ + bool ready = 1; +} + +//@@ +//@@.. cpp:var:: message ModelReadyRequest +//@@ +//@@ Request message for ModelReady. +//@@ +message ModelReadyRequest +{ + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the model to check for readiness. + //@@ + string name = 1; + + //@@ .. cpp:var:: string version + //@@ + //@@ The version of the model to check for readiness. If not given the + //@@ server will choose a version based on the model and internal policy. + //@@ + string version = 2; +} + +//@@ +//@@.. cpp:var:: message ModelReadyResponse +//@@ +//@@ Response message for ModelReady. +//@@ +message ModelReadyResponse +{ + //@@ + //@@ .. cpp:var:: bool ready + //@@ + //@@ True if the model is ready, false it not ready. + //@@ + bool ready = 1; +} + +//@@ +//@@.. cpp:var:: message ServerMetadataRequest +//@@ +//@@ Request message for ServerMetadata. +//@@ +message ServerMetadataRequest {} + +//@@ +//@@.. cpp:var:: message ServerMetadataResponse +//@@ +//@@ Response message for ServerMetadata. +//@@ +message ServerMetadataResponse +{ + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The server name. + //@@ + string name = 1; + + //@@ + //@@ .. cpp:var:: string version + //@@ + //@@ The server version. + //@@ + string version = 2; + + //@@ + //@@ .. cpp:var:: string extensions (repeated) + //@@ + //@@ The extensions supported by the server. + //@@ + repeated string extensions = 3; +} + +//@@ +//@@.. cpp:var:: message ModelMetadataRequest +//@@ +//@@ Request message for ModelMetadata. +//@@ +message ModelMetadataRequest +{ + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the model. + //@@ + string name = 1; + + //@@ .. cpp:var:: string version + //@@ + //@@ The version of the model to check for readiness. If not + //@@ given the server will choose a version based on the + //@@ model and internal policy. + //@@ + string version = 2; +} + +//@@ +//@@.. cpp:var:: message ModelMetadataResponse +//@@ +//@@ Response message for ModelMetadata. +//@@ +message ModelMetadataResponse +{ + //@@ + //@@ .. cpp:var:: message TensorMetadata + //@@ + //@@ Metadata for a tensor. + //@@ + message TensorMetadata + { + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The tensor name. + //@@ + string name = 1; + + //@@ + //@@ .. cpp:var:: string datatype + //@@ + //@@ The tensor data type. + //@@ + string datatype = 2; + + //@@ + //@@ .. cpp:var:: int64 shape (repeated) + //@@ + //@@ The tensor shape. A variable-size dimension is represented + //@@ by a -1 value. + //@@ + repeated int64 shape = 3; + } + + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The model name. + //@@ + string name = 1; + + //@@ + //@@ .. cpp:var:: string versions (repeated) + //@@ + //@@ The versions of the model. + //@@ + repeated string versions = 2; + + //@@ + //@@ .. cpp:var:: string platform + //@@ + //@@ The model's platform. + //@@ + string platform = 3; + + //@@ + //@@ .. cpp:var:: TensorMetadata inputs (repeated) + //@@ + //@@ The model's inputs. + //@@ + repeated TensorMetadata inputs = 4; + + //@@ + //@@ .. cpp:var:: TensorMetadata outputs (repeated) + //@@ + //@@ The model's outputs. + //@@ + repeated TensorMetadata outputs = 5; +} + +//@@ +//@@.. cpp:var:: message InferParameter +//@@ +//@@ An inference parameter value. +//@@ +message InferParameter +{ + //@@ .. cpp:var:: oneof parameter_choice + //@@ + //@@ The parameter value can be a string, an int64, + //@@ an uint64, a double, or a boolean + //@@ + //@@ Note: double and uint64 are currently + //@@ placeholders for future use and + //@@ are not supported for custom parameters + //@@ + oneof parameter_choice + { + //@@ .. cpp:var:: bool bool_param + //@@ + //@@ A boolean parameter value. + //@@ + bool bool_param = 1; + + //@@ .. cpp:var:: int64 int64_param + //@@ + //@@ An int64 parameter value. + //@@ + int64 int64_param = 2; + + //@@ .. cpp:var:: string string_param + //@@ + //@@ A string parameter value. + //@@ + string string_param = 3; + + //@@ .. cpp:var:: double double_param + //@@ + //@@ A double parameter value. + //@@ + //@@ Not supported for custom parameters + //@@ + double double_param = 4; + + //@@ .. cpp:var:: uint64 uint64_param + //@@ + //@@ A uint64 parameter value. + //@@ + //@@ Not supported for custom parameters + //@@ + uint64 uint64_param = 5; + } +} + +//@@ +//@@.. cpp:var:: message InferTensorContents +//@@ +//@@ The data contained in a tensor represented by the repeated type +//@@ that matches the tensor's data type. Protobuf oneof is not used +//@@ because oneofs cannot contain repeated fields. +//@@ +message InferTensorContents +{ + //@@ + //@@ .. cpp:var:: bool bool_contents (repeated) + //@@ + //@@ Representation for BOOL data type. The size must match what is + //@@ expected by the tensor's shape. The contents must be the flattened, + //@@ one-dimensional, row-major order of the tensor elements. + //@@ + repeated bool bool_contents = 1; + + //@@ + //@@ .. cpp:var:: int32 int_contents (repeated) + //@@ + //@@ Representation for INT8, INT16, and INT32 data types. The size + //@@ must match what is expected by the tensor's shape. The contents + //@@ must be the flattened, one-dimensional, row-major order of the + //@@ tensor elements. + //@@ + repeated int32 int_contents = 2; + + //@@ + //@@ .. cpp:var:: int64 int64_contents (repeated) + //@@ + //@@ Representation for INT64 data types. The size must match what + //@@ is expected by the tensor's shape. The contents must be the + //@@ flattened, one-dimensional, row-major order of the tensor elements. + //@@ + repeated int64 int64_contents = 3; + + //@@ + //@@ .. cpp:var:: uint32 uint_contents (repeated) + //@@ + //@@ Representation for UINT8, UINT16, and UINT32 data types. The size + //@@ must match what is expected by the tensor's shape. The contents + //@@ must be the flattened, one-dimensional, row-major order of the + //@@ tensor elements. + //@@ + repeated uint32 uint_contents = 4; + + //@@ + //@@ .. cpp:var:: uint64 uint64_contents (repeated) + //@@ + //@@ Representation for UINT64 data types. The size must match what + //@@ is expected by the tensor's shape. The contents must be the + //@@ flattened, one-dimensional, row-major order of the tensor elements. + //@@ + repeated uint64 uint64_contents = 5; + + //@@ + //@@ .. cpp:var:: float fp32_contents (repeated) + //@@ + //@@ Representation for FP32 data type. The size must match what is + //@@ expected by the tensor's shape. The contents must be the flattened, + //@@ one-dimensional, row-major order of the tensor elements. + //@@ + repeated float fp32_contents = 6; + + //@@ + //@@ .. cpp:var:: double fp64_contents (repeated) + //@@ + //@@ Representation for FP64 data type. The size must match what is + //@@ expected by the tensor's shape. The contents must be the flattened, + //@@ one-dimensional, row-major order of the tensor elements. + //@@ + repeated double fp64_contents = 7; + + //@@ + //@@ .. cpp:var:: bytes bytes_contents (repeated) + //@@ + //@@ Representation for BYTES data type. The size must match what is + //@@ expected by the tensor's shape. The contents must be the flattened, + //@@ one-dimensional, row-major order of the tensor elements. + //@@ + repeated bytes bytes_contents = 8; +} + +//@@ +//@@.. cpp:var:: message ModelInferRequest +//@@ +//@@ Request message for ModelInfer. +//@@ +message ModelInferRequest +{ + //@@ + //@@ .. cpp:var:: message InferInputTensor + //@@ + //@@ An input tensor for an inference request. + //@@ + message InferInputTensor + { + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The tensor name. + //@@ + string name = 1; + + //@@ + //@@ .. cpp:var:: string datatype + //@@ + //@@ The tensor data type. + //@@ + string datatype = 2; + + //@@ + //@@ .. cpp:var:: int64 shape (repeated) + //@@ + //@@ The tensor shape. + //@@ + repeated int64 shape = 3; + + //@@ .. cpp:var:: map parameters + //@@ + //@@ Optional inference input tensor parameters. + //@@ + map parameters = 4; + + //@@ .. cpp:var:: InferTensorContents contents + //@@ + //@@ The tensor contents using a data-type format. This field + //@@ must not be specified if tensor contents are being specified + //@@ in ModelInferRequest.raw_input_contents. + //@@ + InferTensorContents contents = 5; + } + + //@@ + //@@ .. cpp:var:: message InferRequestedOutputTensor + //@@ + //@@ An output tensor requested for an inference request. + //@@ + message InferRequestedOutputTensor + { + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The tensor name. + //@@ + string name = 1; + + //@@ .. cpp:var:: map parameters + //@@ + //@@ Optional requested output tensor parameters. + //@@ + map parameters = 2; + } + + //@@ .. cpp:var:: string model_name + //@@ + //@@ The name of the model to use for inferencing. + //@@ + string model_name = 1; + + //@@ .. cpp:var:: string model_version + //@@ + //@@ The version of the model to use for inference. If not + //@@ given the latest/most-recent version of the model is used. + //@@ + string model_version = 2; + + //@@ .. cpp:var:: string id + //@@ + //@@ Optional identifier for the request. If specified will be + //@@ returned in the response. + //@@ + string id = 3; + + //@@ .. cpp:var:: map parameters + //@@ + //@@ Optional inference parameters. + //@@ + map parameters = 4; + + //@@ + //@@ .. cpp:var:: InferInputTensor inputs (repeated) + //@@ + //@@ The input tensors for the inference. + //@@ + repeated InferInputTensor inputs = 5; + + //@@ + //@@ .. cpp:var:: InferRequestedOutputTensor outputs (repeated) + //@@ + //@@ The requested output tensors for the inference. Optional, if not + //@@ specified all outputs specified in the model config will be + //@@ returned. + //@@ + repeated InferRequestedOutputTensor outputs = 6; + + //@@ + //@@ .. cpp:var:: bytes raw_input_contents + //@@ + //@@ The data contained in an input tensor can be represented in + //@@ "raw" bytes form or in the repeated type that matches the + //@@ tensor's data type. Using the "raw" bytes form will + //@@ typically allow higher performance due to the way protobuf + //@@ allocation and reuse interacts with GRPC. For example, see + //@@ https://github.com/grpc/grpc/issues/23231. + //@@ + //@@ To use the raw representation 'raw_input_contents' must be + //@@ initialized with data for each tensor in the same order as + //@@ 'inputs'. For each tensor, the size of this content must + //@@ match what is expected by the tensor's shape and data + //@@ type. The raw data must be the flattened, one-dimensional, + //@@ row-major order of the tensor elements without any stride + //@@ or padding between the elements. Note that the FP16 and BF16 data + //@@ types must be represented as raw content as there is no + //@@ specific data type for a 16-bit float type. + //@@ + //@@ If this field is specified then InferInputTensor::contents + //@@ must not be specified for any input tensor. + //@@ + repeated bytes raw_input_contents = 7; +} + +//@@ +//@@.. cpp:var:: message ModelInferResponse +//@@ +//@@ Response message for ModelInfer. +//@@ +message ModelInferResponse +{ + //@@ + //@@ .. cpp:var:: message InferOutputTensor + //@@ + //@@ An output tensor returned for an inference request. + //@@ + message InferOutputTensor + { + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The tensor name. + //@@ + string name = 1; + + //@@ + //@@ .. cpp:var:: string datatype + //@@ + //@@ The tensor data type. + //@@ + string datatype = 2; + + //@@ + //@@ .. cpp:var:: int64 shape (repeated) + //@@ + //@@ The tensor shape. + //@@ + repeated int64 shape = 3; + + //@@ .. cpp:var:: map parameters + //@@ + //@@ Optional output tensor parameters. + //@@ + map parameters = 4; + + //@@ .. cpp:var:: InferTensorContents contents + //@@ + //@@ The tensor contents using a data-type format. This field + //@@ must not be specified if tensor contents are being specified + //@@ in ModelInferResponse.raw_output_contents. + //@@ + InferTensorContents contents = 5; + } + + //@@ .. cpp:var:: string model_name + //@@ + //@@ The name of the model used for inference. + //@@ + string model_name = 1; + + //@@ .. cpp:var:: string model_version + //@@ + //@@ The version of the model used for inference. + //@@ + string model_version = 2; + + //@@ .. cpp:var:: string id + //@@ + //@@ The id of the inference request if one was specified. + //@@ + string id = 3; + + //@@ .. cpp:var:: map parameters + //@@ + //@@ Optional inference response parameters. + //@@ + map parameters = 4; + + //@@ + //@@ .. cpp:var:: InferOutputTensor outputs (repeated) + //@@ + //@@ The output tensors holding inference results. + //@@ + repeated InferOutputTensor outputs = 5; + + //@@ + //@@ .. cpp:var:: bytes raw_output_contents + //@@ + //@@ The data contained in an output tensor can be represented in + //@@ "raw" bytes form or in the repeated type that matches the + //@@ tensor's data type. Using the "raw" bytes form will + //@@ typically allow higher performance due to the way protobuf + //@@ allocation and reuse interacts with GRPC. For example, see + //@@ https://github.com/grpc/grpc/issues/23231. + //@@ + //@@ To use the raw representation 'raw_output_contents' must be + //@@ initialized with data for each tensor in the same order as + //@@ 'outputs'. For each tensor, the size of this content must + //@@ match what is expected by the tensor's shape and data + //@@ type. The raw data must be the flattened, one-dimensional, + //@@ row-major order of the tensor elements without any stride + //@@ or padding between the elements. Note that the FP16 and BF16 data + //@@ types must be represented as raw content as there is no + //@@ specific data type for a 16-bit float type. + //@@ + //@@ If this field is specified then InferOutputTensor::contents + //@@ must not be specified for any output tensor. + //@@ + repeated bytes raw_output_contents = 6; +} + +//@@ +//@@.. cpp:var:: message ModelStreamInferResponse +//@@ +//@@ Response message for ModelStreamInfer. +//@@ +message ModelStreamInferResponse +{ + //@@ + //@@ .. cpp:var:: string error_message + //@@ + //@@ The message describing the error. The empty message + //@@ indicates the inference was successful without errors. + //@@ + string error_message = 1; + + //@@ + //@@ .. cpp:var:: ModelInferResponse infer_response + //@@ + //@@ Holds the results of the request. + //@@ + ModelInferResponse infer_response = 2; +} + +//@@ +//@@.. cpp:var:: message ModelConfigRequest +//@@ +//@@ Request message for ModelConfig. +//@@ +message ModelConfigRequest +{ + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the model. + //@@ + string name = 1; + + //@@ .. cpp:var:: string version + //@@ + //@@ The version of the model. If not given the model version + //@@ is selected automatically based on the version policy. + //@@ + string version = 2; +} + +//@@ +//@@.. cpp:var:: message ModelConfigResponse +//@@ +//@@ Response message for ModelConfig. +//@@ +message ModelConfigResponse +{ + //@@ + //@@ .. cpp:var:: ModelConfig config + //@@ + //@@ The model configuration. + //@@ + ModelConfig config = 1; +} + +//@@ +//@@.. cpp:var:: message ModelStatisticsRequest +//@@ +//@@ Request message for ModelStatistics. +//@@ +message ModelStatisticsRequest +{ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the model. If not given returns statistics for + //@@ all models. + //@@ + string name = 1; + + //@@ .. cpp:var:: string version + //@@ + //@@ The version of the model. If not given returns statistics for + //@@ all model versions. + //@@ + string version = 2; +} + + +//@@ +//@@.. cpp:var:: message StatisticDuration +//@@ +//@@ Statistic recording a cumulative duration metric. +//@@ +message StatisticDuration +{ + //@@ .. cpp:var:: uint64 count + //@@ + //@@ Cumulative number of times this metric occurred. + //@@ + uint64 count = 1; + + //@@ .. cpp:var:: uint64 total_time_ns + //@@ + //@@ Total collected duration of this metric in nanoseconds. + //@@ + uint64 ns = 2; +} + +//@@ +//@@.. cpp:var:: message InferStatistics +//@@ +//@@ Inference statistics. +//@@ +message InferStatistics +{ + //@@ .. cpp:var:: StatisticDuration success + //@@ + //@@ Cumulative count and duration for successful inference + //@@ request. The "success" count and cumulative duration includes + //@@ cache hits. + //@@ + StatisticDuration success = 1; + + //@@ .. cpp:var:: StatisticDuration fail + //@@ + //@@ Cumulative count and duration for failed inference + //@@ request. + //@@ + StatisticDuration fail = 2; + + //@@ .. cpp:var:: StatisticDuration queue + //@@ + //@@ The count and cumulative duration that inference requests wait in + //@@ scheduling or other queues. The "queue" count and cumulative + //@@ duration includes cache hits. + //@@ + StatisticDuration queue = 3; + + //@@ .. cpp:var:: StatisticDuration compute_input + //@@ + //@@ The count and cumulative duration to prepare input tensor data as + //@@ required by the model framework / backend. For example, this duration + //@@ should include the time to copy input tensor data to the GPU. + //@@ The "compute_input" count and cumulative duration do not account for + //@@ requests that were a cache hit. See the "cache_hit" field for more + //@@ info. + //@@ + StatisticDuration compute_input = 4; + + //@@ .. cpp:var:: StatisticDuration compute_infer + //@@ + //@@ The count and cumulative duration to execute the model. + //@@ The "compute_infer" count and cumulative duration do not account for + //@@ requests that were a cache hit. See the "cache_hit" field for more + //@@ info. + //@@ + StatisticDuration compute_infer = 5; + + //@@ .. cpp:var:: StatisticDuration compute_output + //@@ + //@@ The count and cumulative duration to extract output tensor data + //@@ produced by the model framework / backend. For example, this duration + //@@ should include the time to copy output tensor data from the GPU. + //@@ The "compute_output" count and cumulative duration do not account for + //@@ requests that were a cache hit. See the "cache_hit" field for more + //@@ info. + //@@ + StatisticDuration compute_output = 6; + + //@@ .. cpp:var:: StatisticDuration cache_hit + //@@ + //@@ The count of response cache hits and cumulative duration to lookup + //@@ and extract output tensor data from the Response Cache on a cache + //@@ hit. For example, this duration should include the time to copy + //@@ output tensor data from the Response Cache to the response object. + //@@ On cache hits, triton does not need to go to the model/backend + //@@ for the output tensor data, so the "compute_input", "compute_infer", + //@@ and "compute_output" fields are not updated. Assuming the response + //@@ cache is enabled for a given model, a cache hit occurs for a + //@@ request to that model when the request metadata (model name, + //@@ model version, model inputs) hashes to an existing entry in the + //@@ cache. On a cache miss, the request hash and response output tensor + //@@ data is added to the cache. See response cache docs for more info: + //@@ + //https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md + //@@ + StatisticDuration cache_hit = 7; + + //@@ .. cpp:var:: StatisticDuration cache_miss + //@@ + //@@ The count of response cache misses and cumulative duration to lookup + //@@ and insert output tensor data from the computed response to the + //cache. + //@@ For example, this duration should include the time to copy + //@@ output tensor data from the response object to the Response Cache. + //@@ Assuming the response cache is enabled for a given model, a cache + //@@ miss occurs for a request to that model when the request metadata + //@@ does NOT hash to an existing entry in the cache. See the response + //@@ cache docs for more info: + //@@ + //https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md + //@@ + StatisticDuration cache_miss = 8; +} + +//@@ +//@@.. cpp:var:: message InferBatchStatistics +//@@ +//@@ Inference batch statistics. +//@@ +message InferBatchStatistics +{ + //@@ .. cpp:var:: uint64 batch_size + //@@ + //@@ The size of the batch. + //@@ + uint64 batch_size = 1; + + //@@ .. cpp:var:: StatisticDuration compute_input + //@@ + //@@ The count and cumulative duration to prepare input tensor data as + //@@ required by the model framework / backend with the given batch size. + //@@ For example, this duration should include the time to copy input + //@@ tensor data to the GPU. + //@@ + StatisticDuration compute_input = 2; + + //@@ .. cpp:var:: StatisticDuration compute_infer + //@@ + //@@ The count and cumulative duration to execute the model with the given + //@@ batch size. + //@@ + StatisticDuration compute_infer = 3; + + //@@ .. cpp:var:: StatisticDuration compute_output + //@@ + //@@ The count and cumulative duration to extract output tensor data + //@@ produced by the model framework / backend with the given batch size. + //@@ For example, this duration should include the time to copy output + //@@ tensor data from the GPU. + //@@ + StatisticDuration compute_output = 4; +} + +//@@ +//@@.. cpp:var:: message MemoryUsage +//@@ +//@@ Memory usage. +//@@ +message MemoryUsage +{ + //@@ .. cpp:var:: string type + //@@ + //@@ The type of memory, the value can be "CPU", "CPU_PINNED", "GPU". + //@@ + string type = 1; + + //@@ .. cpp:var:: int64 id + //@@ + //@@ The id of the memory, typically used with "type" to identify + //@@ a device that hosts the memory. + //@@ + int64 id = 2; + + //@@ .. cpp:var:: uint64 byte_size + //@@ + //@@ The byte size of the memory. + //@@ + uint64 byte_size = 3; +} + +//@@ +//@@.. cpp:var:: message ModelStatistics +//@@ +//@@ Statistics for a specific model and version. +//@@ +message ModelStatistics +{ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the model. If not given returns statistics for all + //@@ + string name = 1; + + //@@ .. cpp:var:: string version + //@@ + //@@ The version of the model. + //@@ + string version = 2; + + //@@ .. cpp:var:: uint64 last_inference + //@@ + //@@ The timestamp of the last inference request made for this model, + //@@ as milliseconds since the epoch. + //@@ + uint64 last_inference = 3; + + //@@ .. cpp:var:: uint64 last_inference + //@@ + //@@ The cumulative count of successful inference requests made for this + //@@ model. Each inference in a batched request is counted as an + //@@ individual inference. For example, if a client sends a single + //@@ inference request with batch size 64, "inference_count" will be + //@@ incremented by 64. Similarly, if a clients sends 64 individual + //@@ requests each with batch size 1, "inference_count" will be + //@@ incremented by 64. The "inference_count" value DOES NOT include + //@@ cache hits. + //@@ + uint64 inference_count = 4; + + //@@ .. cpp:var:: uint64 last_inference + //@@ + //@@ The cumulative count of the number of successful inference executions + //@@ performed for the model. When dynamic batching is enabled, a single + //@@ model execution can perform inferencing for more than one inference + //@@ request. For example, if a clients sends 64 individual requests each + //@@ with batch size 1 and the dynamic batcher batches them into a single + //@@ large batch for model execution then "execution_count" will be + //@@ incremented by 1. If, on the other hand, the dynamic batcher is not + //@@ enabled for that each of the 64 individual requests is executed + //@@ independently, then "execution_count" will be incremented by 64. + //@@ The "execution_count" value DOES NOT include cache hits. + //@@ + uint64 execution_count = 5; + + //@@ .. cpp:var:: InferStatistics inference_stats + //@@ + //@@ The aggregate statistics for the model/version. + //@@ + InferStatistics inference_stats = 6; + + //@@ .. cpp:var:: InferBatchStatistics batch_stats (repeated) + //@@ + //@@ The aggregate statistics for each different batch size that is + //@@ executed in the model. The batch statistics indicate how many actual + //@@ model executions were performed and show differences due to different + //@@ batch size (for example, larger batches typically take longer to + //@@ compute). + //@@ + repeated InferBatchStatistics batch_stats = 7; + + //@@ .. cpp:var:: MemoryUsage memory_usage (repeated) + //@@ + //@@ The memory usage detected during model loading, which may be used to + //@@ estimate the memory to be released once the model is unloaded. Note + //@@ that the estimation is inferenced by the profiling tools and + //@@ framework's memory schema, therefore it is advised to perform + //@@ experiments to understand the scenario that the reported memory usage + //@@ can be relied on. As a starting point, the GPU memory usage for + //@@ models in ONNX Runtime backend and TensorRT backend is usually + //@@ aligned. + //@@ + repeated MemoryUsage memory_usage = 8; +} + +//@@ +//@@.. cpp:var:: message ModelStatisticsResponse +//@@ +//@@ Response message for ModelStatistics. +//@@ +message ModelStatisticsResponse +{ + //@@ .. cpp:var:: ModelStatistics model_stats (repeated) + //@@ + //@@ Statistics for each requested model. + //@@ + repeated ModelStatistics model_stats = 1; +} + +//@@ +//@@.. cpp:var:: message ModelRepositoryParameter +//@@ +//@@ An model repository parameter value. +//@@ +message ModelRepositoryParameter +{ + //@@ .. cpp:var:: oneof parameter_choice + //@@ + //@@ The parameter value can be a string, an int64 or + //@@ a boolean + //@@ + oneof parameter_choice + { + //@@ .. cpp:var:: bool bool_param + //@@ + //@@ A boolean parameter value. + //@@ + bool bool_param = 1; + + //@@ .. cpp:var:: int64 int64_param + //@@ + //@@ An int64 parameter value. + //@@ + int64 int64_param = 2; + + //@@ .. cpp:var:: string string_param + //@@ + //@@ A string parameter value. + //@@ + string string_param = 3; + + //@@ .. cpp:var:: bytes bytes_param + //@@ + //@@ A bytes parameter value. + //@@ + bytes bytes_param = 4; + } +} + +//@@ +//@@.. cpp:var:: message RepositoryIndexRequest +//@@ +//@@ Request message for RepositoryIndex. +//@@ +message RepositoryIndexRequest +{ + //@@ .. cpp:var:: string repository_name + //@@ + //@@ The name of the repository. If empty the index is returned + //@@ for all repositories. + //@@ + string repository_name = 1; + + //@@ .. cpp:var:: bool ready + //@@ + //@@ If true returned only models currently ready for inferencing. + //@@ + bool ready = 2; +} + +//@@ +//@@.. cpp:var:: message RepositoryIndexResponse +//@@ +//@@ Response message for RepositoryIndex. +//@@ +message RepositoryIndexResponse +{ + //@@ + //@@ .. cpp:var:: message ModelIndex + //@@ + //@@ Index entry for a model. + //@@ + message ModelIndex + { + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the model. + //@@ + string name = 1; + + //@@ .. cpp:var:: string version + //@@ + //@@ The version of the model. + //@@ + string version = 2; + + //@@ + //@@ .. cpp:var:: string state + //@@ + //@@ The state of the model. + //@@ + string state = 3; + + //@@ + //@@ .. cpp:var:: string reason + //@@ + //@@ The reason, if any, that the model is in the given state. + //@@ + string reason = 4; + } + + //@@ + //@@ .. cpp:var:: ModelIndex models (repeated) + //@@ + //@@ An index entry for each model. + //@@ + repeated ModelIndex models = 1; +} + +//@@ +//@@.. cpp:var:: message RepositoryModelLoadRequest +//@@ +//@@ Request message for RepositoryModelLoad. +//@@ +message RepositoryModelLoadRequest +{ + //@@ .. cpp:var:: string repository_name + //@@ + //@@ The name of the repository to load from. If empty the model + //@@ is loaded from any repository. + //@@ + string repository_name = 1; + + //@@ .. cpp:var:: string repository_name + //@@ + //@@ The name of the model to load, or reload. + //@@ + string model_name = 2; + + //@@ .. cpp:var:: map parameters + //@@ + //@@ Optional model repository request parameters. + //@@ + map parameters = 3; +} + +//@@ +//@@.. cpp:var:: message RepositoryModelLoadResponse +//@@ +//@@ Response message for RepositoryModelLoad. +//@@ +message RepositoryModelLoadResponse {} + +//@@ +//@@.. cpp:var:: message RepositoryModelUnloadRequest +//@@ +//@@ Request message for RepositoryModelUnload. +//@@ +message RepositoryModelUnloadRequest +{ + //@@ .. cpp:var:: string repository_name + //@@ + //@@ The name of the repository from which the model was originally + //@@ loaded. If empty the repository is not considered. + //@@ + string repository_name = 1; + + //@@ .. cpp:var:: string repository_name + //@@ + //@@ The name of the model to unload. + //@@ + string model_name = 2; + + //@@ .. cpp:var:: map parameters + //@@ + //@@ Optional model repository request parameters. + //@@ + map parameters = 3; +} + +//@@ +//@@.. cpp:var:: message RepositoryModelUnloadResponse +//@@ +//@@ Response message for RepositoryModelUnload. +//@@ +message RepositoryModelUnloadResponse {} + +//@@ +//@@.. cpp:var:: message SystemSharedMemoryStatusRequest +//@@ +//@@ Request message for SystemSharedMemoryStatus. +//@@ +message SystemSharedMemoryStatusRequest +{ + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the region to get status for. If empty the + //@@ status is returned for all registered regions. + //@@ + string name = 1; +} + +//@@ +//@@.. cpp:var:: message SystemSharedMemoryStatusResponse +//@@ +//@@ Response message for SystemSharedMemoryStatus. +//@@ +message SystemSharedMemoryStatusResponse +{ + //@@ + //@@ .. cpp:var:: message RegionStatus + //@@ + //@@ Status for a shared memory region. + //@@ + message RegionStatus + { + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name for the shared memory region. + //@@ + string name = 1; + + //@@ .. cpp:var:: string shared_memory_key + //@@ + //@@ The key of the underlying memory object that contains the + //@@ shared memory region. + //@@ + string key = 2; + + //@@ .. cpp:var:: uint64 offset + //@@ + //@@ Offset, in bytes, within the underlying memory object to + //@@ the start of the shared memory region. + //@@ + uint64 offset = 3; + + //@@ .. cpp:var:: uint64 byte_size + //@@ + //@@ Size of the shared memory region, in bytes. + //@@ + uint64 byte_size = 4; + } + + //@@ + //@@ .. cpp:var:: map regions + //@@ + //@@ Status for each of the registered regions, indexed by + //@@ region name. + //@@ + map regions = 1; +} + +//@@ +//@@.. cpp:var:: message SystemSharedMemoryRegisterRequest +//@@ +//@@ Request message for SystemSharedMemoryRegister. +//@@ +message SystemSharedMemoryRegisterRequest +{ + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the region to register. + //@@ + string name = 1; + + //@@ .. cpp:var:: string shared_memory_key + //@@ + //@@ The key of the underlying memory object that contains the + //@@ shared memory region. + //@@ + string key = 2; + + //@@ .. cpp:var:: uint64 offset + //@@ + //@@ Offset, in bytes, within the underlying memory object to + //@@ the start of the shared memory region. + //@@ + uint64 offset = 3; + + //@@ .. cpp:var:: uint64 byte_size + //@@ + //@@ Size of the shared memory region, in bytes. + //@@ + uint64 byte_size = 4; +} + +//@@ +//@@.. cpp:var:: message SystemSharedMemoryRegisterResponse +//@@ +//@@ Response message for SystemSharedMemoryRegister. +//@@ +message SystemSharedMemoryRegisterResponse {} + +//@@ +//@@.. cpp:var:: message SystemSharedMemoryUnregisterRequest +//@@ +//@@ Request message for SystemSharedMemoryUnregister. +//@@ +message SystemSharedMemoryUnregisterRequest +{ + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the system region to unregister. If empty + //@@ all system shared-memory regions are unregistered. + //@@ + string name = 1; +} + +//@@ +//@@.. cpp:var:: message SystemSharedMemoryUnregisterResponse +//@@ +//@@ Response message for SystemSharedMemoryUnregister. +//@@ +message SystemSharedMemoryUnregisterResponse {} + +//@@ +//@@.. cpp:var:: message CudaSharedMemoryStatusRequest +//@@ +//@@ Request message for CudaSharedMemoryStatus. +//@@ +message CudaSharedMemoryStatusRequest +{ + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the region to get status for. If empty the + //@@ status is returned for all registered regions. + //@@ + string name = 1; +} + +//@@ +//@@.. cpp:var:: message CudaSharedMemoryStatusResponse +//@@ +//@@ Response message for CudaSharedMemoryStatus. +//@@ +message CudaSharedMemoryStatusResponse +{ + //@@ + //@@ .. cpp:var:: message RegionStatus + //@@ + //@@ Status for a shared memory region. + //@@ + message RegionStatus + { + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name for the shared memory region. + //@@ + string name = 1; + + //@@ .. cpp:var:: uin64 device_id + //@@ + //@@ The GPU device ID where the cudaIPC handle was created. + //@@ + uint64 device_id = 2; + + //@@ .. cpp:var:: uint64 byte_size + //@@ + //@@ Size of the shared memory region, in bytes. + //@@ + uint64 byte_size = 3; + } + + //@@ + //@@ .. cpp:var:: map regions + //@@ + //@@ Status for each of the registered regions, indexed by + //@@ region name. + //@@ + map regions = 1; +} + +//@@ +//@@.. cpp:var:: message CudaSharedMemoryRegisterRequest +//@@ +//@@ Request message for CudaSharedMemoryRegister. +//@@ +message CudaSharedMemoryRegisterRequest +{ + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the region to register. + //@@ + string name = 1; + + //@@ .. cpp:var:: bytes raw_handle + //@@ + //@@ The raw serialized cudaIPC handle. + //@@ + bytes raw_handle = 2; + + //@@ .. cpp:var:: int64 device_id + //@@ + //@@ The GPU device ID on which the cudaIPC handle was created. + //@@ + int64 device_id = 3; + + //@@ .. cpp:var:: uint64 byte_size + //@@ + //@@ Size of the shared memory block, in bytes. + //@@ + uint64 byte_size = 4; +} + +//@@ +//@@.. cpp:var:: message CudaSharedMemoryRegisterResponse +//@@ +//@@ Response message for CudaSharedMemoryRegister. +//@@ +message CudaSharedMemoryRegisterResponse {} + +//@@ +//@@.. cpp:var:: message CudaSharedMemoryUnregisterRequest +//@@ +//@@ Request message for CudaSharedMemoryUnregister. +//@@ +message CudaSharedMemoryUnregisterRequest +{ + //@@ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the cuda region to unregister. If empty + //@@ all cuda shared-memory regions are unregistered. + //@@ + string name = 1; +} + +//@@ +//@@.. cpp:var:: message CudaSharedMemoryUnregisterResponse +//@@ +//@@ Response message for CudaSharedMemoryUnregister. +//@@ +message CudaSharedMemoryUnregisterResponse {} + +//@@ +//@@.. cpp:var:: message TraceSettingRequest +//@@ +//@@ Request message for TraceSetting. +//@@ +message TraceSettingRequest +{ + //@@ + //@@ .. cpp:var:: message SettingValue + //@@ + //@@ The values to be associated with a trace setting. + //@@ If no value is provided, the setting will be clear and + //@@ the global setting value will be used. + //@@ + message SettingValue + { + //@@ + //@@ .. cpp:var:: string value (repeated) + //@@ + //@@ The value. + //@@ + repeated string value = 1; + } + + //@@ .. cpp:var:: map settings + //@@ + //@@ The new setting values to be updated, + //@@ settings that are not specified will remain unchanged. + //@@ + map settings = 1; + + //@@ + //@@ .. cpp:var:: string model_name + //@@ + //@@ The name of the model to apply the new trace settings. + //@@ If not given, the new settings will be applied globally. + //@@ + string model_name = 2; +} + +//@@ +//@@.. cpp:var:: message TraceSettingResponse +//@@ +//@@ Response message for TraceSetting. +//@@ +message TraceSettingResponse +{ + //@@ + //@@ .. cpp:var:: message SettingValue + //@@ + //@@ The values to be associated with a trace setting. + //@@ + message SettingValue + { + //@@ + //@@ .. cpp:var:: string value (repeated) + //@@ + //@@ The value. + //@@ + repeated string value = 1; + } + + //@@ .. cpp:var:: map settings + //@@ + //@@ The current trace settings, including any changes specified + //@@ by TraceSettingRequest. + //@@ + map settings = 1; +} + +//@@ +//@@.. cpp:var:: message LogSettingsRequest +//@@ +//@@ Request message for LogSettings. +//@@ +message LogSettingsRequest +{ + message SettingValue + { + oneof parameter_choice + { + //@@ .. cpp:var:: bool bool_param + //@@ + //@@ A boolean parameter value. + //@@ + bool bool_param = 1; + + //@@ .. cpp:var:: uint32 uint32_param + //@@ + //@@ An uint32 parameter value. + //@@ + uint32 uint32_param = 2; + + //@@ .. cpp:var:: string string_param + //@@ + //@@ A string parameter value. + //@@ + string string_param = 3; + } + } + //@@ .. cpp:var:: map settings + //@@ + //@@ The current log settings. + //@@ + map settings = 1; +} + +//@@ +//@@.. cpp:var:: message LogSettingsResponse +//@@ +//@@ Response message for LogSettings. +//@@ +message LogSettingsResponse +{ + message SettingValue + { + oneof parameter_choice + { + //@@ .. cpp:var:: bool bool_param + //@@ + //@@ A boolean parameter value. + //@@ + bool bool_param = 1; + + //@@ .. cpp:var:: uint32 uint32_param + //@@ + //@@ An int32 parameter value. + //@@ + uint32 uint32_param = 2; + + //@@ .. cpp:var:: string string_param + //@@ + //@@ A string parameter value. + //@@ + string string_param = 3; + } + } + //@@ .. cpp:var:: map settings + //@@ + //@@ The current log settings. + //@@ + map settings = 1; +} diff --git a/pkg/apis/inference/v1/grpc_service_grpc.pb.go b/pkg/apis/inference/v1/grpc_service_grpc.pb.go new file mode 100644 index 0000000..8d37085 --- /dev/null +++ b/pkg/apis/inference/v1/grpc_service_grpc.pb.go @@ -0,0 +1,1034 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.6 +// source: pkg/apis/inference/v1/grpc_service.proto + +package inference + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// GRPCInferenceServiceClient is the client API for GRPCInferenceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type GRPCInferenceServiceClient interface { + // @@ .. cpp:var:: rpc ServerLive(ServerLiveRequest) returns + // @@ (ServerLiveResponse) + // @@ + // @@ Check liveness of the inference server. + // @@ + ServerLive(ctx context.Context, in *ServerLiveRequest, opts ...grpc.CallOption) (*ServerLiveResponse, error) + // @@ .. cpp:var:: rpc ServerReady(ServerReadyRequest) returns + // @@ (ServerReadyResponse) + // @@ + // @@ Check readiness of the inference server. + // @@ + ServerReady(ctx context.Context, in *ServerReadyRequest, opts ...grpc.CallOption) (*ServerReadyResponse, error) + // @@ .. cpp:var:: rpc ModelReady(ModelReadyRequest) returns + // @@ (ModelReadyResponse) + // @@ + // @@ Check readiness of a model in the inference server. + // @@ + ModelReady(ctx context.Context, in *ModelReadyRequest, opts ...grpc.CallOption) (*ModelReadyResponse, error) + // @@ .. cpp:var:: rpc ServerMetadata(ServerMetadataRequest) returns + // @@ (ServerMetadataResponse) + // @@ + // @@ Get server metadata. + // @@ + ServerMetadata(ctx context.Context, in *ServerMetadataRequest, opts ...grpc.CallOption) (*ServerMetadataResponse, error) + // @@ .. cpp:var:: rpc ModelMetadata(ModelMetadataRequest) returns + // @@ (ModelMetadataResponse) + // @@ + // @@ Get model metadata. + // @@ + ModelMetadata(ctx context.Context, in *ModelMetadataRequest, opts ...grpc.CallOption) (*ModelMetadataResponse, error) + // @@ .. cpp:var:: rpc ModelInfer(ModelInferRequest) returns + // @@ (ModelInferResponse) + // @@ + // @@ Perform inference using a specific model. + // @@ + ModelInfer(ctx context.Context, in *ModelInferRequest, opts ...grpc.CallOption) (*ModelInferResponse, error) + // @@ .. cpp:var:: rpc ModelStreamInfer(stream ModelInferRequest) returns + // @@ (stream ModelStreamInferResponse) + // @@ + // @@ Perform streaming inference. + // @@ + ModelStreamInfer(ctx context.Context, opts ...grpc.CallOption) (GRPCInferenceService_ModelStreamInferClient, error) + // @@ .. cpp:var:: rpc ModelConfig(ModelConfigRequest) returns + // @@ (ModelConfigResponse) + // @@ + // @@ Get model configuration. + // @@ + ModelConfig(ctx context.Context, in *ModelConfigRequest, opts ...grpc.CallOption) (*ModelConfigResponse, error) + // @@ .. cpp:var:: rpc ModelStatistics( + // @@ ModelStatisticsRequest) + // @@ returns (ModelStatisticsResponse) + // @@ + // @@ Get the cumulative inference statistics for a model. + // @@ + ModelStatistics(ctx context.Context, in *ModelStatisticsRequest, opts ...grpc.CallOption) (*ModelStatisticsResponse, error) + // @@ .. cpp:var:: rpc RepositoryIndex(RepositoryIndexRequest) returns + // @@ (RepositoryIndexResponse) + // @@ + // @@ Get the index of model repository contents. + // @@ + RepositoryIndex(ctx context.Context, in *RepositoryIndexRequest, opts ...grpc.CallOption) (*RepositoryIndexResponse, error) + // @@ .. cpp:var:: rpc RepositoryModelLoad(RepositoryModelLoadRequest) returns + // @@ (RepositoryModelLoadResponse) + // @@ + // @@ Load or reload a model from a repository. + // @@ + RepositoryModelLoad(ctx context.Context, in *RepositoryModelLoadRequest, opts ...grpc.CallOption) (*RepositoryModelLoadResponse, error) + // @@ .. cpp:var:: rpc RepositoryModelUnload(RepositoryModelUnloadRequest) + // @@ returns (RepositoryModelUnloadResponse) + // @@ + // @@ Unload a model. + // @@ + RepositoryModelUnload(ctx context.Context, in *RepositoryModelUnloadRequest, opts ...grpc.CallOption) (*RepositoryModelUnloadResponse, error) + // @@ .. cpp:var:: rpc SystemSharedMemoryStatus( + // @@ SystemSharedMemoryStatusRequest) + // @@ returns (SystemSharedMemoryStatusRespose) + // @@ + // @@ Get the status of all registered system-shared-memory regions. + // @@ + SystemSharedMemoryStatus(ctx context.Context, in *SystemSharedMemoryStatusRequest, opts ...grpc.CallOption) (*SystemSharedMemoryStatusResponse, error) + // @@ .. cpp:var:: rpc SystemSharedMemoryRegister( + // @@ SystemSharedMemoryRegisterRequest) + // @@ returns (SystemSharedMemoryRegisterResponse) + // @@ + // @@ Register a system-shared-memory region. + // @@ + SystemSharedMemoryRegister(ctx context.Context, in *SystemSharedMemoryRegisterRequest, opts ...grpc.CallOption) (*SystemSharedMemoryRegisterResponse, error) + // @@ .. cpp:var:: rpc SystemSharedMemoryUnregister( + // @@ SystemSharedMemoryUnregisterRequest) + // @@ returns (SystemSharedMemoryUnregisterResponse) + // @@ + // @@ Unregister a system-shared-memory region. + // @@ + SystemSharedMemoryUnregister(ctx context.Context, in *SystemSharedMemoryUnregisterRequest, opts ...grpc.CallOption) (*SystemSharedMemoryUnregisterResponse, error) + // @@ .. cpp:var:: rpc CudaSharedMemoryStatus( + // @@ CudaSharedMemoryStatusRequest) + // @@ returns (CudaSharedMemoryStatusRespose) + // @@ + // @@ Get the status of all registered CUDA-shared-memory regions. + // @@ + CudaSharedMemoryStatus(ctx context.Context, in *CudaSharedMemoryStatusRequest, opts ...grpc.CallOption) (*CudaSharedMemoryStatusResponse, error) + // @@ .. cpp:var:: rpc CudaSharedMemoryRegister( + // @@ CudaSharedMemoryRegisterRequest) + // @@ returns (CudaSharedMemoryRegisterResponse) + // @@ + // @@ Register a CUDA-shared-memory region. + // @@ + CudaSharedMemoryRegister(ctx context.Context, in *CudaSharedMemoryRegisterRequest, opts ...grpc.CallOption) (*CudaSharedMemoryRegisterResponse, error) + // @@ .. cpp:var:: rpc CudaSharedMemoryUnregister( + // @@ CudaSharedMemoryUnregisterRequest) + // @@ returns (CudaSharedMemoryUnregisterResponse) + // @@ + // @@ Unregister a CUDA-shared-memory region. + // @@ + CudaSharedMemoryUnregister(ctx context.Context, in *CudaSharedMemoryUnregisterRequest, opts ...grpc.CallOption) (*CudaSharedMemoryUnregisterResponse, error) + // @@ .. cpp:var:: rpc TraceSetting(TraceSettingRequest) + // @@ returns (TraceSettingResponse) + // @@ + // @@ Update and get the trace setting of the Triton server. + // @@ + TraceSetting(ctx context.Context, in *TraceSettingRequest, opts ...grpc.CallOption) (*TraceSettingResponse, error) + // @@ .. cpp:var:: rpc LogSettings(LogSettingsRequest) + // @@ returns (LogSettingsResponse) + // @@ + // @@ Update and get the log settings of the Triton server. + // @@ + LogSettings(ctx context.Context, in *LogSettingsRequest, opts ...grpc.CallOption) (*LogSettingsResponse, error) +} + +type gRPCInferenceServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewGRPCInferenceServiceClient(cc grpc.ClientConnInterface) GRPCInferenceServiceClient { + return &gRPCInferenceServiceClient{cc} +} + +func (c *gRPCInferenceServiceClient) ServerLive(ctx context.Context, in *ServerLiveRequest, opts ...grpc.CallOption) (*ServerLiveResponse, error) { + out := new(ServerLiveResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/ServerLive", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) ServerReady(ctx context.Context, in *ServerReadyRequest, opts ...grpc.CallOption) (*ServerReadyResponse, error) { + out := new(ServerReadyResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/ServerReady", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) ModelReady(ctx context.Context, in *ModelReadyRequest, opts ...grpc.CallOption) (*ModelReadyResponse, error) { + out := new(ModelReadyResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/ModelReady", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) ServerMetadata(ctx context.Context, in *ServerMetadataRequest, opts ...grpc.CallOption) (*ServerMetadataResponse, error) { + out := new(ServerMetadataResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/ServerMetadata", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) ModelMetadata(ctx context.Context, in *ModelMetadataRequest, opts ...grpc.CallOption) (*ModelMetadataResponse, error) { + out := new(ModelMetadataResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/ModelMetadata", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) ModelInfer(ctx context.Context, in *ModelInferRequest, opts ...grpc.CallOption) (*ModelInferResponse, error) { + out := new(ModelInferResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/ModelInfer", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) ModelStreamInfer(ctx context.Context, opts ...grpc.CallOption) (GRPCInferenceService_ModelStreamInferClient, error) { + stream, err := c.cc.NewStream(ctx, &GRPCInferenceService_ServiceDesc.Streams[0], "/inference.v1.GRPCInferenceService/ModelStreamInfer", opts...) + if err != nil { + return nil, err + } + x := &gRPCInferenceServiceModelStreamInferClient{stream} + return x, nil +} + +type GRPCInferenceService_ModelStreamInferClient interface { + Send(*ModelInferRequest) error + Recv() (*ModelStreamInferResponse, error) + grpc.ClientStream +} + +type gRPCInferenceServiceModelStreamInferClient struct { + grpc.ClientStream +} + +func (x *gRPCInferenceServiceModelStreamInferClient) Send(m *ModelInferRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *gRPCInferenceServiceModelStreamInferClient) Recv() (*ModelStreamInferResponse, error) { + m := new(ModelStreamInferResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *gRPCInferenceServiceClient) ModelConfig(ctx context.Context, in *ModelConfigRequest, opts ...grpc.CallOption) (*ModelConfigResponse, error) { + out := new(ModelConfigResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/ModelConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) ModelStatistics(ctx context.Context, in *ModelStatisticsRequest, opts ...grpc.CallOption) (*ModelStatisticsResponse, error) { + out := new(ModelStatisticsResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/ModelStatistics", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) RepositoryIndex(ctx context.Context, in *RepositoryIndexRequest, opts ...grpc.CallOption) (*RepositoryIndexResponse, error) { + out := new(RepositoryIndexResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/RepositoryIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) RepositoryModelLoad(ctx context.Context, in *RepositoryModelLoadRequest, opts ...grpc.CallOption) (*RepositoryModelLoadResponse, error) { + out := new(RepositoryModelLoadResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/RepositoryModelLoad", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) RepositoryModelUnload(ctx context.Context, in *RepositoryModelUnloadRequest, opts ...grpc.CallOption) (*RepositoryModelUnloadResponse, error) { + out := new(RepositoryModelUnloadResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/RepositoryModelUnload", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) SystemSharedMemoryStatus(ctx context.Context, in *SystemSharedMemoryStatusRequest, opts ...grpc.CallOption) (*SystemSharedMemoryStatusResponse, error) { + out := new(SystemSharedMemoryStatusResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/SystemSharedMemoryStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) SystemSharedMemoryRegister(ctx context.Context, in *SystemSharedMemoryRegisterRequest, opts ...grpc.CallOption) (*SystemSharedMemoryRegisterResponse, error) { + out := new(SystemSharedMemoryRegisterResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/SystemSharedMemoryRegister", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) SystemSharedMemoryUnregister(ctx context.Context, in *SystemSharedMemoryUnregisterRequest, opts ...grpc.CallOption) (*SystemSharedMemoryUnregisterResponse, error) { + out := new(SystemSharedMemoryUnregisterResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/SystemSharedMemoryUnregister", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) CudaSharedMemoryStatus(ctx context.Context, in *CudaSharedMemoryStatusRequest, opts ...grpc.CallOption) (*CudaSharedMemoryStatusResponse, error) { + out := new(CudaSharedMemoryStatusResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/CudaSharedMemoryStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) CudaSharedMemoryRegister(ctx context.Context, in *CudaSharedMemoryRegisterRequest, opts ...grpc.CallOption) (*CudaSharedMemoryRegisterResponse, error) { + out := new(CudaSharedMemoryRegisterResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/CudaSharedMemoryRegister", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) CudaSharedMemoryUnregister(ctx context.Context, in *CudaSharedMemoryUnregisterRequest, opts ...grpc.CallOption) (*CudaSharedMemoryUnregisterResponse, error) { + out := new(CudaSharedMemoryUnregisterResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/CudaSharedMemoryUnregister", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) TraceSetting(ctx context.Context, in *TraceSettingRequest, opts ...grpc.CallOption) (*TraceSettingResponse, error) { + out := new(TraceSettingResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/TraceSetting", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *gRPCInferenceServiceClient) LogSettings(ctx context.Context, in *LogSettingsRequest, opts ...grpc.CallOption) (*LogSettingsResponse, error) { + out := new(LogSettingsResponse) + err := c.cc.Invoke(ctx, "/inference.v1.GRPCInferenceService/LogSettings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GRPCInferenceServiceServer is the server API for GRPCInferenceService service. +// All implementations should embed UnimplementedGRPCInferenceServiceServer +// for forward compatibility +type GRPCInferenceServiceServer interface { + // @@ .. cpp:var:: rpc ServerLive(ServerLiveRequest) returns + // @@ (ServerLiveResponse) + // @@ + // @@ Check liveness of the inference server. + // @@ + ServerLive(context.Context, *ServerLiveRequest) (*ServerLiveResponse, error) + // @@ .. cpp:var:: rpc ServerReady(ServerReadyRequest) returns + // @@ (ServerReadyResponse) + // @@ + // @@ Check readiness of the inference server. + // @@ + ServerReady(context.Context, *ServerReadyRequest) (*ServerReadyResponse, error) + // @@ .. cpp:var:: rpc ModelReady(ModelReadyRequest) returns + // @@ (ModelReadyResponse) + // @@ + // @@ Check readiness of a model in the inference server. + // @@ + ModelReady(context.Context, *ModelReadyRequest) (*ModelReadyResponse, error) + // @@ .. cpp:var:: rpc ServerMetadata(ServerMetadataRequest) returns + // @@ (ServerMetadataResponse) + // @@ + // @@ Get server metadata. + // @@ + ServerMetadata(context.Context, *ServerMetadataRequest) (*ServerMetadataResponse, error) + // @@ .. cpp:var:: rpc ModelMetadata(ModelMetadataRequest) returns + // @@ (ModelMetadataResponse) + // @@ + // @@ Get model metadata. + // @@ + ModelMetadata(context.Context, *ModelMetadataRequest) (*ModelMetadataResponse, error) + // @@ .. cpp:var:: rpc ModelInfer(ModelInferRequest) returns + // @@ (ModelInferResponse) + // @@ + // @@ Perform inference using a specific model. + // @@ + ModelInfer(context.Context, *ModelInferRequest) (*ModelInferResponse, error) + // @@ .. cpp:var:: rpc ModelStreamInfer(stream ModelInferRequest) returns + // @@ (stream ModelStreamInferResponse) + // @@ + // @@ Perform streaming inference. + // @@ + ModelStreamInfer(GRPCInferenceService_ModelStreamInferServer) error + // @@ .. cpp:var:: rpc ModelConfig(ModelConfigRequest) returns + // @@ (ModelConfigResponse) + // @@ + // @@ Get model configuration. + // @@ + ModelConfig(context.Context, *ModelConfigRequest) (*ModelConfigResponse, error) + // @@ .. cpp:var:: rpc ModelStatistics( + // @@ ModelStatisticsRequest) + // @@ returns (ModelStatisticsResponse) + // @@ + // @@ Get the cumulative inference statistics for a model. + // @@ + ModelStatistics(context.Context, *ModelStatisticsRequest) (*ModelStatisticsResponse, error) + // @@ .. cpp:var:: rpc RepositoryIndex(RepositoryIndexRequest) returns + // @@ (RepositoryIndexResponse) + // @@ + // @@ Get the index of model repository contents. + // @@ + RepositoryIndex(context.Context, *RepositoryIndexRequest) (*RepositoryIndexResponse, error) + // @@ .. cpp:var:: rpc RepositoryModelLoad(RepositoryModelLoadRequest) returns + // @@ (RepositoryModelLoadResponse) + // @@ + // @@ Load or reload a model from a repository. + // @@ + RepositoryModelLoad(context.Context, *RepositoryModelLoadRequest) (*RepositoryModelLoadResponse, error) + // @@ .. cpp:var:: rpc RepositoryModelUnload(RepositoryModelUnloadRequest) + // @@ returns (RepositoryModelUnloadResponse) + // @@ + // @@ Unload a model. + // @@ + RepositoryModelUnload(context.Context, *RepositoryModelUnloadRequest) (*RepositoryModelUnloadResponse, error) + // @@ .. cpp:var:: rpc SystemSharedMemoryStatus( + // @@ SystemSharedMemoryStatusRequest) + // @@ returns (SystemSharedMemoryStatusRespose) + // @@ + // @@ Get the status of all registered system-shared-memory regions. + // @@ + SystemSharedMemoryStatus(context.Context, *SystemSharedMemoryStatusRequest) (*SystemSharedMemoryStatusResponse, error) + // @@ .. cpp:var:: rpc SystemSharedMemoryRegister( + // @@ SystemSharedMemoryRegisterRequest) + // @@ returns (SystemSharedMemoryRegisterResponse) + // @@ + // @@ Register a system-shared-memory region. + // @@ + SystemSharedMemoryRegister(context.Context, *SystemSharedMemoryRegisterRequest) (*SystemSharedMemoryRegisterResponse, error) + // @@ .. cpp:var:: rpc SystemSharedMemoryUnregister( + // @@ SystemSharedMemoryUnregisterRequest) + // @@ returns (SystemSharedMemoryUnregisterResponse) + // @@ + // @@ Unregister a system-shared-memory region. + // @@ + SystemSharedMemoryUnregister(context.Context, *SystemSharedMemoryUnregisterRequest) (*SystemSharedMemoryUnregisterResponse, error) + // @@ .. cpp:var:: rpc CudaSharedMemoryStatus( + // @@ CudaSharedMemoryStatusRequest) + // @@ returns (CudaSharedMemoryStatusRespose) + // @@ + // @@ Get the status of all registered CUDA-shared-memory regions. + // @@ + CudaSharedMemoryStatus(context.Context, *CudaSharedMemoryStatusRequest) (*CudaSharedMemoryStatusResponse, error) + // @@ .. cpp:var:: rpc CudaSharedMemoryRegister( + // @@ CudaSharedMemoryRegisterRequest) + // @@ returns (CudaSharedMemoryRegisterResponse) + // @@ + // @@ Register a CUDA-shared-memory region. + // @@ + CudaSharedMemoryRegister(context.Context, *CudaSharedMemoryRegisterRequest) (*CudaSharedMemoryRegisterResponse, error) + // @@ .. cpp:var:: rpc CudaSharedMemoryUnregister( + // @@ CudaSharedMemoryUnregisterRequest) + // @@ returns (CudaSharedMemoryUnregisterResponse) + // @@ + // @@ Unregister a CUDA-shared-memory region. + // @@ + CudaSharedMemoryUnregister(context.Context, *CudaSharedMemoryUnregisterRequest) (*CudaSharedMemoryUnregisterResponse, error) + // @@ .. cpp:var:: rpc TraceSetting(TraceSettingRequest) + // @@ returns (TraceSettingResponse) + // @@ + // @@ Update and get the trace setting of the Triton server. + // @@ + TraceSetting(context.Context, *TraceSettingRequest) (*TraceSettingResponse, error) + // @@ .. cpp:var:: rpc LogSettings(LogSettingsRequest) + // @@ returns (LogSettingsResponse) + // @@ + // @@ Update and get the log settings of the Triton server. + // @@ + LogSettings(context.Context, *LogSettingsRequest) (*LogSettingsResponse, error) +} + +// UnimplementedGRPCInferenceServiceServer should be embedded to have forward compatible implementations. +type UnimplementedGRPCInferenceServiceServer struct { +} + +func (UnimplementedGRPCInferenceServiceServer) ServerLive(context.Context, *ServerLiveRequest) (*ServerLiveResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ServerLive not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) ServerReady(context.Context, *ServerReadyRequest) (*ServerReadyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ServerReady not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) ModelReady(context.Context, *ModelReadyRequest) (*ModelReadyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ModelReady not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) ServerMetadata(context.Context, *ServerMetadataRequest) (*ServerMetadataResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ServerMetadata not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) ModelMetadata(context.Context, *ModelMetadataRequest) (*ModelMetadataResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ModelMetadata not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) ModelInfer(context.Context, *ModelInferRequest) (*ModelInferResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ModelInfer not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) ModelStreamInfer(GRPCInferenceService_ModelStreamInferServer) error { + return status.Errorf(codes.Unimplemented, "method ModelStreamInfer not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) ModelConfig(context.Context, *ModelConfigRequest) (*ModelConfigResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ModelConfig not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) ModelStatistics(context.Context, *ModelStatisticsRequest) (*ModelStatisticsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ModelStatistics not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) RepositoryIndex(context.Context, *RepositoryIndexRequest) (*RepositoryIndexResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RepositoryIndex not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) RepositoryModelLoad(context.Context, *RepositoryModelLoadRequest) (*RepositoryModelLoadResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RepositoryModelLoad not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) RepositoryModelUnload(context.Context, *RepositoryModelUnloadRequest) (*RepositoryModelUnloadResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RepositoryModelUnload not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) SystemSharedMemoryStatus(context.Context, *SystemSharedMemoryStatusRequest) (*SystemSharedMemoryStatusResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SystemSharedMemoryStatus not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) SystemSharedMemoryRegister(context.Context, *SystemSharedMemoryRegisterRequest) (*SystemSharedMemoryRegisterResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SystemSharedMemoryRegister not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) SystemSharedMemoryUnregister(context.Context, *SystemSharedMemoryUnregisterRequest) (*SystemSharedMemoryUnregisterResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SystemSharedMemoryUnregister not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) CudaSharedMemoryStatus(context.Context, *CudaSharedMemoryStatusRequest) (*CudaSharedMemoryStatusResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CudaSharedMemoryStatus not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) CudaSharedMemoryRegister(context.Context, *CudaSharedMemoryRegisterRequest) (*CudaSharedMemoryRegisterResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CudaSharedMemoryRegister not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) CudaSharedMemoryUnregister(context.Context, *CudaSharedMemoryUnregisterRequest) (*CudaSharedMemoryUnregisterResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CudaSharedMemoryUnregister not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) TraceSetting(context.Context, *TraceSettingRequest) (*TraceSettingResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method TraceSetting not implemented") +} +func (UnimplementedGRPCInferenceServiceServer) LogSettings(context.Context, *LogSettingsRequest) (*LogSettingsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method LogSettings not implemented") +} + +// UnsafeGRPCInferenceServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to GRPCInferenceServiceServer will +// result in compilation errors. +type UnsafeGRPCInferenceServiceServer interface { + mustEmbedUnimplementedGRPCInferenceServiceServer() +} + +func RegisterGRPCInferenceServiceServer(s grpc.ServiceRegistrar, srv GRPCInferenceServiceServer) { + s.RegisterService(&GRPCInferenceService_ServiceDesc, srv) +} + +func _GRPCInferenceService_ServerLive_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ServerLiveRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).ServerLive(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/ServerLive", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).ServerLive(ctx, req.(*ServerLiveRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_ServerReady_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ServerReadyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).ServerReady(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/ServerReady", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).ServerReady(ctx, req.(*ServerReadyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_ModelReady_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModelReadyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).ModelReady(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/ModelReady", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).ModelReady(ctx, req.(*ModelReadyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_ServerMetadata_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ServerMetadataRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).ServerMetadata(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/ServerMetadata", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).ServerMetadata(ctx, req.(*ServerMetadataRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_ModelMetadata_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModelMetadataRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).ModelMetadata(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/ModelMetadata", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).ModelMetadata(ctx, req.(*ModelMetadataRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_ModelInfer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModelInferRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).ModelInfer(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/ModelInfer", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).ModelInfer(ctx, req.(*ModelInferRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_ModelStreamInfer_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(GRPCInferenceServiceServer).ModelStreamInfer(&gRPCInferenceServiceModelStreamInferServer{stream}) +} + +type GRPCInferenceService_ModelStreamInferServer interface { + Send(*ModelStreamInferResponse) error + Recv() (*ModelInferRequest, error) + grpc.ServerStream +} + +type gRPCInferenceServiceModelStreamInferServer struct { + grpc.ServerStream +} + +func (x *gRPCInferenceServiceModelStreamInferServer) Send(m *ModelStreamInferResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *gRPCInferenceServiceModelStreamInferServer) Recv() (*ModelInferRequest, error) { + m := new(ModelInferRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _GRPCInferenceService_ModelConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModelConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).ModelConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/ModelConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).ModelConfig(ctx, req.(*ModelConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_ModelStatistics_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModelStatisticsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).ModelStatistics(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/ModelStatistics", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).ModelStatistics(ctx, req.(*ModelStatisticsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_RepositoryIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RepositoryIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).RepositoryIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/RepositoryIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).RepositoryIndex(ctx, req.(*RepositoryIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_RepositoryModelLoad_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RepositoryModelLoadRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).RepositoryModelLoad(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/RepositoryModelLoad", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).RepositoryModelLoad(ctx, req.(*RepositoryModelLoadRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_RepositoryModelUnload_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RepositoryModelUnloadRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).RepositoryModelUnload(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/RepositoryModelUnload", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).RepositoryModelUnload(ctx, req.(*RepositoryModelUnloadRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_SystemSharedMemoryStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SystemSharedMemoryStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).SystemSharedMemoryStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/SystemSharedMemoryStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).SystemSharedMemoryStatus(ctx, req.(*SystemSharedMemoryStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_SystemSharedMemoryRegister_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SystemSharedMemoryRegisterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).SystemSharedMemoryRegister(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/SystemSharedMemoryRegister", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).SystemSharedMemoryRegister(ctx, req.(*SystemSharedMemoryRegisterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_SystemSharedMemoryUnregister_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SystemSharedMemoryUnregisterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).SystemSharedMemoryUnregister(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/SystemSharedMemoryUnregister", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).SystemSharedMemoryUnregister(ctx, req.(*SystemSharedMemoryUnregisterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_CudaSharedMemoryStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CudaSharedMemoryStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).CudaSharedMemoryStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/CudaSharedMemoryStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).CudaSharedMemoryStatus(ctx, req.(*CudaSharedMemoryStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_CudaSharedMemoryRegister_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CudaSharedMemoryRegisterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).CudaSharedMemoryRegister(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/CudaSharedMemoryRegister", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).CudaSharedMemoryRegister(ctx, req.(*CudaSharedMemoryRegisterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_CudaSharedMemoryUnregister_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CudaSharedMemoryUnregisterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).CudaSharedMemoryUnregister(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/CudaSharedMemoryUnregister", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).CudaSharedMemoryUnregister(ctx, req.(*CudaSharedMemoryUnregisterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_TraceSetting_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TraceSettingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).TraceSetting(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/TraceSetting", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).TraceSetting(ctx, req.(*TraceSettingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GRPCInferenceService_LogSettings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LogSettingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCInferenceServiceServer).LogSettings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/inference.v1.GRPCInferenceService/LogSettings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCInferenceServiceServer).LogSettings(ctx, req.(*LogSettingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// GRPCInferenceService_ServiceDesc is the grpc.ServiceDesc for GRPCInferenceService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var GRPCInferenceService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "inference.v1.GRPCInferenceService", + HandlerType: (*GRPCInferenceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ServerLive", + Handler: _GRPCInferenceService_ServerLive_Handler, + }, + { + MethodName: "ServerReady", + Handler: _GRPCInferenceService_ServerReady_Handler, + }, + { + MethodName: "ModelReady", + Handler: _GRPCInferenceService_ModelReady_Handler, + }, + { + MethodName: "ServerMetadata", + Handler: _GRPCInferenceService_ServerMetadata_Handler, + }, + { + MethodName: "ModelMetadata", + Handler: _GRPCInferenceService_ModelMetadata_Handler, + }, + { + MethodName: "ModelInfer", + Handler: _GRPCInferenceService_ModelInfer_Handler, + }, + { + MethodName: "ModelConfig", + Handler: _GRPCInferenceService_ModelConfig_Handler, + }, + { + MethodName: "ModelStatistics", + Handler: _GRPCInferenceService_ModelStatistics_Handler, + }, + { + MethodName: "RepositoryIndex", + Handler: _GRPCInferenceService_RepositoryIndex_Handler, + }, + { + MethodName: "RepositoryModelLoad", + Handler: _GRPCInferenceService_RepositoryModelLoad_Handler, + }, + { + MethodName: "RepositoryModelUnload", + Handler: _GRPCInferenceService_RepositoryModelUnload_Handler, + }, + { + MethodName: "SystemSharedMemoryStatus", + Handler: _GRPCInferenceService_SystemSharedMemoryStatus_Handler, + }, + { + MethodName: "SystemSharedMemoryRegister", + Handler: _GRPCInferenceService_SystemSharedMemoryRegister_Handler, + }, + { + MethodName: "SystemSharedMemoryUnregister", + Handler: _GRPCInferenceService_SystemSharedMemoryUnregister_Handler, + }, + { + MethodName: "CudaSharedMemoryStatus", + Handler: _GRPCInferenceService_CudaSharedMemoryStatus_Handler, + }, + { + MethodName: "CudaSharedMemoryRegister", + Handler: _GRPCInferenceService_CudaSharedMemoryRegister_Handler, + }, + { + MethodName: "CudaSharedMemoryUnregister", + Handler: _GRPCInferenceService_CudaSharedMemoryUnregister_Handler, + }, + { + MethodName: "TraceSetting", + Handler: _GRPCInferenceService_TraceSetting_Handler, + }, + { + MethodName: "LogSettings", + Handler: _GRPCInferenceService_LogSettings_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "ModelStreamInfer", + Handler: _GRPCInferenceService_ModelStreamInfer_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "pkg/apis/inference/v1/grpc_service.proto", +} diff --git a/pkg/apis/inference/v1/mocks/grpc_service_mock.go b/pkg/apis/inference/v1/mocks/grpc_service_mock.go new file mode 100644 index 0000000..0a489b0 --- /dev/null +++ b/pkg/apis/inference/v1/mocks/grpc_service_mock.go @@ -0,0 +1,1066 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: ../grpc_service_grpc.pb.go + +// Package mocks is a generated GoMock package. +package mocks + +import ( + context "context" + reflect "reflect" + + inference "d7y.io/api/pkg/apis/inference/v1" + gomock "github.com/golang/mock/gomock" + grpc "google.golang.org/grpc" + metadata "google.golang.org/grpc/metadata" +) + +// MockGRPCInferenceServiceClient is a mock of GRPCInferenceServiceClient interface. +type MockGRPCInferenceServiceClient struct { + ctrl *gomock.Controller + recorder *MockGRPCInferenceServiceClientMockRecorder +} + +// MockGRPCInferenceServiceClientMockRecorder is the mock recorder for MockGRPCInferenceServiceClient. +type MockGRPCInferenceServiceClientMockRecorder struct { + mock *MockGRPCInferenceServiceClient +} + +// NewMockGRPCInferenceServiceClient creates a new mock instance. +func NewMockGRPCInferenceServiceClient(ctrl *gomock.Controller) *MockGRPCInferenceServiceClient { + mock := &MockGRPCInferenceServiceClient{ctrl: ctrl} + mock.recorder = &MockGRPCInferenceServiceClientMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockGRPCInferenceServiceClient) EXPECT() *MockGRPCInferenceServiceClientMockRecorder { + return m.recorder +} + +// CudaSharedMemoryRegister mocks base method. +func (m *MockGRPCInferenceServiceClient) CudaSharedMemoryRegister(ctx context.Context, in *inference.CudaSharedMemoryRegisterRequest, opts ...grpc.CallOption) (*inference.CudaSharedMemoryRegisterResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CudaSharedMemoryRegister", varargs...) + ret0, _ := ret[0].(*inference.CudaSharedMemoryRegisterResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CudaSharedMemoryRegister indicates an expected call of CudaSharedMemoryRegister. +func (mr *MockGRPCInferenceServiceClientMockRecorder) CudaSharedMemoryRegister(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CudaSharedMemoryRegister", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).CudaSharedMemoryRegister), varargs...) +} + +// CudaSharedMemoryStatus mocks base method. +func (m *MockGRPCInferenceServiceClient) CudaSharedMemoryStatus(ctx context.Context, in *inference.CudaSharedMemoryStatusRequest, opts ...grpc.CallOption) (*inference.CudaSharedMemoryStatusResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CudaSharedMemoryStatus", varargs...) + ret0, _ := ret[0].(*inference.CudaSharedMemoryStatusResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CudaSharedMemoryStatus indicates an expected call of CudaSharedMemoryStatus. +func (mr *MockGRPCInferenceServiceClientMockRecorder) CudaSharedMemoryStatus(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CudaSharedMemoryStatus", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).CudaSharedMemoryStatus), varargs...) +} + +// CudaSharedMemoryUnregister mocks base method. +func (m *MockGRPCInferenceServiceClient) CudaSharedMemoryUnregister(ctx context.Context, in *inference.CudaSharedMemoryUnregisterRequest, opts ...grpc.CallOption) (*inference.CudaSharedMemoryUnregisterResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CudaSharedMemoryUnregister", varargs...) + ret0, _ := ret[0].(*inference.CudaSharedMemoryUnregisterResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CudaSharedMemoryUnregister indicates an expected call of CudaSharedMemoryUnregister. +func (mr *MockGRPCInferenceServiceClientMockRecorder) CudaSharedMemoryUnregister(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CudaSharedMemoryUnregister", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).CudaSharedMemoryUnregister), varargs...) +} + +// LogSettings mocks base method. +func (m *MockGRPCInferenceServiceClient) LogSettings(ctx context.Context, in *inference.LogSettingsRequest, opts ...grpc.CallOption) (*inference.LogSettingsResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "LogSettings", varargs...) + ret0, _ := ret[0].(*inference.LogSettingsResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// LogSettings indicates an expected call of LogSettings. +func (mr *MockGRPCInferenceServiceClientMockRecorder) LogSettings(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "LogSettings", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).LogSettings), varargs...) +} + +// ModelConfig mocks base method. +func (m *MockGRPCInferenceServiceClient) ModelConfig(ctx context.Context, in *inference.ModelConfigRequest, opts ...grpc.CallOption) (*inference.ModelConfigResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ModelConfig", varargs...) + ret0, _ := ret[0].(*inference.ModelConfigResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelConfig indicates an expected call of ModelConfig. +func (mr *MockGRPCInferenceServiceClientMockRecorder) ModelConfig(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelConfig", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).ModelConfig), varargs...) +} + +// ModelInfer mocks base method. +func (m *MockGRPCInferenceServiceClient) ModelInfer(ctx context.Context, in *inference.ModelInferRequest, opts ...grpc.CallOption) (*inference.ModelInferResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ModelInfer", varargs...) + ret0, _ := ret[0].(*inference.ModelInferResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelInfer indicates an expected call of ModelInfer. +func (mr *MockGRPCInferenceServiceClientMockRecorder) ModelInfer(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelInfer", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).ModelInfer), varargs...) +} + +// ModelMetadata mocks base method. +func (m *MockGRPCInferenceServiceClient) ModelMetadata(ctx context.Context, in *inference.ModelMetadataRequest, opts ...grpc.CallOption) (*inference.ModelMetadataResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ModelMetadata", varargs...) + ret0, _ := ret[0].(*inference.ModelMetadataResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelMetadata indicates an expected call of ModelMetadata. +func (mr *MockGRPCInferenceServiceClientMockRecorder) ModelMetadata(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelMetadata", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).ModelMetadata), varargs...) +} + +// ModelReady mocks base method. +func (m *MockGRPCInferenceServiceClient) ModelReady(ctx context.Context, in *inference.ModelReadyRequest, opts ...grpc.CallOption) (*inference.ModelReadyResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ModelReady", varargs...) + ret0, _ := ret[0].(*inference.ModelReadyResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelReady indicates an expected call of ModelReady. +func (mr *MockGRPCInferenceServiceClientMockRecorder) ModelReady(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelReady", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).ModelReady), varargs...) +} + +// ModelStatistics mocks base method. +func (m *MockGRPCInferenceServiceClient) ModelStatistics(ctx context.Context, in *inference.ModelStatisticsRequest, opts ...grpc.CallOption) (*inference.ModelStatisticsResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ModelStatistics", varargs...) + ret0, _ := ret[0].(*inference.ModelStatisticsResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelStatistics indicates an expected call of ModelStatistics. +func (mr *MockGRPCInferenceServiceClientMockRecorder) ModelStatistics(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelStatistics", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).ModelStatistics), varargs...) +} + +// ModelStreamInfer mocks base method. +func (m *MockGRPCInferenceServiceClient) ModelStreamInfer(ctx context.Context, opts ...grpc.CallOption) (inference.GRPCInferenceService_ModelStreamInferClient, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ModelStreamInfer", varargs...) + ret0, _ := ret[0].(inference.GRPCInferenceService_ModelStreamInferClient) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelStreamInfer indicates an expected call of ModelStreamInfer. +func (mr *MockGRPCInferenceServiceClientMockRecorder) ModelStreamInfer(ctx interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelStreamInfer", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).ModelStreamInfer), varargs...) +} + +// RepositoryIndex mocks base method. +func (m *MockGRPCInferenceServiceClient) RepositoryIndex(ctx context.Context, in *inference.RepositoryIndexRequest, opts ...grpc.CallOption) (*inference.RepositoryIndexResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "RepositoryIndex", varargs...) + ret0, _ := ret[0].(*inference.RepositoryIndexResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RepositoryIndex indicates an expected call of RepositoryIndex. +func (mr *MockGRPCInferenceServiceClientMockRecorder) RepositoryIndex(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RepositoryIndex", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).RepositoryIndex), varargs...) +} + +// RepositoryModelLoad mocks base method. +func (m *MockGRPCInferenceServiceClient) RepositoryModelLoad(ctx context.Context, in *inference.RepositoryModelLoadRequest, opts ...grpc.CallOption) (*inference.RepositoryModelLoadResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "RepositoryModelLoad", varargs...) + ret0, _ := ret[0].(*inference.RepositoryModelLoadResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RepositoryModelLoad indicates an expected call of RepositoryModelLoad. +func (mr *MockGRPCInferenceServiceClientMockRecorder) RepositoryModelLoad(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RepositoryModelLoad", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).RepositoryModelLoad), varargs...) +} + +// RepositoryModelUnload mocks base method. +func (m *MockGRPCInferenceServiceClient) RepositoryModelUnload(ctx context.Context, in *inference.RepositoryModelUnloadRequest, opts ...grpc.CallOption) (*inference.RepositoryModelUnloadResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "RepositoryModelUnload", varargs...) + ret0, _ := ret[0].(*inference.RepositoryModelUnloadResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RepositoryModelUnload indicates an expected call of RepositoryModelUnload. +func (mr *MockGRPCInferenceServiceClientMockRecorder) RepositoryModelUnload(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RepositoryModelUnload", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).RepositoryModelUnload), varargs...) +} + +// ServerLive mocks base method. +func (m *MockGRPCInferenceServiceClient) ServerLive(ctx context.Context, in *inference.ServerLiveRequest, opts ...grpc.CallOption) (*inference.ServerLiveResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ServerLive", varargs...) + ret0, _ := ret[0].(*inference.ServerLiveResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ServerLive indicates an expected call of ServerLive. +func (mr *MockGRPCInferenceServiceClientMockRecorder) ServerLive(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServerLive", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).ServerLive), varargs...) +} + +// ServerMetadata mocks base method. +func (m *MockGRPCInferenceServiceClient) ServerMetadata(ctx context.Context, in *inference.ServerMetadataRequest, opts ...grpc.CallOption) (*inference.ServerMetadataResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ServerMetadata", varargs...) + ret0, _ := ret[0].(*inference.ServerMetadataResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ServerMetadata indicates an expected call of ServerMetadata. +func (mr *MockGRPCInferenceServiceClientMockRecorder) ServerMetadata(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServerMetadata", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).ServerMetadata), varargs...) +} + +// ServerReady mocks base method. +func (m *MockGRPCInferenceServiceClient) ServerReady(ctx context.Context, in *inference.ServerReadyRequest, opts ...grpc.CallOption) (*inference.ServerReadyResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ServerReady", varargs...) + ret0, _ := ret[0].(*inference.ServerReadyResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ServerReady indicates an expected call of ServerReady. +func (mr *MockGRPCInferenceServiceClientMockRecorder) ServerReady(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServerReady", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).ServerReady), varargs...) +} + +// SystemSharedMemoryRegister mocks base method. +func (m *MockGRPCInferenceServiceClient) SystemSharedMemoryRegister(ctx context.Context, in *inference.SystemSharedMemoryRegisterRequest, opts ...grpc.CallOption) (*inference.SystemSharedMemoryRegisterResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "SystemSharedMemoryRegister", varargs...) + ret0, _ := ret[0].(*inference.SystemSharedMemoryRegisterResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SystemSharedMemoryRegister indicates an expected call of SystemSharedMemoryRegister. +func (mr *MockGRPCInferenceServiceClientMockRecorder) SystemSharedMemoryRegister(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SystemSharedMemoryRegister", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).SystemSharedMemoryRegister), varargs...) +} + +// SystemSharedMemoryStatus mocks base method. +func (m *MockGRPCInferenceServiceClient) SystemSharedMemoryStatus(ctx context.Context, in *inference.SystemSharedMemoryStatusRequest, opts ...grpc.CallOption) (*inference.SystemSharedMemoryStatusResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "SystemSharedMemoryStatus", varargs...) + ret0, _ := ret[0].(*inference.SystemSharedMemoryStatusResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SystemSharedMemoryStatus indicates an expected call of SystemSharedMemoryStatus. +func (mr *MockGRPCInferenceServiceClientMockRecorder) SystemSharedMemoryStatus(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SystemSharedMemoryStatus", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).SystemSharedMemoryStatus), varargs...) +} + +// SystemSharedMemoryUnregister mocks base method. +func (m *MockGRPCInferenceServiceClient) SystemSharedMemoryUnregister(ctx context.Context, in *inference.SystemSharedMemoryUnregisterRequest, opts ...grpc.CallOption) (*inference.SystemSharedMemoryUnregisterResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "SystemSharedMemoryUnregister", varargs...) + ret0, _ := ret[0].(*inference.SystemSharedMemoryUnregisterResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SystemSharedMemoryUnregister indicates an expected call of SystemSharedMemoryUnregister. +func (mr *MockGRPCInferenceServiceClientMockRecorder) SystemSharedMemoryUnregister(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SystemSharedMemoryUnregister", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).SystemSharedMemoryUnregister), varargs...) +} + +// TraceSetting mocks base method. +func (m *MockGRPCInferenceServiceClient) TraceSetting(ctx context.Context, in *inference.TraceSettingRequest, opts ...grpc.CallOption) (*inference.TraceSettingResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{ctx, in} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "TraceSetting", varargs...) + ret0, _ := ret[0].(*inference.TraceSettingResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// TraceSetting indicates an expected call of TraceSetting. +func (mr *MockGRPCInferenceServiceClientMockRecorder) TraceSetting(ctx, in interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{ctx, in}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TraceSetting", reflect.TypeOf((*MockGRPCInferenceServiceClient)(nil).TraceSetting), varargs...) +} + +// MockGRPCInferenceService_ModelStreamInferClient is a mock of GRPCInferenceService_ModelStreamInferClient interface. +type MockGRPCInferenceService_ModelStreamInferClient struct { + ctrl *gomock.Controller + recorder *MockGRPCInferenceService_ModelStreamInferClientMockRecorder +} + +// MockGRPCInferenceService_ModelStreamInferClientMockRecorder is the mock recorder for MockGRPCInferenceService_ModelStreamInferClient. +type MockGRPCInferenceService_ModelStreamInferClientMockRecorder struct { + mock *MockGRPCInferenceService_ModelStreamInferClient +} + +// NewMockGRPCInferenceService_ModelStreamInferClient creates a new mock instance. +func NewMockGRPCInferenceService_ModelStreamInferClient(ctrl *gomock.Controller) *MockGRPCInferenceService_ModelStreamInferClient { + mock := &MockGRPCInferenceService_ModelStreamInferClient{ctrl: ctrl} + mock.recorder = &MockGRPCInferenceService_ModelStreamInferClientMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockGRPCInferenceService_ModelStreamInferClient) EXPECT() *MockGRPCInferenceService_ModelStreamInferClientMockRecorder { + return m.recorder +} + +// CloseSend mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferClient) CloseSend() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CloseSend") + ret0, _ := ret[0].(error) + return ret0 +} + +// CloseSend indicates an expected call of CloseSend. +func (mr *MockGRPCInferenceService_ModelStreamInferClientMockRecorder) CloseSend() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CloseSend", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferClient)(nil).CloseSend)) +} + +// Context mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferClient) Context() context.Context { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Context") + ret0, _ := ret[0].(context.Context) + return ret0 +} + +// Context indicates an expected call of Context. +func (mr *MockGRPCInferenceService_ModelStreamInferClientMockRecorder) Context() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Context", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferClient)(nil).Context)) +} + +// Header mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferClient) Header() (metadata.MD, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Header") + ret0, _ := ret[0].(metadata.MD) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Header indicates an expected call of Header. +func (mr *MockGRPCInferenceService_ModelStreamInferClientMockRecorder) Header() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Header", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferClient)(nil).Header)) +} + +// Recv mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferClient) Recv() (*inference.ModelStreamInferResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Recv") + ret0, _ := ret[0].(*inference.ModelStreamInferResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Recv indicates an expected call of Recv. +func (mr *MockGRPCInferenceService_ModelStreamInferClientMockRecorder) Recv() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Recv", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferClient)(nil).Recv)) +} + +// RecvMsg mocks base method. +func (m_2 *MockGRPCInferenceService_ModelStreamInferClient) RecvMsg(m interface{}) error { + m_2.ctrl.T.Helper() + ret := m_2.ctrl.Call(m_2, "RecvMsg", m) + ret0, _ := ret[0].(error) + return ret0 +} + +// RecvMsg indicates an expected call of RecvMsg. +func (mr *MockGRPCInferenceService_ModelStreamInferClientMockRecorder) RecvMsg(m interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RecvMsg", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferClient)(nil).RecvMsg), m) +} + +// Send mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferClient) Send(arg0 *inference.ModelInferRequest) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Send", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// Send indicates an expected call of Send. +func (mr *MockGRPCInferenceService_ModelStreamInferClientMockRecorder) Send(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Send", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferClient)(nil).Send), arg0) +} + +// SendMsg mocks base method. +func (m_2 *MockGRPCInferenceService_ModelStreamInferClient) SendMsg(m interface{}) error { + m_2.ctrl.T.Helper() + ret := m_2.ctrl.Call(m_2, "SendMsg", m) + ret0, _ := ret[0].(error) + return ret0 +} + +// SendMsg indicates an expected call of SendMsg. +func (mr *MockGRPCInferenceService_ModelStreamInferClientMockRecorder) SendMsg(m interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SendMsg", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferClient)(nil).SendMsg), m) +} + +// Trailer mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferClient) Trailer() metadata.MD { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Trailer") + ret0, _ := ret[0].(metadata.MD) + return ret0 +} + +// Trailer indicates an expected call of Trailer. +func (mr *MockGRPCInferenceService_ModelStreamInferClientMockRecorder) Trailer() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Trailer", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferClient)(nil).Trailer)) +} + +// MockGRPCInferenceServiceServer is a mock of GRPCInferenceServiceServer interface. +type MockGRPCInferenceServiceServer struct { + ctrl *gomock.Controller + recorder *MockGRPCInferenceServiceServerMockRecorder +} + +// MockGRPCInferenceServiceServerMockRecorder is the mock recorder for MockGRPCInferenceServiceServer. +type MockGRPCInferenceServiceServerMockRecorder struct { + mock *MockGRPCInferenceServiceServer +} + +// NewMockGRPCInferenceServiceServer creates a new mock instance. +func NewMockGRPCInferenceServiceServer(ctrl *gomock.Controller) *MockGRPCInferenceServiceServer { + mock := &MockGRPCInferenceServiceServer{ctrl: ctrl} + mock.recorder = &MockGRPCInferenceServiceServerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockGRPCInferenceServiceServer) EXPECT() *MockGRPCInferenceServiceServerMockRecorder { + return m.recorder +} + +// CudaSharedMemoryRegister mocks base method. +func (m *MockGRPCInferenceServiceServer) CudaSharedMemoryRegister(arg0 context.Context, arg1 *inference.CudaSharedMemoryRegisterRequest) (*inference.CudaSharedMemoryRegisterResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CudaSharedMemoryRegister", arg0, arg1) + ret0, _ := ret[0].(*inference.CudaSharedMemoryRegisterResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CudaSharedMemoryRegister indicates an expected call of CudaSharedMemoryRegister. +func (mr *MockGRPCInferenceServiceServerMockRecorder) CudaSharedMemoryRegister(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CudaSharedMemoryRegister", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).CudaSharedMemoryRegister), arg0, arg1) +} + +// CudaSharedMemoryStatus mocks base method. +func (m *MockGRPCInferenceServiceServer) CudaSharedMemoryStatus(arg0 context.Context, arg1 *inference.CudaSharedMemoryStatusRequest) (*inference.CudaSharedMemoryStatusResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CudaSharedMemoryStatus", arg0, arg1) + ret0, _ := ret[0].(*inference.CudaSharedMemoryStatusResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CudaSharedMemoryStatus indicates an expected call of CudaSharedMemoryStatus. +func (mr *MockGRPCInferenceServiceServerMockRecorder) CudaSharedMemoryStatus(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CudaSharedMemoryStatus", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).CudaSharedMemoryStatus), arg0, arg1) +} + +// CudaSharedMemoryUnregister mocks base method. +func (m *MockGRPCInferenceServiceServer) CudaSharedMemoryUnregister(arg0 context.Context, arg1 *inference.CudaSharedMemoryUnregisterRequest) (*inference.CudaSharedMemoryUnregisterResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CudaSharedMemoryUnregister", arg0, arg1) + ret0, _ := ret[0].(*inference.CudaSharedMemoryUnregisterResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CudaSharedMemoryUnregister indicates an expected call of CudaSharedMemoryUnregister. +func (mr *MockGRPCInferenceServiceServerMockRecorder) CudaSharedMemoryUnregister(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CudaSharedMemoryUnregister", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).CudaSharedMemoryUnregister), arg0, arg1) +} + +// LogSettings mocks base method. +func (m *MockGRPCInferenceServiceServer) LogSettings(arg0 context.Context, arg1 *inference.LogSettingsRequest) (*inference.LogSettingsResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "LogSettings", arg0, arg1) + ret0, _ := ret[0].(*inference.LogSettingsResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// LogSettings indicates an expected call of LogSettings. +func (mr *MockGRPCInferenceServiceServerMockRecorder) LogSettings(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "LogSettings", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).LogSettings), arg0, arg1) +} + +// ModelConfig mocks base method. +func (m *MockGRPCInferenceServiceServer) ModelConfig(arg0 context.Context, arg1 *inference.ModelConfigRequest) (*inference.ModelConfigResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ModelConfig", arg0, arg1) + ret0, _ := ret[0].(*inference.ModelConfigResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelConfig indicates an expected call of ModelConfig. +func (mr *MockGRPCInferenceServiceServerMockRecorder) ModelConfig(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelConfig", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).ModelConfig), arg0, arg1) +} + +// ModelInfer mocks base method. +func (m *MockGRPCInferenceServiceServer) ModelInfer(arg0 context.Context, arg1 *inference.ModelInferRequest) (*inference.ModelInferResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ModelInfer", arg0, arg1) + ret0, _ := ret[0].(*inference.ModelInferResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelInfer indicates an expected call of ModelInfer. +func (mr *MockGRPCInferenceServiceServerMockRecorder) ModelInfer(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelInfer", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).ModelInfer), arg0, arg1) +} + +// ModelMetadata mocks base method. +func (m *MockGRPCInferenceServiceServer) ModelMetadata(arg0 context.Context, arg1 *inference.ModelMetadataRequest) (*inference.ModelMetadataResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ModelMetadata", arg0, arg1) + ret0, _ := ret[0].(*inference.ModelMetadataResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelMetadata indicates an expected call of ModelMetadata. +func (mr *MockGRPCInferenceServiceServerMockRecorder) ModelMetadata(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelMetadata", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).ModelMetadata), arg0, arg1) +} + +// ModelReady mocks base method. +func (m *MockGRPCInferenceServiceServer) ModelReady(arg0 context.Context, arg1 *inference.ModelReadyRequest) (*inference.ModelReadyResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ModelReady", arg0, arg1) + ret0, _ := ret[0].(*inference.ModelReadyResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelReady indicates an expected call of ModelReady. +func (mr *MockGRPCInferenceServiceServerMockRecorder) ModelReady(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelReady", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).ModelReady), arg0, arg1) +} + +// ModelStatistics mocks base method. +func (m *MockGRPCInferenceServiceServer) ModelStatistics(arg0 context.Context, arg1 *inference.ModelStatisticsRequest) (*inference.ModelStatisticsResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ModelStatistics", arg0, arg1) + ret0, _ := ret[0].(*inference.ModelStatisticsResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ModelStatistics indicates an expected call of ModelStatistics. +func (mr *MockGRPCInferenceServiceServerMockRecorder) ModelStatistics(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelStatistics", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).ModelStatistics), arg0, arg1) +} + +// ModelStreamInfer mocks base method. +func (m *MockGRPCInferenceServiceServer) ModelStreamInfer(arg0 inference.GRPCInferenceService_ModelStreamInferServer) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ModelStreamInfer", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// ModelStreamInfer indicates an expected call of ModelStreamInfer. +func (mr *MockGRPCInferenceServiceServerMockRecorder) ModelStreamInfer(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModelStreamInfer", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).ModelStreamInfer), arg0) +} + +// RepositoryIndex mocks base method. +func (m *MockGRPCInferenceServiceServer) RepositoryIndex(arg0 context.Context, arg1 *inference.RepositoryIndexRequest) (*inference.RepositoryIndexResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RepositoryIndex", arg0, arg1) + ret0, _ := ret[0].(*inference.RepositoryIndexResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RepositoryIndex indicates an expected call of RepositoryIndex. +func (mr *MockGRPCInferenceServiceServerMockRecorder) RepositoryIndex(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RepositoryIndex", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).RepositoryIndex), arg0, arg1) +} + +// RepositoryModelLoad mocks base method. +func (m *MockGRPCInferenceServiceServer) RepositoryModelLoad(arg0 context.Context, arg1 *inference.RepositoryModelLoadRequest) (*inference.RepositoryModelLoadResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RepositoryModelLoad", arg0, arg1) + ret0, _ := ret[0].(*inference.RepositoryModelLoadResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RepositoryModelLoad indicates an expected call of RepositoryModelLoad. +func (mr *MockGRPCInferenceServiceServerMockRecorder) RepositoryModelLoad(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RepositoryModelLoad", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).RepositoryModelLoad), arg0, arg1) +} + +// RepositoryModelUnload mocks base method. +func (m *MockGRPCInferenceServiceServer) RepositoryModelUnload(arg0 context.Context, arg1 *inference.RepositoryModelUnloadRequest) (*inference.RepositoryModelUnloadResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RepositoryModelUnload", arg0, arg1) + ret0, _ := ret[0].(*inference.RepositoryModelUnloadResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RepositoryModelUnload indicates an expected call of RepositoryModelUnload. +func (mr *MockGRPCInferenceServiceServerMockRecorder) RepositoryModelUnload(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RepositoryModelUnload", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).RepositoryModelUnload), arg0, arg1) +} + +// ServerLive mocks base method. +func (m *MockGRPCInferenceServiceServer) ServerLive(arg0 context.Context, arg1 *inference.ServerLiveRequest) (*inference.ServerLiveResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ServerLive", arg0, arg1) + ret0, _ := ret[0].(*inference.ServerLiveResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ServerLive indicates an expected call of ServerLive. +func (mr *MockGRPCInferenceServiceServerMockRecorder) ServerLive(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServerLive", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).ServerLive), arg0, arg1) +} + +// ServerMetadata mocks base method. +func (m *MockGRPCInferenceServiceServer) ServerMetadata(arg0 context.Context, arg1 *inference.ServerMetadataRequest) (*inference.ServerMetadataResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ServerMetadata", arg0, arg1) + ret0, _ := ret[0].(*inference.ServerMetadataResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ServerMetadata indicates an expected call of ServerMetadata. +func (mr *MockGRPCInferenceServiceServerMockRecorder) ServerMetadata(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServerMetadata", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).ServerMetadata), arg0, arg1) +} + +// ServerReady mocks base method. +func (m *MockGRPCInferenceServiceServer) ServerReady(arg0 context.Context, arg1 *inference.ServerReadyRequest) (*inference.ServerReadyResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ServerReady", arg0, arg1) + ret0, _ := ret[0].(*inference.ServerReadyResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ServerReady indicates an expected call of ServerReady. +func (mr *MockGRPCInferenceServiceServerMockRecorder) ServerReady(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServerReady", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).ServerReady), arg0, arg1) +} + +// SystemSharedMemoryRegister mocks base method. +func (m *MockGRPCInferenceServiceServer) SystemSharedMemoryRegister(arg0 context.Context, arg1 *inference.SystemSharedMemoryRegisterRequest) (*inference.SystemSharedMemoryRegisterResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SystemSharedMemoryRegister", arg0, arg1) + ret0, _ := ret[0].(*inference.SystemSharedMemoryRegisterResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SystemSharedMemoryRegister indicates an expected call of SystemSharedMemoryRegister. +func (mr *MockGRPCInferenceServiceServerMockRecorder) SystemSharedMemoryRegister(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SystemSharedMemoryRegister", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).SystemSharedMemoryRegister), arg0, arg1) +} + +// SystemSharedMemoryStatus mocks base method. +func (m *MockGRPCInferenceServiceServer) SystemSharedMemoryStatus(arg0 context.Context, arg1 *inference.SystemSharedMemoryStatusRequest) (*inference.SystemSharedMemoryStatusResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SystemSharedMemoryStatus", arg0, arg1) + ret0, _ := ret[0].(*inference.SystemSharedMemoryStatusResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SystemSharedMemoryStatus indicates an expected call of SystemSharedMemoryStatus. +func (mr *MockGRPCInferenceServiceServerMockRecorder) SystemSharedMemoryStatus(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SystemSharedMemoryStatus", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).SystemSharedMemoryStatus), arg0, arg1) +} + +// SystemSharedMemoryUnregister mocks base method. +func (m *MockGRPCInferenceServiceServer) SystemSharedMemoryUnregister(arg0 context.Context, arg1 *inference.SystemSharedMemoryUnregisterRequest) (*inference.SystemSharedMemoryUnregisterResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SystemSharedMemoryUnregister", arg0, arg1) + ret0, _ := ret[0].(*inference.SystemSharedMemoryUnregisterResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SystemSharedMemoryUnregister indicates an expected call of SystemSharedMemoryUnregister. +func (mr *MockGRPCInferenceServiceServerMockRecorder) SystemSharedMemoryUnregister(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SystemSharedMemoryUnregister", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).SystemSharedMemoryUnregister), arg0, arg1) +} + +// TraceSetting mocks base method. +func (m *MockGRPCInferenceServiceServer) TraceSetting(arg0 context.Context, arg1 *inference.TraceSettingRequest) (*inference.TraceSettingResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "TraceSetting", arg0, arg1) + ret0, _ := ret[0].(*inference.TraceSettingResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// TraceSetting indicates an expected call of TraceSetting. +func (mr *MockGRPCInferenceServiceServerMockRecorder) TraceSetting(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TraceSetting", reflect.TypeOf((*MockGRPCInferenceServiceServer)(nil).TraceSetting), arg0, arg1) +} + +// MockUnsafeGRPCInferenceServiceServer is a mock of UnsafeGRPCInferenceServiceServer interface. +type MockUnsafeGRPCInferenceServiceServer struct { + ctrl *gomock.Controller + recorder *MockUnsafeGRPCInferenceServiceServerMockRecorder +} + +// MockUnsafeGRPCInferenceServiceServerMockRecorder is the mock recorder for MockUnsafeGRPCInferenceServiceServer. +type MockUnsafeGRPCInferenceServiceServerMockRecorder struct { + mock *MockUnsafeGRPCInferenceServiceServer +} + +// NewMockUnsafeGRPCInferenceServiceServer creates a new mock instance. +func NewMockUnsafeGRPCInferenceServiceServer(ctrl *gomock.Controller) *MockUnsafeGRPCInferenceServiceServer { + mock := &MockUnsafeGRPCInferenceServiceServer{ctrl: ctrl} + mock.recorder = &MockUnsafeGRPCInferenceServiceServerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockUnsafeGRPCInferenceServiceServer) EXPECT() *MockUnsafeGRPCInferenceServiceServerMockRecorder { + return m.recorder +} + +// mustEmbedUnimplementedGRPCInferenceServiceServer mocks base method. +func (m *MockUnsafeGRPCInferenceServiceServer) mustEmbedUnimplementedGRPCInferenceServiceServer() { + m.ctrl.T.Helper() + m.ctrl.Call(m, "mustEmbedUnimplementedGRPCInferenceServiceServer") +} + +// mustEmbedUnimplementedGRPCInferenceServiceServer indicates an expected call of mustEmbedUnimplementedGRPCInferenceServiceServer. +func (mr *MockUnsafeGRPCInferenceServiceServerMockRecorder) mustEmbedUnimplementedGRPCInferenceServiceServer() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "mustEmbedUnimplementedGRPCInferenceServiceServer", reflect.TypeOf((*MockUnsafeGRPCInferenceServiceServer)(nil).mustEmbedUnimplementedGRPCInferenceServiceServer)) +} + +// MockGRPCInferenceService_ModelStreamInferServer is a mock of GRPCInferenceService_ModelStreamInferServer interface. +type MockGRPCInferenceService_ModelStreamInferServer struct { + ctrl *gomock.Controller + recorder *MockGRPCInferenceService_ModelStreamInferServerMockRecorder +} + +// MockGRPCInferenceService_ModelStreamInferServerMockRecorder is the mock recorder for MockGRPCInferenceService_ModelStreamInferServer. +type MockGRPCInferenceService_ModelStreamInferServerMockRecorder struct { + mock *MockGRPCInferenceService_ModelStreamInferServer +} + +// NewMockGRPCInferenceService_ModelStreamInferServer creates a new mock instance. +func NewMockGRPCInferenceService_ModelStreamInferServer(ctrl *gomock.Controller) *MockGRPCInferenceService_ModelStreamInferServer { + mock := &MockGRPCInferenceService_ModelStreamInferServer{ctrl: ctrl} + mock.recorder = &MockGRPCInferenceService_ModelStreamInferServerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockGRPCInferenceService_ModelStreamInferServer) EXPECT() *MockGRPCInferenceService_ModelStreamInferServerMockRecorder { + return m.recorder +} + +// Context mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferServer) Context() context.Context { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Context") + ret0, _ := ret[0].(context.Context) + return ret0 +} + +// Context indicates an expected call of Context. +func (mr *MockGRPCInferenceService_ModelStreamInferServerMockRecorder) Context() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Context", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferServer)(nil).Context)) +} + +// Recv mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferServer) Recv() (*inference.ModelInferRequest, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Recv") + ret0, _ := ret[0].(*inference.ModelInferRequest) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Recv indicates an expected call of Recv. +func (mr *MockGRPCInferenceService_ModelStreamInferServerMockRecorder) Recv() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Recv", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferServer)(nil).Recv)) +} + +// RecvMsg mocks base method. +func (m_2 *MockGRPCInferenceService_ModelStreamInferServer) RecvMsg(m interface{}) error { + m_2.ctrl.T.Helper() + ret := m_2.ctrl.Call(m_2, "RecvMsg", m) + ret0, _ := ret[0].(error) + return ret0 +} + +// RecvMsg indicates an expected call of RecvMsg. +func (mr *MockGRPCInferenceService_ModelStreamInferServerMockRecorder) RecvMsg(m interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RecvMsg", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferServer)(nil).RecvMsg), m) +} + +// Send mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferServer) Send(arg0 *inference.ModelStreamInferResponse) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Send", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// Send indicates an expected call of Send. +func (mr *MockGRPCInferenceService_ModelStreamInferServerMockRecorder) Send(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Send", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferServer)(nil).Send), arg0) +} + +// SendHeader mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferServer) SendHeader(arg0 metadata.MD) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SendHeader", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// SendHeader indicates an expected call of SendHeader. +func (mr *MockGRPCInferenceService_ModelStreamInferServerMockRecorder) SendHeader(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SendHeader", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferServer)(nil).SendHeader), arg0) +} + +// SendMsg mocks base method. +func (m_2 *MockGRPCInferenceService_ModelStreamInferServer) SendMsg(m interface{}) error { + m_2.ctrl.T.Helper() + ret := m_2.ctrl.Call(m_2, "SendMsg", m) + ret0, _ := ret[0].(error) + return ret0 +} + +// SendMsg indicates an expected call of SendMsg. +func (mr *MockGRPCInferenceService_ModelStreamInferServerMockRecorder) SendMsg(m interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SendMsg", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferServer)(nil).SendMsg), m) +} + +// SetHeader mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferServer) SetHeader(arg0 metadata.MD) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SetHeader", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// SetHeader indicates an expected call of SetHeader. +func (mr *MockGRPCInferenceService_ModelStreamInferServerMockRecorder) SetHeader(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetHeader", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferServer)(nil).SetHeader), arg0) +} + +// SetTrailer mocks base method. +func (m *MockGRPCInferenceService_ModelStreamInferServer) SetTrailer(arg0 metadata.MD) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetTrailer", arg0) +} + +// SetTrailer indicates an expected call of SetTrailer. +func (mr *MockGRPCInferenceService_ModelStreamInferServerMockRecorder) SetTrailer(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetTrailer", reflect.TypeOf((*MockGRPCInferenceService_ModelStreamInferServer)(nil).SetTrailer), arg0) +} diff --git a/pkg/apis/inference/v1/mocks/mocks.go b/pkg/apis/inference/v1/mocks/mocks.go new file mode 100644 index 0000000..287027a --- /dev/null +++ b/pkg/apis/inference/v1/mocks/mocks.go @@ -0,0 +1,19 @@ +/* + * Copyright 2023 The Dragonfly Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package mocks + +//go:generate mockgen -destination grpc_service_mock.go -source ../grpc_service_grpc.pb.go -package mocks diff --git a/pkg/apis/inference/v1/model_config.pb.go b/pkg/apis/inference/v1/model_config.pb.go new file mode 100644 index 0000000..25b5913 --- /dev/null +++ b/pkg/apis/inference/v1/model_config.pb.go @@ -0,0 +1,6333 @@ +// Copyright 2018-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// * Neither the name of NVIDIA CORPORATION nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY +// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Copyright (c) 2018, TensorFlow Authors. All rights reserved. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.6 +// source: pkg/apis/inference/v1/model_config.proto + +package inference + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// @@ +// @@.. cpp:enum:: DataType +// @@ +// @@ Data types supported for input and output tensors. +// @@ +type DataType int32 + +const ( + // @@ .. cpp:enumerator:: DataType::INVALID = 0 + DataType_TYPE_INVALID DataType = 0 + // @@ .. cpp:enumerator:: DataType::BOOL = 1 + DataType_TYPE_BOOL DataType = 1 + // @@ .. cpp:enumerator:: DataType::UINT8 = 2 + DataType_TYPE_UINT8 DataType = 2 + // @@ .. cpp:enumerator:: DataType::UINT16 = 3 + DataType_TYPE_UINT16 DataType = 3 + // @@ .. cpp:enumerator:: DataType::UINT32 = 4 + DataType_TYPE_UINT32 DataType = 4 + // @@ .. cpp:enumerator:: DataType::UINT64 = 5 + DataType_TYPE_UINT64 DataType = 5 + // @@ .. cpp:enumerator:: DataType::INT8 = 6 + DataType_TYPE_INT8 DataType = 6 + // @@ .. cpp:enumerator:: DataType::INT16 = 7 + DataType_TYPE_INT16 DataType = 7 + // @@ .. cpp:enumerator:: DataType::INT32 = 8 + DataType_TYPE_INT32 DataType = 8 + // @@ .. cpp:enumerator:: DataType::INT64 = 9 + DataType_TYPE_INT64 DataType = 9 + // @@ .. cpp:enumerator:: DataType::FP16 = 10 + DataType_TYPE_FP16 DataType = 10 + // @@ .. cpp:enumerator:: DataType::FP32 = 11 + DataType_TYPE_FP32 DataType = 11 + // @@ .. cpp:enumerator:: DataType::FP64 = 12 + DataType_TYPE_FP64 DataType = 12 + // @@ .. cpp:enumerator:: DataType::STRING = 13 + DataType_TYPE_STRING DataType = 13 + // @@ .. cpp:enumerator:: DataType::BF16 = 14 + DataType_TYPE_BF16 DataType = 14 +) + +// Enum value maps for DataType. +var ( + DataType_name = map[int32]string{ + 0: "TYPE_INVALID", + 1: "TYPE_BOOL", + 2: "TYPE_UINT8", + 3: "TYPE_UINT16", + 4: "TYPE_UINT32", + 5: "TYPE_UINT64", + 6: "TYPE_INT8", + 7: "TYPE_INT16", + 8: "TYPE_INT32", + 9: "TYPE_INT64", + 10: "TYPE_FP16", + 11: "TYPE_FP32", + 12: "TYPE_FP64", + 13: "TYPE_STRING", + 14: "TYPE_BF16", + } + DataType_value = map[string]int32{ + "TYPE_INVALID": 0, + "TYPE_BOOL": 1, + "TYPE_UINT8": 2, + "TYPE_UINT16": 3, + "TYPE_UINT32": 4, + "TYPE_UINT64": 5, + "TYPE_INT8": 6, + "TYPE_INT16": 7, + "TYPE_INT32": 8, + "TYPE_INT64": 9, + "TYPE_FP16": 10, + "TYPE_FP32": 11, + "TYPE_FP64": 12, + "TYPE_STRING": 13, + "TYPE_BF16": 14, + } +) + +func (x DataType) Enum() *DataType { + p := new(DataType) + *p = x + return p +} + +func (x DataType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (DataType) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_apis_inference_v1_model_config_proto_enumTypes[0].Descriptor() +} + +func (DataType) Type() protoreflect.EnumType { + return &file_pkg_apis_inference_v1_model_config_proto_enumTypes[0] +} + +func (x DataType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use DataType.Descriptor instead. +func (DataType) EnumDescriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{0} +} + +// @@ +// @@ .. cpp:enum:: Kind +// @@ +// @@ Kind of this instance group. +// @@ +type ModelInstanceGroup_Kind int32 + +const ( + // @@ .. cpp:enumerator:: Kind::KIND_AUTO = 0 + // @@ + // @@ This instance group represents instances that can run on either + // @@ CPU or GPU. If all GPUs listed in 'gpus' are available then + // @@ instances will be created on GPU(s), otherwise instances will + // @@ be created on CPU. + // @@ + ModelInstanceGroup_KIND_AUTO ModelInstanceGroup_Kind = 0 + // @@ .. cpp:enumerator:: Kind::KIND_GPU = 1 + // @@ + // @@ This instance group represents instances that must run on the + // @@ GPU. + // @@ + ModelInstanceGroup_KIND_GPU ModelInstanceGroup_Kind = 1 + // @@ .. cpp:enumerator:: Kind::KIND_CPU = 2 + // @@ + // @@ This instance group represents instances that must run on the + // @@ CPU. + // @@ + ModelInstanceGroup_KIND_CPU ModelInstanceGroup_Kind = 2 + // @@ .. cpp:enumerator:: Kind::KIND_MODEL = 3 + // @@ + // @@ This instance group represents instances that should run on the + // @@ CPU and/or GPU(s) as specified by the model or backend itself. + // @@ The inference server will not override the model/backend + // @@ settings. + // @@ + ModelInstanceGroup_KIND_MODEL ModelInstanceGroup_Kind = 3 +) + +// Enum value maps for ModelInstanceGroup_Kind. +var ( + ModelInstanceGroup_Kind_name = map[int32]string{ + 0: "KIND_AUTO", + 1: "KIND_GPU", + 2: "KIND_CPU", + 3: "KIND_MODEL", + } + ModelInstanceGroup_Kind_value = map[string]int32{ + "KIND_AUTO": 0, + "KIND_GPU": 1, + "KIND_CPU": 2, + "KIND_MODEL": 3, + } +) + +func (x ModelInstanceGroup_Kind) Enum() *ModelInstanceGroup_Kind { + p := new(ModelInstanceGroup_Kind) + *p = x + return p +} + +func (x ModelInstanceGroup_Kind) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ModelInstanceGroup_Kind) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_apis_inference_v1_model_config_proto_enumTypes[1].Descriptor() +} + +func (ModelInstanceGroup_Kind) Type() protoreflect.EnumType { + return &file_pkg_apis_inference_v1_model_config_proto_enumTypes[1] +} + +func (x ModelInstanceGroup_Kind) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ModelInstanceGroup_Kind.Descriptor instead. +func (ModelInstanceGroup_Kind) EnumDescriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{1, 0} +} + +// @@ +// @@ .. cpp:enum:: SecondaryDeviceKind +// @@ +// @@ The kind of the secondary device. +// @@ +type ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind int32 + +const ( + // @@ .. cpp:enumerator:: SecondaryDeviceKind::KIND_NVDLA = 0 + // @@ + // @@ An NVDLA core. http://nvdla.org + // @@ Currently KIND_NVDLA is only supported by the TensorRT backend. + // @@ + ModelInstanceGroup_SecondaryDevice_KIND_NVDLA ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind = 0 +) + +// Enum value maps for ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind. +var ( + ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind_name = map[int32]string{ + 0: "KIND_NVDLA", + } + ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind_value = map[string]int32{ + "KIND_NVDLA": 0, + } +) + +func (x ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind) Enum() *ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind { + p := new(ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind) + *p = x + return p +} + +func (x ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_apis_inference_v1_model_config_proto_enumTypes[2].Descriptor() +} + +func (ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind) Type() protoreflect.EnumType { + return &file_pkg_apis_inference_v1_model_config_proto_enumTypes[2] +} + +func (x ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind.Descriptor instead. +func (ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind) EnumDescriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{1, 0, 0} +} + +// @@ +// @@ .. cpp:enum:: Format +// @@ +// @@ The format for the input. +// @@ +type ModelInput_Format int32 + +const ( + // @@ .. cpp:enumerator:: Format::FORMAT_NONE = 0 + // @@ + // @@ The input has no specific format. This is the default. + // @@ + ModelInput_FORMAT_NONE ModelInput_Format = 0 + // @@ .. cpp:enumerator:: Format::FORMAT_NHWC = 1 + // @@ + // @@ HWC image format. Tensors with this format require 3 dimensions + // @@ if the model does not support batching (max_batch_size = 0) or 4 + // @@ dimensions if the model does support batching (max_batch_size + // @@ >= 1). In either case the 'dims' below should only specify the + // @@ 3 non-batch dimensions (i.e. HWC or CHW). + // @@ + ModelInput_FORMAT_NHWC ModelInput_Format = 1 + // @@ .. cpp:enumerator:: Format::FORMAT_NCHW = 2 + // @@ + // @@ CHW image format. Tensors with this format require 3 dimensions + // @@ if the model does not support batching (max_batch_size = 0) or 4 + // @@ dimensions if the model does support batching (max_batch_size + // @@ >= 1). In either case the 'dims' below should only specify the + // @@ 3 non-batch dimensions (i.e. HWC or CHW). + // @@ + ModelInput_FORMAT_NCHW ModelInput_Format = 2 +) + +// Enum value maps for ModelInput_Format. +var ( + ModelInput_Format_name = map[int32]string{ + 0: "FORMAT_NONE", + 1: "FORMAT_NHWC", + 2: "FORMAT_NCHW", + } + ModelInput_Format_value = map[string]int32{ + "FORMAT_NONE": 0, + "FORMAT_NHWC": 1, + "FORMAT_NCHW": 2, + } +) + +func (x ModelInput_Format) Enum() *ModelInput_Format { + p := new(ModelInput_Format) + *p = x + return p +} + +func (x ModelInput_Format) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ModelInput_Format) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_apis_inference_v1_model_config_proto_enumTypes[3].Descriptor() +} + +func (ModelInput_Format) Type() protoreflect.EnumType { + return &file_pkg_apis_inference_v1_model_config_proto_enumTypes[3] +} + +func (x ModelInput_Format) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ModelInput_Format.Descriptor instead. +func (ModelInput_Format) EnumDescriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{3, 0} +} + +// @@ +// @@ .. cpp:enum:: Kind +// @@ +// @@ The kind of the batch input. +// @@ +type BatchInput_Kind int32 + +const ( + // @@ .. cpp:enumerator:: Kind::BATCH_ELEMENT_COUNT = 0 + // @@ + // @@ The element count of the 'source_input' will be added as + // @@ input with shape [1]. + // @@ + BatchInput_BATCH_ELEMENT_COUNT BatchInput_Kind = 0 + // @@ .. cpp:enumerator:: Kind::BATCH_ACCUMULATED_ELEMENT_COUNT = 1 + // @@ + // @@ The accumulated element count of the 'source_input' will be + // @@ added as input with shape [1]. For example, if there is a + // @@ batch of two request, each with 2 elements, an input of value + // @@ 2 will be added to the first request, and an input of value + // @@ 4 will be added to the second request. + // @@ + BatchInput_BATCH_ACCUMULATED_ELEMENT_COUNT BatchInput_Kind = 1 + // @@ .. cpp:enumerator:: + // @@ Kind::BATCH_ACCUMULATED_ELEMENT_COUNT_WITH_ZERO = 2 + // @@ + // @@ The accumulated element count of the 'source_input' will be + // @@ added as input with shape [1], except for the first request + // @@ in the batch. For the first request in the batch, the input + // @@ will have shape [2] where the first element is value 0. + // @@ + BatchInput_BATCH_ACCUMULATED_ELEMENT_COUNT_WITH_ZERO BatchInput_Kind = 2 + // @@ .. cpp:enumerator:: Kind::BATCH_MAX_ELEMENT_COUNT_AS_SHAPE = 3 + // @@ + // @@ Among the requests in the batch, the max element count of the + // @@ 'source_input' will be added as input with shape + // @@ [max_element_count] for the first request in the batch. + // @@ For other requests, such input will be with shape [0]. + // @@ The data of the tensor will be uninitialized. + // @@ + BatchInput_BATCH_MAX_ELEMENT_COUNT_AS_SHAPE BatchInput_Kind = 3 + // @@ .. cpp:enumerator:: Kind::BATCH_ITEM_SHAPE = 4 + // @@ + // @@ Among the requests in the batch, the shape of the + // @@ 'source_input' will be added as input with shape + // @@ [batch_size, len(input_dim)]. For example, if one + // @@ batch-2 input with shape [3, 1] and batch-1 input + // @@ with shape [2, 2] are batched, the batch input will + // @@ have shape [3, 2] and value [ [3, 1], [3, 1], [2, 2]]. + // @@ + BatchInput_BATCH_ITEM_SHAPE BatchInput_Kind = 4 + // @@ .. cpp:enumerator:: Kind::BATCH_ITEM_SHAPE_FLATTEN = 5 + // @@ + // @@ Among the requests in the batch, the shape of the + // @@ 'source_input' will be added as input with single dimensional + // @@ shape [batch_size * len(input_dim)]. For example, if one + // @@ batch-2 input with shape [3, 1] and batch-1 input + // @@ with shape [2, 2] are batched, the batch input will + // @@ have shape [6] and value [3, 1, 3, 1, 2, 2]. + // @@ + BatchInput_BATCH_ITEM_SHAPE_FLATTEN BatchInput_Kind = 5 +) + +// Enum value maps for BatchInput_Kind. +var ( + BatchInput_Kind_name = map[int32]string{ + 0: "BATCH_ELEMENT_COUNT", + 1: "BATCH_ACCUMULATED_ELEMENT_COUNT", + 2: "BATCH_ACCUMULATED_ELEMENT_COUNT_WITH_ZERO", + 3: "BATCH_MAX_ELEMENT_COUNT_AS_SHAPE", + 4: "BATCH_ITEM_SHAPE", + 5: "BATCH_ITEM_SHAPE_FLATTEN", + } + BatchInput_Kind_value = map[string]int32{ + "BATCH_ELEMENT_COUNT": 0, + "BATCH_ACCUMULATED_ELEMENT_COUNT": 1, + "BATCH_ACCUMULATED_ELEMENT_COUNT_WITH_ZERO": 2, + "BATCH_MAX_ELEMENT_COUNT_AS_SHAPE": 3, + "BATCH_ITEM_SHAPE": 4, + "BATCH_ITEM_SHAPE_FLATTEN": 5, + } +) + +func (x BatchInput_Kind) Enum() *BatchInput_Kind { + p := new(BatchInput_Kind) + *p = x + return p +} + +func (x BatchInput_Kind) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (BatchInput_Kind) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_apis_inference_v1_model_config_proto_enumTypes[4].Descriptor() +} + +func (BatchInput_Kind) Type() protoreflect.EnumType { + return &file_pkg_apis_inference_v1_model_config_proto_enumTypes[4] +} + +func (x BatchInput_Kind) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use BatchInput_Kind.Descriptor instead. +func (BatchInput_Kind) EnumDescriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{5, 0} +} + +// @@ +// @@ .. cpp:enum:: Kind +// @@ +// @@ The kind of the batch output. +// @@ +type BatchOutput_Kind int32 + +const ( + // @@ .. cpp:enumerator:: Kind::BATCH_SCATTER_WITH_INPUT_SHAPE = 0 + // @@ + // @@ The output should be scattered according to the shape of + // @@ 'source_input'. The dynamic dimension of the output will + // @@ be set to the value of the same dimension in the input. + // @@ + BatchOutput_BATCH_SCATTER_WITH_INPUT_SHAPE BatchOutput_Kind = 0 +) + +// Enum value maps for BatchOutput_Kind. +var ( + BatchOutput_Kind_name = map[int32]string{ + 0: "BATCH_SCATTER_WITH_INPUT_SHAPE", + } + BatchOutput_Kind_value = map[string]int32{ + "BATCH_SCATTER_WITH_INPUT_SHAPE": 0, + } +) + +func (x BatchOutput_Kind) Enum() *BatchOutput_Kind { + p := new(BatchOutput_Kind) + *p = x + return p +} + +func (x BatchOutput_Kind) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (BatchOutput_Kind) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_apis_inference_v1_model_config_proto_enumTypes[5].Descriptor() +} + +func (BatchOutput_Kind) Type() protoreflect.EnumType { + return &file_pkg_apis_inference_v1_model_config_proto_enumTypes[5] +} + +func (x BatchOutput_Kind) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use BatchOutput_Kind.Descriptor instead. +func (BatchOutput_Kind) EnumDescriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{6, 0} +} + +// @@ +// @@ .. cpp:enum:: ModelPriority +// @@ +// @@ Model priorities. A model will be given scheduling and execution +// @@ preference over models at lower priorities. Current model +// @@ priorities only work for TensorRT models. +// @@ +type ModelOptimizationPolicy_ModelPriority int32 + +const ( + // @@ .. cpp:enumerator:: ModelPriority::PRIORITY_DEFAULT = 0 + // @@ + // @@ The default model priority. + // @@ + ModelOptimizationPolicy_PRIORITY_DEFAULT ModelOptimizationPolicy_ModelPriority = 0 + // @@ .. cpp:enumerator:: ModelPriority::PRIORITY_MAX = 1 + // @@ + // @@ The maximum model priority. + // @@ + ModelOptimizationPolicy_PRIORITY_MAX ModelOptimizationPolicy_ModelPriority = 1 + // @@ .. cpp:enumerator:: ModelPriority::PRIORITY_MIN = 2 + // @@ + // @@ The minimum model priority. + // @@ + ModelOptimizationPolicy_PRIORITY_MIN ModelOptimizationPolicy_ModelPriority = 2 +) + +// Enum value maps for ModelOptimizationPolicy_ModelPriority. +var ( + ModelOptimizationPolicy_ModelPriority_name = map[int32]string{ + 0: "PRIORITY_DEFAULT", + 1: "PRIORITY_MAX", + 2: "PRIORITY_MIN", + } + ModelOptimizationPolicy_ModelPriority_value = map[string]int32{ + "PRIORITY_DEFAULT": 0, + "PRIORITY_MAX": 1, + "PRIORITY_MIN": 2, + } +) + +func (x ModelOptimizationPolicy_ModelPriority) Enum() *ModelOptimizationPolicy_ModelPriority { + p := new(ModelOptimizationPolicy_ModelPriority) + *p = x + return p +} + +func (x ModelOptimizationPolicy_ModelPriority) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ModelOptimizationPolicy_ModelPriority) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_apis_inference_v1_model_config_proto_enumTypes[6].Descriptor() +} + +func (ModelOptimizationPolicy_ModelPriority) Type() protoreflect.EnumType { + return &file_pkg_apis_inference_v1_model_config_proto_enumTypes[6] +} + +func (x ModelOptimizationPolicy_ModelPriority) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ModelOptimizationPolicy_ModelPriority.Descriptor instead. +func (ModelOptimizationPolicy_ModelPriority) EnumDescriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{8, 0} +} + +// @@ +// @@ .. cpp:enum:: TimeoutAction +// @@ +// @@ The action applied to timed-out requests. +// @@ +type ModelQueuePolicy_TimeoutAction int32 + +const ( + // @@ .. cpp:enumerator:: Action::REJECT = 0 + // @@ + // @@ Reject the request and return error message accordingly. + // @@ + ModelQueuePolicy_REJECT ModelQueuePolicy_TimeoutAction = 0 + // @@ .. cpp:enumerator:: Action::DELAY = 1 + // @@ + // @@ Delay the request until all other requests at the same + // @@ (or higher) priority levels that have not reached their timeouts + // @@ are processed. A delayed request will eventually be processed, + // @@ but may be delayed indefinitely due to newly arriving requests. + // @@ + ModelQueuePolicy_DELAY ModelQueuePolicy_TimeoutAction = 1 +) + +// Enum value maps for ModelQueuePolicy_TimeoutAction. +var ( + ModelQueuePolicy_TimeoutAction_name = map[int32]string{ + 0: "REJECT", + 1: "DELAY", + } + ModelQueuePolicy_TimeoutAction_value = map[string]int32{ + "REJECT": 0, + "DELAY": 1, + } +) + +func (x ModelQueuePolicy_TimeoutAction) Enum() *ModelQueuePolicy_TimeoutAction { + p := new(ModelQueuePolicy_TimeoutAction) + *p = x + return p +} + +func (x ModelQueuePolicy_TimeoutAction) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ModelQueuePolicy_TimeoutAction) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_apis_inference_v1_model_config_proto_enumTypes[7].Descriptor() +} + +func (ModelQueuePolicy_TimeoutAction) Type() protoreflect.EnumType { + return &file_pkg_apis_inference_v1_model_config_proto_enumTypes[7] +} + +func (x ModelQueuePolicy_TimeoutAction) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ModelQueuePolicy_TimeoutAction.Descriptor instead. +func (ModelQueuePolicy_TimeoutAction) EnumDescriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{9, 0} +} + +// @@ +// @@ .. cpp:enum:: Kind +// @@ +// @@ The kind of the control. +// @@ +type ModelSequenceBatching_Control_Kind int32 + +const ( + // @@ .. cpp:enumerator:: Kind::CONTROL_SEQUENCE_START = 0 + // @@ + // @@ A new sequence is/is-not starting. If true a sequence is + // @@ starting, if false a sequence is continuing. Must + // @@ specify either int32_false_true, fp32_false_true or + // @@ bool_false_true for this control. This control is optional. + // @@ + ModelSequenceBatching_Control_CONTROL_SEQUENCE_START ModelSequenceBatching_Control_Kind = 0 + // @@ .. cpp:enumerator:: Kind::CONTROL_SEQUENCE_READY = 1 + // @@ + // @@ A sequence is/is-not ready for inference. If true the + // @@ input tensor data is valid and should be used. If false + // @@ the input tensor data is invalid and inferencing should + // @@ be "skipped". Must specify either int32_false_true, + // @@ fp32_false_true or bool_false_true for this control. This + // @@ control is optional. + // @@ + ModelSequenceBatching_Control_CONTROL_SEQUENCE_READY ModelSequenceBatching_Control_Kind = 1 + // @@ .. cpp:enumerator:: Kind::CONTROL_SEQUENCE_END = 2 + // @@ + // @@ A sequence is/is-not ending. If true a sequence is + // @@ ending, if false a sequence is continuing. Must specify + // @@ either int32_false_true, fp32_false_true or bool_false_true + // @@ for this control. This control is optional. + // @@ + ModelSequenceBatching_Control_CONTROL_SEQUENCE_END ModelSequenceBatching_Control_Kind = 2 + // @@ .. cpp:enumerator:: Kind::CONTROL_SEQUENCE_CORRID = 3 + // @@ + // @@ The correlation ID of the sequence. The correlation ID + // @@ is an uint64_t value that is communicated in whole or + // @@ in part by the tensor. The tensor's datatype must be + // @@ specified by data_type and must be TYPE_UINT64, TYPE_INT64, + // @@ TYPE_UINT32 or TYPE_INT32. If a 32-bit datatype is specified + // @@ the correlation ID will be truncated to the low-order 32 + // @@ bits. This control is optional. + // @@ + ModelSequenceBatching_Control_CONTROL_SEQUENCE_CORRID ModelSequenceBatching_Control_Kind = 3 +) + +// Enum value maps for ModelSequenceBatching_Control_Kind. +var ( + ModelSequenceBatching_Control_Kind_name = map[int32]string{ + 0: "CONTROL_SEQUENCE_START", + 1: "CONTROL_SEQUENCE_READY", + 2: "CONTROL_SEQUENCE_END", + 3: "CONTROL_SEQUENCE_CORRID", + } + ModelSequenceBatching_Control_Kind_value = map[string]int32{ + "CONTROL_SEQUENCE_START": 0, + "CONTROL_SEQUENCE_READY": 1, + "CONTROL_SEQUENCE_END": 2, + "CONTROL_SEQUENCE_CORRID": 3, + } +) + +func (x ModelSequenceBatching_Control_Kind) Enum() *ModelSequenceBatching_Control_Kind { + p := new(ModelSequenceBatching_Control_Kind) + *p = x + return p +} + +func (x ModelSequenceBatching_Control_Kind) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ModelSequenceBatching_Control_Kind) Descriptor() protoreflect.EnumDescriptor { + return file_pkg_apis_inference_v1_model_config_proto_enumTypes[8].Descriptor() +} + +func (ModelSequenceBatching_Control_Kind) Type() protoreflect.EnumType { + return &file_pkg_apis_inference_v1_model_config_proto_enumTypes[8] +} + +func (x ModelSequenceBatching_Control_Kind) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ModelSequenceBatching_Control_Kind.Descriptor instead. +func (ModelSequenceBatching_Control_Kind) EnumDescriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{11, 0, 0} +} + +// @@ +// @@ .. cpp:var:: message ModelRateLimiter +// @@ +// @@ The specifications required by the rate limiter to properly +// @@ schedule the inference requests across the different models +// @@ and their instances. +// @@ +type ModelRateLimiter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: Resource resources (repeated) + // @@ + // @@ The resources required to execute the request on a model instance. + // @@ Resources are just names with a corresponding count. The execution + // @@ of the instance will be blocked until the specificied resources are + // @@ available. By default an instance uses no rate-limiter resources. + // @@ + Resources []*ModelRateLimiter_Resource `protobuf:"bytes,1,rep,name=resources,proto3" json:"resources,omitempty"` + // @@ .. cpp:var:: uint32 priority + // @@ + // @@ The optional weighting value to be used for prioritizing across + // @@ instances. An instance with priority 2 will be given 1/2 the + // @@ number of scheduling chances as an instance_group with priority + // @@ 1. The default priority is 1. The priority of value 0 will be + // @@ treated as priority 1. + // @@ + Priority uint32 `protobuf:"varint,2,opt,name=priority,proto3" json:"priority,omitempty"` +} + +func (x *ModelRateLimiter) Reset() { + *x = ModelRateLimiter{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelRateLimiter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelRateLimiter) ProtoMessage() {} + +func (x *ModelRateLimiter) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelRateLimiter.ProtoReflect.Descriptor instead. +func (*ModelRateLimiter) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{0} +} + +func (x *ModelRateLimiter) GetResources() []*ModelRateLimiter_Resource { + if x != nil { + return x.Resources + } + return nil +} + +func (x *ModelRateLimiter) GetPriority() uint32 { + if x != nil { + return x.Priority + } + return 0 +} + +// @@ +// @@.. cpp:var:: message ModelInstanceGroup +// @@ +// @@ A group of one or more instances of a model and resources made +// @@ available for those instances. +// @@ +type ModelInstanceGroup struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ Optional name of this group of instances. If not specified the + // @@ name will be formed as _. The name of + // @@ individual instances will be further formed by a unique instance + // @@ number and GPU index: + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: Kind kind + // @@ + // @@ The kind of this instance group. Default is KIND_AUTO. If + // @@ KIND_AUTO or KIND_GPU then both 'count' and 'gpu' are valid and + // @@ may be specified. If KIND_CPU or KIND_MODEL only 'count' is valid + // @@ and 'gpu' cannot be specified. + // @@ + Kind ModelInstanceGroup_Kind `protobuf:"varint,4,opt,name=kind,proto3,enum=inference.v1.ModelInstanceGroup_Kind" json:"kind,omitempty"` + // @@ .. cpp:var:: int32 count + // @@ + // @@ For a group assigned to GPU, the number of instances created for + // @@ each GPU listed in 'gpus'. For a group assigned to CPU the number + // @@ of instances created. Default is 1. + Count int32 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + // @@ .. cpp:var:: ModelRateLimiter rate_limiter + // @@ + // @@ The rate limiter specific settings to be associated with this + // @@ instance group. Optional, if not specified no rate limiting + // @@ will be applied to this instance group. + // @@ + RateLimiter *ModelRateLimiter `protobuf:"bytes,6,opt,name=rate_limiter,json=rateLimiter,proto3" json:"rate_limiter,omitempty"` + // @@ .. cpp:var:: int32 gpus (repeated) + // @@ + // @@ GPU(s) where instances should be available. For each GPU listed, + // @@ 'count' instances of the model will be available. Setting 'gpus' + // @@ to empty (or not specifying at all) is eqivalent to listing all + // @@ available GPUs. + // @@ + Gpus []int32 `protobuf:"varint,3,rep,packed,name=gpus,proto3" json:"gpus,omitempty"` + // @@ .. cpp:var:: SecondaryDevice secondary_devices (repeated) + // @@ + // @@ Secondary devices that are required by instances specified by this + // @@ instance group. Optional. + // @@ + SecondaryDevices []*ModelInstanceGroup_SecondaryDevice `protobuf:"bytes,8,rep,name=secondary_devices,json=secondaryDevices,proto3" json:"secondary_devices,omitempty"` + // @@ .. cpp:var:: string profile (repeated) + // @@ + // @@ For TensorRT models containing multiple optimization profile, this + // @@ parameter specifies a set of optimization profiles available to this + // @@ instance group. The inference server will choose the optimal profile + // @@ based on the shapes of the input tensors. This field should lie + // @@ between 0 and - 1 + // @@ and be specified only for TensorRT backend, otherwise an error will + // @@ be generated. If not specified, the server will select the first + // @@ optimization profile by default. + // @@ + Profile []string `protobuf:"bytes,5,rep,name=profile,proto3" json:"profile,omitempty"` + // @@ .. cpp:var:: bool passive + // @@ + // @@ Whether the instances within this instance group will be accepting + // @@ inference requests from the scheduler. If true, the instances will + // @@ not be added to the scheduler. Default value is false. + // @@ + Passive bool `protobuf:"varint,7,opt,name=passive,proto3" json:"passive,omitempty"` + // @@ .. cpp:var:: string host_policy + // @@ + // @@ The host policy name that the instance to be associated with. + // @@ The default value is set to reflect the device kind of the instance, + // @@ for instance, KIND_CPU is "cpu", KIND_MODEL is "model" and + // @@ KIND_GPU is "gpu_". + // @@ + HostPolicy string `protobuf:"bytes,9,opt,name=host_policy,json=hostPolicy,proto3" json:"host_policy,omitempty"` +} + +func (x *ModelInstanceGroup) Reset() { + *x = ModelInstanceGroup{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelInstanceGroup) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelInstanceGroup) ProtoMessage() {} + +func (x *ModelInstanceGroup) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelInstanceGroup.ProtoReflect.Descriptor instead. +func (*ModelInstanceGroup) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{1} +} + +func (x *ModelInstanceGroup) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelInstanceGroup) GetKind() ModelInstanceGroup_Kind { + if x != nil { + return x.Kind + } + return ModelInstanceGroup_KIND_AUTO +} + +func (x *ModelInstanceGroup) GetCount() int32 { + if x != nil { + return x.Count + } + return 0 +} + +func (x *ModelInstanceGroup) GetRateLimiter() *ModelRateLimiter { + if x != nil { + return x.RateLimiter + } + return nil +} + +func (x *ModelInstanceGroup) GetGpus() []int32 { + if x != nil { + return x.Gpus + } + return nil +} + +func (x *ModelInstanceGroup) GetSecondaryDevices() []*ModelInstanceGroup_SecondaryDevice { + if x != nil { + return x.SecondaryDevices + } + return nil +} + +func (x *ModelInstanceGroup) GetProfile() []string { + if x != nil { + return x.Profile + } + return nil +} + +func (x *ModelInstanceGroup) GetPassive() bool { + if x != nil { + return x.Passive + } + return false +} + +func (x *ModelInstanceGroup) GetHostPolicy() string { + if x != nil { + return x.HostPolicy + } + return "" +} + +// @@ +// @@.. cpp:var:: message ModelTensorReshape +// @@ +// @@ Reshape specification for input and output tensors. +// @@ +type ModelTensorReshape struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: int64 shape (repeated) + // @@ + // @@ The shape to use for reshaping. + // @@ + Shape []int64 `protobuf:"varint,1,rep,packed,name=shape,proto3" json:"shape,omitempty"` +} + +func (x *ModelTensorReshape) Reset() { + *x = ModelTensorReshape{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelTensorReshape) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelTensorReshape) ProtoMessage() {} + +func (x *ModelTensorReshape) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelTensorReshape.ProtoReflect.Descriptor instead. +func (*ModelTensorReshape) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{2} +} + +func (x *ModelTensorReshape) GetShape() []int64 { + if x != nil { + return x.Shape + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelInput +// @@ +// @@ An input required by the model. +// @@ +type ModelInput struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the input. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: DataType data_type + // @@ + // @@ The data-type of the input. + // @@ + DataType DataType `protobuf:"varint,2,opt,name=data_type,json=dataType,proto3,enum=inference.v1.DataType" json:"data_type,omitempty"` + // @@ .. cpp:var:: Format format + // @@ + // @@ The format of the input. Optional. + // @@ + Format ModelInput_Format `protobuf:"varint,3,opt,name=format,proto3,enum=inference.v1.ModelInput_Format" json:"format,omitempty"` + // @@ .. cpp:var:: int64 dims (repeated) + // @@ + // @@ The dimensions/shape of the input tensor that must be provided + // @@ when invoking the inference API for this model. + // @@ + Dims []int64 `protobuf:"varint,4,rep,packed,name=dims,proto3" json:"dims,omitempty"` + // @@ .. cpp:var:: ModelTensorReshape reshape + // @@ + // @@ The shape expected for this input by the backend. The input will + // @@ be reshaped to this before being presented to the backend. The + // @@ reshape must have the same number of elements as the input shape + // @@ specified by 'dims'. Optional. + // @@ + Reshape *ModelTensorReshape `protobuf:"bytes,5,opt,name=reshape,proto3" json:"reshape,omitempty"` + // @@ .. cpp:var:: bool is_shape_tensor + // @@ + // @@ Whether or not the input is a shape tensor to the model. This field + // @@ is currently supported only for the TensorRT model. An error will be + // @@ generated if this specification does not comply with underlying + // @@ model. + // @@ + IsShapeTensor bool `protobuf:"varint,6,opt,name=is_shape_tensor,json=isShapeTensor,proto3" json:"is_shape_tensor,omitempty"` + // @@ .. cpp:var:: bool allow_ragged_batch + // @@ + // @@ Whether or not the input is allowed to be "ragged" in a dynamically + // @@ created batch. Default is false indicating that two requests will + // @@ only be batched if this tensor has the same shape in both requests. + // @@ True indicates that two requests can be batched even if this tensor + // @@ has a different shape in each request. + // @@ + AllowRaggedBatch bool `protobuf:"varint,7,opt,name=allow_ragged_batch,json=allowRaggedBatch,proto3" json:"allow_ragged_batch,omitempty"` + // @@ .. cpp:var:: bool optional + // @@ + // @@ Whether or not the input is optional for the model execution. + // @@ If true, the input is not required in the inference request. + // @@ Default value is false. + // @@ + Optional bool `protobuf:"varint,8,opt,name=optional,proto3" json:"optional,omitempty"` +} + +func (x *ModelInput) Reset() { + *x = ModelInput{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelInput) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelInput) ProtoMessage() {} + +func (x *ModelInput) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelInput.ProtoReflect.Descriptor instead. +func (*ModelInput) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{3} +} + +func (x *ModelInput) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelInput) GetDataType() DataType { + if x != nil { + return x.DataType + } + return DataType_TYPE_INVALID +} + +func (x *ModelInput) GetFormat() ModelInput_Format { + if x != nil { + return x.Format + } + return ModelInput_FORMAT_NONE +} + +func (x *ModelInput) GetDims() []int64 { + if x != nil { + return x.Dims + } + return nil +} + +func (x *ModelInput) GetReshape() *ModelTensorReshape { + if x != nil { + return x.Reshape + } + return nil +} + +func (x *ModelInput) GetIsShapeTensor() bool { + if x != nil { + return x.IsShapeTensor + } + return false +} + +func (x *ModelInput) GetAllowRaggedBatch() bool { + if x != nil { + return x.AllowRaggedBatch + } + return false +} + +func (x *ModelInput) GetOptional() bool { + if x != nil { + return x.Optional + } + return false +} + +// @@ +// @@.. cpp:var:: message ModelOutput +// @@ +// @@ An output produced by the model. +// @@ +type ModelOutput struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the output. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: DataType data_type + // @@ + // @@ The data-type of the output. + // @@ + DataType DataType `protobuf:"varint,2,opt,name=data_type,json=dataType,proto3,enum=inference.v1.DataType" json:"data_type,omitempty"` + // @@ .. cpp:var:: int64 dims (repeated) + // @@ + // @@ The dimensions/shape of the output tensor. + // @@ + Dims []int64 `protobuf:"varint,3,rep,packed,name=dims,proto3" json:"dims,omitempty"` + // @@ .. cpp:var:: ModelTensorReshape reshape + // @@ + // @@ The shape produced for this output by the backend. The output will + // @@ be reshaped from this to the shape specifed in 'dims' before being + // @@ returned in the inference response. The reshape must have the same + // @@ number of elements as the output shape specified by 'dims'. Optional. + // @@ + Reshape *ModelTensorReshape `protobuf:"bytes,5,opt,name=reshape,proto3" json:"reshape,omitempty"` + // @@ .. cpp:var:: string label_filename + // @@ + // @@ The label file associated with this output. Should be specified only + // @@ for outputs that represent classifications. Optional. + // @@ + LabelFilename string `protobuf:"bytes,4,opt,name=label_filename,json=labelFilename,proto3" json:"label_filename,omitempty"` + // @@ .. cpp:var:: bool is_shape_tensor + // @@ + // @@ Whether or not the output is a shape tensor to the model. This field + // @@ is currently supported only for the TensorRT model. An error will be + // @@ generated if this specification does not comply with underlying + // @@ model. + // @@ + IsShapeTensor bool `protobuf:"varint,6,opt,name=is_shape_tensor,json=isShapeTensor,proto3" json:"is_shape_tensor,omitempty"` +} + +func (x *ModelOutput) Reset() { + *x = ModelOutput{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOutput) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOutput) ProtoMessage() {} + +func (x *ModelOutput) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOutput.ProtoReflect.Descriptor instead. +func (*ModelOutput) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{4} +} + +func (x *ModelOutput) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelOutput) GetDataType() DataType { + if x != nil { + return x.DataType + } + return DataType_TYPE_INVALID +} + +func (x *ModelOutput) GetDims() []int64 { + if x != nil { + return x.Dims + } + return nil +} + +func (x *ModelOutput) GetReshape() *ModelTensorReshape { + if x != nil { + return x.Reshape + } + return nil +} + +func (x *ModelOutput) GetLabelFilename() string { + if x != nil { + return x.LabelFilename + } + return "" +} + +func (x *ModelOutput) GetIsShapeTensor() bool { + if x != nil { + return x.IsShapeTensor + } + return false +} + +// @@ .. cpp:var:: message BatchInput +// @@ +// @@ A batch input is an additional input that must be added by +// @@ the backend based on all the requests in a batch. +// @@ +type BatchInput struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: Kind kind + // @@ + // @@ The kind of this batch input. + // @@ + Kind BatchInput_Kind `protobuf:"varint,1,opt,name=kind,proto3,enum=inference.v1.BatchInput_Kind" json:"kind,omitempty"` + // @@ .. cpp:var:: string target_name (repeated) + // @@ + // @@ The name of the model inputs that the backend will create + // @@ for this batch input. + // @@ + TargetName []string `protobuf:"bytes,2,rep,name=target_name,json=targetName,proto3" json:"target_name,omitempty"` + // @@ .. cpp:var:: DataType data_type + // @@ + // @@ The input's datatype. The data type can be TYPE_INT32 or + // @@ TYPE_FP32. + // @@ + DataType DataType `protobuf:"varint,3,opt,name=data_type,json=dataType,proto3,enum=inference.v1.DataType" json:"data_type,omitempty"` + // @@ .. cpp:var:: string source_input (repeated) + // @@ + // @@ The backend derives the value for each batch input from one or + // @@ more other inputs. 'source_input' gives the names of those + // @@ inputs. + // @@ + SourceInput []string `protobuf:"bytes,4,rep,name=source_input,json=sourceInput,proto3" json:"source_input,omitempty"` +} + +func (x *BatchInput) Reset() { + *x = BatchInput{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BatchInput) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchInput) ProtoMessage() {} + +func (x *BatchInput) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchInput.ProtoReflect.Descriptor instead. +func (*BatchInput) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{5} +} + +func (x *BatchInput) GetKind() BatchInput_Kind { + if x != nil { + return x.Kind + } + return BatchInput_BATCH_ELEMENT_COUNT +} + +func (x *BatchInput) GetTargetName() []string { + if x != nil { + return x.TargetName + } + return nil +} + +func (x *BatchInput) GetDataType() DataType { + if x != nil { + return x.DataType + } + return DataType_TYPE_INVALID +} + +func (x *BatchInput) GetSourceInput() []string { + if x != nil { + return x.SourceInput + } + return nil +} + +// @@.. cpp:var:: message BatchOutput +// @@ +// @@ A batch output is an output produced by the model that must be handled +// @@ differently by the backend based on all the requests in a batch. +// @@ +type BatchOutput struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string target_name (repeated) + // @@ + // @@ The name of the outputs to be produced by this batch output + // @@ specification. + // @@ + TargetName []string `protobuf:"bytes,1,rep,name=target_name,json=targetName,proto3" json:"target_name,omitempty"` + // @@ .. cpp:var:: Kind kind + // @@ + // @@ The kind of this batch output. + // @@ + Kind BatchOutput_Kind `protobuf:"varint,2,opt,name=kind,proto3,enum=inference.v1.BatchOutput_Kind" json:"kind,omitempty"` + // @@ .. cpp:var:: string source_input (repeated) + // @@ + // @@ The backend derives each batch output from one or more inputs. + // @@ 'source_input' gives the names of those inputs. + // @@ + SourceInput []string `protobuf:"bytes,3,rep,name=source_input,json=sourceInput,proto3" json:"source_input,omitempty"` +} + +func (x *BatchOutput) Reset() { + *x = BatchOutput{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BatchOutput) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchOutput) ProtoMessage() {} + +func (x *BatchOutput) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchOutput.ProtoReflect.Descriptor instead. +func (*BatchOutput) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{6} +} + +func (x *BatchOutput) GetTargetName() []string { + if x != nil { + return x.TargetName + } + return nil +} + +func (x *BatchOutput) GetKind() BatchOutput_Kind { + if x != nil { + return x.Kind + } + return BatchOutput_BATCH_SCATTER_WITH_INPUT_SHAPE +} + +func (x *BatchOutput) GetSourceInput() []string { + if x != nil { + return x.SourceInput + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelVersionPolicy +// @@ +// @@ Policy indicating which versions of a model should be made +// @@ available by the inference server. +// @@ +type ModelVersionPolicy struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: oneof policy_choice + // @@ + // @@ Each model must implement only a single version policy. The + // @@ default policy is 'Latest'. + // @@ + // + // Types that are assignable to PolicyChoice: + // + // *ModelVersionPolicy_Latest_ + // *ModelVersionPolicy_All_ + // *ModelVersionPolicy_Specific_ + PolicyChoice isModelVersionPolicy_PolicyChoice `protobuf_oneof:"policy_choice"` +} + +func (x *ModelVersionPolicy) Reset() { + *x = ModelVersionPolicy{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelVersionPolicy) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelVersionPolicy) ProtoMessage() {} + +func (x *ModelVersionPolicy) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelVersionPolicy.ProtoReflect.Descriptor instead. +func (*ModelVersionPolicy) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{7} +} + +func (m *ModelVersionPolicy) GetPolicyChoice() isModelVersionPolicy_PolicyChoice { + if m != nil { + return m.PolicyChoice + } + return nil +} + +func (x *ModelVersionPolicy) GetLatest() *ModelVersionPolicy_Latest { + if x, ok := x.GetPolicyChoice().(*ModelVersionPolicy_Latest_); ok { + return x.Latest + } + return nil +} + +func (x *ModelVersionPolicy) GetAll() *ModelVersionPolicy_All { + if x, ok := x.GetPolicyChoice().(*ModelVersionPolicy_All_); ok { + return x.All + } + return nil +} + +func (x *ModelVersionPolicy) GetSpecific() *ModelVersionPolicy_Specific { + if x, ok := x.GetPolicyChoice().(*ModelVersionPolicy_Specific_); ok { + return x.Specific + } + return nil +} + +type isModelVersionPolicy_PolicyChoice interface { + isModelVersionPolicy_PolicyChoice() +} + +type ModelVersionPolicy_Latest_ struct { + // @@ .. cpp:var:: Latest latest + // @@ + // @@ Serve only latest version(s) of the model. + // @@ + Latest *ModelVersionPolicy_Latest `protobuf:"bytes,1,opt,name=latest,proto3,oneof"` +} + +type ModelVersionPolicy_All_ struct { + // @@ .. cpp:var:: All all + // @@ + // @@ Serve all versions of the model. + // @@ + All *ModelVersionPolicy_All `protobuf:"bytes,2,opt,name=all,proto3,oneof"` +} + +type ModelVersionPolicy_Specific_ struct { + // @@ .. cpp:var:: Specific specific + // @@ + // @@ Serve only specific version(s) of the model. + // @@ + Specific *ModelVersionPolicy_Specific `protobuf:"bytes,3,opt,name=specific,proto3,oneof"` +} + +func (*ModelVersionPolicy_Latest_) isModelVersionPolicy_PolicyChoice() {} + +func (*ModelVersionPolicy_All_) isModelVersionPolicy_PolicyChoice() {} + +func (*ModelVersionPolicy_Specific_) isModelVersionPolicy_PolicyChoice() {} + +// @@ +// @@.. cpp:var:: message ModelOptimizationPolicy +// @@ +// @@ Optimization settings for a model. These settings control if/how a +// @@ model is optimized and prioritized by the backend framework when +// @@ it is loaded. +// @@ +type ModelOptimizationPolicy struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: Graph graph + // @@ + // @@ The graph optimization setting for the model. Optional. + // @@ + Graph *ModelOptimizationPolicy_Graph `protobuf:"bytes,1,opt,name=graph,proto3" json:"graph,omitempty"` + // @@ .. cpp:var:: ModelPriority priority + // @@ + // @@ The priority setting for the model. Optional. + // @@ + Priority ModelOptimizationPolicy_ModelPriority `protobuf:"varint,2,opt,name=priority,proto3,enum=inference.v1.ModelOptimizationPolicy_ModelPriority" json:"priority,omitempty"` + // @@ .. cpp:var:: Cuda cuda + // @@ + // @@ CUDA-specific optimization settings. Optional. + // @@ + Cuda *ModelOptimizationPolicy_Cuda `protobuf:"bytes,3,opt,name=cuda,proto3" json:"cuda,omitempty"` + // @@ .. cpp:var:: ExecutionAccelerators execution_accelerators + // @@ + // @@ The accelerators used for the model. Optional. + // @@ + ExecutionAccelerators *ModelOptimizationPolicy_ExecutionAccelerators `protobuf:"bytes,4,opt,name=execution_accelerators,json=executionAccelerators,proto3" json:"execution_accelerators,omitempty"` + // @@ .. cpp:var:: PinnedMemoryBuffer input_pinned_memory + // @@ + // @@ Use pinned memory buffer when the data transfer for inputs + // @@ is between GPU memory and non-pinned system memory. + // @@ Default is true. + // @@ + InputPinnedMemory *ModelOptimizationPolicy_PinnedMemoryBuffer `protobuf:"bytes,5,opt,name=input_pinned_memory,json=inputPinnedMemory,proto3" json:"input_pinned_memory,omitempty"` + // @@ .. cpp:var:: PinnedMemoryBuffer output_pinned_memory + // @@ + // @@ Use pinned memory buffer when the data transfer for outputs + // @@ is between GPU memory and non-pinned system memory. + // @@ Default is true. + // @@ + OutputPinnedMemory *ModelOptimizationPolicy_PinnedMemoryBuffer `protobuf:"bytes,6,opt,name=output_pinned_memory,json=outputPinnedMemory,proto3" json:"output_pinned_memory,omitempty"` + // @@ .. cpp:var:: uint32 gather_kernel_buffer_threshold + // @@ + // @@ The backend may use a gather kernel to gather input data if the + // @@ device has direct access to the source buffer and the destination + // @@ buffer. In such case, the gather kernel will be used only if the + // @@ number of buffers to be gathered is greater or equal to + // @@ the specifed value. If 0, the gather kernel will be disabled. + // @@ Default value is 0. + // @@ Currently only recognized by TensorRT backend. + // @@ + GatherKernelBufferThreshold uint32 `protobuf:"varint,7,opt,name=gather_kernel_buffer_threshold,json=gatherKernelBufferThreshold,proto3" json:"gather_kernel_buffer_threshold,omitempty"` + // @@ .. cpp:var:: bool eager_batching + // @@ + // @@ Start preparing the next batch before the model instance is ready + // @@ for the next inference. This option can be used to overlap the + // @@ batch preparation with model execution, with the trade-off that + // @@ the next batch might be smaller than what it could have been. + // @@ Default value is false. + // @@ Currently only recognized by TensorRT backend. + // @@ + EagerBatching bool `protobuf:"varint,8,opt,name=eager_batching,json=eagerBatching,proto3" json:"eager_batching,omitempty"` +} + +func (x *ModelOptimizationPolicy) Reset() { + *x = ModelOptimizationPolicy{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOptimizationPolicy) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOptimizationPolicy) ProtoMessage() {} + +func (x *ModelOptimizationPolicy) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOptimizationPolicy.ProtoReflect.Descriptor instead. +func (*ModelOptimizationPolicy) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{8} +} + +func (x *ModelOptimizationPolicy) GetGraph() *ModelOptimizationPolicy_Graph { + if x != nil { + return x.Graph + } + return nil +} + +func (x *ModelOptimizationPolicy) GetPriority() ModelOptimizationPolicy_ModelPriority { + if x != nil { + return x.Priority + } + return ModelOptimizationPolicy_PRIORITY_DEFAULT +} + +func (x *ModelOptimizationPolicy) GetCuda() *ModelOptimizationPolicy_Cuda { + if x != nil { + return x.Cuda + } + return nil +} + +func (x *ModelOptimizationPolicy) GetExecutionAccelerators() *ModelOptimizationPolicy_ExecutionAccelerators { + if x != nil { + return x.ExecutionAccelerators + } + return nil +} + +func (x *ModelOptimizationPolicy) GetInputPinnedMemory() *ModelOptimizationPolicy_PinnedMemoryBuffer { + if x != nil { + return x.InputPinnedMemory + } + return nil +} + +func (x *ModelOptimizationPolicy) GetOutputPinnedMemory() *ModelOptimizationPolicy_PinnedMemoryBuffer { + if x != nil { + return x.OutputPinnedMemory + } + return nil +} + +func (x *ModelOptimizationPolicy) GetGatherKernelBufferThreshold() uint32 { + if x != nil { + return x.GatherKernelBufferThreshold + } + return 0 +} + +func (x *ModelOptimizationPolicy) GetEagerBatching() bool { + if x != nil { + return x.EagerBatching + } + return false +} + +// @@ +// @@.. cpp:var:: message ModelQueuePolicy +// @@ +// @@ Queue policy for inference requests. +// @@ +type ModelQueuePolicy struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: TimeoutAction timeout_action + // @@ + // @@ The action applied to timed-out request. + // @@ The default action is REJECT. + // @@ + TimeoutAction ModelQueuePolicy_TimeoutAction `protobuf:"varint,1,opt,name=timeout_action,json=timeoutAction,proto3,enum=inference.v1.ModelQueuePolicy_TimeoutAction" json:"timeout_action,omitempty"` + // @@ + // @@ .. cpp:var:: uint64 default_timeout_microseconds + // @@ + // @@ The default timeout for every request, in microseconds. + // @@ The default value is 0 which indicates that no timeout is set. + // @@ + DefaultTimeoutMicroseconds uint64 `protobuf:"varint,2,opt,name=default_timeout_microseconds,json=defaultTimeoutMicroseconds,proto3" json:"default_timeout_microseconds,omitempty"` + // @@ + // @@ .. cpp:var:: bool allow_timeout_override + // @@ + // @@ Whether individual request can override the default timeout value. + // @@ When true, individual requests can set a timeout that is less than + // @@ the default timeout value but may not increase the timeout. + // @@ The default value is false. + // @@ + AllowTimeoutOverride bool `protobuf:"varint,3,opt,name=allow_timeout_override,json=allowTimeoutOverride,proto3" json:"allow_timeout_override,omitempty"` + // @@ + // @@ .. cpp:var:: uint32 max_queue_size + // @@ + // @@ The maximum queue size for holding requests. A request will be + // @@ rejected immediately if it can't be enqueued because the queue is + // @@ full. The default value is 0 which indicates that no maximum + // @@ queue size is enforced. + // @@ + MaxQueueSize uint32 `protobuf:"varint,4,opt,name=max_queue_size,json=maxQueueSize,proto3" json:"max_queue_size,omitempty"` +} + +func (x *ModelQueuePolicy) Reset() { + *x = ModelQueuePolicy{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelQueuePolicy) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelQueuePolicy) ProtoMessage() {} + +func (x *ModelQueuePolicy) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelQueuePolicy.ProtoReflect.Descriptor instead. +func (*ModelQueuePolicy) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{9} +} + +func (x *ModelQueuePolicy) GetTimeoutAction() ModelQueuePolicy_TimeoutAction { + if x != nil { + return x.TimeoutAction + } + return ModelQueuePolicy_REJECT +} + +func (x *ModelQueuePolicy) GetDefaultTimeoutMicroseconds() uint64 { + if x != nil { + return x.DefaultTimeoutMicroseconds + } + return 0 +} + +func (x *ModelQueuePolicy) GetAllowTimeoutOverride() bool { + if x != nil { + return x.AllowTimeoutOverride + } + return false +} + +func (x *ModelQueuePolicy) GetMaxQueueSize() uint32 { + if x != nil { + return x.MaxQueueSize + } + return 0 +} + +// @@ +// @@.. cpp:var:: message ModelDynamicBatching +// @@ +// @@ Dynamic batching configuration. These settings control how dynamic +// @@ batching operates for the model. +// @@ +type ModelDynamicBatching struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: int32 preferred_batch_size (repeated) + // @@ + // @@ Preferred batch sizes for dynamic batching. If a batch of one of + // @@ these sizes can be formed it will be executed immediately. If + // @@ not specified a preferred batch size will be chosen automatically + // @@ based on model and GPU characteristics. + // @@ + PreferredBatchSize []int32 `protobuf:"varint,1,rep,packed,name=preferred_batch_size,json=preferredBatchSize,proto3" json:"preferred_batch_size,omitempty"` + // @@ .. cpp:var:: uint64 max_queue_delay_microseconds + // @@ + // @@ The maximum time, in microseconds, a request will be delayed in + // @@ the scheduling queue to wait for additional requests for + // @@ batching. Default is 0. + // @@ + MaxQueueDelayMicroseconds uint64 `protobuf:"varint,2,opt,name=max_queue_delay_microseconds,json=maxQueueDelayMicroseconds,proto3" json:"max_queue_delay_microseconds,omitempty"` + // @@ .. cpp:var:: bool preserve_ordering + // @@ + // @@ Should the dynamic batcher preserve the ordering of responses to + // @@ match the order of requests received by the scheduler. Default is + // @@ false. If true, the responses will be returned in the same order as + // @@ the order of requests sent to the scheduler. If false, the responses + // @@ may be returned in arbitrary order. This option is specifically + // @@ needed when a sequence of related inference requests (i.e. inference + // @@ requests with the same correlation ID) are sent to the dynamic + // @@ batcher to ensure that the sequence responses are in the correct + // @@ order. + // @@ + PreserveOrdering bool `protobuf:"varint,3,opt,name=preserve_ordering,json=preserveOrdering,proto3" json:"preserve_ordering,omitempty"` + // @@ .. cpp:var:: uint64 priority_levels + // @@ + // @@ The number of priority levels to be enabled for the model, + // @@ the priority level starts from 1 and 1 is the highest priority. + // @@ Requests are handled in priority order with all priority 1 requests + // @@ processed before priority 2, all priority 2 requests processed before + // @@ priority 3, etc. Requests with the same priority level will be + // @@ handled in the order that they are received. + // @@ + PriorityLevels uint64 `protobuf:"varint,4,opt,name=priority_levels,json=priorityLevels,proto3" json:"priority_levels,omitempty"` + // @@ .. cpp:var:: uint64 default_priority_level + // @@ + // @@ The priority level used for requests that don't specify their + // @@ priority. The value must be in the range [ 1, 'priority_levels' ]. + // @@ + DefaultPriorityLevel uint64 `protobuf:"varint,5,opt,name=default_priority_level,json=defaultPriorityLevel,proto3" json:"default_priority_level,omitempty"` + // @@ .. cpp:var:: ModelQueuePolicy default_queue_policy + // @@ + // @@ The default queue policy used for requests that don't require + // @@ priority handling and requests that specify priority levels where + // @@ there is no specific policy given. If not specified, a policy with + // @@ default field values will be used. + // @@ + DefaultQueuePolicy *ModelQueuePolicy `protobuf:"bytes,6,opt,name=default_queue_policy,json=defaultQueuePolicy,proto3" json:"default_queue_policy,omitempty"` + // @@ .. cpp:var:: map priority_queue_policy + // @@ + // @@ Specify the queue policy for the priority level. The default queue + // @@ policy will be used if a priority level doesn't specify a queue + // @@ policy. + // @@ + PriorityQueuePolicy map[uint64]*ModelQueuePolicy `protobuf:"bytes,7,rep,name=priority_queue_policy,json=priorityQueuePolicy,proto3" json:"priority_queue_policy,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *ModelDynamicBatching) Reset() { + *x = ModelDynamicBatching{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelDynamicBatching) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelDynamicBatching) ProtoMessage() {} + +func (x *ModelDynamicBatching) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelDynamicBatching.ProtoReflect.Descriptor instead. +func (*ModelDynamicBatching) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{10} +} + +func (x *ModelDynamicBatching) GetPreferredBatchSize() []int32 { + if x != nil { + return x.PreferredBatchSize + } + return nil +} + +func (x *ModelDynamicBatching) GetMaxQueueDelayMicroseconds() uint64 { + if x != nil { + return x.MaxQueueDelayMicroseconds + } + return 0 +} + +func (x *ModelDynamicBatching) GetPreserveOrdering() bool { + if x != nil { + return x.PreserveOrdering + } + return false +} + +func (x *ModelDynamicBatching) GetPriorityLevels() uint64 { + if x != nil { + return x.PriorityLevels + } + return 0 +} + +func (x *ModelDynamicBatching) GetDefaultPriorityLevel() uint64 { + if x != nil { + return x.DefaultPriorityLevel + } + return 0 +} + +func (x *ModelDynamicBatching) GetDefaultQueuePolicy() *ModelQueuePolicy { + if x != nil { + return x.DefaultQueuePolicy + } + return nil +} + +func (x *ModelDynamicBatching) GetPriorityQueuePolicy() map[uint64]*ModelQueuePolicy { + if x != nil { + return x.PriorityQueuePolicy + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelSequenceBatching +// @@ +// @@ Sequence batching configuration. These settings control how sequence +// @@ batching operates for the model. +// @@ +type ModelSequenceBatching struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: oneof strategy_choice + // @@ + // @@ The strategy used by the sequence batcher. Default strategy + // @@ is 'direct'. + // @@ + // + // Types that are assignable to StrategyChoice: + // + // *ModelSequenceBatching_Direct + // *ModelSequenceBatching_Oldest + StrategyChoice isModelSequenceBatching_StrategyChoice `protobuf_oneof:"strategy_choice"` + // @@ .. cpp:var:: uint64 max_sequence_idle_microseconds + // @@ + // @@ The maximum time, in microseconds, that a sequence is allowed to + // @@ be idle before it is aborted. The inference server considers a + // @@ sequence idle when it does not have any inference request queued + // @@ for the sequence. If this limit is exceeded, the inference server + // @@ will free the sequence slot allocated by the sequence and make it + // @@ available for another sequence. If not specified (or specified as + // @@ zero) a default value of 1000000 (1 second) is used. + // @@ + MaxSequenceIdleMicroseconds uint64 `protobuf:"varint,1,opt,name=max_sequence_idle_microseconds,json=maxSequenceIdleMicroseconds,proto3" json:"max_sequence_idle_microseconds,omitempty"` + // @@ .. cpp:var:: ControlInput control_input (repeated) + // @@ + // @@ The model input(s) that the server should use to communicate + // @@ sequence start, stop, ready and similar control values to the + // @@ model. + // @@ + ControlInput []*ModelSequenceBatching_ControlInput `protobuf:"bytes,2,rep,name=control_input,json=controlInput,proto3" json:"control_input,omitempty"` + // @@ .. cpp:var:: State state (repeated) + // @@ + // @@ The optional state that can be stored in Triton for performing + // @@ inference requests on a sequence. Each sequence holds an implicit + // @@ state local to itself. The output state tensor provided by the + // @@ model in 'output_name' field of the current inference request will + // @@ be transferred as an input tensor named 'input_name' in the next + // @@ request of the same sequence. The input state of the first request + // @@ in the sequence contains garbage data. + // @@ + State []*ModelSequenceBatching_State `protobuf:"bytes,5,rep,name=state,proto3" json:"state,omitempty"` +} + +func (x *ModelSequenceBatching) Reset() { + *x = ModelSequenceBatching{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelSequenceBatching) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelSequenceBatching) ProtoMessage() {} + +func (x *ModelSequenceBatching) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelSequenceBatching.ProtoReflect.Descriptor instead. +func (*ModelSequenceBatching) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{11} +} + +func (m *ModelSequenceBatching) GetStrategyChoice() isModelSequenceBatching_StrategyChoice { + if m != nil { + return m.StrategyChoice + } + return nil +} + +func (x *ModelSequenceBatching) GetDirect() *ModelSequenceBatching_StrategyDirect { + if x, ok := x.GetStrategyChoice().(*ModelSequenceBatching_Direct); ok { + return x.Direct + } + return nil +} + +func (x *ModelSequenceBatching) GetOldest() *ModelSequenceBatching_StrategyOldest { + if x, ok := x.GetStrategyChoice().(*ModelSequenceBatching_Oldest); ok { + return x.Oldest + } + return nil +} + +func (x *ModelSequenceBatching) GetMaxSequenceIdleMicroseconds() uint64 { + if x != nil { + return x.MaxSequenceIdleMicroseconds + } + return 0 +} + +func (x *ModelSequenceBatching) GetControlInput() []*ModelSequenceBatching_ControlInput { + if x != nil { + return x.ControlInput + } + return nil +} + +func (x *ModelSequenceBatching) GetState() []*ModelSequenceBatching_State { + if x != nil { + return x.State + } + return nil +} + +type isModelSequenceBatching_StrategyChoice interface { + isModelSequenceBatching_StrategyChoice() +} + +type ModelSequenceBatching_Direct struct { + // @@ .. cpp:var:: StrategyDirect direct + // @@ + // @@ StrategyDirect scheduling strategy. + // @@ + Direct *ModelSequenceBatching_StrategyDirect `protobuf:"bytes,3,opt,name=direct,proto3,oneof"` +} + +type ModelSequenceBatching_Oldest struct { + // @@ .. cpp:var:: StrategyOldest oldest + // @@ + // @@ StrategyOldest scheduling strategy. + // @@ + Oldest *ModelSequenceBatching_StrategyOldest `protobuf:"bytes,4,opt,name=oldest,proto3,oneof"` +} + +func (*ModelSequenceBatching_Direct) isModelSequenceBatching_StrategyChoice() {} + +func (*ModelSequenceBatching_Oldest) isModelSequenceBatching_StrategyChoice() {} + +// @@ +// @@.. cpp:var:: message ModelEnsembling +// @@ +// @@ Model ensembling configuration. These settings specify the models that +// @@ compose the ensemble and how data flows between the models. +// @@ +type ModelEnsembling struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: Step step (repeated) + // @@ + // @@ The models and the input / output mappings used within the ensemble. + // @@ + Step []*ModelEnsembling_Step `protobuf:"bytes,1,rep,name=step,proto3" json:"step,omitempty"` +} + +func (x *ModelEnsembling) Reset() { + *x = ModelEnsembling{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelEnsembling) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelEnsembling) ProtoMessage() {} + +func (x *ModelEnsembling) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelEnsembling.ProtoReflect.Descriptor instead. +func (*ModelEnsembling) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{12} +} + +func (x *ModelEnsembling) GetStep() []*ModelEnsembling_Step { + if x != nil { + return x.Step + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelParameter +// @@ +// @@ A model parameter. +// @@ +type ModelParameter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string string_value + // @@ + // @@ The string value of the parameter. + // @@ + StringValue string `protobuf:"bytes,1,opt,name=string_value,json=stringValue,proto3" json:"string_value,omitempty"` +} + +func (x *ModelParameter) Reset() { + *x = ModelParameter{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelParameter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelParameter) ProtoMessage() {} + +func (x *ModelParameter) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelParameter.ProtoReflect.Descriptor instead. +func (*ModelParameter) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{13} +} + +func (x *ModelParameter) GetStringValue() string { + if x != nil { + return x.StringValue + } + return "" +} + +// @@ +// @@.. cpp:var:: message ModelWarmup +// @@ +// @@ Settings used to construct the request sample for model warmup. +// @@ +type ModelWarmup struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the request sample. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: uint32 batch_size + // @@ + // @@ The batch size of the inference request. This must be >= 1. For + // @@ models that don't support batching, batch_size must be 1. If + // @@ batch_size > 1, the 'inputs' specified below will be duplicated to + // @@ match the batch size requested. + // @@ + BatchSize uint32 `protobuf:"varint,2,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + // @@ .. cpp:var:: map inputs + // @@ + // @@ The warmup meta data associated with every model input, including + // @@ control tensors. + // @@ + Inputs map[string]*ModelWarmup_Input `protobuf:"bytes,3,rep,name=inputs,proto3" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ .. cpp:var:: uint32 count + // @@ + // @@ The number of iterations that this warmup sample will be executed. + // @@ For example, if this field is set to 2, 2 model executions using this + // @@ sample will be scheduled for warmup. Default value is 0 which + // @@ indicates that this sample will be used only once. + // @@ Note that for sequence model, 'count' may not work well + // @@ because the model often expect a valid sequence of requests which + // @@ should be represented by a series of warmup samples. 'count > 1' + // @@ essentially "resends" one of the sample, which may invalidate the + // @@ sequence and result in unexpected warmup failure. + // @@ + Count uint32 `protobuf:"varint,4,opt,name=count,proto3" json:"count,omitempty"` +} + +func (x *ModelWarmup) Reset() { + *x = ModelWarmup{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelWarmup) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelWarmup) ProtoMessage() {} + +func (x *ModelWarmup) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelWarmup.ProtoReflect.Descriptor instead. +func (*ModelWarmup) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{14} +} + +func (x *ModelWarmup) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelWarmup) GetBatchSize() uint32 { + if x != nil { + return x.BatchSize + } + return 0 +} + +func (x *ModelWarmup) GetInputs() map[string]*ModelWarmup_Input { + if x != nil { + return x.Inputs + } + return nil +} + +func (x *ModelWarmup) GetCount() uint32 { + if x != nil { + return x.Count + } + return 0 +} + +// @@ +// @@ .. cpp:var:: message ModelOperations +// @@ +// @@ The metadata of libraries providing custom operations for this model. +// @@ +type ModelOperations struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string op_library_filename (repeated) + // @@ + // @@ Optional paths of the libraries providing custom operations for + // @@ this model. Valid only for ONNX models. + // @@ + OpLibraryFilename []string `protobuf:"bytes,1,rep,name=op_library_filename,json=opLibraryFilename,proto3" json:"op_library_filename,omitempty"` +} + +func (x *ModelOperations) Reset() { + *x = ModelOperations{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOperations) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOperations) ProtoMessage() {} + +func (x *ModelOperations) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOperations.ProtoReflect.Descriptor instead. +func (*ModelOperations) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{15} +} + +func (x *ModelOperations) GetOpLibraryFilename() []string { + if x != nil { + return x.OpLibraryFilename + } + return nil +} + +// @@ +// @@ .. cpp:var:: message ModelTransactionPolicy +// @@ +// @@ The specification that describes the nature of transactions +// @@ to be expected from the model. +// @@ +type ModelTransactionPolicy struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: bool decoupled + // @@ + // @@ Indicates whether responses generated by the model are decoupled with + // @@ the requests issued to it, which means the number of responses + // @@ generated by model may differ from number of requests issued, and + // @@ that the responses may be out of order relative to the order of + // @@ requests. The default is false, which means the model will generate + // @@ exactly one response for each request. + // @@ + Decoupled bool `protobuf:"varint,1,opt,name=decoupled,proto3" json:"decoupled,omitempty"` +} + +func (x *ModelTransactionPolicy) Reset() { + *x = ModelTransactionPolicy{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelTransactionPolicy) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelTransactionPolicy) ProtoMessage() {} + +func (x *ModelTransactionPolicy) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelTransactionPolicy.ProtoReflect.Descriptor instead. +func (*ModelTransactionPolicy) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{16} +} + +func (x *ModelTransactionPolicy) GetDecoupled() bool { + if x != nil { + return x.Decoupled + } + return false +} + +// @@ +// @@.. cpp:var:: message ModelRepositoryAgents +// @@ +// @@ The repository agents for the model. +// @@ +type ModelRepositoryAgents struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp:var:: Agent agents (repeated) + // @@ + // @@ The ordered list of agents for the model. These agents will be + // @@ invoked in order to respond to repository actions occuring for the + // @@ model. + // @@ + Agents []*ModelRepositoryAgents_Agent `protobuf:"bytes,1,rep,name=agents,proto3" json:"agents,omitempty"` +} + +func (x *ModelRepositoryAgents) Reset() { + *x = ModelRepositoryAgents{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelRepositoryAgents) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelRepositoryAgents) ProtoMessage() {} + +func (x *ModelRepositoryAgents) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelRepositoryAgents.ProtoReflect.Descriptor instead. +func (*ModelRepositoryAgents) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{17} +} + +func (x *ModelRepositoryAgents) GetAgents() []*ModelRepositoryAgents_Agent { + if x != nil { + return x.Agents + } + return nil +} + +// @@ +// @@.. cpp:var:: message ModelResponseCache +// @@ +// @@ The response cache setting for the model. +// @@ +type ModelResponseCache struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ + // @@ .. cpp::var:: bool enable + // @@ + // @@ Whether or not to use response cache for the model. If True, the + // @@ responses from the model are cached and when identical request + // @@ is encountered, instead of going through the model execution, + // @@ the response from the cache is utilized. By default, response + // @@ cache is disabled for the models. + // @@ + Enable bool `protobuf:"varint,1,opt,name=enable,proto3" json:"enable,omitempty"` +} + +func (x *ModelResponseCache) Reset() { + *x = ModelResponseCache{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelResponseCache) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelResponseCache) ProtoMessage() {} + +func (x *ModelResponseCache) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelResponseCache.ProtoReflect.Descriptor instead. +func (*ModelResponseCache) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{18} +} + +func (x *ModelResponseCache) GetEnable() bool { + if x != nil { + return x.Enable + } + return false +} + +// @@ +// @@.. cpp:var:: message ModelConfig +// @@ +// @@ A model configuration. +// @@ +type ModelConfig struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the model. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: string platform + // @@ + // @@ Additional backend-specific configuration for the model. + // @@ Please refer to the backend documentation on whether this field + // @@ should be specified. + // @@ + Platform string `protobuf:"bytes,2,opt,name=platform,proto3" json:"platform,omitempty"` + // @@ .. cpp:var:: string backend + // @@ + // @@ The backend used by the model. + // @@ + Backend string `protobuf:"bytes,17,opt,name=backend,proto3" json:"backend,omitempty"` + // @@ .. cpp:var:: ModelVersionPolicy version_policy + // @@ + // @@ Policy indicating which version(s) of the model will be served. + // @@ + VersionPolicy *ModelVersionPolicy `protobuf:"bytes,3,opt,name=version_policy,json=versionPolicy,proto3" json:"version_policy,omitempty"` + // @@ .. cpp:var:: int32 max_batch_size + // @@ + // @@ Maximum batch size allowed for inference. This can only decrease + // @@ what is allowed by the model itself. A max_batch_size value of 0 + // @@ indicates that batching is not allowed for the model and the + // @@ dimension/shape of the input and output tensors must exactly + // @@ match what is specified in the input and output configuration. A + // @@ max_batch_size value > 0 indicates that batching is allowed and + // @@ so the model expects the input tensors to have an additional + // @@ initial dimension for the batching that is not specified in the + // @@ input (for example, if the model supports batched inputs of + // @@ 2-dimensional tensors then the model configuration will specify + // @@ the input shape as [ X, Y ] but the model will expect the actual + // @@ input tensors to have shape [ N, X, Y ]). For max_batch_size > 0 + // @@ returned outputs will also have an additional initial dimension + // @@ for the batch. + // @@ + MaxBatchSize int32 `protobuf:"varint,4,opt,name=max_batch_size,json=maxBatchSize,proto3" json:"max_batch_size,omitempty"` + // @@ .. cpp:var:: ModelInput input (repeated) + // @@ + // @@ The inputs request by the model. + // @@ + Input []*ModelInput `protobuf:"bytes,5,rep,name=input,proto3" json:"input,omitempty"` + // @@ .. cpp:var:: ModelOutput output (repeated) + // @@ + // @@ The outputs produced by the model. + // @@ + Output []*ModelOutput `protobuf:"bytes,6,rep,name=output,proto3" json:"output,omitempty"` + // @@ .. cpp:var:: BatchInput batch_input (repeated) + // @@ + // @@ The model input(s) that the server should use to communicate + // @@ batch related values to the model. + // @@ + BatchInput []*BatchInput `protobuf:"bytes,20,rep,name=batch_input,json=batchInput,proto3" json:"batch_input,omitempty"` + // @@ .. cpp:var:: BatchOutput batch_output (repeated) + // @@ + // @@ The outputs produced by the model that requires special handling + // @@ by the model backend. + // @@ + BatchOutput []*BatchOutput `protobuf:"bytes,21,rep,name=batch_output,json=batchOutput,proto3" json:"batch_output,omitempty"` + // @@ .. cpp:var:: ModelOptimizationPolicy optimization + // @@ + // @@ Optimization configuration for the model. If not specified + // @@ then default optimization policy is used. + // @@ + Optimization *ModelOptimizationPolicy `protobuf:"bytes,12,opt,name=optimization,proto3" json:"optimization,omitempty"` + // @@ .. cpp:var:: oneof scheduling_choice + // @@ + // @@ The scheduling policy for the model. If not specified the + // @@ default scheduling policy is used for the model. The default + // @@ policy is to execute each inference request independently. + // @@ + // + // Types that are assignable to SchedulingChoice: + // + // *ModelConfig_DynamicBatching + // *ModelConfig_SequenceBatching + // *ModelConfig_EnsembleScheduling + SchedulingChoice isModelConfig_SchedulingChoice `protobuf_oneof:"scheduling_choice"` + // @@ .. cpp:var:: ModelInstanceGroup instance_group (repeated) + // @@ + // @@ Instances of this model. If not specified, one instance + // @@ of the model will be instantiated on each available GPU. + // @@ + InstanceGroup []*ModelInstanceGroup `protobuf:"bytes,7,rep,name=instance_group,json=instanceGroup,proto3" json:"instance_group,omitempty"` + // @@ .. cpp:var:: string default_model_filename + // @@ + // @@ Optional filename of the model file to use if a + // @@ compute-capability specific model is not specified in + // @@ :cpp:var:`cc_model_filenames`. If not specified the default name + // @@ is 'model.graphdef', 'model.savedmodel', 'model.plan' or + // @@ 'model.pt' depending on the model type. + // @@ + DefaultModelFilename string `protobuf:"bytes,8,opt,name=default_model_filename,json=defaultModelFilename,proto3" json:"default_model_filename,omitempty"` + // @@ .. cpp:var:: map cc_model_filenames + // @@ + // @@ Optional map from CUDA compute capability to the filename of + // @@ the model that supports that compute capability. The filename + // @@ refers to a file within the model version directory. + // @@ + CcModelFilenames map[string]string `protobuf:"bytes,9,rep,name=cc_model_filenames,json=ccModelFilenames,proto3" json:"cc_model_filenames,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ .. cpp:var:: map metric_tags + // @@ + // @@ Optional metric tags. User-specific key-value pairs for metrics + // @@ reported for this model. These tags are applied to the metrics + // @@ reported on the HTTP metrics port. + // @@ + MetricTags map[string]string `protobuf:"bytes,10,rep,name=metric_tags,json=metricTags,proto3" json:"metric_tags,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ .. cpp:var:: map parameters + // @@ + // @@ Optional model parameters. User-specified parameter values. + // @@ + Parameters map[string]*ModelParameter `protobuf:"bytes,14,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ .. cpp:var:: ModelWarmup model_warmup (repeated) + // @@ + // @@ Warmup setting of this model. If specified, all instances + // @@ will be run with the request samples in sequence before + // @@ serving the model. + // @@ This field can only be specified if the model is not an ensemble + // @@ model. + // @@ + ModelWarmup []*ModelWarmup `protobuf:"bytes,16,rep,name=model_warmup,json=modelWarmup,proto3" json:"model_warmup,omitempty"` + // @@ .. cpp:var:: ModelOperations model_operations + // @@ + // @@ Optional metadata of the libraries providing custom operations for + // @@ this model. + // @@ + ModelOperations *ModelOperations `protobuf:"bytes,18,opt,name=model_operations,json=modelOperations,proto3" json:"model_operations,omitempty"` + // @@ .. cpp:var:: ModelTransactionPolicy model_transaction_policy + // @@ + // @@ Optional specification that describes the nature of transactions + // @@ to be expected from the model. + // @@ + ModelTransactionPolicy *ModelTransactionPolicy `protobuf:"bytes,19,opt,name=model_transaction_policy,json=modelTransactionPolicy,proto3" json:"model_transaction_policy,omitempty"` + // @@ .. cpp:var:: ModelRepositoryAgents model_repository_agents + // @@ + // @@ Optional specification of the agent(s) that should be invoked + // @@ with repository actions are performed for this model. + // @@ + ModelRepositoryAgents *ModelRepositoryAgents `protobuf:"bytes,23,opt,name=model_repository_agents,json=modelRepositoryAgents,proto3" json:"model_repository_agents,omitempty"` + // @@ .. cpp:var:: ModelResponseCache response_cache + // @@ + // @@ Optional setting for utilizing the response cache for this + // @@ model. + // @@ + ResponseCache *ModelResponseCache `protobuf:"bytes,24,opt,name=response_cache,json=responseCache,proto3" json:"response_cache,omitempty"` +} + +func (x *ModelConfig) Reset() { + *x = ModelConfig{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelConfig) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelConfig) ProtoMessage() {} + +func (x *ModelConfig) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelConfig.ProtoReflect.Descriptor instead. +func (*ModelConfig) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{19} +} + +func (x *ModelConfig) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelConfig) GetPlatform() string { + if x != nil { + return x.Platform + } + return "" +} + +func (x *ModelConfig) GetBackend() string { + if x != nil { + return x.Backend + } + return "" +} + +func (x *ModelConfig) GetVersionPolicy() *ModelVersionPolicy { + if x != nil { + return x.VersionPolicy + } + return nil +} + +func (x *ModelConfig) GetMaxBatchSize() int32 { + if x != nil { + return x.MaxBatchSize + } + return 0 +} + +func (x *ModelConfig) GetInput() []*ModelInput { + if x != nil { + return x.Input + } + return nil +} + +func (x *ModelConfig) GetOutput() []*ModelOutput { + if x != nil { + return x.Output + } + return nil +} + +func (x *ModelConfig) GetBatchInput() []*BatchInput { + if x != nil { + return x.BatchInput + } + return nil +} + +func (x *ModelConfig) GetBatchOutput() []*BatchOutput { + if x != nil { + return x.BatchOutput + } + return nil +} + +func (x *ModelConfig) GetOptimization() *ModelOptimizationPolicy { + if x != nil { + return x.Optimization + } + return nil +} + +func (m *ModelConfig) GetSchedulingChoice() isModelConfig_SchedulingChoice { + if m != nil { + return m.SchedulingChoice + } + return nil +} + +func (x *ModelConfig) GetDynamicBatching() *ModelDynamicBatching { + if x, ok := x.GetSchedulingChoice().(*ModelConfig_DynamicBatching); ok { + return x.DynamicBatching + } + return nil +} + +func (x *ModelConfig) GetSequenceBatching() *ModelSequenceBatching { + if x, ok := x.GetSchedulingChoice().(*ModelConfig_SequenceBatching); ok { + return x.SequenceBatching + } + return nil +} + +func (x *ModelConfig) GetEnsembleScheduling() *ModelEnsembling { + if x, ok := x.GetSchedulingChoice().(*ModelConfig_EnsembleScheduling); ok { + return x.EnsembleScheduling + } + return nil +} + +func (x *ModelConfig) GetInstanceGroup() []*ModelInstanceGroup { + if x != nil { + return x.InstanceGroup + } + return nil +} + +func (x *ModelConfig) GetDefaultModelFilename() string { + if x != nil { + return x.DefaultModelFilename + } + return "" +} + +func (x *ModelConfig) GetCcModelFilenames() map[string]string { + if x != nil { + return x.CcModelFilenames + } + return nil +} + +func (x *ModelConfig) GetMetricTags() map[string]string { + if x != nil { + return x.MetricTags + } + return nil +} + +func (x *ModelConfig) GetParameters() map[string]*ModelParameter { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *ModelConfig) GetModelWarmup() []*ModelWarmup { + if x != nil { + return x.ModelWarmup + } + return nil +} + +func (x *ModelConfig) GetModelOperations() *ModelOperations { + if x != nil { + return x.ModelOperations + } + return nil +} + +func (x *ModelConfig) GetModelTransactionPolicy() *ModelTransactionPolicy { + if x != nil { + return x.ModelTransactionPolicy + } + return nil +} + +func (x *ModelConfig) GetModelRepositoryAgents() *ModelRepositoryAgents { + if x != nil { + return x.ModelRepositoryAgents + } + return nil +} + +func (x *ModelConfig) GetResponseCache() *ModelResponseCache { + if x != nil { + return x.ResponseCache + } + return nil +} + +type isModelConfig_SchedulingChoice interface { + isModelConfig_SchedulingChoice() +} + +type ModelConfig_DynamicBatching struct { + // @@ .. cpp:var:: ModelDynamicBatching dynamic_batching + // @@ + // @@ If specified, enables the dynamic-batching scheduling + // @@ policy. With dynamic-batching the scheduler may group + // @@ together independent requests into a single batch to + // @@ improve inference throughput. + // @@ + DynamicBatching *ModelDynamicBatching `protobuf:"bytes,11,opt,name=dynamic_batching,json=dynamicBatching,proto3,oneof"` +} + +type ModelConfig_SequenceBatching struct { + // @@ .. cpp:var:: ModelSequenceBatching sequence_batching + // @@ + // @@ If specified, enables the sequence-batching scheduling + // @@ policy. With sequence-batching, inference requests + // @@ with the same correlation ID are routed to the same + // @@ model instance. Multiple sequences of inference requests + // @@ may be batched together into a single batch to + // @@ improve inference throughput. + // @@ + SequenceBatching *ModelSequenceBatching `protobuf:"bytes,13,opt,name=sequence_batching,json=sequenceBatching,proto3,oneof"` +} + +type ModelConfig_EnsembleScheduling struct { + // @@ .. cpp:var:: ModelEnsembling ensemble_scheduling + // @@ + // @@ If specified, enables the model-ensembling scheduling + // @@ policy. With model-ensembling, inference requests + // @@ will be processed according to the specification, such as an + // @@ execution sequence of models. The input specified in this model + // @@ config will be the input for the ensemble, and the output + // @@ specified will be the output of the ensemble. + // @@ + EnsembleScheduling *ModelEnsembling `protobuf:"bytes,15,opt,name=ensemble_scheduling,json=ensembleScheduling,proto3,oneof"` +} + +func (*ModelConfig_DynamicBatching) isModelConfig_SchedulingChoice() {} + +func (*ModelConfig_SequenceBatching) isModelConfig_SchedulingChoice() {} + +func (*ModelConfig_EnsembleScheduling) isModelConfig_SchedulingChoice() {} + +// @@ .. cpp:var:: message Resource +// @@ +// @@ The resource property. +// @@ +type ModelRateLimiter_Resource struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ The name associated with the resource. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: bool global + // @@ + // @@ Whether or not the resource is global. If true then the resource + // @@ is assumed to be shared among the devices otherwise specified + // @@ count of the resource is assumed for each device associated + // @@ with the instance. + // @@ + Global bool `protobuf:"varint,2,opt,name=global,proto3" json:"global,omitempty"` + // @@ .. cpp:var:: uint32 count + // @@ + // @@ The number of resources required for the execution of the model + // @@ instance. + // @@ + Count uint32 `protobuf:"varint,3,opt,name=count,proto3" json:"count,omitempty"` +} + +func (x *ModelRateLimiter_Resource) Reset() { + *x = ModelRateLimiter_Resource{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelRateLimiter_Resource) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelRateLimiter_Resource) ProtoMessage() {} + +func (x *ModelRateLimiter_Resource) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelRateLimiter_Resource.ProtoReflect.Descriptor instead. +func (*ModelRateLimiter_Resource) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *ModelRateLimiter_Resource) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelRateLimiter_Resource) GetGlobal() bool { + if x != nil { + return x.Global + } + return false +} + +func (x *ModelRateLimiter_Resource) GetCount() uint32 { + if x != nil { + return x.Count + } + return 0 +} + +// @@ +// @@ .. cpp:var:: message SecondaryDevice +// @@ +// @@ A secondary device required for a model instance. +// @@ +type ModelInstanceGroup_SecondaryDevice struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: SecondaryDeviceKind kind + // @@ + // @@ The secondary device kind. + // @@ + Kind ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind `protobuf:"varint,1,opt,name=kind,proto3,enum=inference.v1.ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind" json:"kind,omitempty"` + // @@ .. cpp:var:: int64 device_id + // @@ + // @@ Identifier for the secondary device. + // @@ + DeviceId int64 `protobuf:"varint,2,opt,name=device_id,json=deviceId,proto3" json:"device_id,omitempty"` +} + +func (x *ModelInstanceGroup_SecondaryDevice) Reset() { + *x = ModelInstanceGroup_SecondaryDevice{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelInstanceGroup_SecondaryDevice) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelInstanceGroup_SecondaryDevice) ProtoMessage() {} + +func (x *ModelInstanceGroup_SecondaryDevice) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelInstanceGroup_SecondaryDevice.ProtoReflect.Descriptor instead. +func (*ModelInstanceGroup_SecondaryDevice) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{1, 0} +} + +func (x *ModelInstanceGroup_SecondaryDevice) GetKind() ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind { + if x != nil { + return x.Kind + } + return ModelInstanceGroup_SecondaryDevice_KIND_NVDLA +} + +func (x *ModelInstanceGroup_SecondaryDevice) GetDeviceId() int64 { + if x != nil { + return x.DeviceId + } + return 0 +} + +// @@ .. cpp:var:: message Latest +// @@ +// @@ Serve only the latest version(s) of a model. This is +// @@ the default policy. +// @@ +type ModelVersionPolicy_Latest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: uint32 num_versions + // @@ + // @@ Serve only the 'num_versions' highest-numbered versions. T + // @@ The default value of 'num_versions' is 1, indicating that by + // @@ default only the single highest-number version of a + // @@ model will be served. + // @@ + NumVersions uint32 `protobuf:"varint,1,opt,name=num_versions,json=numVersions,proto3" json:"num_versions,omitempty"` +} + +func (x *ModelVersionPolicy_Latest) Reset() { + *x = ModelVersionPolicy_Latest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelVersionPolicy_Latest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelVersionPolicy_Latest) ProtoMessage() {} + +func (x *ModelVersionPolicy_Latest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelVersionPolicy_Latest.ProtoReflect.Descriptor instead. +func (*ModelVersionPolicy_Latest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{7, 0} +} + +func (x *ModelVersionPolicy_Latest) GetNumVersions() uint32 { + if x != nil { + return x.NumVersions + } + return 0 +} + +// @@ .. cpp:var:: message All +// @@ +// @@ Serve all versions of the model. +// @@ +type ModelVersionPolicy_All struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *ModelVersionPolicy_All) Reset() { + *x = ModelVersionPolicy_All{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelVersionPolicy_All) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelVersionPolicy_All) ProtoMessage() {} + +func (x *ModelVersionPolicy_All) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelVersionPolicy_All.ProtoReflect.Descriptor instead. +func (*ModelVersionPolicy_All) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{7, 1} +} + +// @@ .. cpp:var:: message Specific +// @@ +// @@ Serve only specific versions of the model. +// @@ +type ModelVersionPolicy_Specific struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: int64 versions (repeated) + // @@ + // @@ The specific versions of the model that will be served. + // @@ + Versions []int64 `protobuf:"varint,1,rep,packed,name=versions,proto3" json:"versions,omitempty"` +} + +func (x *ModelVersionPolicy_Specific) Reset() { + *x = ModelVersionPolicy_Specific{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelVersionPolicy_Specific) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelVersionPolicy_Specific) ProtoMessage() {} + +func (x *ModelVersionPolicy_Specific) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelVersionPolicy_Specific.ProtoReflect.Descriptor instead. +func (*ModelVersionPolicy_Specific) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{7, 2} +} + +func (x *ModelVersionPolicy_Specific) GetVersions() []int64 { + if x != nil { + return x.Versions + } + return nil +} + +// @@ +// @@ .. cpp:var:: message Graph +// @@ +// @@ Enable generic graph optimization of the model. If not specified +// @@ the framework's default level of optimization is used. Supports +// @@ TensorFlow graphdef and savedmodel and Onnx models. For TensorFlow +// @@ causes XLA to be enabled/disabled for the model. For Onnx defaults +// @@ to enabling all optimizations, -1 enables only basic optimizations, +// @@ +1 enables only basic and extended optimizations. +// @@ +type ModelOptimizationPolicy_Graph struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: int32 level + // @@ + // @@ The optimization level. Defaults to 0 (zero) if not specified. + // @@ + // @@ - -1: Disabled + // @@ - 0: Framework default + // @@ - 1+: Enable optimization level (greater values indicate + // @@ higher optimization levels) + // @@ + Level int32 `protobuf:"varint,1,opt,name=level,proto3" json:"level,omitempty"` +} + +func (x *ModelOptimizationPolicy_Graph) Reset() { + *x = ModelOptimizationPolicy_Graph{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOptimizationPolicy_Graph) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOptimizationPolicy_Graph) ProtoMessage() {} + +func (x *ModelOptimizationPolicy_Graph) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOptimizationPolicy_Graph.ProtoReflect.Descriptor instead. +func (*ModelOptimizationPolicy_Graph) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{8, 0} +} + +func (x *ModelOptimizationPolicy_Graph) GetLevel() int32 { + if x != nil { + return x.Level + } + return 0 +} + +// @@ +// @@ .. cpp:var:: message Cuda +// @@ +// @@ CUDA-specific optimization settings. +// @@ +type ModelOptimizationPolicy_Cuda struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: bool graphs + // @@ + // @@ Use CUDA graphs API to capture model operations and execute + // @@ them more efficiently. Default value is false. + // @@ Currently only recognized by TensorRT backend. + // @@ + Graphs bool `protobuf:"varint,1,opt,name=graphs,proto3" json:"graphs,omitempty"` + // @@ .. cpp:var:: bool busy_wait_events + // @@ + // @@ Use busy-waiting to synchronize CUDA events to achieve minimum + // @@ latency from event complete to host thread to be notified, with + // @@ the cost of high CPU load. Default value is false. + // @@ Currently only recognized by TensorRT backend. + // @@ + BusyWaitEvents bool `protobuf:"varint,2,opt,name=busy_wait_events,json=busyWaitEvents,proto3" json:"busy_wait_events,omitempty"` + // @@ .. cpp:var:: GraphSpec graph_spec (repeated) + // @@ + // @@ Specification of the CUDA graph to be captured. If not specified + // @@ and 'graphs' is true, the default CUDA graphs will be captured + // @@ based on model settings. + // @@ Currently only recognized by TensorRT backend. + // @@ + GraphSpec []*ModelOptimizationPolicy_Cuda_GraphSpec `protobuf:"bytes,3,rep,name=graph_spec,json=graphSpec,proto3" json:"graph_spec,omitempty"` + // @@ .. cpp:var:: bool output_copy_stream + // @@ + // @@ Uses a CUDA stream separate from the inference stream to copy the + // @@ output to host. However, be aware that setting this option to + // @@ true will lead to an increase in the memory consumption of the + // @@ model as Triton will allocate twice as much GPU memory for its + // @@ I/O tensor buffers. Default value is false. + // @@ Currently only recognized by TensorRT backend. + // @@ + OutputCopyStream bool `protobuf:"varint,4,opt,name=output_copy_stream,json=outputCopyStream,proto3" json:"output_copy_stream,omitempty"` +} + +func (x *ModelOptimizationPolicy_Cuda) Reset() { + *x = ModelOptimizationPolicy_Cuda{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOptimizationPolicy_Cuda) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOptimizationPolicy_Cuda) ProtoMessage() {} + +func (x *ModelOptimizationPolicy_Cuda) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[26] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOptimizationPolicy_Cuda.ProtoReflect.Descriptor instead. +func (*ModelOptimizationPolicy_Cuda) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{8, 1} +} + +func (x *ModelOptimizationPolicy_Cuda) GetGraphs() bool { + if x != nil { + return x.Graphs + } + return false +} + +func (x *ModelOptimizationPolicy_Cuda) GetBusyWaitEvents() bool { + if x != nil { + return x.BusyWaitEvents + } + return false +} + +func (x *ModelOptimizationPolicy_Cuda) GetGraphSpec() []*ModelOptimizationPolicy_Cuda_GraphSpec { + if x != nil { + return x.GraphSpec + } + return nil +} + +func (x *ModelOptimizationPolicy_Cuda) GetOutputCopyStream() bool { + if x != nil { + return x.OutputCopyStream + } + return false +} + +// @@ +// @@ .. cpp:var:: message ExecutionAccelerators +// @@ +// @@ Specify the preferred execution accelerators to be used to execute +// @@ the model. Currently only recognized by ONNX Runtime backend and +// @@ TensorFlow backend. +// @@ +// @@ For ONNX Runtime backend, it will deploy the model with the execution +// @@ accelerators by priority, the priority is determined based on the +// @@ order that they are set, i.e. the provider at the front has highest +// @@ priority. Overall, the priority will be in the following order: +// @@ (if instance is on GPU) +// @@ CUDA Execution Provider (if instance is on GPU) +// @@ +// @@ Default CPU Execution Provider +// @@ +type ModelOptimizationPolicy_ExecutionAccelerators struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: Accelerator gpu_execution_accelerator (repeated) + // @@ + // @@ The preferred execution provider to be used if the model instance + // @@ is deployed on GPU. + // @@ + // @@ For ONNX Runtime backend, possible value is "tensorrt" as name, + // @@ and no parameters are required. + // @@ + // @@ For TensorFlow backend, possible values are "tensorrt", + // @@ "auto_mixed_precision", "gpu_io". + // @@ + // @@ For "tensorrt", the following parameters can be specified: + // @@ "precision_mode": The precision used for optimization. + // @@ Allowed values are "FP32" and "FP16". Default value is "FP32". + // @@ + // @@ "max_cached_engines": The maximum number of cached TensorRT + // @@ engines in dynamic TensorRT ops. Default value is 100. + // @@ + // @@ "minimum_segment_size": The smallest model subgraph that will + // @@ be considered for optimization by TensorRT. Default value is 3. + // @@ + // @@ "max_workspace_size_bytes": The maximum GPU memory the model + // @@ can use temporarily during execution. Default value is 1GB. + // @@ + // @@ For "auto_mixed_precision", no parameters are required. If set, + // @@ the model will try to use FP16 for better performance. + // @@ This optimization can not be set with "tensorrt". + // @@ + // @@ For "gpu_io", no parameters are required. If set, the model will + // @@ be executed using TensorFlow Callable API to set input and output + // @@ tensors in GPU memory if possible, which can reduce data transfer + // @@ overhead if the model is used in ensemble. However, the Callable + // @@ object will be created on model creation and it will request all + // @@ outputs for every model execution, which may impact the + // @@ performance if a request does not require all outputs. This + // @@ optimization will only take affect if the model instance is + // @@ created with KIND_GPU. + // @@ + GpuExecutionAccelerator []*ModelOptimizationPolicy_ExecutionAccelerators_Accelerator `protobuf:"bytes,1,rep,name=gpu_execution_accelerator,json=gpuExecutionAccelerator,proto3" json:"gpu_execution_accelerator,omitempty"` + // @@ .. cpp:var:: Accelerator cpu_execution_accelerator (repeated) + // @@ + // @@ The preferred execution provider to be used if the model instance + // @@ is deployed on CPU. + // @@ + // @@ For ONNX Runtime backend, possible value is "openvino" as name, + // @@ and no parameters are required. + // @@ + CpuExecutionAccelerator []*ModelOptimizationPolicy_ExecutionAccelerators_Accelerator `protobuf:"bytes,2,rep,name=cpu_execution_accelerator,json=cpuExecutionAccelerator,proto3" json:"cpu_execution_accelerator,omitempty"` +} + +func (x *ModelOptimizationPolicy_ExecutionAccelerators) Reset() { + *x = ModelOptimizationPolicy_ExecutionAccelerators{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOptimizationPolicy_ExecutionAccelerators) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOptimizationPolicy_ExecutionAccelerators) ProtoMessage() {} + +func (x *ModelOptimizationPolicy_ExecutionAccelerators) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[27] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOptimizationPolicy_ExecutionAccelerators.ProtoReflect.Descriptor instead. +func (*ModelOptimizationPolicy_ExecutionAccelerators) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{8, 2} +} + +func (x *ModelOptimizationPolicy_ExecutionAccelerators) GetGpuExecutionAccelerator() []*ModelOptimizationPolicy_ExecutionAccelerators_Accelerator { + if x != nil { + return x.GpuExecutionAccelerator + } + return nil +} + +func (x *ModelOptimizationPolicy_ExecutionAccelerators) GetCpuExecutionAccelerator() []*ModelOptimizationPolicy_ExecutionAccelerators_Accelerator { + if x != nil { + return x.CpuExecutionAccelerator + } + return nil +} + +// @@ +// @@ .. cpp:var:: message PinnedMemoryBuffer +// @@ +// @@ Specify whether to use a pinned memory buffer when transferring data +// @@ between non-pinned system memory and GPU memory. Using a pinned +// @@ memory buffer for system from/to GPU transfers will typically provide +// @@ increased performance. For example, in the common use case where the +// @@ request provides inputs and delivers outputs via non-pinned system +// @@ memory, if the model instance accepts GPU IOs, the inputs will be +// @@ processed by two copies: from non-pinned system memory to pinned +// @@ memory, and from pinned memory to GPU memory. Similarly, pinned +// @@ memory will be used for delivering the outputs. +// @@ +type ModelOptimizationPolicy_PinnedMemoryBuffer struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: bool enable + // @@ + // @@ Use pinned memory buffer. Default is true. + // @@ + Enable bool `protobuf:"varint,1,opt,name=enable,proto3" json:"enable,omitempty"` +} + +func (x *ModelOptimizationPolicy_PinnedMemoryBuffer) Reset() { + *x = ModelOptimizationPolicy_PinnedMemoryBuffer{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOptimizationPolicy_PinnedMemoryBuffer) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOptimizationPolicy_PinnedMemoryBuffer) ProtoMessage() {} + +func (x *ModelOptimizationPolicy_PinnedMemoryBuffer) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[28] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOptimizationPolicy_PinnedMemoryBuffer.ProtoReflect.Descriptor instead. +func (*ModelOptimizationPolicy_PinnedMemoryBuffer) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{8, 3} +} + +func (x *ModelOptimizationPolicy_PinnedMemoryBuffer) GetEnable() bool { + if x != nil { + return x.Enable + } + return false +} + +// @@ .. cpp:var:: message GraphSpec +// @@ +// @@ Specification of the CUDA graph to be captured. +// @@ +type ModelOptimizationPolicy_Cuda_GraphSpec struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: int32 batch_size + // @@ + // @@ The batch size of the CUDA graph. If 'max_batch_size' is 0, + // @@ 'batch_size' must be set to 0. Otherwise, 'batch_size' must + // @@ be set to value between 1 and 'max_batch_size'. + // @@ + BatchSize int32 `protobuf:"varint,1,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + // @@ .. cpp:var:: map input + // @@ + // @@ The specification of the inputs. 'Shape' is the shape of the + // @@ input without batching dimension. + // @@ + Input map[string]*ModelOptimizationPolicy_Cuda_GraphSpec_Shape `protobuf:"bytes,2,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ .. cpp:var:: LowerBound graph_lower_bound + // @@ + // @@ Specify the lower bound of the CUDA graph. Optional. + // @@ If specified, the graph can be used for input shapes and + // @@ batch sizes that are in closed interval between the lower + // @@ bound specification and graph specification. For dynamic + // @@ shape model, this allows CUDA graphs to be launched + // @@ frequently without capturing all possible shape combinations. + // @@ However, using graph for shape combinations different from + // @@ the one used for capturing introduces uninitialized data for + // @@ execution and it may distort the inference result if + // @@ the model is sensitive to uninitialized data. + // @@ + GraphLowerBound *ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound `protobuf:"bytes,3,opt,name=graph_lower_bound,json=graphLowerBound,proto3" json:"graph_lower_bound,omitempty"` +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec) Reset() { + *x = ModelOptimizationPolicy_Cuda_GraphSpec{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOptimizationPolicy_Cuda_GraphSpec) ProtoMessage() {} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[29] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOptimizationPolicy_Cuda_GraphSpec.ProtoReflect.Descriptor instead. +func (*ModelOptimizationPolicy_Cuda_GraphSpec) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{8, 1, 0} +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec) GetBatchSize() int32 { + if x != nil { + return x.BatchSize + } + return 0 +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec) GetInput() map[string]*ModelOptimizationPolicy_Cuda_GraphSpec_Shape { + if x != nil { + return x.Input + } + return nil +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec) GetGraphLowerBound() *ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound { + if x != nil { + return x.GraphLowerBound + } + return nil +} + +// @@ .. cpp:var:: message Dims +// @@ +// @@ Specification of tensor dimension. +// @@ +type ModelOptimizationPolicy_Cuda_GraphSpec_Shape struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: int64 dim (repeated) + // @@ + // @@ The dimension. + // @@ + Dim []int64 `protobuf:"varint,1,rep,packed,name=dim,proto3" json:"dim,omitempty"` +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec_Shape) Reset() { + *x = ModelOptimizationPolicy_Cuda_GraphSpec_Shape{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec_Shape) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOptimizationPolicy_Cuda_GraphSpec_Shape) ProtoMessage() {} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec_Shape) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[30] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOptimizationPolicy_Cuda_GraphSpec_Shape.ProtoReflect.Descriptor instead. +func (*ModelOptimizationPolicy_Cuda_GraphSpec_Shape) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{8, 1, 0, 0} +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec_Shape) GetDim() []int64 { + if x != nil { + return x.Dim + } + return nil +} + +type ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: int32 batch_size + // @@ + // @@ The batch size of the CUDA graph. If 'max_batch_size' is 0, + // @@ 'batch_size' must be set to 0. Otherwise, 'batch_size' must + // @@ be set to value between 1 and 'max_batch_size'. + // @@ + BatchSize int32 `protobuf:"varint,1,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + // @@ .. cpp:var:: map input + // @@ + // @@ The specification of the inputs. 'Shape' is the shape of + // @@ the input without batching dimension. + // @@ + Input map[string]*ModelOptimizationPolicy_Cuda_GraphSpec_Shape `protobuf:"bytes,2,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound) Reset() { + *x = ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound) ProtoMessage() {} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[31] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound.ProtoReflect.Descriptor instead. +func (*ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{8, 1, 0, 1} +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound) GetBatchSize() int32 { + if x != nil { + return x.BatchSize + } + return 0 +} + +func (x *ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound) GetInput() map[string]*ModelOptimizationPolicy_Cuda_GraphSpec_Shape { + if x != nil { + return x.Input + } + return nil +} + +// @@ +// @@ .. cpp:var:: message Accelerator +// @@ +// @@ Specify the accelerator to be used to execute the model. +// @@ Accelerator with the same name may accept different parameters +// @@ depending on the backends. +// @@ +type ModelOptimizationPolicy_ExecutionAccelerators_Accelerator struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the execution accelerator. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: map parameters + // @@ + // @@ Additional paremeters used to configure the accelerator. + // @@ + Parameters map[string]string `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *ModelOptimizationPolicy_ExecutionAccelerators_Accelerator) Reset() { + *x = ModelOptimizationPolicy_ExecutionAccelerators_Accelerator{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelOptimizationPolicy_ExecutionAccelerators_Accelerator) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelOptimizationPolicy_ExecutionAccelerators_Accelerator) ProtoMessage() {} + +func (x *ModelOptimizationPolicy_ExecutionAccelerators_Accelerator) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[34] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelOptimizationPolicy_ExecutionAccelerators_Accelerator.ProtoReflect.Descriptor instead. +func (*ModelOptimizationPolicy_ExecutionAccelerators_Accelerator) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{8, 2, 0} +} + +func (x *ModelOptimizationPolicy_ExecutionAccelerators_Accelerator) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelOptimizationPolicy_ExecutionAccelerators_Accelerator) GetParameters() map[string]string { + if x != nil { + return x.Parameters + } + return nil +} + +// @@ .. cpp:var:: message Control +// @@ +// @@ A control is a signal that the sequence batcher uses to +// @@ communicate with a backend. +// @@ +type ModelSequenceBatching_Control struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: Kind kind + // @@ + // @@ The kind of this control. + // @@ + Kind ModelSequenceBatching_Control_Kind `protobuf:"varint,1,opt,name=kind,proto3,enum=inference.v1.ModelSequenceBatching_Control_Kind" json:"kind,omitempty"` + // @@ .. cpp:var:: int32 int32_false_true (repeated) + // @@ + // @@ The control's true and false setting is indicated by setting + // @@ a value in an int32 tensor. The tensor must be a + // @@ 1-dimensional tensor with size equal to the batch size of + // @@ the request. 'int32_false_true' must have two entries: the + // @@ first the false value and the second the true value. + // @@ + Int32FalseTrue []int32 `protobuf:"varint,2,rep,packed,name=int32_false_true,json=int32FalseTrue,proto3" json:"int32_false_true,omitempty"` + // @@ .. cpp:var:: float fp32_false_true (repeated) + // @@ + // @@ The control's true and false setting is indicated by setting + // @@ a value in a fp32 tensor. The tensor must be a + // @@ 1-dimensional tensor with size equal to the batch size of + // @@ the request. 'fp32_false_true' must have two entries: the + // @@ first the false value and the second the true value. + // @@ + Fp32FalseTrue []float32 `protobuf:"fixed32,3,rep,packed,name=fp32_false_true,json=fp32FalseTrue,proto3" json:"fp32_false_true,omitempty"` + // @@ .. cpp:var:: bool bool_false_true (repeated) + // @@ + // @@ The control's true and false setting is indicated by setting + // @@ a value in a bool tensor. The tensor must be a + // @@ 1-dimensional tensor with size equal to the batch size of + // @@ the request. 'bool_false_true' must have two entries: the + // @@ first the false value and the second the true value. + // @@ + BoolFalseTrue []bool `protobuf:"varint,5,rep,packed,name=bool_false_true,json=boolFalseTrue,proto3" json:"bool_false_true,omitempty"` + // @@ .. cpp:var:: DataType data_type + // @@ + // @@ The control's datatype. + // @@ + DataType DataType `protobuf:"varint,4,opt,name=data_type,json=dataType,proto3,enum=inference.v1.DataType" json:"data_type,omitempty"` +} + +func (x *ModelSequenceBatching_Control) Reset() { + *x = ModelSequenceBatching_Control{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[37] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelSequenceBatching_Control) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelSequenceBatching_Control) ProtoMessage() {} + +func (x *ModelSequenceBatching_Control) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[37] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelSequenceBatching_Control.ProtoReflect.Descriptor instead. +func (*ModelSequenceBatching_Control) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{11, 0} +} + +func (x *ModelSequenceBatching_Control) GetKind() ModelSequenceBatching_Control_Kind { + if x != nil { + return x.Kind + } + return ModelSequenceBatching_Control_CONTROL_SEQUENCE_START +} + +func (x *ModelSequenceBatching_Control) GetInt32FalseTrue() []int32 { + if x != nil { + return x.Int32FalseTrue + } + return nil +} + +func (x *ModelSequenceBatching_Control) GetFp32FalseTrue() []float32 { + if x != nil { + return x.Fp32FalseTrue + } + return nil +} + +func (x *ModelSequenceBatching_Control) GetBoolFalseTrue() []bool { + if x != nil { + return x.BoolFalseTrue + } + return nil +} + +func (x *ModelSequenceBatching_Control) GetDataType() DataType { + if x != nil { + return x.DataType + } + return DataType_TYPE_INVALID +} + +// @@ .. cpp:var:: message ControlInput +// @@ +// @@ The sequence control values to communicate by a model input. +// @@ +type ModelSequenceBatching_ControlInput struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the model input. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: Control control (repeated) + // @@ + // @@ The control value(s) that should be communicated to the + // @@ model using this model input. + // @@ + Control []*ModelSequenceBatching_Control `protobuf:"bytes,2,rep,name=control,proto3" json:"control,omitempty"` +} + +func (x *ModelSequenceBatching_ControlInput) Reset() { + *x = ModelSequenceBatching_ControlInput{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[38] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelSequenceBatching_ControlInput) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelSequenceBatching_ControlInput) ProtoMessage() {} + +func (x *ModelSequenceBatching_ControlInput) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[38] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelSequenceBatching_ControlInput.ProtoReflect.Descriptor instead. +func (*ModelSequenceBatching_ControlInput) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{11, 1} +} + +func (x *ModelSequenceBatching_ControlInput) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelSequenceBatching_ControlInput) GetControl() []*ModelSequenceBatching_Control { + if x != nil { + return x.Control + } + return nil +} + +// @@ +// @@ .. cpp:var:: message InitialState +// @@ +// @@ Settings used to initialize data for implicit state. +// @@ +type ModelSequenceBatching_InitialState struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: DataType data_type + // @@ + // @@ The data-type of the state. + // @@ + DataType DataType `protobuf:"varint,1,opt,name=data_type,json=dataType,proto3,enum=inference.v1.DataType" json:"data_type,omitempty"` + // @@ .. cpp:var:: int64 dims (repeated) + // @@ + // @@ The shape of the state tensor, not including the batch + // @@ dimension. + // @@ + Dims []int64 `protobuf:"varint,2,rep,packed,name=dims,proto3" json:"dims,omitempty"` + // @@ .. cpp:var:: oneof state_data + // @@ + // @@ Specify how the initial state data is generated. + // @@ + // + // Types that are assignable to StateData: + // + // *ModelSequenceBatching_InitialState_ZeroData + // *ModelSequenceBatching_InitialState_DataFile + StateData isModelSequenceBatching_InitialState_StateData `protobuf_oneof:"state_data"` + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the state initialization. + // @@ + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *ModelSequenceBatching_InitialState) Reset() { + *x = ModelSequenceBatching_InitialState{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[39] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelSequenceBatching_InitialState) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelSequenceBatching_InitialState) ProtoMessage() {} + +func (x *ModelSequenceBatching_InitialState) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[39] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelSequenceBatching_InitialState.ProtoReflect.Descriptor instead. +func (*ModelSequenceBatching_InitialState) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{11, 2} +} + +func (x *ModelSequenceBatching_InitialState) GetDataType() DataType { + if x != nil { + return x.DataType + } + return DataType_TYPE_INVALID +} + +func (x *ModelSequenceBatching_InitialState) GetDims() []int64 { + if x != nil { + return x.Dims + } + return nil +} + +func (m *ModelSequenceBatching_InitialState) GetStateData() isModelSequenceBatching_InitialState_StateData { + if m != nil { + return m.StateData + } + return nil +} + +func (x *ModelSequenceBatching_InitialState) GetZeroData() bool { + if x, ok := x.GetStateData().(*ModelSequenceBatching_InitialState_ZeroData); ok { + return x.ZeroData + } + return false +} + +func (x *ModelSequenceBatching_InitialState) GetDataFile() string { + if x, ok := x.GetStateData().(*ModelSequenceBatching_InitialState_DataFile); ok { + return x.DataFile + } + return "" +} + +func (x *ModelSequenceBatching_InitialState) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +type isModelSequenceBatching_InitialState_StateData interface { + isModelSequenceBatching_InitialState_StateData() +} + +type ModelSequenceBatching_InitialState_ZeroData struct { + // @@ + // @@ .. cpp:var:: bool zero_data + // @@ + // @@ The identifier for using zeros as initial state data. + // @@ Note that the value of 'zero_data' will not be checked, + // @@ instead, zero data will be used as long as the field is set. + // @@ + ZeroData bool `protobuf:"varint,3,opt,name=zero_data,json=zeroData,proto3,oneof"` +} + +type ModelSequenceBatching_InitialState_DataFile struct { + // @@ .. cpp:var:: string data_file + // @@ + // @@ The file whose content will be used as the initial data for + // @@ the state in row-major order. The file must be provided in + // @@ sub-directory 'initial_state' under the model directory. + // @@ + DataFile string `protobuf:"bytes,4,opt,name=data_file,json=dataFile,proto3,oneof"` +} + +func (*ModelSequenceBatching_InitialState_ZeroData) isModelSequenceBatching_InitialState_StateData() { +} + +func (*ModelSequenceBatching_InitialState_DataFile) isModelSequenceBatching_InitialState_StateData() { +} + +// @@ .. cpp:var:: message State +// @@ +// @@ An input / output pair of tensors that carry state for the sequence. +// @@ +type ModelSequenceBatching_State struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string input_name + // @@ + // @@ The name of the model state input. + // @@ + InputName string `protobuf:"bytes,1,opt,name=input_name,json=inputName,proto3" json:"input_name,omitempty"` + // @@ .. cpp:var:: string output_name + // @@ + // @@ The name of the model state output. + // @@ + OutputName string `protobuf:"bytes,2,opt,name=output_name,json=outputName,proto3" json:"output_name,omitempty"` + // @@ .. cpp:var:: DataType data_type + // @@ + // @@ The data-type of the state. + // @@ + DataType DataType `protobuf:"varint,3,opt,name=data_type,json=dataType,proto3,enum=inference.v1.DataType" json:"data_type,omitempty"` + // @@ .. cpp:var:: int64 dim (repeated) + // @@ + // @@ The dimension. + // @@ + Dims []int64 `protobuf:"varint,4,rep,packed,name=dims,proto3" json:"dims,omitempty"` + // @@ .. cpp:var:: InitialState initial_state (repeated) + // @@ + // @@ The optional field to specify the initial state for the model. + // @@ + InitialState []*ModelSequenceBatching_InitialState `protobuf:"bytes,5,rep,name=initial_state,json=initialState,proto3" json:"initial_state,omitempty"` +} + +func (x *ModelSequenceBatching_State) Reset() { + *x = ModelSequenceBatching_State{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[40] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelSequenceBatching_State) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelSequenceBatching_State) ProtoMessage() {} + +func (x *ModelSequenceBatching_State) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[40] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelSequenceBatching_State.ProtoReflect.Descriptor instead. +func (*ModelSequenceBatching_State) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{11, 3} +} + +func (x *ModelSequenceBatching_State) GetInputName() string { + if x != nil { + return x.InputName + } + return "" +} + +func (x *ModelSequenceBatching_State) GetOutputName() string { + if x != nil { + return x.OutputName + } + return "" +} + +func (x *ModelSequenceBatching_State) GetDataType() DataType { + if x != nil { + return x.DataType + } + return DataType_TYPE_INVALID +} + +func (x *ModelSequenceBatching_State) GetDims() []int64 { + if x != nil { + return x.Dims + } + return nil +} + +func (x *ModelSequenceBatching_State) GetInitialState() []*ModelSequenceBatching_InitialState { + if x != nil { + return x.InitialState + } + return nil +} + +// @@ .. cpp:var:: message StrategyDirect +// @@ +// @@ The sequence batcher uses a specific, unique batch +// @@ slot for each sequence. All inference requests in a +// @@ sequence are directed to the same batch slot in the same +// @@ model instance over the lifetime of the sequence. This +// @@ is the default strategy. +// @@ +type ModelSequenceBatching_StrategyDirect struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: uint64 max_queue_delay_microseconds + // @@ + // @@ The maximum time, in microseconds, a candidate request + // @@ will be delayed in the sequence batch scheduling queue to + // @@ wait for additional requests for batching. Default is 0. + // @@ + MaxQueueDelayMicroseconds uint64 `protobuf:"varint,1,opt,name=max_queue_delay_microseconds,json=maxQueueDelayMicroseconds,proto3" json:"max_queue_delay_microseconds,omitempty"` + // @@ .. cpp:var:: float minimum_slot_utilization + // @@ + // @@ The minimum slot utilization that must be satisfied to + // @@ execute the batch before 'max_queue_delay_microseconds' expires. + // @@ For example, a value of 0.5 indicates that the batch should be + // @@ executed as soon as 50% or more of the slots are ready even if + // @@ the 'max_queue_delay_microseconds' timeout has not expired. + // @@ The default is 0.0, indicating that a batch will be executed + // @@ before 'max_queue_delay_microseconds' timeout expires if at least + // @@ one batch slot is ready. 'max_queue_delay_microseconds' will be + // @@ ignored unless minimum_slot_utilization is set to a non-zero + // @@ value. + // @@ + MinimumSlotUtilization float32 `protobuf:"fixed32,2,opt,name=minimum_slot_utilization,json=minimumSlotUtilization,proto3" json:"minimum_slot_utilization,omitempty"` +} + +func (x *ModelSequenceBatching_StrategyDirect) Reset() { + *x = ModelSequenceBatching_StrategyDirect{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[41] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelSequenceBatching_StrategyDirect) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelSequenceBatching_StrategyDirect) ProtoMessage() {} + +func (x *ModelSequenceBatching_StrategyDirect) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[41] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelSequenceBatching_StrategyDirect.ProtoReflect.Descriptor instead. +func (*ModelSequenceBatching_StrategyDirect) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{11, 4} +} + +func (x *ModelSequenceBatching_StrategyDirect) GetMaxQueueDelayMicroseconds() uint64 { + if x != nil { + return x.MaxQueueDelayMicroseconds + } + return 0 +} + +func (x *ModelSequenceBatching_StrategyDirect) GetMinimumSlotUtilization() float32 { + if x != nil { + return x.MinimumSlotUtilization + } + return 0 +} + +// @@ .. cpp:var:: message StrategyOldest +// @@ +// @@ The sequence batcher maintains up to 'max_candidate_sequences' +// @@ candidate sequences. 'max_candidate_sequences' can be greater +// @@ than the model's 'max_batch_size'. For inferencing the batcher +// @@ chooses from the candidate sequences up to 'max_batch_size' +// @@ inference requests. Requests are chosen in an oldest-first +// @@ manner across all candidate sequences. A given sequence is +// @@ not guaranteed to be assigned to the same batch slot for +// @@ all inference requests of that sequence. +// @@ +type ModelSequenceBatching_StrategyOldest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: int32 max_candidate_sequences + // @@ + // @@ Maximum number of candidate sequences that the batcher + // @@ maintains. Excess seqences are kept in an ordered backlog + // @@ and become candidates when existing candidate sequences + // @@ complete. + // @@ + MaxCandidateSequences int32 `protobuf:"varint,1,opt,name=max_candidate_sequences,json=maxCandidateSequences,proto3" json:"max_candidate_sequences,omitempty"` + // @@ .. cpp:var:: int32 preferred_batch_size (repeated) + // @@ + // @@ Preferred batch sizes for dynamic batching of candidate + // @@ sequences. If a batch of one of these sizes can be formed + // @@ it will be executed immediately. If not specified a + // @@ preferred batch size will be chosen automatically + // @@ based on model and GPU characteristics. + // @@ + PreferredBatchSize []int32 `protobuf:"varint,2,rep,packed,name=preferred_batch_size,json=preferredBatchSize,proto3" json:"preferred_batch_size,omitempty"` + // @@ .. cpp:var:: uint64 max_queue_delay_microseconds + // @@ + // @@ The maximum time, in microseconds, a candidate request + // @@ will be delayed in the dynamic batch scheduling queue to + // @@ wait for additional requests for batching. Default is 0. + // @@ + MaxQueueDelayMicroseconds uint64 `protobuf:"varint,3,opt,name=max_queue_delay_microseconds,json=maxQueueDelayMicroseconds,proto3" json:"max_queue_delay_microseconds,omitempty"` +} + +func (x *ModelSequenceBatching_StrategyOldest) Reset() { + *x = ModelSequenceBatching_StrategyOldest{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[42] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelSequenceBatching_StrategyOldest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelSequenceBatching_StrategyOldest) ProtoMessage() {} + +func (x *ModelSequenceBatching_StrategyOldest) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[42] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelSequenceBatching_StrategyOldest.ProtoReflect.Descriptor instead. +func (*ModelSequenceBatching_StrategyOldest) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{11, 5} +} + +func (x *ModelSequenceBatching_StrategyOldest) GetMaxCandidateSequences() int32 { + if x != nil { + return x.MaxCandidateSequences + } + return 0 +} + +func (x *ModelSequenceBatching_StrategyOldest) GetPreferredBatchSize() []int32 { + if x != nil { + return x.PreferredBatchSize + } + return nil +} + +func (x *ModelSequenceBatching_StrategyOldest) GetMaxQueueDelayMicroseconds() uint64 { + if x != nil { + return x.MaxQueueDelayMicroseconds + } + return 0 +} + +// @@ .. cpp:var:: message Step +// @@ +// @@ Each step specifies a model included in the ensemble, +// @@ maps ensemble tensor names to the model input tensors, +// @@ and maps model output tensors to ensemble tensor names +// @@ +type ModelEnsembling_Step struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string model_name + // @@ + // @@ The name of the model to execute for this step of the ensemble. + // @@ + ModelName string `protobuf:"bytes,1,opt,name=model_name,json=modelName,proto3" json:"model_name,omitempty"` + // @@ .. cpp:var:: int64 model_version + // @@ + // @@ The version of the model to use for inference. If -1 + // @@ the latest/most-recent version of the model is used. + // @@ + ModelVersion int64 `protobuf:"varint,2,opt,name=model_version,json=modelVersion,proto3" json:"model_version,omitempty"` + // @@ .. cpp:var:: map input_map + // @@ + // @@ Map from name of an input tensor on this step's model to ensemble + // @@ tensor name. The ensemble tensor must have the same data type and + // @@ shape as the model input. Each model input must be assigned to + // @@ one ensemble tensor, but the same ensemble tensor can be assigned + // @@ to multiple model inputs. + // @@ + InputMap map[string]string `protobuf:"bytes,3,rep,name=input_map,json=inputMap,proto3" json:"input_map,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ .. cpp:var:: map output_map + // @@ + // @@ Map from name of an output tensor on this step's model to ensemble + // @@ tensor name. The data type and shape of the ensemble tensor will + // @@ be inferred from the model output. It is optional to assign all + // @@ model outputs to ensemble tensors. One ensemble tensor name + // @@ can appear in an output map only once. + // @@ + OutputMap map[string]string `protobuf:"bytes,4,rep,name=output_map,json=outputMap,proto3" json:"output_map,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // @@ .. cpp:var:: string model_namespace + // @@ + // @@ [RESERVED] currently this field is reserved for internal use, users + // @@ must not set any value to this field to avoid unexpected behavior. + // @@ + ModelNamespace string `protobuf:"bytes,5,opt,name=model_namespace,json=modelNamespace,proto3" json:"model_namespace,omitempty"` +} + +func (x *ModelEnsembling_Step) Reset() { + *x = ModelEnsembling_Step{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[43] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelEnsembling_Step) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelEnsembling_Step) ProtoMessage() {} + +func (x *ModelEnsembling_Step) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[43] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelEnsembling_Step.ProtoReflect.Descriptor instead. +func (*ModelEnsembling_Step) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{12, 0} +} + +func (x *ModelEnsembling_Step) GetModelName() string { + if x != nil { + return x.ModelName + } + return "" +} + +func (x *ModelEnsembling_Step) GetModelVersion() int64 { + if x != nil { + return x.ModelVersion + } + return 0 +} + +func (x *ModelEnsembling_Step) GetInputMap() map[string]string { + if x != nil { + return x.InputMap + } + return nil +} + +func (x *ModelEnsembling_Step) GetOutputMap() map[string]string { + if x != nil { + return x.OutputMap + } + return nil +} + +func (x *ModelEnsembling_Step) GetModelNamespace() string { + if x != nil { + return x.ModelNamespace + } + return "" +} + +// @@ +// @@ .. cpp:var:: message Input +// @@ +// @@ Meta data associated with an input. +// @@ +type ModelWarmup_Input struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: DataType data_type + // @@ + // @@ The data-type of the input. + // @@ + DataType DataType `protobuf:"varint,1,opt,name=data_type,json=dataType,proto3,enum=inference.v1.DataType" json:"data_type,omitempty"` + // @@ .. cpp:var:: int64 dims (repeated) + // @@ + // @@ The shape of the input tensor, not including the batch dimension. + // @@ + Dims []int64 `protobuf:"varint,2,rep,packed,name=dims,proto3" json:"dims,omitempty"` + // @@ .. cpp:var:: oneof input_data_type + // @@ + // @@ Specify how the input data is generated. If the input has STRING + // @@ data type and 'random_data' is set, the data generation will fall + // @@ back to 'zero_data'. + // @@ + // + // Types that are assignable to InputDataType: + // + // *ModelWarmup_Input_ZeroData + // *ModelWarmup_Input_RandomData + // *ModelWarmup_Input_InputDataFile + InputDataType isModelWarmup_Input_InputDataType `protobuf_oneof:"input_data_type"` +} + +func (x *ModelWarmup_Input) Reset() { + *x = ModelWarmup_Input{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[46] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelWarmup_Input) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelWarmup_Input) ProtoMessage() {} + +func (x *ModelWarmup_Input) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[46] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelWarmup_Input.ProtoReflect.Descriptor instead. +func (*ModelWarmup_Input) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{14, 0} +} + +func (x *ModelWarmup_Input) GetDataType() DataType { + if x != nil { + return x.DataType + } + return DataType_TYPE_INVALID +} + +func (x *ModelWarmup_Input) GetDims() []int64 { + if x != nil { + return x.Dims + } + return nil +} + +func (m *ModelWarmup_Input) GetInputDataType() isModelWarmup_Input_InputDataType { + if m != nil { + return m.InputDataType + } + return nil +} + +func (x *ModelWarmup_Input) GetZeroData() bool { + if x, ok := x.GetInputDataType().(*ModelWarmup_Input_ZeroData); ok { + return x.ZeroData + } + return false +} + +func (x *ModelWarmup_Input) GetRandomData() bool { + if x, ok := x.GetInputDataType().(*ModelWarmup_Input_RandomData); ok { + return x.RandomData + } + return false +} + +func (x *ModelWarmup_Input) GetInputDataFile() string { + if x, ok := x.GetInputDataType().(*ModelWarmup_Input_InputDataFile); ok { + return x.InputDataFile + } + return "" +} + +type isModelWarmup_Input_InputDataType interface { + isModelWarmup_Input_InputDataType() +} + +type ModelWarmup_Input_ZeroData struct { + // @@ + // @@ .. cpp:var:: bool zero_data + // @@ + // @@ The identifier for using zeros as input data. Note that the + // @@ value of 'zero_data' will not be checked, instead, zero data + // @@ will be used as long as the field is set. + // @@ + ZeroData bool `protobuf:"varint,3,opt,name=zero_data,json=zeroData,proto3,oneof"` +} + +type ModelWarmup_Input_RandomData struct { + // @@ + // @@ .. cpp:var:: bool random_data + // @@ + // @@ The identifier for using random data as input data. Note that + // @@ the value of 'random_data' will not be checked, instead, + // @@ random data will be used as long as the field is set. + // @@ + RandomData bool `protobuf:"varint,4,opt,name=random_data,json=randomData,proto3,oneof"` +} + +type ModelWarmup_Input_InputDataFile struct { + // @@ .. cpp:var:: string input_data_file + // @@ + // @@ The file whose content will be used as raw input data in + // @@ row-major order. The file must be provided in a sub-directory + // @@ 'warmup' under the model directory. The file contents should be + // @@ in binary format. For TYPE_STRING data-type, an element is + // @@ represented by a 4-byte unsigned integer giving the length + // @@ followed by the actual bytes. + // @@ + InputDataFile string `protobuf:"bytes,5,opt,name=input_data_file,json=inputDataFile,proto3,oneof"` +} + +func (*ModelWarmup_Input_ZeroData) isModelWarmup_Input_InputDataType() {} + +func (*ModelWarmup_Input_RandomData) isModelWarmup_Input_InputDataType() {} + +func (*ModelWarmup_Input_InputDataFile) isModelWarmup_Input_InputDataType() {} + +// @@ +// @@ .. cpp:var:: message Agent +// @@ +// @@ A repository agent that should be invoked for the specified +// @@ repository actions for this model. +// @@ +type ModelRepositoryAgents_Agent struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // @@ .. cpp:var:: string name + // @@ + // @@ The name of the agent. + // @@ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @@ .. cpp:var:: map parameters + // @@ + // @@ The parameters for the agent. + // @@ + Parameters map[string]string `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *ModelRepositoryAgents_Agent) Reset() { + *x = ModelRepositoryAgents_Agent{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[48] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ModelRepositoryAgents_Agent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModelRepositoryAgents_Agent) ProtoMessage() {} + +func (x *ModelRepositoryAgents_Agent) ProtoReflect() protoreflect.Message { + mi := &file_pkg_apis_inference_v1_model_config_proto_msgTypes[48] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModelRepositoryAgents_Agent.ProtoReflect.Descriptor instead. +func (*ModelRepositoryAgents_Agent) Descriptor() ([]byte, []int) { + return file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP(), []int{17, 0} +} + +func (x *ModelRepositoryAgents_Agent) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ModelRepositoryAgents_Agent) GetParameters() map[string]string { + if x != nil { + return x.Parameters + } + return nil +} + +var File_pkg_apis_inference_v1_model_config_proto protoreflect.FileDescriptor + +var file_pkg_apis_inference_v1_model_config_proto_rawDesc = []byte{ + 0x0a, 0x28, 0x70, 0x6b, 0x67, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x22, 0xc3, 0x01, 0x0a, 0x10, 0x4d, 0x6f, 0x64, + 0x65, 0x6c, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x12, 0x45, 0x0a, + 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x27, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, + 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, + 0x1a, 0x4c, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x12, 0x16, 0x0a, 0x06, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x06, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, + 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xf9, + 0x04, 0x0a, 0x12, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, + 0x47, 0x72, 0x6f, 0x75, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x39, 0x0a, 0x04, 0x6b, 0x69, 0x6e, + 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x73, 0x74, + 0x61, 0x6e, 0x63, 0x65, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x2e, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x04, + 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x41, 0x0a, 0x0c, 0x72, 0x61, + 0x74, 0x65, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1e, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, + 0x52, 0x0b, 0x72, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, + 0x04, 0x67, 0x70, 0x75, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x05, 0x52, 0x04, 0x67, 0x70, 0x75, + 0x73, 0x12, 0x5d, 0x0a, 0x11, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x5f, 0x64, + 0x65, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x69, + 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x2e, 0x53, + 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x44, 0x65, 0x76, 0x69, 0x63, 0x65, 0x52, 0x10, + 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x44, 0x65, 0x76, 0x69, 0x63, 0x65, 0x73, + 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x05, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, + 0x73, 0x73, 0x69, 0x76, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x70, 0x61, 0x73, + 0x73, 0x69, 0x76, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x68, 0x6f, 0x73, 0x74, 0x5f, 0x70, 0x6f, 0x6c, + 0x69, 0x63, 0x79, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x68, 0x6f, 0x73, 0x74, 0x50, + 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x1a, 0xaf, 0x01, 0x0a, 0x0f, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, + 0x61, 0x72, 0x79, 0x44, 0x65, 0x76, 0x69, 0x63, 0x65, 0x12, 0x58, 0x0a, 0x04, 0x6b, 0x69, 0x6e, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x44, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x73, 0x74, + 0x61, 0x6e, 0x63, 0x65, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x2e, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, + 0x61, 0x72, 0x79, 0x44, 0x65, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, + 0x61, 0x72, 0x79, 0x44, 0x65, 0x76, 0x69, 0x63, 0x65, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x04, 0x6b, + 0x69, 0x6e, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x5f, 0x69, 0x64, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x49, 0x64, + 0x22, 0x25, 0x0a, 0x13, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x44, 0x65, 0x76, + 0x69, 0x63, 0x65, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x0e, 0x0a, 0x0a, 0x4b, 0x49, 0x4e, 0x44, 0x5f, + 0x4e, 0x56, 0x44, 0x4c, 0x41, 0x10, 0x00, 0x22, 0x41, 0x0a, 0x04, 0x4b, 0x69, 0x6e, 0x64, 0x12, + 0x0d, 0x0a, 0x09, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x41, 0x55, 0x54, 0x4f, 0x10, 0x00, 0x12, 0x0c, + 0x0a, 0x08, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x47, 0x50, 0x55, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, + 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x43, 0x50, 0x55, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x4b, 0x49, + 0x4e, 0x44, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x4c, 0x10, 0x03, 0x22, 0x2a, 0x0a, 0x12, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x68, 0x61, 0x70, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x03, 0x52, + 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x22, 0x8d, 0x03, 0x0a, 0x0a, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x64, 0x61, 0x74, + 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x69, + 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x61, 0x74, 0x61, + 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x37, + 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1f, + 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x52, + 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x69, 0x6d, 0x73, 0x18, + 0x04, 0x20, 0x03, 0x28, 0x03, 0x52, 0x04, 0x64, 0x69, 0x6d, 0x73, 0x12, 0x3a, 0x0a, 0x07, 0x72, + 0x65, 0x73, 0x68, 0x61, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, + 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x68, 0x61, 0x70, 0x65, 0x52, 0x07, + 0x72, 0x65, 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x69, 0x73, 0x5f, 0x73, 0x68, + 0x61, 0x70, 0x65, 0x5f, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0d, 0x69, 0x73, 0x53, 0x68, 0x61, 0x70, 0x65, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x12, + 0x2c, 0x0a, 0x12, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x61, 0x67, 0x67, 0x65, 0x64, 0x5f, + 0x62, 0x61, 0x74, 0x63, 0x68, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x61, 0x6c, 0x6c, + 0x6f, 0x77, 0x52, 0x61, 0x67, 0x67, 0x65, 0x64, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x1a, 0x0a, + 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x22, 0x3b, 0x0a, 0x06, 0x46, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x12, 0x0f, 0x0a, 0x0b, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x4e, 0x4f, + 0x4e, 0x45, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x4e, + 0x48, 0x57, 0x43, 0x10, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, + 0x4e, 0x43, 0x48, 0x57, 0x10, 0x02, 0x22, 0xf5, 0x01, 0x0a, 0x0b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x64, 0x61, + 0x74, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, + 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x61, 0x74, + 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x64, 0x69, 0x6d, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x03, 0x52, 0x04, 0x64, + 0x69, 0x6d, 0x73, 0x12, 0x3a, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x68, 0x61, 0x70, 0x65, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, + 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, + 0x65, 0x73, 0x68, 0x61, 0x70, 0x65, 0x52, 0x07, 0x72, 0x65, 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, + 0x25, 0x0a, 0x0e, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x46, 0x69, + 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x69, 0x73, 0x5f, 0x73, 0x68, 0x61, + 0x70, 0x65, 0x5f, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x0d, 0x69, 0x73, 0x53, 0x68, 0x61, 0x70, 0x65, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x22, 0x88, + 0x03, 0x0a, 0x0a, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x31, 0x0a, + 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1d, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, + 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, + 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x4e, 0x61, 0x6d, + 0x65, 0x12, 0x33, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, + 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x64, 0x61, + 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0xcd, 0x01, 0x0a, 0x04, 0x4b, 0x69, + 0x6e, 0x64, 0x12, 0x17, 0x0a, 0x13, 0x42, 0x41, 0x54, 0x43, 0x48, 0x5f, 0x45, 0x4c, 0x45, 0x4d, + 0x45, 0x4e, 0x54, 0x5f, 0x43, 0x4f, 0x55, 0x4e, 0x54, 0x10, 0x00, 0x12, 0x23, 0x0a, 0x1f, 0x42, + 0x41, 0x54, 0x43, 0x48, 0x5f, 0x41, 0x43, 0x43, 0x55, 0x4d, 0x55, 0x4c, 0x41, 0x54, 0x45, 0x44, + 0x5f, 0x45, 0x4c, 0x45, 0x4d, 0x45, 0x4e, 0x54, 0x5f, 0x43, 0x4f, 0x55, 0x4e, 0x54, 0x10, 0x01, + 0x12, 0x2d, 0x0a, 0x29, 0x42, 0x41, 0x54, 0x43, 0x48, 0x5f, 0x41, 0x43, 0x43, 0x55, 0x4d, 0x55, + 0x4c, 0x41, 0x54, 0x45, 0x44, 0x5f, 0x45, 0x4c, 0x45, 0x4d, 0x45, 0x4e, 0x54, 0x5f, 0x43, 0x4f, + 0x55, 0x4e, 0x54, 0x5f, 0x57, 0x49, 0x54, 0x48, 0x5f, 0x5a, 0x45, 0x52, 0x4f, 0x10, 0x02, 0x12, + 0x24, 0x0a, 0x20, 0x42, 0x41, 0x54, 0x43, 0x48, 0x5f, 0x4d, 0x41, 0x58, 0x5f, 0x45, 0x4c, 0x45, + 0x4d, 0x45, 0x4e, 0x54, 0x5f, 0x43, 0x4f, 0x55, 0x4e, 0x54, 0x5f, 0x41, 0x53, 0x5f, 0x53, 0x48, + 0x41, 0x50, 0x45, 0x10, 0x03, 0x12, 0x14, 0x0a, 0x10, 0x42, 0x41, 0x54, 0x43, 0x48, 0x5f, 0x49, + 0x54, 0x45, 0x4d, 0x5f, 0x53, 0x48, 0x41, 0x50, 0x45, 0x10, 0x04, 0x12, 0x1c, 0x0a, 0x18, 0x42, + 0x41, 0x54, 0x43, 0x48, 0x5f, 0x49, 0x54, 0x45, 0x4d, 0x5f, 0x53, 0x48, 0x41, 0x50, 0x45, 0x5f, + 0x46, 0x4c, 0x41, 0x54, 0x54, 0x45, 0x4e, 0x10, 0x05, 0x22, 0xb1, 0x01, 0x0a, 0x0b, 0x42, 0x61, + 0x74, 0x63, 0x68, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x32, 0x0a, 0x04, 0x6b, 0x69, + 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x4f, 0x75, 0x74, + 0x70, 0x75, 0x74, 0x2e, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x21, + 0x0a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x03, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x70, 0x75, + 0x74, 0x22, 0x2a, 0x0a, 0x04, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x22, 0x0a, 0x1e, 0x42, 0x41, 0x54, + 0x43, 0x48, 0x5f, 0x53, 0x43, 0x41, 0x54, 0x54, 0x45, 0x52, 0x5f, 0x57, 0x49, 0x54, 0x48, 0x5f, + 0x49, 0x4e, 0x50, 0x55, 0x54, 0x5f, 0x53, 0x48, 0x41, 0x50, 0x45, 0x10, 0x00, 0x22, 0xc7, 0x02, + 0x0a, 0x12, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x6f, + 0x6c, 0x69, 0x63, 0x79, 0x12, 0x41, 0x0a, 0x06, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, + 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, + 0x06, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x03, 0x61, 0x6c, 0x6c, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, + 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x41, 0x6c, 0x6c, 0x48, 0x00, 0x52, 0x03, 0x61, 0x6c, + 0x6c, 0x12, 0x47, 0x0a, 0x08, 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x50, + 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x48, 0x00, + 0x52, 0x08, 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x1a, 0x2b, 0x0a, 0x06, 0x4c, 0x61, + 0x74, 0x65, 0x73, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x6e, 0x75, 0x6d, 0x5f, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0b, 0x6e, 0x75, 0x6d, 0x56, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x05, 0x0a, 0x03, 0x41, 0x6c, 0x6c, 0x1a, 0x26, + 0x0a, 0x08, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x12, 0x1a, 0x0a, 0x08, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x03, 0x52, 0x08, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x0f, 0x0a, 0x0d, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, + 0x5f, 0x63, 0x68, 0x6f, 0x69, 0x63, 0x65, 0x22, 0x93, 0x11, 0x0a, 0x17, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, + 0x69, 0x63, 0x79, 0x12, 0x41, 0x0a, 0x05, 0x67, 0x72, 0x61, 0x70, 0x68, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, + 0x05, 0x67, 0x72, 0x61, 0x70, 0x68, 0x12, 0x4f, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, + 0x74, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x33, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, + 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x50, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x70, + 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x04, 0x63, 0x75, 0x64, 0x61, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, + 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x43, 0x75, 0x64, + 0x61, 0x52, 0x04, 0x63, 0x75, 0x64, 0x61, 0x12, 0x72, 0x0a, 0x16, 0x65, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, + 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3b, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, 0x69, + 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x45, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, + 0x74, 0x6f, 0x72, 0x73, 0x52, 0x15, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x41, + 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x68, 0x0a, 0x13, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x69, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x6d, 0x65, 0x6d, 0x6f, + 0x72, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, + 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, + 0x50, 0x69, 0x6e, 0x6e, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x42, 0x75, 0x66, 0x66, + 0x65, 0x72, 0x52, 0x11, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x69, 0x6e, 0x6e, 0x65, 0x64, 0x4d, + 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x6a, 0x0a, 0x14, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, + 0x70, 0x69, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x50, 0x69, 0x6e, 0x6e, 0x65, + 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x42, 0x75, 0x66, 0x66, 0x65, 0x72, 0x52, 0x12, 0x6f, + 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x69, 0x6e, 0x6e, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, + 0x79, 0x12, 0x43, 0x0a, 0x1e, 0x67, 0x61, 0x74, 0x68, 0x65, 0x72, 0x5f, 0x6b, 0x65, 0x72, 0x6e, + 0x65, 0x6c, 0x5f, 0x62, 0x75, 0x66, 0x66, 0x65, 0x72, 0x5f, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, + 0x6f, 0x6c, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x1b, 0x67, 0x61, 0x74, 0x68, 0x65, + 0x72, 0x4b, 0x65, 0x72, 0x6e, 0x65, 0x6c, 0x42, 0x75, 0x66, 0x66, 0x65, 0x72, 0x54, 0x68, 0x72, + 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x25, 0x0a, 0x0e, 0x65, 0x61, 0x67, 0x65, 0x72, 0x5f, + 0x62, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, + 0x65, 0x61, 0x67, 0x65, 0x72, 0x42, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x1a, 0x1d, 0x0a, + 0x05, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x1a, 0xd3, 0x06, 0x0a, + 0x04, 0x43, 0x75, 0x64, 0x61, 0x12, 0x16, 0x0a, 0x06, 0x67, 0x72, 0x61, 0x70, 0x68, 0x73, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x67, 0x72, 0x61, 0x70, 0x68, 0x73, 0x12, 0x28, 0x0a, + 0x10, 0x62, 0x75, 0x73, 0x79, 0x5f, 0x77, 0x61, 0x69, 0x74, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x62, 0x75, 0x73, 0x79, 0x57, 0x61, 0x69, + 0x74, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x53, 0x0a, 0x0a, 0x67, 0x72, 0x61, 0x70, 0x68, + 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, + 0x63, 0x79, 0x2e, 0x43, 0x75, 0x64, 0x61, 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x53, 0x70, 0x65, + 0x63, 0x52, 0x09, 0x67, 0x72, 0x61, 0x70, 0x68, 0x53, 0x70, 0x65, 0x63, 0x12, 0x2c, 0x0a, 0x12, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x72, 0x65, + 0x61, 0x6d, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x43, 0x6f, 0x70, 0x79, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x1a, 0x85, 0x05, 0x0a, 0x09, 0x47, + 0x72, 0x61, 0x70, 0x68, 0x53, 0x70, 0x65, 0x63, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, 0x63, + 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x62, 0x61, + 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x55, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, + 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6d, + 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x43, 0x75, + 0x64, 0x61, 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, + 0x75, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x6b, + 0x0a, 0x11, 0x67, 0x72, 0x61, 0x70, 0x68, 0x5f, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x5f, 0x62, 0x6f, + 0x75, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3f, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, + 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, + 0x2e, 0x43, 0x75, 0x64, 0x61, 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x53, 0x70, 0x65, 0x63, 0x2e, + 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x52, 0x0f, 0x67, 0x72, 0x61, 0x70, + 0x68, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x1a, 0x19, 0x0a, 0x05, 0x53, + 0x68, 0x61, 0x70, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x69, 0x6d, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x03, 0x52, 0x03, 0x64, 0x69, 0x6d, 0x1a, 0x83, 0x02, 0x0a, 0x0a, 0x4c, 0x6f, 0x77, 0x65, 0x72, + 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, + 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x62, 0x61, 0x74, 0x63, 0x68, + 0x53, 0x69, 0x7a, 0x65, 0x12, 0x60, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x4a, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x43, 0x75, 0x64, 0x61, 0x2e, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x42, + 0x6f, 0x75, 0x6e, 0x64, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, + 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x1a, 0x74, 0x0a, 0x0a, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x50, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, + 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x43, 0x75, 0x64, + 0x61, 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x53, 0x68, 0x61, 0x70, + 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x74, 0x0a, 0x0a, + 0x49, 0x6e, 0x70, 0x75, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x50, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, + 0x63, 0x79, 0x2e, 0x43, 0x75, 0x64, 0x61, 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x53, 0x70, 0x65, + 0x63, 0x2e, 0x53, 0x68, 0x61, 0x70, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x1a, 0xff, 0x03, 0x0a, 0x15, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x41, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x83, 0x01, 0x0a, + 0x19, 0x67, 0x70, 0x75, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x61, + 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x47, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, + 0x6e, 0x41, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x63, + 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x17, 0x67, 0x70, 0x75, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, + 0x6f, 0x72, 0x12, 0x83, 0x01, 0x0a, 0x19, 0x63, 0x70, 0x75, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x47, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, + 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6d, + 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, + 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, + 0x17, 0x63, 0x70, 0x75, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x63, 0x63, + 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x1a, 0xd9, 0x01, 0x0a, 0x0b, 0x41, 0x63, 0x63, + 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x77, 0x0a, 0x0a, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x57, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, + 0x6e, 0x41, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x63, + 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x73, 0x1a, 0x3d, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x2c, 0x0a, 0x12, 0x50, 0x69, 0x6e, 0x6e, 0x65, 0x64, 0x4d, 0x65, + 0x6d, 0x6f, 0x72, 0x79, 0x42, 0x75, 0x66, 0x66, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x65, 0x6e, + 0x61, 0x62, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x65, 0x6e, 0x61, 0x62, + 0x6c, 0x65, 0x22, 0x49, 0x0a, 0x0d, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x50, 0x72, 0x69, 0x6f, 0x72, + 0x69, 0x74, 0x79, 0x12, 0x14, 0x0a, 0x10, 0x50, 0x52, 0x49, 0x4f, 0x52, 0x49, 0x54, 0x59, 0x5f, + 0x44, 0x45, 0x46, 0x41, 0x55, 0x4c, 0x54, 0x10, 0x00, 0x12, 0x10, 0x0a, 0x0c, 0x50, 0x52, 0x49, + 0x4f, 0x52, 0x49, 0x54, 0x59, 0x5f, 0x4d, 0x41, 0x58, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x50, + 0x52, 0x49, 0x4f, 0x52, 0x49, 0x54, 0x59, 0x5f, 0x4d, 0x49, 0x4e, 0x10, 0x02, 0x22, 0xad, 0x02, + 0x0a, 0x10, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x51, 0x75, 0x65, 0x75, 0x65, 0x50, 0x6f, 0x6c, 0x69, + 0x63, 0x79, 0x12, 0x53, 0x0a, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x61, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2c, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x51, + 0x75, 0x65, 0x75, 0x65, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x6f, + 0x75, 0x74, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, + 0x74, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x1c, 0x64, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x6d, 0x69, 0x63, 0x72, 0x6f, + 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x1a, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x69, 0x63, + 0x72, 0x6f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x34, 0x0a, 0x16, 0x61, 0x6c, 0x6c, + 0x6f, 0x77, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x72, + 0x69, 0x64, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x14, 0x61, 0x6c, 0x6c, 0x6f, 0x77, + 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x12, + 0x24, 0x0a, 0x0e, 0x6d, 0x61, 0x78, 0x5f, 0x71, 0x75, 0x65, 0x75, 0x65, 0x5f, 0x73, 0x69, 0x7a, + 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x6d, 0x61, 0x78, 0x51, 0x75, 0x65, 0x75, + 0x65, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x26, 0x0a, 0x0d, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, + 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0a, 0x0a, 0x06, 0x52, 0x45, 0x4a, 0x45, 0x43, 0x54, + 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x4c, 0x41, 0x59, 0x10, 0x01, 0x22, 0xc0, 0x04, + 0x0a, 0x14, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x42, 0x61, + 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x0a, 0x14, 0x70, 0x72, 0x65, 0x66, 0x65, 0x72, + 0x72, 0x65, 0x64, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x05, 0x52, 0x12, 0x70, 0x72, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x42, + 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x3f, 0x0a, 0x1c, 0x6d, 0x61, 0x78, 0x5f, + 0x71, 0x75, 0x65, 0x75, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x61, 0x79, 0x5f, 0x6d, 0x69, 0x63, 0x72, + 0x6f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x19, + 0x6d, 0x61, 0x78, 0x51, 0x75, 0x65, 0x75, 0x65, 0x44, 0x65, 0x6c, 0x61, 0x79, 0x4d, 0x69, 0x63, + 0x72, 0x6f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x70, 0x72, 0x65, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x70, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, + 0x74, 0x79, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, + 0x0e, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x73, 0x12, + 0x34, 0x0a, 0x16, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, + 0x69, 0x74, 0x79, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, + 0x14, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, + 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x50, 0x0a, 0x14, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x5f, 0x71, 0x75, 0x65, 0x75, 0x65, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x51, 0x75, 0x65, 0x75, 0x65, 0x50, 0x6f, 0x6c, + 0x69, 0x63, 0x79, 0x52, 0x12, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x51, 0x75, 0x65, 0x75, + 0x65, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x6f, 0x0a, 0x15, 0x70, 0x72, 0x69, 0x6f, 0x72, + 0x69, 0x74, 0x79, 0x5f, 0x71, 0x75, 0x65, 0x75, 0x65, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, + 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3b, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, + 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x44, 0x79, 0x6e, 0x61, 0x6d, + 0x69, 0x63, 0x42, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x50, 0x72, 0x69, 0x6f, 0x72, + 0x69, 0x74, 0x79, 0x51, 0x75, 0x65, 0x75, 0x65, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x13, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x51, 0x75, 0x65, + 0x75, 0x65, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x1a, 0x66, 0x0a, 0x18, 0x50, 0x72, 0x69, 0x6f, + 0x72, 0x69, 0x74, 0x79, 0x51, 0x75, 0x65, 0x75, 0x65, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x04, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x51, 0x75, 0x65, 0x75, 0x65, 0x50, + 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, + 0x22, 0xf6, 0x0c, 0x0a, 0x15, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, + 0x63, 0x65, 0x42, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x12, 0x4c, 0x0a, 0x06, 0x64, 0x69, + 0x72, 0x65, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, + 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x2e, + 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x48, 0x00, + 0x52, 0x06, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x12, 0x4c, 0x0a, 0x06, 0x6f, 0x6c, 0x64, 0x65, + 0x73, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x65, 0x71, + 0x75, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x74, + 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x4f, 0x6c, 0x64, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, + 0x6f, 0x6c, 0x64, 0x65, 0x73, 0x74, 0x12, 0x43, 0x0a, 0x1e, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x65, + 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x6c, 0x65, 0x5f, 0x6d, 0x69, 0x63, 0x72, + 0x6f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x1b, + 0x6d, 0x61, 0x78, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x6c, 0x65, 0x4d, + 0x69, 0x63, 0x72, 0x6f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x55, 0x0a, 0x0d, 0x63, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x42, + 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x49, + 0x6e, 0x70, 0x75, 0x74, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x49, 0x6e, 0x70, + 0x75, 0x74, 0x12, 0x3f, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x05, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x29, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x61, + 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x1a, 0xf5, 0x02, 0x0a, 0x07, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x12, + 0x44, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x30, 0x2e, + 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, + 0x65, 0x6c, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x61, 0x74, 0x63, 0x68, 0x69, + 0x6e, 0x67, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x2e, 0x4b, 0x69, 0x6e, 0x64, 0x52, + 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x28, 0x0a, 0x10, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x5f, 0x66, + 0x61, 0x6c, 0x73, 0x65, 0x5f, 0x74, 0x72, 0x75, 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, 0x05, 0x52, + 0x0e, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x46, 0x61, 0x6c, 0x73, 0x65, 0x54, 0x72, 0x75, 0x65, 0x12, + 0x26, 0x0a, 0x0f, 0x66, 0x70, 0x33, 0x32, 0x5f, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x5f, 0x74, 0x72, + 0x75, 0x65, 0x18, 0x03, 0x20, 0x03, 0x28, 0x02, 0x52, 0x0d, 0x66, 0x70, 0x33, 0x32, 0x46, 0x61, + 0x6c, 0x73, 0x65, 0x54, 0x72, 0x75, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, + 0x66, 0x61, 0x6c, 0x73, 0x65, 0x5f, 0x74, 0x72, 0x75, 0x65, 0x18, 0x05, 0x20, 0x03, 0x28, 0x08, + 0x52, 0x0d, 0x62, 0x6f, 0x6f, 0x6c, 0x46, 0x61, 0x6c, 0x73, 0x65, 0x54, 0x72, 0x75, 0x65, 0x12, + 0x33, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, + 0x54, 0x79, 0x70, 0x65, 0x22, 0x75, 0x0a, 0x04, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x1a, 0x0a, 0x16, + 0x43, 0x4f, 0x4e, 0x54, 0x52, 0x4f, 0x4c, 0x5f, 0x53, 0x45, 0x51, 0x55, 0x45, 0x4e, 0x43, 0x45, + 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54, 0x10, 0x00, 0x12, 0x1a, 0x0a, 0x16, 0x43, 0x4f, 0x4e, 0x54, + 0x52, 0x4f, 0x4c, 0x5f, 0x53, 0x45, 0x51, 0x55, 0x45, 0x4e, 0x43, 0x45, 0x5f, 0x52, 0x45, 0x41, + 0x44, 0x59, 0x10, 0x01, 0x12, 0x18, 0x0a, 0x14, 0x43, 0x4f, 0x4e, 0x54, 0x52, 0x4f, 0x4c, 0x5f, + 0x53, 0x45, 0x51, 0x55, 0x45, 0x4e, 0x43, 0x45, 0x5f, 0x45, 0x4e, 0x44, 0x10, 0x02, 0x12, 0x1b, + 0x0a, 0x17, 0x43, 0x4f, 0x4e, 0x54, 0x52, 0x4f, 0x4c, 0x5f, 0x53, 0x45, 0x51, 0x55, 0x45, 0x4e, + 0x43, 0x45, 0x5f, 0x43, 0x4f, 0x52, 0x52, 0x49, 0x44, 0x10, 0x03, 0x1a, 0x69, 0x0a, 0x0c, 0x43, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, + 0x45, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x2b, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x61, 0x74, + 0x63, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x52, 0x07, 0x63, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x1a, 0xb7, 0x01, 0x0a, 0x0c, 0x49, 0x6e, 0x69, 0x74, 0x69, + 0x61, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, + 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, + 0x70, 0x65, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, + 0x64, 0x69, 0x6d, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x03, 0x52, 0x04, 0x64, 0x69, 0x6d, 0x73, + 0x12, 0x1d, 0x0a, 0x09, 0x7a, 0x65, 0x72, 0x6f, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x08, 0x7a, 0x65, 0x72, 0x6f, 0x44, 0x61, 0x74, 0x61, 0x12, + 0x1d, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, + 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x42, 0x0c, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, + 0x1a, 0xe7, 0x01, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x6f, 0x75, 0x74, + 0x70, 0x75, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x64, 0x61, + 0x74, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, + 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x61, 0x74, + 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x64, 0x69, 0x6d, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x03, 0x52, 0x04, 0x64, + 0x69, 0x6d, 0x73, 0x12, 0x55, 0x0a, 0x0d, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x53, + 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x2e, + 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x0c, 0x69, 0x6e, + 0x69, 0x74, 0x69, 0x61, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x8b, 0x01, 0x0a, 0x0e, 0x53, + 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x12, 0x3f, 0x0a, + 0x1c, 0x6d, 0x61, 0x78, 0x5f, 0x71, 0x75, 0x65, 0x75, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x61, 0x79, + 0x5f, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x04, 0x52, 0x19, 0x6d, 0x61, 0x78, 0x51, 0x75, 0x65, 0x75, 0x65, 0x44, 0x65, 0x6c, + 0x61, 0x79, 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x38, + 0x0a, 0x18, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x75, + 0x74, 0x69, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, + 0x52, 0x16, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x53, 0x6c, 0x6f, 0x74, 0x55, 0x74, 0x69, + 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xbb, 0x01, 0x0a, 0x0e, 0x53, 0x74, 0x72, + 0x61, 0x74, 0x65, 0x67, 0x79, 0x4f, 0x6c, 0x64, 0x65, 0x73, 0x74, 0x12, 0x36, 0x0a, 0x17, 0x6d, + 0x61, 0x78, 0x5f, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x65, 0x71, + 0x75, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x15, 0x6d, 0x61, + 0x78, 0x43, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, + 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x14, 0x70, 0x72, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, + 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, + 0x05, 0x52, 0x12, 0x70, 0x72, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x3f, 0x0a, 0x1c, 0x6d, 0x61, 0x78, 0x5f, 0x71, 0x75, 0x65, + 0x75, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x61, 0x79, 0x5f, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x65, + 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x19, 0x6d, 0x61, 0x78, + 0x51, 0x75, 0x65, 0x75, 0x65, 0x44, 0x65, 0x6c, 0x61, 0x79, 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, + 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x42, 0x11, 0x0a, 0x0f, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, + 0x67, 0x79, 0x5f, 0x63, 0x68, 0x6f, 0x69, 0x63, 0x65, 0x22, 0xdb, 0x03, 0x0a, 0x0f, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x45, 0x6e, 0x73, 0x65, 0x6d, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x12, 0x36, 0x0a, + 0x04, 0x73, 0x74, 0x65, 0x70, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x45, 0x6e, 0x73, 0x65, 0x6d, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x52, + 0x04, 0x73, 0x74, 0x65, 0x70, 0x1a, 0x8f, 0x03, 0x0a, 0x04, 0x53, 0x74, 0x65, 0x70, 0x12, 0x1d, + 0x0a, 0x0a, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x23, 0x0a, + 0x0d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x4d, 0x0a, 0x09, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x6d, 0x61, 0x70, 0x18, + 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x45, 0x6e, 0x73, 0x65, 0x6d, 0x62, + 0x6c, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x4d, + 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x4d, 0x61, + 0x70, 0x12, 0x50, 0x0a, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x6d, 0x61, 0x70, 0x18, + 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x45, 0x6e, 0x73, 0x65, 0x6d, 0x62, + 0x6c, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x4d, 0x61, 0x70, 0x12, 0x27, 0x0a, 0x0f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x6e, 0x61, 0x6d, + 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x6d, 0x6f, + 0x64, 0x65, 0x6c, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x1a, 0x3b, 0x0a, 0x0d, + 0x49, 0x6e, 0x70, 0x75, 0x74, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x3c, 0x0a, 0x0e, 0x4f, 0x75, 0x74, + 0x70, 0x75, 0x74, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, + 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x33, 0x0a, 0x0e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xc3, 0x03, 0x0a, + 0x0b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x57, 0x61, 0x72, 0x6d, 0x75, 0x70, 0x12, 0x12, 0x0a, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0d, 0x52, 0x09, 0x62, 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x12, + 0x3d, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x25, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, + 0x6f, 0x64, 0x65, 0x6c, 0x57, 0x61, 0x72, 0x6d, 0x75, 0x70, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x14, + 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x63, + 0x6f, 0x75, 0x6e, 0x74, 0x1a, 0xcf, 0x01, 0x0a, 0x05, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x33, + 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0e, 0x32, 0x16, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x54, + 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x69, 0x6d, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, + 0x03, 0x52, 0x04, 0x64, 0x69, 0x6d, 0x73, 0x12, 0x1d, 0x0a, 0x09, 0x7a, 0x65, 0x72, 0x6f, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x08, 0x7a, 0x65, + 0x72, 0x6f, 0x44, 0x61, 0x74, 0x61, 0x12, 0x21, 0x0a, 0x0b, 0x72, 0x61, 0x6e, 0x64, 0x6f, 0x6d, + 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0a, 0x72, + 0x61, 0x6e, 0x64, 0x6f, 0x6d, 0x44, 0x61, 0x74, 0x61, 0x12, 0x28, 0x0a, 0x0f, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x09, 0x48, 0x00, 0x52, 0x0d, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, + 0x69, 0x6c, 0x65, 0x42, 0x11, 0x0a, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x64, 0x61, 0x74, + 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x1a, 0x5a, 0x0a, 0x0b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x35, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, + 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x57, 0x61, 0x72, 0x6d, 0x75, + 0x70, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0x41, 0x0a, 0x0f, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x6f, 0x70, 0x5f, 0x6c, 0x69, 0x62, 0x72, + 0x61, 0x72, 0x79, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x11, 0x6f, 0x70, 0x4c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x79, 0x46, 0x69, 0x6c, + 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x36, 0x0a, 0x16, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, + 0x1c, 0x0a, 0x09, 0x64, 0x65, 0x63, 0x6f, 0x75, 0x70, 0x6c, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x09, 0x64, 0x65, 0x63, 0x6f, 0x75, 0x70, 0x6c, 0x65, 0x64, 0x22, 0x92, 0x02, + 0x0a, 0x15, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, + 0x79, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x41, 0x0a, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, + 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x65, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x41, 0x67, 0x65, + 0x6e, 0x74, 0x52, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0xb5, 0x01, 0x0a, 0x05, 0x41, + 0x67, 0x65, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x59, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x69, + 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x41, 0x67, 0x65, 0x6e, 0x74, + 0x73, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x73, 0x1a, 0x3d, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0x2c, 0x0a, 0x12, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x65, 0x6e, 0x61, 0x62, + 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, + 0x22, 0xdd, 0x0d, 0x0a, 0x0b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, + 0x12, 0x18, 0x0a, 0x07, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x12, 0x47, 0x0a, 0x0e, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x6f, + 0x6c, 0x69, 0x63, 0x79, 0x52, 0x0d, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, + 0x69, 0x63, 0x79, 0x12, 0x24, 0x0a, 0x0e, 0x6d, 0x61, 0x78, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, + 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0c, 0x6d, 0x61, 0x78, + 0x42, 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x2e, 0x0a, 0x05, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x70, + 0x75, 0x74, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x31, 0x0a, 0x06, 0x6f, 0x75, 0x74, + 0x70, 0x75, 0x74, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x75, + 0x74, 0x70, 0x75, 0x74, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x39, 0x0a, 0x0b, + 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x14, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x18, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x52, 0x0a, 0x62, 0x61, 0x74, + 0x63, 0x68, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x3c, 0x0a, 0x0c, 0x62, 0x61, 0x74, 0x63, 0x68, + 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, + 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x61, 0x74, + 0x63, 0x68, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x0b, 0x62, 0x61, 0x74, 0x63, 0x68, 0x4f, + 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x49, 0x0a, 0x0c, 0x6f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x69, 0x6e, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, + 0x63, 0x79, 0x52, 0x0c, 0x6f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x4f, 0x0a, 0x10, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x5f, 0x62, 0x61, 0x74, 0x63, + 0x68, 0x69, 0x6e, 0x67, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x44, + 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x42, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x48, 0x00, + 0x52, 0x0f, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x42, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, + 0x67, 0x12, 0x52, 0x0a, 0x11, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x62, 0x61, + 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x69, + 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, + 0x67, 0x48, 0x00, 0x52, 0x10, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x61, 0x74, + 0x63, 0x68, 0x69, 0x6e, 0x67, 0x12, 0x50, 0x0a, 0x13, 0x65, 0x6e, 0x73, 0x65, 0x6d, 0x62, 0x6c, + 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x69, 0x6e, 0x67, 0x18, 0x0f, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x45, 0x6e, 0x73, 0x65, 0x6d, 0x62, 0x6c, 0x69, 0x6e, + 0x67, 0x48, 0x00, 0x52, 0x12, 0x65, 0x6e, 0x73, 0x65, 0x6d, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, + 0x65, 0x64, 0x75, 0x6c, 0x69, 0x6e, 0x67, 0x12, 0x47, 0x0a, 0x0e, 0x69, 0x6e, 0x73, 0x74, 0x61, + 0x6e, 0x63, 0x65, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x20, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, + 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x47, 0x72, 0x6f, 0x75, + 0x70, 0x52, 0x0d, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x47, 0x72, 0x6f, 0x75, 0x70, + 0x12, 0x34, 0x0a, 0x16, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, + 0x6c, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x14, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x46, 0x69, + 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x5d, 0x0a, 0x12, 0x63, 0x63, 0x5f, 0x6d, 0x6f, 0x64, + 0x65, 0x6c, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x09, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x43, 0x63, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x46, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x10, 0x63, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x46, 0x69, 0x6c, 0x65, + 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x12, 0x4a, 0x0a, 0x0b, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, + 0x74, 0x61, 0x67, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x69, 0x6e, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x61, 0x67, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x61, 0x67, + 0x73, 0x12, 0x49, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, + 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x3c, 0x0a, 0x0c, + 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x77, 0x61, 0x72, 0x6d, 0x75, 0x70, 0x18, 0x10, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x57, 0x61, 0x72, 0x6d, 0x75, 0x70, 0x52, 0x0b, 0x6d, + 0x6f, 0x64, 0x65, 0x6c, 0x57, 0x61, 0x72, 0x6d, 0x75, 0x70, 0x12, 0x48, 0x0a, 0x10, 0x6d, 0x6f, + 0x64, 0x65, 0x6c, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x12, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, + 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x52, 0x0f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x5e, 0x0a, 0x18, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, + 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, + 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x54, 0x72, 0x61, 0x6e, 0x73, + 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x52, 0x16, 0x6d, 0x6f, + 0x64, 0x65, 0x6c, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f, + 0x6c, 0x69, 0x63, 0x79, 0x12, 0x5b, 0x0a, 0x17, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x72, 0x65, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x18, + 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x6f, 0x72, 0x79, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x15, 0x6d, 0x6f, 0x64, 0x65, + 0x6c, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x41, 0x67, 0x65, 0x6e, 0x74, + 0x73, 0x12, 0x47, 0x0a, 0x0e, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x63, 0x61, + 0x63, 0x68, 0x65, 0x18, 0x18, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x6e, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x52, 0x0d, 0x72, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x1a, 0x43, 0x0a, 0x15, 0x43, 0x63, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x46, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, + 0x3d, 0x0a, 0x0f, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x5b, + 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x13, 0x0a, 0x11, 0x73, + 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x68, 0x6f, 0x69, 0x63, 0x65, + 0x2a, 0xfa, 0x01, 0x0a, 0x08, 0x44, 0x61, 0x74, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x10, 0x0a, + 0x0c, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, + 0x0d, 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x42, 0x4f, 0x4f, 0x4c, 0x10, 0x01, 0x12, 0x0e, + 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x49, 0x4e, 0x54, 0x38, 0x10, 0x02, 0x12, 0x0f, + 0x0a, 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x49, 0x4e, 0x54, 0x31, 0x36, 0x10, 0x03, 0x12, + 0x0f, 0x0a, 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x49, 0x4e, 0x54, 0x33, 0x32, 0x10, 0x04, + 0x12, 0x0f, 0x0a, 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x49, 0x4e, 0x54, 0x36, 0x34, 0x10, + 0x05, 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x54, 0x38, 0x10, 0x06, + 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x54, 0x31, 0x36, 0x10, 0x07, + 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x54, 0x33, 0x32, 0x10, 0x08, + 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x54, 0x36, 0x34, 0x10, 0x09, + 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x50, 0x31, 0x36, 0x10, 0x0a, 0x12, + 0x0d, 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x50, 0x33, 0x32, 0x10, 0x0b, 0x12, 0x0d, + 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x50, 0x36, 0x34, 0x10, 0x0c, 0x12, 0x0f, 0x0a, + 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x0d, 0x12, 0x0d, + 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x42, 0x46, 0x31, 0x36, 0x10, 0x0e, 0x42, 0x2c, 0x5a, + 0x2a, 0x64, 0x37, 0x79, 0x2e, 0x69, 0x6f, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x70, 0x6b, 0x67, 0x2f, + 0x61, 0x70, 0x69, 0x73, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2f, 0x76, + 0x31, 0x3b, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, +} + +var ( + file_pkg_apis_inference_v1_model_config_proto_rawDescOnce sync.Once + file_pkg_apis_inference_v1_model_config_proto_rawDescData = file_pkg_apis_inference_v1_model_config_proto_rawDesc +) + +func file_pkg_apis_inference_v1_model_config_proto_rawDescGZIP() []byte { + file_pkg_apis_inference_v1_model_config_proto_rawDescOnce.Do(func() { + file_pkg_apis_inference_v1_model_config_proto_rawDescData = protoimpl.X.CompressGZIP(file_pkg_apis_inference_v1_model_config_proto_rawDescData) + }) + return file_pkg_apis_inference_v1_model_config_proto_rawDescData +} + +var file_pkg_apis_inference_v1_model_config_proto_enumTypes = make([]protoimpl.EnumInfo, 9) +var file_pkg_apis_inference_v1_model_config_proto_msgTypes = make([]protoimpl.MessageInfo, 53) +var file_pkg_apis_inference_v1_model_config_proto_goTypes = []interface{}{ + (DataType)(0), // 0: inference.v1.DataType + (ModelInstanceGroup_Kind)(0), // 1: inference.v1.ModelInstanceGroup.Kind + (ModelInstanceGroup_SecondaryDevice_SecondaryDeviceKind)(0), // 2: inference.v1.ModelInstanceGroup.SecondaryDevice.SecondaryDeviceKind + (ModelInput_Format)(0), // 3: inference.v1.ModelInput.Format + (BatchInput_Kind)(0), // 4: inference.v1.BatchInput.Kind + (BatchOutput_Kind)(0), // 5: inference.v1.BatchOutput.Kind + (ModelOptimizationPolicy_ModelPriority)(0), // 6: inference.v1.ModelOptimizationPolicy.ModelPriority + (ModelQueuePolicy_TimeoutAction)(0), // 7: inference.v1.ModelQueuePolicy.TimeoutAction + (ModelSequenceBatching_Control_Kind)(0), // 8: inference.v1.ModelSequenceBatching.Control.Kind + (*ModelRateLimiter)(nil), // 9: inference.v1.ModelRateLimiter + (*ModelInstanceGroup)(nil), // 10: inference.v1.ModelInstanceGroup + (*ModelTensorReshape)(nil), // 11: inference.v1.ModelTensorReshape + (*ModelInput)(nil), // 12: inference.v1.ModelInput + (*ModelOutput)(nil), // 13: inference.v1.ModelOutput + (*BatchInput)(nil), // 14: inference.v1.BatchInput + (*BatchOutput)(nil), // 15: inference.v1.BatchOutput + (*ModelVersionPolicy)(nil), // 16: inference.v1.ModelVersionPolicy + (*ModelOptimizationPolicy)(nil), // 17: inference.v1.ModelOptimizationPolicy + (*ModelQueuePolicy)(nil), // 18: inference.v1.ModelQueuePolicy + (*ModelDynamicBatching)(nil), // 19: inference.v1.ModelDynamicBatching + (*ModelSequenceBatching)(nil), // 20: inference.v1.ModelSequenceBatching + (*ModelEnsembling)(nil), // 21: inference.v1.ModelEnsembling + (*ModelParameter)(nil), // 22: inference.v1.ModelParameter + (*ModelWarmup)(nil), // 23: inference.v1.ModelWarmup + (*ModelOperations)(nil), // 24: inference.v1.ModelOperations + (*ModelTransactionPolicy)(nil), // 25: inference.v1.ModelTransactionPolicy + (*ModelRepositoryAgents)(nil), // 26: inference.v1.ModelRepositoryAgents + (*ModelResponseCache)(nil), // 27: inference.v1.ModelResponseCache + (*ModelConfig)(nil), // 28: inference.v1.ModelConfig + (*ModelRateLimiter_Resource)(nil), // 29: inference.v1.ModelRateLimiter.Resource + (*ModelInstanceGroup_SecondaryDevice)(nil), // 30: inference.v1.ModelInstanceGroup.SecondaryDevice + (*ModelVersionPolicy_Latest)(nil), // 31: inference.v1.ModelVersionPolicy.Latest + (*ModelVersionPolicy_All)(nil), // 32: inference.v1.ModelVersionPolicy.All + (*ModelVersionPolicy_Specific)(nil), // 33: inference.v1.ModelVersionPolicy.Specific + (*ModelOptimizationPolicy_Graph)(nil), // 34: inference.v1.ModelOptimizationPolicy.Graph + (*ModelOptimizationPolicy_Cuda)(nil), // 35: inference.v1.ModelOptimizationPolicy.Cuda + (*ModelOptimizationPolicy_ExecutionAccelerators)(nil), // 36: inference.v1.ModelOptimizationPolicy.ExecutionAccelerators + (*ModelOptimizationPolicy_PinnedMemoryBuffer)(nil), // 37: inference.v1.ModelOptimizationPolicy.PinnedMemoryBuffer + (*ModelOptimizationPolicy_Cuda_GraphSpec)(nil), // 38: inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec + (*ModelOptimizationPolicy_Cuda_GraphSpec_Shape)(nil), // 39: inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.Shape + (*ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound)(nil), // 40: inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.LowerBound + nil, // 41: inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.InputEntry + nil, // 42: inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.LowerBound.InputEntry + (*ModelOptimizationPolicy_ExecutionAccelerators_Accelerator)(nil), // 43: inference.v1.ModelOptimizationPolicy.ExecutionAccelerators.Accelerator + nil, // 44: inference.v1.ModelOptimizationPolicy.ExecutionAccelerators.Accelerator.ParametersEntry + nil, // 45: inference.v1.ModelDynamicBatching.PriorityQueuePolicyEntry + (*ModelSequenceBatching_Control)(nil), // 46: inference.v1.ModelSequenceBatching.Control + (*ModelSequenceBatching_ControlInput)(nil), // 47: inference.v1.ModelSequenceBatching.ControlInput + (*ModelSequenceBatching_InitialState)(nil), // 48: inference.v1.ModelSequenceBatching.InitialState + (*ModelSequenceBatching_State)(nil), // 49: inference.v1.ModelSequenceBatching.State + (*ModelSequenceBatching_StrategyDirect)(nil), // 50: inference.v1.ModelSequenceBatching.StrategyDirect + (*ModelSequenceBatching_StrategyOldest)(nil), // 51: inference.v1.ModelSequenceBatching.StrategyOldest + (*ModelEnsembling_Step)(nil), // 52: inference.v1.ModelEnsembling.Step + nil, // 53: inference.v1.ModelEnsembling.Step.InputMapEntry + nil, // 54: inference.v1.ModelEnsembling.Step.OutputMapEntry + (*ModelWarmup_Input)(nil), // 55: inference.v1.ModelWarmup.Input + nil, // 56: inference.v1.ModelWarmup.InputsEntry + (*ModelRepositoryAgents_Agent)(nil), // 57: inference.v1.ModelRepositoryAgents.Agent + nil, // 58: inference.v1.ModelRepositoryAgents.Agent.ParametersEntry + nil, // 59: inference.v1.ModelConfig.CcModelFilenamesEntry + nil, // 60: inference.v1.ModelConfig.MetricTagsEntry + nil, // 61: inference.v1.ModelConfig.ParametersEntry +} +var file_pkg_apis_inference_v1_model_config_proto_depIdxs = []int32{ + 29, // 0: inference.v1.ModelRateLimiter.resources:type_name -> inference.v1.ModelRateLimiter.Resource + 1, // 1: inference.v1.ModelInstanceGroup.kind:type_name -> inference.v1.ModelInstanceGroup.Kind + 9, // 2: inference.v1.ModelInstanceGroup.rate_limiter:type_name -> inference.v1.ModelRateLimiter + 30, // 3: inference.v1.ModelInstanceGroup.secondary_devices:type_name -> inference.v1.ModelInstanceGroup.SecondaryDevice + 0, // 4: inference.v1.ModelInput.data_type:type_name -> inference.v1.DataType + 3, // 5: inference.v1.ModelInput.format:type_name -> inference.v1.ModelInput.Format + 11, // 6: inference.v1.ModelInput.reshape:type_name -> inference.v1.ModelTensorReshape + 0, // 7: inference.v1.ModelOutput.data_type:type_name -> inference.v1.DataType + 11, // 8: inference.v1.ModelOutput.reshape:type_name -> inference.v1.ModelTensorReshape + 4, // 9: inference.v1.BatchInput.kind:type_name -> inference.v1.BatchInput.Kind + 0, // 10: inference.v1.BatchInput.data_type:type_name -> inference.v1.DataType + 5, // 11: inference.v1.BatchOutput.kind:type_name -> inference.v1.BatchOutput.Kind + 31, // 12: inference.v1.ModelVersionPolicy.latest:type_name -> inference.v1.ModelVersionPolicy.Latest + 32, // 13: inference.v1.ModelVersionPolicy.all:type_name -> inference.v1.ModelVersionPolicy.All + 33, // 14: inference.v1.ModelVersionPolicy.specific:type_name -> inference.v1.ModelVersionPolicy.Specific + 34, // 15: inference.v1.ModelOptimizationPolicy.graph:type_name -> inference.v1.ModelOptimizationPolicy.Graph + 6, // 16: inference.v1.ModelOptimizationPolicy.priority:type_name -> inference.v1.ModelOptimizationPolicy.ModelPriority + 35, // 17: inference.v1.ModelOptimizationPolicy.cuda:type_name -> inference.v1.ModelOptimizationPolicy.Cuda + 36, // 18: inference.v1.ModelOptimizationPolicy.execution_accelerators:type_name -> inference.v1.ModelOptimizationPolicy.ExecutionAccelerators + 37, // 19: inference.v1.ModelOptimizationPolicy.input_pinned_memory:type_name -> inference.v1.ModelOptimizationPolicy.PinnedMemoryBuffer + 37, // 20: inference.v1.ModelOptimizationPolicy.output_pinned_memory:type_name -> inference.v1.ModelOptimizationPolicy.PinnedMemoryBuffer + 7, // 21: inference.v1.ModelQueuePolicy.timeout_action:type_name -> inference.v1.ModelQueuePolicy.TimeoutAction + 18, // 22: inference.v1.ModelDynamicBatching.default_queue_policy:type_name -> inference.v1.ModelQueuePolicy + 45, // 23: inference.v1.ModelDynamicBatching.priority_queue_policy:type_name -> inference.v1.ModelDynamicBatching.PriorityQueuePolicyEntry + 50, // 24: inference.v1.ModelSequenceBatching.direct:type_name -> inference.v1.ModelSequenceBatching.StrategyDirect + 51, // 25: inference.v1.ModelSequenceBatching.oldest:type_name -> inference.v1.ModelSequenceBatching.StrategyOldest + 47, // 26: inference.v1.ModelSequenceBatching.control_input:type_name -> inference.v1.ModelSequenceBatching.ControlInput + 49, // 27: inference.v1.ModelSequenceBatching.state:type_name -> inference.v1.ModelSequenceBatching.State + 52, // 28: inference.v1.ModelEnsembling.step:type_name -> inference.v1.ModelEnsembling.Step + 56, // 29: inference.v1.ModelWarmup.inputs:type_name -> inference.v1.ModelWarmup.InputsEntry + 57, // 30: inference.v1.ModelRepositoryAgents.agents:type_name -> inference.v1.ModelRepositoryAgents.Agent + 16, // 31: inference.v1.ModelConfig.version_policy:type_name -> inference.v1.ModelVersionPolicy + 12, // 32: inference.v1.ModelConfig.input:type_name -> inference.v1.ModelInput + 13, // 33: inference.v1.ModelConfig.output:type_name -> inference.v1.ModelOutput + 14, // 34: inference.v1.ModelConfig.batch_input:type_name -> inference.v1.BatchInput + 15, // 35: inference.v1.ModelConfig.batch_output:type_name -> inference.v1.BatchOutput + 17, // 36: inference.v1.ModelConfig.optimization:type_name -> inference.v1.ModelOptimizationPolicy + 19, // 37: inference.v1.ModelConfig.dynamic_batching:type_name -> inference.v1.ModelDynamicBatching + 20, // 38: inference.v1.ModelConfig.sequence_batching:type_name -> inference.v1.ModelSequenceBatching + 21, // 39: inference.v1.ModelConfig.ensemble_scheduling:type_name -> inference.v1.ModelEnsembling + 10, // 40: inference.v1.ModelConfig.instance_group:type_name -> inference.v1.ModelInstanceGroup + 59, // 41: inference.v1.ModelConfig.cc_model_filenames:type_name -> inference.v1.ModelConfig.CcModelFilenamesEntry + 60, // 42: inference.v1.ModelConfig.metric_tags:type_name -> inference.v1.ModelConfig.MetricTagsEntry + 61, // 43: inference.v1.ModelConfig.parameters:type_name -> inference.v1.ModelConfig.ParametersEntry + 23, // 44: inference.v1.ModelConfig.model_warmup:type_name -> inference.v1.ModelWarmup + 24, // 45: inference.v1.ModelConfig.model_operations:type_name -> inference.v1.ModelOperations + 25, // 46: inference.v1.ModelConfig.model_transaction_policy:type_name -> inference.v1.ModelTransactionPolicy + 26, // 47: inference.v1.ModelConfig.model_repository_agents:type_name -> inference.v1.ModelRepositoryAgents + 27, // 48: inference.v1.ModelConfig.response_cache:type_name -> inference.v1.ModelResponseCache + 2, // 49: inference.v1.ModelInstanceGroup.SecondaryDevice.kind:type_name -> inference.v1.ModelInstanceGroup.SecondaryDevice.SecondaryDeviceKind + 38, // 50: inference.v1.ModelOptimizationPolicy.Cuda.graph_spec:type_name -> inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec + 43, // 51: inference.v1.ModelOptimizationPolicy.ExecutionAccelerators.gpu_execution_accelerator:type_name -> inference.v1.ModelOptimizationPolicy.ExecutionAccelerators.Accelerator + 43, // 52: inference.v1.ModelOptimizationPolicy.ExecutionAccelerators.cpu_execution_accelerator:type_name -> inference.v1.ModelOptimizationPolicy.ExecutionAccelerators.Accelerator + 41, // 53: inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.input:type_name -> inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.InputEntry + 40, // 54: inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.graph_lower_bound:type_name -> inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.LowerBound + 42, // 55: inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.LowerBound.input:type_name -> inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.LowerBound.InputEntry + 39, // 56: inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.InputEntry.value:type_name -> inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.Shape + 39, // 57: inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.LowerBound.InputEntry.value:type_name -> inference.v1.ModelOptimizationPolicy.Cuda.GraphSpec.Shape + 44, // 58: inference.v1.ModelOptimizationPolicy.ExecutionAccelerators.Accelerator.parameters:type_name -> inference.v1.ModelOptimizationPolicy.ExecutionAccelerators.Accelerator.ParametersEntry + 18, // 59: inference.v1.ModelDynamicBatching.PriorityQueuePolicyEntry.value:type_name -> inference.v1.ModelQueuePolicy + 8, // 60: inference.v1.ModelSequenceBatching.Control.kind:type_name -> inference.v1.ModelSequenceBatching.Control.Kind + 0, // 61: inference.v1.ModelSequenceBatching.Control.data_type:type_name -> inference.v1.DataType + 46, // 62: inference.v1.ModelSequenceBatching.ControlInput.control:type_name -> inference.v1.ModelSequenceBatching.Control + 0, // 63: inference.v1.ModelSequenceBatching.InitialState.data_type:type_name -> inference.v1.DataType + 0, // 64: inference.v1.ModelSequenceBatching.State.data_type:type_name -> inference.v1.DataType + 48, // 65: inference.v1.ModelSequenceBatching.State.initial_state:type_name -> inference.v1.ModelSequenceBatching.InitialState + 53, // 66: inference.v1.ModelEnsembling.Step.input_map:type_name -> inference.v1.ModelEnsembling.Step.InputMapEntry + 54, // 67: inference.v1.ModelEnsembling.Step.output_map:type_name -> inference.v1.ModelEnsembling.Step.OutputMapEntry + 0, // 68: inference.v1.ModelWarmup.Input.data_type:type_name -> inference.v1.DataType + 55, // 69: inference.v1.ModelWarmup.InputsEntry.value:type_name -> inference.v1.ModelWarmup.Input + 58, // 70: inference.v1.ModelRepositoryAgents.Agent.parameters:type_name -> inference.v1.ModelRepositoryAgents.Agent.ParametersEntry + 22, // 71: inference.v1.ModelConfig.ParametersEntry.value:type_name -> inference.v1.ModelParameter + 72, // [72:72] is the sub-list for method output_type + 72, // [72:72] is the sub-list for method input_type + 72, // [72:72] is the sub-list for extension type_name + 72, // [72:72] is the sub-list for extension extendee + 0, // [0:72] is the sub-list for field type_name +} + +func init() { file_pkg_apis_inference_v1_model_config_proto_init() } +func file_pkg_apis_inference_v1_model_config_proto_init() { + if File_pkg_apis_inference_v1_model_config_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_pkg_apis_inference_v1_model_config_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelRateLimiter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelInstanceGroup); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelTensorReshape); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelInput); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOutput); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BatchInput); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BatchOutput); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelVersionPolicy); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOptimizationPolicy); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelQueuePolicy); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelDynamicBatching); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelSequenceBatching); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelEnsembling); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelParameter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelWarmup); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOperations); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelTransactionPolicy); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelRepositoryAgents); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelResponseCache); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelConfig); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelRateLimiter_Resource); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelInstanceGroup_SecondaryDevice); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelVersionPolicy_Latest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelVersionPolicy_All); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelVersionPolicy_Specific); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOptimizationPolicy_Graph); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOptimizationPolicy_Cuda); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOptimizationPolicy_ExecutionAccelerators); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOptimizationPolicy_PinnedMemoryBuffer); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOptimizationPolicy_Cuda_GraphSpec); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOptimizationPolicy_Cuda_GraphSpec_Shape); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelOptimizationPolicy_ExecutionAccelerators_Accelerator); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelSequenceBatching_Control); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelSequenceBatching_ControlInput); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelSequenceBatching_InitialState); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelSequenceBatching_State); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelSequenceBatching_StrategyDirect); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelSequenceBatching_StrategyOldest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[43].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelEnsembling_Step); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[46].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelWarmup_Input); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[48].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ModelRepositoryAgents_Agent); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[7].OneofWrappers = []interface{}{ + (*ModelVersionPolicy_Latest_)(nil), + (*ModelVersionPolicy_All_)(nil), + (*ModelVersionPolicy_Specific_)(nil), + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[11].OneofWrappers = []interface{}{ + (*ModelSequenceBatching_Direct)(nil), + (*ModelSequenceBatching_Oldest)(nil), + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[19].OneofWrappers = []interface{}{ + (*ModelConfig_DynamicBatching)(nil), + (*ModelConfig_SequenceBatching)(nil), + (*ModelConfig_EnsembleScheduling)(nil), + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[39].OneofWrappers = []interface{}{ + (*ModelSequenceBatching_InitialState_ZeroData)(nil), + (*ModelSequenceBatching_InitialState_DataFile)(nil), + } + file_pkg_apis_inference_v1_model_config_proto_msgTypes[46].OneofWrappers = []interface{}{ + (*ModelWarmup_Input_ZeroData)(nil), + (*ModelWarmup_Input_RandomData)(nil), + (*ModelWarmup_Input_InputDataFile)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_pkg_apis_inference_v1_model_config_proto_rawDesc, + NumEnums: 9, + NumMessages: 53, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_pkg_apis_inference_v1_model_config_proto_goTypes, + DependencyIndexes: file_pkg_apis_inference_v1_model_config_proto_depIdxs, + EnumInfos: file_pkg_apis_inference_v1_model_config_proto_enumTypes, + MessageInfos: file_pkg_apis_inference_v1_model_config_proto_msgTypes, + }.Build() + File_pkg_apis_inference_v1_model_config_proto = out.File + file_pkg_apis_inference_v1_model_config_proto_rawDesc = nil + file_pkg_apis_inference_v1_model_config_proto_goTypes = nil + file_pkg_apis_inference_v1_model_config_proto_depIdxs = nil +} diff --git a/pkg/apis/inference/v1/model_config.pb.validate.go b/pkg/apis/inference/v1/model_config.pb.validate.go new file mode 100644 index 0000000..d88c55d --- /dev/null +++ b/pkg/apis/inference/v1/model_config.pb.validate.go @@ -0,0 +1,6203 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: pkg/apis/inference/v1/model_config.proto + +package inference + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "sort" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} + _ = sort.Sort +) + +// Validate checks the field values on ModelRateLimiter with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ModelRateLimiter) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelRateLimiter with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelRateLimiterMultiError, or nil if none found. +func (m *ModelRateLimiter) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelRateLimiter) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + for idx, item := range m.GetResources() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelRateLimiterValidationError{ + field: fmt.Sprintf("Resources[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelRateLimiterValidationError{ + field: fmt.Sprintf("Resources[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelRateLimiterValidationError{ + field: fmt.Sprintf("Resources[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + // no validation rules for Priority + + if len(errors) > 0 { + return ModelRateLimiterMultiError(errors) + } + + return nil +} + +// ModelRateLimiterMultiError is an error wrapping multiple validation errors +// returned by ModelRateLimiter.ValidateAll() if the designated constraints +// aren't met. +type ModelRateLimiterMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelRateLimiterMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelRateLimiterMultiError) AllErrors() []error { return m } + +// ModelRateLimiterValidationError is the validation error returned by +// ModelRateLimiter.Validate if the designated constraints aren't met. +type ModelRateLimiterValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelRateLimiterValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelRateLimiterValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelRateLimiterValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelRateLimiterValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelRateLimiterValidationError) ErrorName() string { return "ModelRateLimiterValidationError" } + +// Error satisfies the builtin error interface +func (e ModelRateLimiterValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelRateLimiter.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelRateLimiterValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelRateLimiterValidationError{} + +// Validate checks the field values on ModelInstanceGroup with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelInstanceGroup) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelInstanceGroup with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelInstanceGroupMultiError, or nil if none found. +func (m *ModelInstanceGroup) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelInstanceGroup) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Kind + + // no validation rules for Count + + if all { + switch v := interface{}(m.GetRateLimiter()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInstanceGroupValidationError{ + field: "RateLimiter", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInstanceGroupValidationError{ + field: "RateLimiter", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetRateLimiter()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInstanceGroupValidationError{ + field: "RateLimiter", + reason: "embedded message failed validation", + cause: err, + } + } + } + + for idx, item := range m.GetSecondaryDevices() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInstanceGroupValidationError{ + field: fmt.Sprintf("SecondaryDevices[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInstanceGroupValidationError{ + field: fmt.Sprintf("SecondaryDevices[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInstanceGroupValidationError{ + field: fmt.Sprintf("SecondaryDevices[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + // no validation rules for Passive + + // no validation rules for HostPolicy + + if len(errors) > 0 { + return ModelInstanceGroupMultiError(errors) + } + + return nil +} + +// ModelInstanceGroupMultiError is an error wrapping multiple validation errors +// returned by ModelInstanceGroup.ValidateAll() if the designated constraints +// aren't met. +type ModelInstanceGroupMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelInstanceGroupMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelInstanceGroupMultiError) AllErrors() []error { return m } + +// ModelInstanceGroupValidationError is the validation error returned by +// ModelInstanceGroup.Validate if the designated constraints aren't met. +type ModelInstanceGroupValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelInstanceGroupValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelInstanceGroupValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelInstanceGroupValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelInstanceGroupValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelInstanceGroupValidationError) ErrorName() string { + return "ModelInstanceGroupValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelInstanceGroupValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelInstanceGroup.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelInstanceGroupValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelInstanceGroupValidationError{} + +// Validate checks the field values on ModelTensorReshape with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelTensorReshape) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelTensorReshape with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelTensorReshapeMultiError, or nil if none found. +func (m *ModelTensorReshape) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelTensorReshape) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return ModelTensorReshapeMultiError(errors) + } + + return nil +} + +// ModelTensorReshapeMultiError is an error wrapping multiple validation errors +// returned by ModelTensorReshape.ValidateAll() if the designated constraints +// aren't met. +type ModelTensorReshapeMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelTensorReshapeMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelTensorReshapeMultiError) AllErrors() []error { return m } + +// ModelTensorReshapeValidationError is the validation error returned by +// ModelTensorReshape.Validate if the designated constraints aren't met. +type ModelTensorReshapeValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelTensorReshapeValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelTensorReshapeValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelTensorReshapeValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelTensorReshapeValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelTensorReshapeValidationError) ErrorName() string { + return "ModelTensorReshapeValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelTensorReshapeValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelTensorReshape.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelTensorReshapeValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelTensorReshapeValidationError{} + +// Validate checks the field values on ModelInput with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ModelInput) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelInput with the rules defined in +// the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in ModelInputMultiError, or +// nil if none found. +func (m *ModelInput) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelInput) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for DataType + + // no validation rules for Format + + if all { + switch v := interface{}(m.GetReshape()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelInputValidationError{ + field: "Reshape", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelInputValidationError{ + field: "Reshape", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetReshape()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelInputValidationError{ + field: "Reshape", + reason: "embedded message failed validation", + cause: err, + } + } + } + + // no validation rules for IsShapeTensor + + // no validation rules for AllowRaggedBatch + + // no validation rules for Optional + + if len(errors) > 0 { + return ModelInputMultiError(errors) + } + + return nil +} + +// ModelInputMultiError is an error wrapping multiple validation errors +// returned by ModelInput.ValidateAll() if the designated constraints aren't met. +type ModelInputMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelInputMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelInputMultiError) AllErrors() []error { return m } + +// ModelInputValidationError is the validation error returned by +// ModelInput.Validate if the designated constraints aren't met. +type ModelInputValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelInputValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelInputValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelInputValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelInputValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelInputValidationError) ErrorName() string { return "ModelInputValidationError" } + +// Error satisfies the builtin error interface +func (e ModelInputValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelInput.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelInputValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelInputValidationError{} + +// Validate checks the field values on ModelOutput with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ModelOutput) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelOutput with the rules defined in +// the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in ModelOutputMultiError, or +// nil if none found. +func (m *ModelOutput) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOutput) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for DataType + + if all { + switch v := interface{}(m.GetReshape()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOutputValidationError{ + field: "Reshape", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOutputValidationError{ + field: "Reshape", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetReshape()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOutputValidationError{ + field: "Reshape", + reason: "embedded message failed validation", + cause: err, + } + } + } + + // no validation rules for LabelFilename + + // no validation rules for IsShapeTensor + + if len(errors) > 0 { + return ModelOutputMultiError(errors) + } + + return nil +} + +// ModelOutputMultiError is an error wrapping multiple validation errors +// returned by ModelOutput.ValidateAll() if the designated constraints aren't met. +type ModelOutputMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOutputMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOutputMultiError) AllErrors() []error { return m } + +// ModelOutputValidationError is the validation error returned by +// ModelOutput.Validate if the designated constraints aren't met. +type ModelOutputValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOutputValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelOutputValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelOutputValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelOutputValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelOutputValidationError) ErrorName() string { return "ModelOutputValidationError" } + +// Error satisfies the builtin error interface +func (e ModelOutputValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOutput.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOutputValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOutputValidationError{} + +// Validate checks the field values on BatchInput with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *BatchInput) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on BatchInput with the rules defined in +// the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in BatchInputMultiError, or +// nil if none found. +func (m *BatchInput) ValidateAll() error { + return m.validate(true) +} + +func (m *BatchInput) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Kind + + // no validation rules for DataType + + if len(errors) > 0 { + return BatchInputMultiError(errors) + } + + return nil +} + +// BatchInputMultiError is an error wrapping multiple validation errors +// returned by BatchInput.ValidateAll() if the designated constraints aren't met. +type BatchInputMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m BatchInputMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m BatchInputMultiError) AllErrors() []error { return m } + +// BatchInputValidationError is the validation error returned by +// BatchInput.Validate if the designated constraints aren't met. +type BatchInputValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e BatchInputValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e BatchInputValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e BatchInputValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e BatchInputValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e BatchInputValidationError) ErrorName() string { return "BatchInputValidationError" } + +// Error satisfies the builtin error interface +func (e BatchInputValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sBatchInput.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = BatchInputValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = BatchInputValidationError{} + +// Validate checks the field values on BatchOutput with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *BatchOutput) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on BatchOutput with the rules defined in +// the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in BatchOutputMultiError, or +// nil if none found. +func (m *BatchOutput) ValidateAll() error { + return m.validate(true) +} + +func (m *BatchOutput) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Kind + + if len(errors) > 0 { + return BatchOutputMultiError(errors) + } + + return nil +} + +// BatchOutputMultiError is an error wrapping multiple validation errors +// returned by BatchOutput.ValidateAll() if the designated constraints aren't met. +type BatchOutputMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m BatchOutputMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m BatchOutputMultiError) AllErrors() []error { return m } + +// BatchOutputValidationError is the validation error returned by +// BatchOutput.Validate if the designated constraints aren't met. +type BatchOutputValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e BatchOutputValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e BatchOutputValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e BatchOutputValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e BatchOutputValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e BatchOutputValidationError) ErrorName() string { return "BatchOutputValidationError" } + +// Error satisfies the builtin error interface +func (e BatchOutputValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sBatchOutput.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = BatchOutputValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = BatchOutputValidationError{} + +// Validate checks the field values on ModelVersionPolicy with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelVersionPolicy) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelVersionPolicy with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelVersionPolicyMultiError, or nil if none found. +func (m *ModelVersionPolicy) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelVersionPolicy) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + switch v := m.PolicyChoice.(type) { + case *ModelVersionPolicy_Latest_: + if v == nil { + err := ModelVersionPolicyValidationError{ + field: "PolicyChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + + if all { + switch v := interface{}(m.GetLatest()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelVersionPolicyValidationError{ + field: "Latest", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelVersionPolicyValidationError{ + field: "Latest", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetLatest()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelVersionPolicyValidationError{ + field: "Latest", + reason: "embedded message failed validation", + cause: err, + } + } + } + + case *ModelVersionPolicy_All_: + if v == nil { + err := ModelVersionPolicyValidationError{ + field: "PolicyChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + + if all { + switch v := interface{}(m.GetAll()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelVersionPolicyValidationError{ + field: "All", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelVersionPolicyValidationError{ + field: "All", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetAll()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelVersionPolicyValidationError{ + field: "All", + reason: "embedded message failed validation", + cause: err, + } + } + } + + case *ModelVersionPolicy_Specific_: + if v == nil { + err := ModelVersionPolicyValidationError{ + field: "PolicyChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + + if all { + switch v := interface{}(m.GetSpecific()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelVersionPolicyValidationError{ + field: "Specific", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelVersionPolicyValidationError{ + field: "Specific", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetSpecific()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelVersionPolicyValidationError{ + field: "Specific", + reason: "embedded message failed validation", + cause: err, + } + } + } + + default: + _ = v // ensures v is used + } + + if len(errors) > 0 { + return ModelVersionPolicyMultiError(errors) + } + + return nil +} + +// ModelVersionPolicyMultiError is an error wrapping multiple validation errors +// returned by ModelVersionPolicy.ValidateAll() if the designated constraints +// aren't met. +type ModelVersionPolicyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelVersionPolicyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelVersionPolicyMultiError) AllErrors() []error { return m } + +// ModelVersionPolicyValidationError is the validation error returned by +// ModelVersionPolicy.Validate if the designated constraints aren't met. +type ModelVersionPolicyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelVersionPolicyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelVersionPolicyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelVersionPolicyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelVersionPolicyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelVersionPolicyValidationError) ErrorName() string { + return "ModelVersionPolicyValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelVersionPolicyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelVersionPolicy.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelVersionPolicyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelVersionPolicyValidationError{} + +// Validate checks the field values on ModelOptimizationPolicy with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelOptimizationPolicy) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelOptimizationPolicy with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelOptimizationPolicyMultiError, or nil if none found. +func (m *ModelOptimizationPolicy) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOptimizationPolicy) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if all { + switch v := interface{}(m.GetGraph()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicyValidationError{ + field: "Graph", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicyValidationError{ + field: "Graph", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetGraph()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicyValidationError{ + field: "Graph", + reason: "embedded message failed validation", + cause: err, + } + } + } + + // no validation rules for Priority + + if all { + switch v := interface{}(m.GetCuda()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicyValidationError{ + field: "Cuda", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicyValidationError{ + field: "Cuda", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetCuda()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicyValidationError{ + field: "Cuda", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetExecutionAccelerators()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicyValidationError{ + field: "ExecutionAccelerators", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicyValidationError{ + field: "ExecutionAccelerators", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetExecutionAccelerators()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicyValidationError{ + field: "ExecutionAccelerators", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetInputPinnedMemory()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicyValidationError{ + field: "InputPinnedMemory", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicyValidationError{ + field: "InputPinnedMemory", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetInputPinnedMemory()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicyValidationError{ + field: "InputPinnedMemory", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetOutputPinnedMemory()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicyValidationError{ + field: "OutputPinnedMemory", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicyValidationError{ + field: "OutputPinnedMemory", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetOutputPinnedMemory()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicyValidationError{ + field: "OutputPinnedMemory", + reason: "embedded message failed validation", + cause: err, + } + } + } + + // no validation rules for GatherKernelBufferThreshold + + // no validation rules for EagerBatching + + if len(errors) > 0 { + return ModelOptimizationPolicyMultiError(errors) + } + + return nil +} + +// ModelOptimizationPolicyMultiError is an error wrapping multiple validation +// errors returned by ModelOptimizationPolicy.ValidateAll() if the designated +// constraints aren't met. +type ModelOptimizationPolicyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOptimizationPolicyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOptimizationPolicyMultiError) AllErrors() []error { return m } + +// ModelOptimizationPolicyValidationError is the validation error returned by +// ModelOptimizationPolicy.Validate if the designated constraints aren't met. +type ModelOptimizationPolicyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOptimizationPolicyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelOptimizationPolicyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelOptimizationPolicyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelOptimizationPolicyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelOptimizationPolicyValidationError) ErrorName() string { + return "ModelOptimizationPolicyValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelOptimizationPolicyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOptimizationPolicy.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOptimizationPolicyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOptimizationPolicyValidationError{} + +// Validate checks the field values on ModelQueuePolicy with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ModelQueuePolicy) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelQueuePolicy with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelQueuePolicyMultiError, or nil if none found. +func (m *ModelQueuePolicy) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelQueuePolicy) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for TimeoutAction + + // no validation rules for DefaultTimeoutMicroseconds + + // no validation rules for AllowTimeoutOverride + + // no validation rules for MaxQueueSize + + if len(errors) > 0 { + return ModelQueuePolicyMultiError(errors) + } + + return nil +} + +// ModelQueuePolicyMultiError is an error wrapping multiple validation errors +// returned by ModelQueuePolicy.ValidateAll() if the designated constraints +// aren't met. +type ModelQueuePolicyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelQueuePolicyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelQueuePolicyMultiError) AllErrors() []error { return m } + +// ModelQueuePolicyValidationError is the validation error returned by +// ModelQueuePolicy.Validate if the designated constraints aren't met. +type ModelQueuePolicyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelQueuePolicyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelQueuePolicyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelQueuePolicyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelQueuePolicyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelQueuePolicyValidationError) ErrorName() string { return "ModelQueuePolicyValidationError" } + +// Error satisfies the builtin error interface +func (e ModelQueuePolicyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelQueuePolicy.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelQueuePolicyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelQueuePolicyValidationError{} + +// Validate checks the field values on ModelDynamicBatching with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelDynamicBatching) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelDynamicBatching with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelDynamicBatchingMultiError, or nil if none found. +func (m *ModelDynamicBatching) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelDynamicBatching) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for MaxQueueDelayMicroseconds + + // no validation rules for PreserveOrdering + + // no validation rules for PriorityLevels + + // no validation rules for DefaultPriorityLevel + + if all { + switch v := interface{}(m.GetDefaultQueuePolicy()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelDynamicBatchingValidationError{ + field: "DefaultQueuePolicy", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelDynamicBatchingValidationError{ + field: "DefaultQueuePolicy", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetDefaultQueuePolicy()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelDynamicBatchingValidationError{ + field: "DefaultQueuePolicy", + reason: "embedded message failed validation", + cause: err, + } + } + } + + { + sorted_keys := make([]uint64, len(m.GetPriorityQueuePolicy())) + i := 0 + for key := range m.GetPriorityQueuePolicy() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetPriorityQueuePolicy()[key] + _ = val + + // no validation rules for PriorityQueuePolicy[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelDynamicBatchingValidationError{ + field: fmt.Sprintf("PriorityQueuePolicy[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelDynamicBatchingValidationError{ + field: fmt.Sprintf("PriorityQueuePolicy[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelDynamicBatchingValidationError{ + field: fmt.Sprintf("PriorityQueuePolicy[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if len(errors) > 0 { + return ModelDynamicBatchingMultiError(errors) + } + + return nil +} + +// ModelDynamicBatchingMultiError is an error wrapping multiple validation +// errors returned by ModelDynamicBatching.ValidateAll() if the designated +// constraints aren't met. +type ModelDynamicBatchingMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelDynamicBatchingMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelDynamicBatchingMultiError) AllErrors() []error { return m } + +// ModelDynamicBatchingValidationError is the validation error returned by +// ModelDynamicBatching.Validate if the designated constraints aren't met. +type ModelDynamicBatchingValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelDynamicBatchingValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelDynamicBatchingValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelDynamicBatchingValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelDynamicBatchingValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelDynamicBatchingValidationError) ErrorName() string { + return "ModelDynamicBatchingValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelDynamicBatchingValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelDynamicBatching.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelDynamicBatchingValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelDynamicBatchingValidationError{} + +// Validate checks the field values on ModelSequenceBatching with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelSequenceBatching) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelSequenceBatching with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelSequenceBatchingMultiError, or nil if none found. +func (m *ModelSequenceBatching) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelSequenceBatching) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for MaxSequenceIdleMicroseconds + + for idx, item := range m.GetControlInput() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelSequenceBatchingValidationError{ + field: fmt.Sprintf("ControlInput[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelSequenceBatchingValidationError{ + field: fmt.Sprintf("ControlInput[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelSequenceBatchingValidationError{ + field: fmt.Sprintf("ControlInput[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + for idx, item := range m.GetState() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelSequenceBatchingValidationError{ + field: fmt.Sprintf("State[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelSequenceBatchingValidationError{ + field: fmt.Sprintf("State[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelSequenceBatchingValidationError{ + field: fmt.Sprintf("State[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + switch v := m.StrategyChoice.(type) { + case *ModelSequenceBatching_Direct: + if v == nil { + err := ModelSequenceBatchingValidationError{ + field: "StrategyChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + + if all { + switch v := interface{}(m.GetDirect()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelSequenceBatchingValidationError{ + field: "Direct", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelSequenceBatchingValidationError{ + field: "Direct", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetDirect()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelSequenceBatchingValidationError{ + field: "Direct", + reason: "embedded message failed validation", + cause: err, + } + } + } + + case *ModelSequenceBatching_Oldest: + if v == nil { + err := ModelSequenceBatchingValidationError{ + field: "StrategyChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + + if all { + switch v := interface{}(m.GetOldest()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelSequenceBatchingValidationError{ + field: "Oldest", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelSequenceBatchingValidationError{ + field: "Oldest", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetOldest()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelSequenceBatchingValidationError{ + field: "Oldest", + reason: "embedded message failed validation", + cause: err, + } + } + } + + default: + _ = v // ensures v is used + } + + if len(errors) > 0 { + return ModelSequenceBatchingMultiError(errors) + } + + return nil +} + +// ModelSequenceBatchingMultiError is an error wrapping multiple validation +// errors returned by ModelSequenceBatching.ValidateAll() if the designated +// constraints aren't met. +type ModelSequenceBatchingMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelSequenceBatchingMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelSequenceBatchingMultiError) AllErrors() []error { return m } + +// ModelSequenceBatchingValidationError is the validation error returned by +// ModelSequenceBatching.Validate if the designated constraints aren't met. +type ModelSequenceBatchingValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelSequenceBatchingValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelSequenceBatchingValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelSequenceBatchingValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelSequenceBatchingValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelSequenceBatchingValidationError) ErrorName() string { + return "ModelSequenceBatchingValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelSequenceBatchingValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelSequenceBatching.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelSequenceBatchingValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelSequenceBatchingValidationError{} + +// Validate checks the field values on ModelEnsembling with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ModelEnsembling) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelEnsembling with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelEnsemblingMultiError, or nil if none found. +func (m *ModelEnsembling) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelEnsembling) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + for idx, item := range m.GetStep() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelEnsemblingValidationError{ + field: fmt.Sprintf("Step[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelEnsemblingValidationError{ + field: fmt.Sprintf("Step[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelEnsemblingValidationError{ + field: fmt.Sprintf("Step[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ModelEnsemblingMultiError(errors) + } + + return nil +} + +// ModelEnsemblingMultiError is an error wrapping multiple validation errors +// returned by ModelEnsembling.ValidateAll() if the designated constraints +// aren't met. +type ModelEnsemblingMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelEnsemblingMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelEnsemblingMultiError) AllErrors() []error { return m } + +// ModelEnsemblingValidationError is the validation error returned by +// ModelEnsembling.Validate if the designated constraints aren't met. +type ModelEnsemblingValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelEnsemblingValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelEnsemblingValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelEnsemblingValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelEnsemblingValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelEnsemblingValidationError) ErrorName() string { return "ModelEnsemblingValidationError" } + +// Error satisfies the builtin error interface +func (e ModelEnsemblingValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelEnsembling.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelEnsemblingValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelEnsemblingValidationError{} + +// Validate checks the field values on ModelParameter with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ModelParameter) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelParameter with the rules defined +// in the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in ModelParameterMultiError, +// or nil if none found. +func (m *ModelParameter) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelParameter) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for StringValue + + if len(errors) > 0 { + return ModelParameterMultiError(errors) + } + + return nil +} + +// ModelParameterMultiError is an error wrapping multiple validation errors +// returned by ModelParameter.ValidateAll() if the designated constraints +// aren't met. +type ModelParameterMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelParameterMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelParameterMultiError) AllErrors() []error { return m } + +// ModelParameterValidationError is the validation error returned by +// ModelParameter.Validate if the designated constraints aren't met. +type ModelParameterValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelParameterValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelParameterValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelParameterValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelParameterValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelParameterValidationError) ErrorName() string { return "ModelParameterValidationError" } + +// Error satisfies the builtin error interface +func (e ModelParameterValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelParameter.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelParameterValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelParameterValidationError{} + +// Validate checks the field values on ModelWarmup with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ModelWarmup) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelWarmup with the rules defined in +// the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in ModelWarmupMultiError, or +// nil if none found. +func (m *ModelWarmup) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelWarmup) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for BatchSize + + { + sorted_keys := make([]string, len(m.GetInputs())) + i := 0 + for key := range m.GetInputs() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetInputs()[key] + _ = val + + // no validation rules for Inputs[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelWarmupValidationError{ + field: fmt.Sprintf("Inputs[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelWarmupValidationError{ + field: fmt.Sprintf("Inputs[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelWarmupValidationError{ + field: fmt.Sprintf("Inputs[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + // no validation rules for Count + + if len(errors) > 0 { + return ModelWarmupMultiError(errors) + } + + return nil +} + +// ModelWarmupMultiError is an error wrapping multiple validation errors +// returned by ModelWarmup.ValidateAll() if the designated constraints aren't met. +type ModelWarmupMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelWarmupMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelWarmupMultiError) AllErrors() []error { return m } + +// ModelWarmupValidationError is the validation error returned by +// ModelWarmup.Validate if the designated constraints aren't met. +type ModelWarmupValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelWarmupValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelWarmupValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelWarmupValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelWarmupValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelWarmupValidationError) ErrorName() string { return "ModelWarmupValidationError" } + +// Error satisfies the builtin error interface +func (e ModelWarmupValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelWarmup.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelWarmupValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelWarmupValidationError{} + +// Validate checks the field values on ModelOperations with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ModelOperations) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelOperations with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelOperationsMultiError, or nil if none found. +func (m *ModelOperations) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOperations) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return ModelOperationsMultiError(errors) + } + + return nil +} + +// ModelOperationsMultiError is an error wrapping multiple validation errors +// returned by ModelOperations.ValidateAll() if the designated constraints +// aren't met. +type ModelOperationsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOperationsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOperationsMultiError) AllErrors() []error { return m } + +// ModelOperationsValidationError is the validation error returned by +// ModelOperations.Validate if the designated constraints aren't met. +type ModelOperationsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOperationsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelOperationsValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelOperationsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelOperationsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelOperationsValidationError) ErrorName() string { return "ModelOperationsValidationError" } + +// Error satisfies the builtin error interface +func (e ModelOperationsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOperations.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOperationsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOperationsValidationError{} + +// Validate checks the field values on ModelTransactionPolicy with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelTransactionPolicy) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelTransactionPolicy with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelTransactionPolicyMultiError, or nil if none found. +func (m *ModelTransactionPolicy) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelTransactionPolicy) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Decoupled + + if len(errors) > 0 { + return ModelTransactionPolicyMultiError(errors) + } + + return nil +} + +// ModelTransactionPolicyMultiError is an error wrapping multiple validation +// errors returned by ModelTransactionPolicy.ValidateAll() if the designated +// constraints aren't met. +type ModelTransactionPolicyMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelTransactionPolicyMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelTransactionPolicyMultiError) AllErrors() []error { return m } + +// ModelTransactionPolicyValidationError is the validation error returned by +// ModelTransactionPolicy.Validate if the designated constraints aren't met. +type ModelTransactionPolicyValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelTransactionPolicyValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelTransactionPolicyValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelTransactionPolicyValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelTransactionPolicyValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelTransactionPolicyValidationError) ErrorName() string { + return "ModelTransactionPolicyValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelTransactionPolicyValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelTransactionPolicy.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelTransactionPolicyValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelTransactionPolicyValidationError{} + +// Validate checks the field values on ModelRepositoryAgents with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelRepositoryAgents) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelRepositoryAgents with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelRepositoryAgentsMultiError, or nil if none found. +func (m *ModelRepositoryAgents) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelRepositoryAgents) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + for idx, item := range m.GetAgents() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelRepositoryAgentsValidationError{ + field: fmt.Sprintf("Agents[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelRepositoryAgentsValidationError{ + field: fmt.Sprintf("Agents[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelRepositoryAgentsValidationError{ + field: fmt.Sprintf("Agents[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ModelRepositoryAgentsMultiError(errors) + } + + return nil +} + +// ModelRepositoryAgentsMultiError is an error wrapping multiple validation +// errors returned by ModelRepositoryAgents.ValidateAll() if the designated +// constraints aren't met. +type ModelRepositoryAgentsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelRepositoryAgentsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelRepositoryAgentsMultiError) AllErrors() []error { return m } + +// ModelRepositoryAgentsValidationError is the validation error returned by +// ModelRepositoryAgents.Validate if the designated constraints aren't met. +type ModelRepositoryAgentsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelRepositoryAgentsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelRepositoryAgentsValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelRepositoryAgentsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelRepositoryAgentsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelRepositoryAgentsValidationError) ErrorName() string { + return "ModelRepositoryAgentsValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelRepositoryAgentsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelRepositoryAgents.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelRepositoryAgentsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelRepositoryAgentsValidationError{} + +// Validate checks the field values on ModelResponseCache with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelResponseCache) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelResponseCache with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelResponseCacheMultiError, or nil if none found. +func (m *ModelResponseCache) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelResponseCache) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Enable + + if len(errors) > 0 { + return ModelResponseCacheMultiError(errors) + } + + return nil +} + +// ModelResponseCacheMultiError is an error wrapping multiple validation errors +// returned by ModelResponseCache.ValidateAll() if the designated constraints +// aren't met. +type ModelResponseCacheMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelResponseCacheMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelResponseCacheMultiError) AllErrors() []error { return m } + +// ModelResponseCacheValidationError is the validation error returned by +// ModelResponseCache.Validate if the designated constraints aren't met. +type ModelResponseCacheValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelResponseCacheValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelResponseCacheValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelResponseCacheValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelResponseCacheValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelResponseCacheValidationError) ErrorName() string { + return "ModelResponseCacheValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelResponseCacheValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelResponseCache.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelResponseCacheValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelResponseCacheValidationError{} + +// Validate checks the field values on ModelConfig with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ModelConfig) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelConfig with the rules defined in +// the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in ModelConfigMultiError, or +// nil if none found. +func (m *ModelConfig) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelConfig) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Platform + + // no validation rules for Backend + + if all { + switch v := interface{}(m.GetVersionPolicy()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "VersionPolicy", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "VersionPolicy", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetVersionPolicy()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: "VersionPolicy", + reason: "embedded message failed validation", + cause: err, + } + } + } + + // no validation rules for MaxBatchSize + + for idx, item := range m.GetInput() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("Input[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("Input[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: fmt.Sprintf("Input[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + for idx, item := range m.GetOutput() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("Output[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("Output[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: fmt.Sprintf("Output[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + for idx, item := range m.GetBatchInput() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("BatchInput[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("BatchInput[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: fmt.Sprintf("BatchInput[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + for idx, item := range m.GetBatchOutput() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("BatchOutput[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("BatchOutput[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: fmt.Sprintf("BatchOutput[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if all { + switch v := interface{}(m.GetOptimization()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "Optimization", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "Optimization", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetOptimization()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: "Optimization", + reason: "embedded message failed validation", + cause: err, + } + } + } + + for idx, item := range m.GetInstanceGroup() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("InstanceGroup[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("InstanceGroup[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: fmt.Sprintf("InstanceGroup[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + // no validation rules for DefaultModelFilename + + // no validation rules for CcModelFilenames + + // no validation rules for MetricTags + + { + sorted_keys := make([]string, len(m.GetParameters())) + i := 0 + for key := range m.GetParameters() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetParameters()[key] + _ = val + + // no validation rules for Parameters[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: fmt.Sprintf("Parameters[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + for idx, item := range m.GetModelWarmup() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("ModelWarmup[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: fmt.Sprintf("ModelWarmup[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: fmt.Sprintf("ModelWarmup[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if all { + switch v := interface{}(m.GetModelOperations()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "ModelOperations", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "ModelOperations", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetModelOperations()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: "ModelOperations", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetModelTransactionPolicy()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "ModelTransactionPolicy", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "ModelTransactionPolicy", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetModelTransactionPolicy()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: "ModelTransactionPolicy", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetModelRepositoryAgents()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "ModelRepositoryAgents", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "ModelRepositoryAgents", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetModelRepositoryAgents()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: "ModelRepositoryAgents", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if all { + switch v := interface{}(m.GetResponseCache()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "ResponseCache", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "ResponseCache", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetResponseCache()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: "ResponseCache", + reason: "embedded message failed validation", + cause: err, + } + } + } + + switch v := m.SchedulingChoice.(type) { + case *ModelConfig_DynamicBatching: + if v == nil { + err := ModelConfigValidationError{ + field: "SchedulingChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + + if all { + switch v := interface{}(m.GetDynamicBatching()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "DynamicBatching", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "DynamicBatching", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetDynamicBatching()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: "DynamicBatching", + reason: "embedded message failed validation", + cause: err, + } + } + } + + case *ModelConfig_SequenceBatching: + if v == nil { + err := ModelConfigValidationError{ + field: "SchedulingChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + + if all { + switch v := interface{}(m.GetSequenceBatching()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "SequenceBatching", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "SequenceBatching", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetSequenceBatching()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: "SequenceBatching", + reason: "embedded message failed validation", + cause: err, + } + } + } + + case *ModelConfig_EnsembleScheduling: + if v == nil { + err := ModelConfigValidationError{ + field: "SchedulingChoice", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + + if all { + switch v := interface{}(m.GetEnsembleScheduling()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "EnsembleScheduling", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelConfigValidationError{ + field: "EnsembleScheduling", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetEnsembleScheduling()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelConfigValidationError{ + field: "EnsembleScheduling", + reason: "embedded message failed validation", + cause: err, + } + } + } + + default: + _ = v // ensures v is used + } + + if len(errors) > 0 { + return ModelConfigMultiError(errors) + } + + return nil +} + +// ModelConfigMultiError is an error wrapping multiple validation errors +// returned by ModelConfig.ValidateAll() if the designated constraints aren't met. +type ModelConfigMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelConfigMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelConfigMultiError) AllErrors() []error { return m } + +// ModelConfigValidationError is the validation error returned by +// ModelConfig.Validate if the designated constraints aren't met. +type ModelConfigValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelConfigValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelConfigValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelConfigValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelConfigValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelConfigValidationError) ErrorName() string { return "ModelConfigValidationError" } + +// Error satisfies the builtin error interface +func (e ModelConfigValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelConfig.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelConfigValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelConfigValidationError{} + +// Validate checks the field values on ModelRateLimiter_Resource with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelRateLimiter_Resource) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelRateLimiter_Resource with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelRateLimiter_ResourceMultiError, or nil if none found. +func (m *ModelRateLimiter_Resource) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelRateLimiter_Resource) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Global + + // no validation rules for Count + + if len(errors) > 0 { + return ModelRateLimiter_ResourceMultiError(errors) + } + + return nil +} + +// ModelRateLimiter_ResourceMultiError is an error wrapping multiple validation +// errors returned by ModelRateLimiter_Resource.ValidateAll() if the +// designated constraints aren't met. +type ModelRateLimiter_ResourceMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelRateLimiter_ResourceMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelRateLimiter_ResourceMultiError) AllErrors() []error { return m } + +// ModelRateLimiter_ResourceValidationError is the validation error returned by +// ModelRateLimiter_Resource.Validate if the designated constraints aren't met. +type ModelRateLimiter_ResourceValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelRateLimiter_ResourceValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelRateLimiter_ResourceValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelRateLimiter_ResourceValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelRateLimiter_ResourceValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelRateLimiter_ResourceValidationError) ErrorName() string { + return "ModelRateLimiter_ResourceValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelRateLimiter_ResourceValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelRateLimiter_Resource.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelRateLimiter_ResourceValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelRateLimiter_ResourceValidationError{} + +// Validate checks the field values on ModelInstanceGroup_SecondaryDevice with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *ModelInstanceGroup_SecondaryDevice) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelInstanceGroup_SecondaryDevice +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// ModelInstanceGroup_SecondaryDeviceMultiError, or nil if none found. +func (m *ModelInstanceGroup_SecondaryDevice) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelInstanceGroup_SecondaryDevice) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Kind + + // no validation rules for DeviceId + + if len(errors) > 0 { + return ModelInstanceGroup_SecondaryDeviceMultiError(errors) + } + + return nil +} + +// ModelInstanceGroup_SecondaryDeviceMultiError is an error wrapping multiple +// validation errors returned by +// ModelInstanceGroup_SecondaryDevice.ValidateAll() if the designated +// constraints aren't met. +type ModelInstanceGroup_SecondaryDeviceMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelInstanceGroup_SecondaryDeviceMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelInstanceGroup_SecondaryDeviceMultiError) AllErrors() []error { return m } + +// ModelInstanceGroup_SecondaryDeviceValidationError is the validation error +// returned by ModelInstanceGroup_SecondaryDevice.Validate if the designated +// constraints aren't met. +type ModelInstanceGroup_SecondaryDeviceValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelInstanceGroup_SecondaryDeviceValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelInstanceGroup_SecondaryDeviceValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelInstanceGroup_SecondaryDeviceValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelInstanceGroup_SecondaryDeviceValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelInstanceGroup_SecondaryDeviceValidationError) ErrorName() string { + return "ModelInstanceGroup_SecondaryDeviceValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelInstanceGroup_SecondaryDeviceValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelInstanceGroup_SecondaryDevice.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelInstanceGroup_SecondaryDeviceValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelInstanceGroup_SecondaryDeviceValidationError{} + +// Validate checks the field values on ModelVersionPolicy_Latest with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelVersionPolicy_Latest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelVersionPolicy_Latest with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelVersionPolicy_LatestMultiError, or nil if none found. +func (m *ModelVersionPolicy_Latest) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelVersionPolicy_Latest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for NumVersions + + if len(errors) > 0 { + return ModelVersionPolicy_LatestMultiError(errors) + } + + return nil +} + +// ModelVersionPolicy_LatestMultiError is an error wrapping multiple validation +// errors returned by ModelVersionPolicy_Latest.ValidateAll() if the +// designated constraints aren't met. +type ModelVersionPolicy_LatestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelVersionPolicy_LatestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelVersionPolicy_LatestMultiError) AllErrors() []error { return m } + +// ModelVersionPolicy_LatestValidationError is the validation error returned by +// ModelVersionPolicy_Latest.Validate if the designated constraints aren't met. +type ModelVersionPolicy_LatestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelVersionPolicy_LatestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelVersionPolicy_LatestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelVersionPolicy_LatestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelVersionPolicy_LatestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelVersionPolicy_LatestValidationError) ErrorName() string { + return "ModelVersionPolicy_LatestValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelVersionPolicy_LatestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelVersionPolicy_Latest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelVersionPolicy_LatestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelVersionPolicy_LatestValidationError{} + +// Validate checks the field values on ModelVersionPolicy_All with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelVersionPolicy_All) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelVersionPolicy_All with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelVersionPolicy_AllMultiError, or nil if none found. +func (m *ModelVersionPolicy_All) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelVersionPolicy_All) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return ModelVersionPolicy_AllMultiError(errors) + } + + return nil +} + +// ModelVersionPolicy_AllMultiError is an error wrapping multiple validation +// errors returned by ModelVersionPolicy_All.ValidateAll() if the designated +// constraints aren't met. +type ModelVersionPolicy_AllMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelVersionPolicy_AllMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelVersionPolicy_AllMultiError) AllErrors() []error { return m } + +// ModelVersionPolicy_AllValidationError is the validation error returned by +// ModelVersionPolicy_All.Validate if the designated constraints aren't met. +type ModelVersionPolicy_AllValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelVersionPolicy_AllValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelVersionPolicy_AllValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelVersionPolicy_AllValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelVersionPolicy_AllValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelVersionPolicy_AllValidationError) ErrorName() string { + return "ModelVersionPolicy_AllValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelVersionPolicy_AllValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelVersionPolicy_All.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelVersionPolicy_AllValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelVersionPolicy_AllValidationError{} + +// Validate checks the field values on ModelVersionPolicy_Specific with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelVersionPolicy_Specific) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelVersionPolicy_Specific with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelVersionPolicy_SpecificMultiError, or nil if none found. +func (m *ModelVersionPolicy_Specific) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelVersionPolicy_Specific) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return ModelVersionPolicy_SpecificMultiError(errors) + } + + return nil +} + +// ModelVersionPolicy_SpecificMultiError is an error wrapping multiple +// validation errors returned by ModelVersionPolicy_Specific.ValidateAll() if +// the designated constraints aren't met. +type ModelVersionPolicy_SpecificMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelVersionPolicy_SpecificMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelVersionPolicy_SpecificMultiError) AllErrors() []error { return m } + +// ModelVersionPolicy_SpecificValidationError is the validation error returned +// by ModelVersionPolicy_Specific.Validate if the designated constraints +// aren't met. +type ModelVersionPolicy_SpecificValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelVersionPolicy_SpecificValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelVersionPolicy_SpecificValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelVersionPolicy_SpecificValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelVersionPolicy_SpecificValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelVersionPolicy_SpecificValidationError) ErrorName() string { + return "ModelVersionPolicy_SpecificValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelVersionPolicy_SpecificValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelVersionPolicy_Specific.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelVersionPolicy_SpecificValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelVersionPolicy_SpecificValidationError{} + +// Validate checks the field values on ModelOptimizationPolicy_Graph with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelOptimizationPolicy_Graph) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelOptimizationPolicy_Graph with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// ModelOptimizationPolicy_GraphMultiError, or nil if none found. +func (m *ModelOptimizationPolicy_Graph) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOptimizationPolicy_Graph) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Level + + if len(errors) > 0 { + return ModelOptimizationPolicy_GraphMultiError(errors) + } + + return nil +} + +// ModelOptimizationPolicy_GraphMultiError is an error wrapping multiple +// validation errors returned by ModelOptimizationPolicy_Graph.ValidateAll() +// if the designated constraints aren't met. +type ModelOptimizationPolicy_GraphMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOptimizationPolicy_GraphMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOptimizationPolicy_GraphMultiError) AllErrors() []error { return m } + +// ModelOptimizationPolicy_GraphValidationError is the validation error +// returned by ModelOptimizationPolicy_Graph.Validate if the designated +// constraints aren't met. +type ModelOptimizationPolicy_GraphValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOptimizationPolicy_GraphValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelOptimizationPolicy_GraphValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelOptimizationPolicy_GraphValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelOptimizationPolicy_GraphValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelOptimizationPolicy_GraphValidationError) ErrorName() string { + return "ModelOptimizationPolicy_GraphValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelOptimizationPolicy_GraphValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOptimizationPolicy_Graph.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOptimizationPolicy_GraphValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOptimizationPolicy_GraphValidationError{} + +// Validate checks the field values on ModelOptimizationPolicy_Cuda with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelOptimizationPolicy_Cuda) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelOptimizationPolicy_Cuda with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelOptimizationPolicy_CudaMultiError, or nil if none found. +func (m *ModelOptimizationPolicy_Cuda) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOptimizationPolicy_Cuda) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Graphs + + // no validation rules for BusyWaitEvents + + for idx, item := range m.GetGraphSpec() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicy_CudaValidationError{ + field: fmt.Sprintf("GraphSpec[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicy_CudaValidationError{ + field: fmt.Sprintf("GraphSpec[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicy_CudaValidationError{ + field: fmt.Sprintf("GraphSpec[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + // no validation rules for OutputCopyStream + + if len(errors) > 0 { + return ModelOptimizationPolicy_CudaMultiError(errors) + } + + return nil +} + +// ModelOptimizationPolicy_CudaMultiError is an error wrapping multiple +// validation errors returned by ModelOptimizationPolicy_Cuda.ValidateAll() if +// the designated constraints aren't met. +type ModelOptimizationPolicy_CudaMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOptimizationPolicy_CudaMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOptimizationPolicy_CudaMultiError) AllErrors() []error { return m } + +// ModelOptimizationPolicy_CudaValidationError is the validation error returned +// by ModelOptimizationPolicy_Cuda.Validate if the designated constraints +// aren't met. +type ModelOptimizationPolicy_CudaValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOptimizationPolicy_CudaValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelOptimizationPolicy_CudaValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelOptimizationPolicy_CudaValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelOptimizationPolicy_CudaValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelOptimizationPolicy_CudaValidationError) ErrorName() string { + return "ModelOptimizationPolicy_CudaValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelOptimizationPolicy_CudaValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOptimizationPolicy_Cuda.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOptimizationPolicy_CudaValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOptimizationPolicy_CudaValidationError{} + +// Validate checks the field values on +// ModelOptimizationPolicy_ExecutionAccelerators with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ModelOptimizationPolicy_ExecutionAccelerators) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on +// ModelOptimizationPolicy_ExecutionAccelerators with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in +// ModelOptimizationPolicy_ExecutionAcceleratorsMultiError, or nil if none found. +func (m *ModelOptimizationPolicy_ExecutionAccelerators) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOptimizationPolicy_ExecutionAccelerators) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + for idx, item := range m.GetGpuExecutionAccelerator() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicy_ExecutionAcceleratorsValidationError{ + field: fmt.Sprintf("GpuExecutionAccelerator[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicy_ExecutionAcceleratorsValidationError{ + field: fmt.Sprintf("GpuExecutionAccelerator[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicy_ExecutionAcceleratorsValidationError{ + field: fmt.Sprintf("GpuExecutionAccelerator[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + for idx, item := range m.GetCpuExecutionAccelerator() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicy_ExecutionAcceleratorsValidationError{ + field: fmt.Sprintf("CpuExecutionAccelerator[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicy_ExecutionAcceleratorsValidationError{ + field: fmt.Sprintf("CpuExecutionAccelerator[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicy_ExecutionAcceleratorsValidationError{ + field: fmt.Sprintf("CpuExecutionAccelerator[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ModelOptimizationPolicy_ExecutionAcceleratorsMultiError(errors) + } + + return nil +} + +// ModelOptimizationPolicy_ExecutionAcceleratorsMultiError is an error wrapping +// multiple validation errors returned by +// ModelOptimizationPolicy_ExecutionAccelerators.ValidateAll() if the +// designated constraints aren't met. +type ModelOptimizationPolicy_ExecutionAcceleratorsMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOptimizationPolicy_ExecutionAcceleratorsMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOptimizationPolicy_ExecutionAcceleratorsMultiError) AllErrors() []error { return m } + +// ModelOptimizationPolicy_ExecutionAcceleratorsValidationError is the +// validation error returned by +// ModelOptimizationPolicy_ExecutionAccelerators.Validate if the designated +// constraints aren't met. +type ModelOptimizationPolicy_ExecutionAcceleratorsValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOptimizationPolicy_ExecutionAcceleratorsValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelOptimizationPolicy_ExecutionAcceleratorsValidationError) Reason() string { + return e.reason +} + +// Cause function returns cause value. +func (e ModelOptimizationPolicy_ExecutionAcceleratorsValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelOptimizationPolicy_ExecutionAcceleratorsValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelOptimizationPolicy_ExecutionAcceleratorsValidationError) ErrorName() string { + return "ModelOptimizationPolicy_ExecutionAcceleratorsValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelOptimizationPolicy_ExecutionAcceleratorsValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOptimizationPolicy_ExecutionAccelerators.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOptimizationPolicy_ExecutionAcceleratorsValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOptimizationPolicy_ExecutionAcceleratorsValidationError{} + +// Validate checks the field values on +// ModelOptimizationPolicy_PinnedMemoryBuffer with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ModelOptimizationPolicy_PinnedMemoryBuffer) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on +// ModelOptimizationPolicy_PinnedMemoryBuffer with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in +// ModelOptimizationPolicy_PinnedMemoryBufferMultiError, or nil if none found. +func (m *ModelOptimizationPolicy_PinnedMemoryBuffer) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOptimizationPolicy_PinnedMemoryBuffer) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Enable + + if len(errors) > 0 { + return ModelOptimizationPolicy_PinnedMemoryBufferMultiError(errors) + } + + return nil +} + +// ModelOptimizationPolicy_PinnedMemoryBufferMultiError is an error wrapping +// multiple validation errors returned by +// ModelOptimizationPolicy_PinnedMemoryBuffer.ValidateAll() if the designated +// constraints aren't met. +type ModelOptimizationPolicy_PinnedMemoryBufferMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOptimizationPolicy_PinnedMemoryBufferMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOptimizationPolicy_PinnedMemoryBufferMultiError) AllErrors() []error { return m } + +// ModelOptimizationPolicy_PinnedMemoryBufferValidationError is the validation +// error returned by ModelOptimizationPolicy_PinnedMemoryBuffer.Validate if +// the designated constraints aren't met. +type ModelOptimizationPolicy_PinnedMemoryBufferValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOptimizationPolicy_PinnedMemoryBufferValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelOptimizationPolicy_PinnedMemoryBufferValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelOptimizationPolicy_PinnedMemoryBufferValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelOptimizationPolicy_PinnedMemoryBufferValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelOptimizationPolicy_PinnedMemoryBufferValidationError) ErrorName() string { + return "ModelOptimizationPolicy_PinnedMemoryBufferValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelOptimizationPolicy_PinnedMemoryBufferValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOptimizationPolicy_PinnedMemoryBuffer.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOptimizationPolicy_PinnedMemoryBufferValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOptimizationPolicy_PinnedMemoryBufferValidationError{} + +// Validate checks the field values on ModelOptimizationPolicy_Cuda_GraphSpec +// with the rules defined in the proto definition for this message. If any +// rules are violated, the first error encountered is returned, or nil if +// there are no violations. +func (m *ModelOptimizationPolicy_Cuda_GraphSpec) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on +// ModelOptimizationPolicy_Cuda_GraphSpec with the rules defined in the proto +// definition for this message. If any rules are violated, the result is a +// list of violation errors wrapped in +// ModelOptimizationPolicy_Cuda_GraphSpecMultiError, or nil if none found. +func (m *ModelOptimizationPolicy_Cuda_GraphSpec) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOptimizationPolicy_Cuda_GraphSpec) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for BatchSize + + { + sorted_keys := make([]string, len(m.GetInput())) + i := 0 + for key := range m.GetInput() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetInput()[key] + _ = val + + // no validation rules for Input[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicy_Cuda_GraphSpecValidationError{ + field: fmt.Sprintf("Input[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicy_Cuda_GraphSpecValidationError{ + field: fmt.Sprintf("Input[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicy_Cuda_GraphSpecValidationError{ + field: fmt.Sprintf("Input[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if all { + switch v := interface{}(m.GetGraphLowerBound()).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicy_Cuda_GraphSpecValidationError{ + field: "GraphLowerBound", + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicy_Cuda_GraphSpecValidationError{ + field: "GraphLowerBound", + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(m.GetGraphLowerBound()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicy_Cuda_GraphSpecValidationError{ + field: "GraphLowerBound", + reason: "embedded message failed validation", + cause: err, + } + } + } + + if len(errors) > 0 { + return ModelOptimizationPolicy_Cuda_GraphSpecMultiError(errors) + } + + return nil +} + +// ModelOptimizationPolicy_Cuda_GraphSpecMultiError is an error wrapping +// multiple validation errors returned by +// ModelOptimizationPolicy_Cuda_GraphSpec.ValidateAll() if the designated +// constraints aren't met. +type ModelOptimizationPolicy_Cuda_GraphSpecMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOptimizationPolicy_Cuda_GraphSpecMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOptimizationPolicy_Cuda_GraphSpecMultiError) AllErrors() []error { return m } + +// ModelOptimizationPolicy_Cuda_GraphSpecValidationError is the validation +// error returned by ModelOptimizationPolicy_Cuda_GraphSpec.Validate if the +// designated constraints aren't met. +type ModelOptimizationPolicy_Cuda_GraphSpecValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOptimizationPolicy_Cuda_GraphSpecValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelOptimizationPolicy_Cuda_GraphSpecValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelOptimizationPolicy_Cuda_GraphSpecValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelOptimizationPolicy_Cuda_GraphSpecValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelOptimizationPolicy_Cuda_GraphSpecValidationError) ErrorName() string { + return "ModelOptimizationPolicy_Cuda_GraphSpecValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelOptimizationPolicy_Cuda_GraphSpecValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOptimizationPolicy_Cuda_GraphSpec.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOptimizationPolicy_Cuda_GraphSpecValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOptimizationPolicy_Cuda_GraphSpecValidationError{} + +// Validate checks the field values on +// ModelOptimizationPolicy_Cuda_GraphSpec_Shape with the rules defined in the +// proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ModelOptimizationPolicy_Cuda_GraphSpec_Shape) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on +// ModelOptimizationPolicy_Cuda_GraphSpec_Shape with the rules defined in the +// proto definition for this message. If any rules are violated, the result is +// a list of violation errors wrapped in +// ModelOptimizationPolicy_Cuda_GraphSpec_ShapeMultiError, or nil if none found. +func (m *ModelOptimizationPolicy_Cuda_GraphSpec_Shape) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOptimizationPolicy_Cuda_GraphSpec_Shape) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + if len(errors) > 0 { + return ModelOptimizationPolicy_Cuda_GraphSpec_ShapeMultiError(errors) + } + + return nil +} + +// ModelOptimizationPolicy_Cuda_GraphSpec_ShapeMultiError is an error wrapping +// multiple validation errors returned by +// ModelOptimizationPolicy_Cuda_GraphSpec_Shape.ValidateAll() if the +// designated constraints aren't met. +type ModelOptimizationPolicy_Cuda_GraphSpec_ShapeMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOptimizationPolicy_Cuda_GraphSpec_ShapeMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOptimizationPolicy_Cuda_GraphSpec_ShapeMultiError) AllErrors() []error { return m } + +// ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError is the +// validation error returned by +// ModelOptimizationPolicy_Cuda_GraphSpec_Shape.Validate if the designated +// constraints aren't met. +type ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError) ErrorName() string { + return "ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOptimizationPolicy_Cuda_GraphSpec_Shape.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOptimizationPolicy_Cuda_GraphSpec_ShapeValidationError{} + +// Validate checks the field values on +// ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound with the rules defined in +// the proto definition for this message. If any rules are violated, the first +// error encountered is returned, or nil if there are no violations. +func (m *ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on +// ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound with the rules defined in +// the proto definition for this message. If any rules are violated, the +// result is a list of violation errors wrapped in +// ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundMultiError, or nil if none found. +func (m *ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for BatchSize + + { + sorted_keys := make([]string, len(m.GetInput())) + i := 0 + for key := range m.GetInput() { + sorted_keys[i] = key + i++ + } + sort.Slice(sorted_keys, func(i, j int) bool { return sorted_keys[i] < sorted_keys[j] }) + for _, key := range sorted_keys { + val := m.GetInput()[key] + _ = val + + // no validation rules for Input[key] + + if all { + switch v := interface{}(val).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError{ + field: fmt.Sprintf("Input[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError{ + field: fmt.Sprintf("Input[%v]", key), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(val).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError{ + field: fmt.Sprintf("Input[%v]", key), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + } + + if len(errors) > 0 { + return ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundMultiError(errors) + } + + return nil +} + +// ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundMultiError is an error +// wrapping multiple validation errors returned by +// ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound.ValidateAll() if the +// designated constraints aren't met. +type ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundMultiError) AllErrors() []error { return m } + +// ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError is the +// validation error returned by +// ModelOptimizationPolicy_Cuda_GraphSpec_LowerBound.Validate if the +// designated constraints aren't met. +type ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError) Field() string { + return e.field +} + +// Reason function returns reason value. +func (e ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError) Reason() string { + return e.reason +} + +// Cause function returns cause value. +func (e ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError) Cause() error { + return e.cause +} + +// Key function returns key value. +func (e ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError) ErrorName() string { + return "ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOptimizationPolicy_Cuda_GraphSpec_LowerBound.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOptimizationPolicy_Cuda_GraphSpec_LowerBoundValidationError{} + +// Validate checks the field values on +// ModelOptimizationPolicy_ExecutionAccelerators_Accelerator with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelOptimizationPolicy_ExecutionAccelerators_Accelerator) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on +// ModelOptimizationPolicy_ExecutionAccelerators_Accelerator with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorMultiError, or nil +// if none found. +func (m *ModelOptimizationPolicy_ExecutionAccelerators_Accelerator) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelOptimizationPolicy_ExecutionAccelerators_Accelerator) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Parameters + + if len(errors) > 0 { + return ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorMultiError(errors) + } + + return nil +} + +// ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorMultiError is an +// error wrapping multiple validation errors returned by +// ModelOptimizationPolicy_ExecutionAccelerators_Accelerator.ValidateAll() if +// the designated constraints aren't met. +type ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorMultiError) AllErrors() []error { + return m +} + +// ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError is +// the validation error returned by +// ModelOptimizationPolicy_ExecutionAccelerators_Accelerator.Validate if the +// designated constraints aren't met. +type ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError) Field() string { + return e.field +} + +// Reason function returns reason value. +func (e ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError) Reason() string { + return e.reason +} + +// Cause function returns cause value. +func (e ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError) Cause() error { + return e.cause +} + +// Key function returns key value. +func (e ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError) Key() bool { + return e.key +} + +// ErrorName returns error name. +func (e ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError) ErrorName() string { + return "ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelOptimizationPolicy_ExecutionAccelerators_Accelerator.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelOptimizationPolicy_ExecutionAccelerators_AcceleratorValidationError{} + +// Validate checks the field values on ModelSequenceBatching_Control with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelSequenceBatching_Control) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelSequenceBatching_Control with +// the rules defined in the proto definition for this message. If any rules +// are violated, the result is a list of violation errors wrapped in +// ModelSequenceBatching_ControlMultiError, or nil if none found. +func (m *ModelSequenceBatching_Control) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelSequenceBatching_Control) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Kind + + // no validation rules for DataType + + if len(errors) > 0 { + return ModelSequenceBatching_ControlMultiError(errors) + } + + return nil +} + +// ModelSequenceBatching_ControlMultiError is an error wrapping multiple +// validation errors returned by ModelSequenceBatching_Control.ValidateAll() +// if the designated constraints aren't met. +type ModelSequenceBatching_ControlMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelSequenceBatching_ControlMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelSequenceBatching_ControlMultiError) AllErrors() []error { return m } + +// ModelSequenceBatching_ControlValidationError is the validation error +// returned by ModelSequenceBatching_Control.Validate if the designated +// constraints aren't met. +type ModelSequenceBatching_ControlValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelSequenceBatching_ControlValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelSequenceBatching_ControlValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelSequenceBatching_ControlValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelSequenceBatching_ControlValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelSequenceBatching_ControlValidationError) ErrorName() string { + return "ModelSequenceBatching_ControlValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelSequenceBatching_ControlValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelSequenceBatching_Control.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelSequenceBatching_ControlValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelSequenceBatching_ControlValidationError{} + +// Validate checks the field values on ModelSequenceBatching_ControlInput with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *ModelSequenceBatching_ControlInput) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelSequenceBatching_ControlInput +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// ModelSequenceBatching_ControlInputMultiError, or nil if none found. +func (m *ModelSequenceBatching_ControlInput) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelSequenceBatching_ControlInput) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + for idx, item := range m.GetControl() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelSequenceBatching_ControlInputValidationError{ + field: fmt.Sprintf("Control[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelSequenceBatching_ControlInputValidationError{ + field: fmt.Sprintf("Control[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelSequenceBatching_ControlInputValidationError{ + field: fmt.Sprintf("Control[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ModelSequenceBatching_ControlInputMultiError(errors) + } + + return nil +} + +// ModelSequenceBatching_ControlInputMultiError is an error wrapping multiple +// validation errors returned by +// ModelSequenceBatching_ControlInput.ValidateAll() if the designated +// constraints aren't met. +type ModelSequenceBatching_ControlInputMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelSequenceBatching_ControlInputMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelSequenceBatching_ControlInputMultiError) AllErrors() []error { return m } + +// ModelSequenceBatching_ControlInputValidationError is the validation error +// returned by ModelSequenceBatching_ControlInput.Validate if the designated +// constraints aren't met. +type ModelSequenceBatching_ControlInputValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelSequenceBatching_ControlInputValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelSequenceBatching_ControlInputValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelSequenceBatching_ControlInputValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelSequenceBatching_ControlInputValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelSequenceBatching_ControlInputValidationError) ErrorName() string { + return "ModelSequenceBatching_ControlInputValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelSequenceBatching_ControlInputValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelSequenceBatching_ControlInput.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelSequenceBatching_ControlInputValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelSequenceBatching_ControlInputValidationError{} + +// Validate checks the field values on ModelSequenceBatching_InitialState with +// the rules defined in the proto definition for this message. If any rules +// are violated, the first error encountered is returned, or nil if there are +// no violations. +func (m *ModelSequenceBatching_InitialState) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelSequenceBatching_InitialState +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// ModelSequenceBatching_InitialStateMultiError, or nil if none found. +func (m *ModelSequenceBatching_InitialState) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelSequenceBatching_InitialState) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for DataType + + // no validation rules for Name + + switch v := m.StateData.(type) { + case *ModelSequenceBatching_InitialState_ZeroData: + if v == nil { + err := ModelSequenceBatching_InitialStateValidationError{ + field: "StateData", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for ZeroData + case *ModelSequenceBatching_InitialState_DataFile: + if v == nil { + err := ModelSequenceBatching_InitialStateValidationError{ + field: "StateData", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for DataFile + default: + _ = v // ensures v is used + } + + if len(errors) > 0 { + return ModelSequenceBatching_InitialStateMultiError(errors) + } + + return nil +} + +// ModelSequenceBatching_InitialStateMultiError is an error wrapping multiple +// validation errors returned by +// ModelSequenceBatching_InitialState.ValidateAll() if the designated +// constraints aren't met. +type ModelSequenceBatching_InitialStateMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelSequenceBatching_InitialStateMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelSequenceBatching_InitialStateMultiError) AllErrors() []error { return m } + +// ModelSequenceBatching_InitialStateValidationError is the validation error +// returned by ModelSequenceBatching_InitialState.Validate if the designated +// constraints aren't met. +type ModelSequenceBatching_InitialStateValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelSequenceBatching_InitialStateValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelSequenceBatching_InitialStateValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelSequenceBatching_InitialStateValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelSequenceBatching_InitialStateValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelSequenceBatching_InitialStateValidationError) ErrorName() string { + return "ModelSequenceBatching_InitialStateValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelSequenceBatching_InitialStateValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelSequenceBatching_InitialState.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelSequenceBatching_InitialStateValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelSequenceBatching_InitialStateValidationError{} + +// Validate checks the field values on ModelSequenceBatching_State with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelSequenceBatching_State) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelSequenceBatching_State with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelSequenceBatching_StateMultiError, or nil if none found. +func (m *ModelSequenceBatching_State) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelSequenceBatching_State) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for InputName + + // no validation rules for OutputName + + // no validation rules for DataType + + for idx, item := range m.GetInitialState() { + _, _ = idx, item + + if all { + switch v := interface{}(item).(type) { + case interface{ ValidateAll() error }: + if err := v.ValidateAll(); err != nil { + errors = append(errors, ModelSequenceBatching_StateValidationError{ + field: fmt.Sprintf("InitialState[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + case interface{ Validate() error }: + if err := v.Validate(); err != nil { + errors = append(errors, ModelSequenceBatching_StateValidationError{ + field: fmt.Sprintf("InitialState[%v]", idx), + reason: "embedded message failed validation", + cause: err, + }) + } + } + } else if v, ok := interface{}(item).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return ModelSequenceBatching_StateValidationError{ + field: fmt.Sprintf("InitialState[%v]", idx), + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + if len(errors) > 0 { + return ModelSequenceBatching_StateMultiError(errors) + } + + return nil +} + +// ModelSequenceBatching_StateMultiError is an error wrapping multiple +// validation errors returned by ModelSequenceBatching_State.ValidateAll() if +// the designated constraints aren't met. +type ModelSequenceBatching_StateMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelSequenceBatching_StateMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelSequenceBatching_StateMultiError) AllErrors() []error { return m } + +// ModelSequenceBatching_StateValidationError is the validation error returned +// by ModelSequenceBatching_State.Validate if the designated constraints +// aren't met. +type ModelSequenceBatching_StateValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelSequenceBatching_StateValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelSequenceBatching_StateValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelSequenceBatching_StateValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelSequenceBatching_StateValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelSequenceBatching_StateValidationError) ErrorName() string { + return "ModelSequenceBatching_StateValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelSequenceBatching_StateValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelSequenceBatching_State.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelSequenceBatching_StateValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelSequenceBatching_StateValidationError{} + +// Validate checks the field values on ModelSequenceBatching_StrategyDirect +// with the rules defined in the proto definition for this message. If any +// rules are violated, the first error encountered is returned, or nil if +// there are no violations. +func (m *ModelSequenceBatching_StrategyDirect) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelSequenceBatching_StrategyDirect +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// ModelSequenceBatching_StrategyDirectMultiError, or nil if none found. +func (m *ModelSequenceBatching_StrategyDirect) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelSequenceBatching_StrategyDirect) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for MaxQueueDelayMicroseconds + + // no validation rules for MinimumSlotUtilization + + if len(errors) > 0 { + return ModelSequenceBatching_StrategyDirectMultiError(errors) + } + + return nil +} + +// ModelSequenceBatching_StrategyDirectMultiError is an error wrapping multiple +// validation errors returned by +// ModelSequenceBatching_StrategyDirect.ValidateAll() if the designated +// constraints aren't met. +type ModelSequenceBatching_StrategyDirectMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelSequenceBatching_StrategyDirectMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelSequenceBatching_StrategyDirectMultiError) AllErrors() []error { return m } + +// ModelSequenceBatching_StrategyDirectValidationError is the validation error +// returned by ModelSequenceBatching_StrategyDirect.Validate if the designated +// constraints aren't met. +type ModelSequenceBatching_StrategyDirectValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelSequenceBatching_StrategyDirectValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelSequenceBatching_StrategyDirectValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelSequenceBatching_StrategyDirectValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelSequenceBatching_StrategyDirectValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelSequenceBatching_StrategyDirectValidationError) ErrorName() string { + return "ModelSequenceBatching_StrategyDirectValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelSequenceBatching_StrategyDirectValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelSequenceBatching_StrategyDirect.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelSequenceBatching_StrategyDirectValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelSequenceBatching_StrategyDirectValidationError{} + +// Validate checks the field values on ModelSequenceBatching_StrategyOldest +// with the rules defined in the proto definition for this message. If any +// rules are violated, the first error encountered is returned, or nil if +// there are no violations. +func (m *ModelSequenceBatching_StrategyOldest) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelSequenceBatching_StrategyOldest +// with the rules defined in the proto definition for this message. If any +// rules are violated, the result is a list of violation errors wrapped in +// ModelSequenceBatching_StrategyOldestMultiError, or nil if none found. +func (m *ModelSequenceBatching_StrategyOldest) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelSequenceBatching_StrategyOldest) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for MaxCandidateSequences + + // no validation rules for MaxQueueDelayMicroseconds + + if len(errors) > 0 { + return ModelSequenceBatching_StrategyOldestMultiError(errors) + } + + return nil +} + +// ModelSequenceBatching_StrategyOldestMultiError is an error wrapping multiple +// validation errors returned by +// ModelSequenceBatching_StrategyOldest.ValidateAll() if the designated +// constraints aren't met. +type ModelSequenceBatching_StrategyOldestMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelSequenceBatching_StrategyOldestMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelSequenceBatching_StrategyOldestMultiError) AllErrors() []error { return m } + +// ModelSequenceBatching_StrategyOldestValidationError is the validation error +// returned by ModelSequenceBatching_StrategyOldest.Validate if the designated +// constraints aren't met. +type ModelSequenceBatching_StrategyOldestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelSequenceBatching_StrategyOldestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelSequenceBatching_StrategyOldestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelSequenceBatching_StrategyOldestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelSequenceBatching_StrategyOldestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelSequenceBatching_StrategyOldestValidationError) ErrorName() string { + return "ModelSequenceBatching_StrategyOldestValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelSequenceBatching_StrategyOldestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelSequenceBatching_StrategyOldest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelSequenceBatching_StrategyOldestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelSequenceBatching_StrategyOldestValidationError{} + +// Validate checks the field values on ModelEnsembling_Step with the rules +// defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelEnsembling_Step) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelEnsembling_Step with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelEnsembling_StepMultiError, or nil if none found. +func (m *ModelEnsembling_Step) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelEnsembling_Step) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for ModelName + + // no validation rules for ModelVersion + + // no validation rules for InputMap + + // no validation rules for OutputMap + + // no validation rules for ModelNamespace + + if len(errors) > 0 { + return ModelEnsembling_StepMultiError(errors) + } + + return nil +} + +// ModelEnsembling_StepMultiError is an error wrapping multiple validation +// errors returned by ModelEnsembling_Step.ValidateAll() if the designated +// constraints aren't met. +type ModelEnsembling_StepMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelEnsembling_StepMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelEnsembling_StepMultiError) AllErrors() []error { return m } + +// ModelEnsembling_StepValidationError is the validation error returned by +// ModelEnsembling_Step.Validate if the designated constraints aren't met. +type ModelEnsembling_StepValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelEnsembling_StepValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelEnsembling_StepValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelEnsembling_StepValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelEnsembling_StepValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelEnsembling_StepValidationError) ErrorName() string { + return "ModelEnsembling_StepValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelEnsembling_StepValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelEnsembling_Step.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelEnsembling_StepValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelEnsembling_StepValidationError{} + +// Validate checks the field values on ModelWarmup_Input with the rules defined +// in the proto definition for this message. If any rules are violated, the +// first error encountered is returned, or nil if there are no violations. +func (m *ModelWarmup_Input) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelWarmup_Input with the rules +// defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelWarmup_InputMultiError, or nil if none found. +func (m *ModelWarmup_Input) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelWarmup_Input) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for DataType + + switch v := m.InputDataType.(type) { + case *ModelWarmup_Input_ZeroData: + if v == nil { + err := ModelWarmup_InputValidationError{ + field: "InputDataType", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for ZeroData + case *ModelWarmup_Input_RandomData: + if v == nil { + err := ModelWarmup_InputValidationError{ + field: "InputDataType", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for RandomData + case *ModelWarmup_Input_InputDataFile: + if v == nil { + err := ModelWarmup_InputValidationError{ + field: "InputDataType", + reason: "oneof value cannot be a typed-nil", + } + if !all { + return err + } + errors = append(errors, err) + } + // no validation rules for InputDataFile + default: + _ = v // ensures v is used + } + + if len(errors) > 0 { + return ModelWarmup_InputMultiError(errors) + } + + return nil +} + +// ModelWarmup_InputMultiError is an error wrapping multiple validation errors +// returned by ModelWarmup_Input.ValidateAll() if the designated constraints +// aren't met. +type ModelWarmup_InputMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelWarmup_InputMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelWarmup_InputMultiError) AllErrors() []error { return m } + +// ModelWarmup_InputValidationError is the validation error returned by +// ModelWarmup_Input.Validate if the designated constraints aren't met. +type ModelWarmup_InputValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelWarmup_InputValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelWarmup_InputValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelWarmup_InputValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelWarmup_InputValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelWarmup_InputValidationError) ErrorName() string { + return "ModelWarmup_InputValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelWarmup_InputValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelWarmup_Input.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelWarmup_InputValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelWarmup_InputValidationError{} + +// Validate checks the field values on ModelRepositoryAgents_Agent with the +// rules defined in the proto definition for this message. If any rules are +// violated, the first error encountered is returned, or nil if there are no violations. +func (m *ModelRepositoryAgents_Agent) Validate() error { + return m.validate(false) +} + +// ValidateAll checks the field values on ModelRepositoryAgents_Agent with the +// rules defined in the proto definition for this message. If any rules are +// violated, the result is a list of violation errors wrapped in +// ModelRepositoryAgents_AgentMultiError, or nil if none found. +func (m *ModelRepositoryAgents_Agent) ValidateAll() error { + return m.validate(true) +} + +func (m *ModelRepositoryAgents_Agent) validate(all bool) error { + if m == nil { + return nil + } + + var errors []error + + // no validation rules for Name + + // no validation rules for Parameters + + if len(errors) > 0 { + return ModelRepositoryAgents_AgentMultiError(errors) + } + + return nil +} + +// ModelRepositoryAgents_AgentMultiError is an error wrapping multiple +// validation errors returned by ModelRepositoryAgents_Agent.ValidateAll() if +// the designated constraints aren't met. +type ModelRepositoryAgents_AgentMultiError []error + +// Error returns a concatenation of all the error messages it wraps. +func (m ModelRepositoryAgents_AgentMultiError) Error() string { + var msgs []string + for _, err := range m { + msgs = append(msgs, err.Error()) + } + return strings.Join(msgs, "; ") +} + +// AllErrors returns a list of validation violation errors. +func (m ModelRepositoryAgents_AgentMultiError) AllErrors() []error { return m } + +// ModelRepositoryAgents_AgentValidationError is the validation error returned +// by ModelRepositoryAgents_Agent.Validate if the designated constraints +// aren't met. +type ModelRepositoryAgents_AgentValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e ModelRepositoryAgents_AgentValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e ModelRepositoryAgents_AgentValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e ModelRepositoryAgents_AgentValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e ModelRepositoryAgents_AgentValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e ModelRepositoryAgents_AgentValidationError) ErrorName() string { + return "ModelRepositoryAgents_AgentValidationError" +} + +// Error satisfies the builtin error interface +func (e ModelRepositoryAgents_AgentValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sModelRepositoryAgents_Agent.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = ModelRepositoryAgents_AgentValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = ModelRepositoryAgents_AgentValidationError{} diff --git a/pkg/apis/inference/v1/model_config.proto b/pkg/apis/inference/v1/model_config.proto new file mode 100644 index 0000000..696db41 --- /dev/null +++ b/pkg/apis/inference/v1/model_config.proto @@ -0,0 +1,1990 @@ +// Copyright 2018-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// * Neither the name of NVIDIA CORPORATION nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY +// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Copyright (c) 2018, TensorFlow Authors. All rights reserved. + +syntax = "proto3"; + +package inference.v1; + +option go_package = "d7y.io/api/pkg/apis/inference/v1;inference"; + +//@@.. cpp:namespace:: inference + +//@@ +//@@.. cpp:enum:: DataType +//@@ +//@@ Data types supported for input and output tensors. +//@@ +enum DataType { + //@@ .. cpp:enumerator:: DataType::INVALID = 0 + TYPE_INVALID = 0; + + //@@ .. cpp:enumerator:: DataType::BOOL = 1 + TYPE_BOOL = 1; + + //@@ .. cpp:enumerator:: DataType::UINT8 = 2 + TYPE_UINT8 = 2; + //@@ .. cpp:enumerator:: DataType::UINT16 = 3 + TYPE_UINT16 = 3; + //@@ .. cpp:enumerator:: DataType::UINT32 = 4 + TYPE_UINT32 = 4; + //@@ .. cpp:enumerator:: DataType::UINT64 = 5 + TYPE_UINT64 = 5; + + //@@ .. cpp:enumerator:: DataType::INT8 = 6 + TYPE_INT8 = 6; + //@@ .. cpp:enumerator:: DataType::INT16 = 7 + TYPE_INT16 = 7; + //@@ .. cpp:enumerator:: DataType::INT32 = 8 + TYPE_INT32 = 8; + //@@ .. cpp:enumerator:: DataType::INT64 = 9 + TYPE_INT64 = 9; + + //@@ .. cpp:enumerator:: DataType::FP16 = 10 + TYPE_FP16 = 10; + //@@ .. cpp:enumerator:: DataType::FP32 = 11 + TYPE_FP32 = 11; + //@@ .. cpp:enumerator:: DataType::FP64 = 12 + TYPE_FP64 = 12; + + //@@ .. cpp:enumerator:: DataType::STRING = 13 + TYPE_STRING = 13; + + //@@ .. cpp:enumerator:: DataType::BF16 = 14 + TYPE_BF16 = 14; +} + +//@@ +//@@ .. cpp:var:: message ModelRateLimiter +//@@ +//@@ The specifications required by the rate limiter to properly +//@@ schedule the inference requests across the different models +//@@ and their instances. +//@@ +message ModelRateLimiter +{ + //@@ .. cpp:var:: message Resource + //@@ + //@@ The resource property. + //@@ + message Resource + { + //@@ .. cpp:var:: string name + //@@ + //@@ The name associated with the resource. + //@@ + string name = 1; + + //@@ .. cpp:var:: bool global + //@@ + //@@ Whether or not the resource is global. If true then the resource + //@@ is assumed to be shared among the devices otherwise specified + //@@ count of the resource is assumed for each device associated + //@@ with the instance. + //@@ + bool global = 2; + + //@@ .. cpp:var:: uint32 count + //@@ + //@@ The number of resources required for the execution of the model + //@@ instance. + //@@ + uint32 count = 3; + } + + //@@ .. cpp:var:: Resource resources (repeated) + //@@ + //@@ The resources required to execute the request on a model instance. + //@@ Resources are just names with a corresponding count. The execution + //@@ of the instance will be blocked until the specificied resources are + //@@ available. By default an instance uses no rate-limiter resources. + //@@ + repeated Resource resources = 1; + + //@@ .. cpp:var:: uint32 priority + //@@ + //@@ The optional weighting value to be used for prioritizing across + //@@ instances. An instance with priority 2 will be given 1/2 the + //@@ number of scheduling chances as an instance_group with priority + //@@ 1. The default priority is 1. The priority of value 0 will be + //@@ treated as priority 1. + //@@ + uint32 priority = 2; +} + +//@@ +//@@.. cpp:var:: message ModelInstanceGroup +//@@ +//@@ A group of one or more instances of a model and resources made +//@@ available for those instances. +//@@ +message ModelInstanceGroup +{ + //@@ + //@@ .. cpp:enum:: Kind + //@@ + //@@ Kind of this instance group. + //@@ + enum Kind { + //@@ .. cpp:enumerator:: Kind::KIND_AUTO = 0 + //@@ + //@@ This instance group represents instances that can run on either + //@@ CPU or GPU. If all GPUs listed in 'gpus' are available then + //@@ instances will be created on GPU(s), otherwise instances will + //@@ be created on CPU. + //@@ + KIND_AUTO = 0; + + //@@ .. cpp:enumerator:: Kind::KIND_GPU = 1 + //@@ + //@@ This instance group represents instances that must run on the + //@@ GPU. + //@@ + KIND_GPU = 1; + + //@@ .. cpp:enumerator:: Kind::KIND_CPU = 2 + //@@ + //@@ This instance group represents instances that must run on the + //@@ CPU. + //@@ + KIND_CPU = 2; + + //@@ .. cpp:enumerator:: Kind::KIND_MODEL = 3 + //@@ + //@@ This instance group represents instances that should run on the + //@@ CPU and/or GPU(s) as specified by the model or backend itself. + //@@ The inference server will not override the model/backend + //@@ settings. + //@@ + KIND_MODEL = 3; + } + + //@@ + //@@ .. cpp:var:: message SecondaryDevice + //@@ + //@@ A secondary device required for a model instance. + //@@ + message SecondaryDevice + { + //@@ + //@@ .. cpp:enum:: SecondaryDeviceKind + //@@ + //@@ The kind of the secondary device. + //@@ + enum SecondaryDeviceKind { + //@@ .. cpp:enumerator:: SecondaryDeviceKind::KIND_NVDLA = 0 + //@@ + //@@ An NVDLA core. http://nvdla.org + //@@ Currently KIND_NVDLA is only supported by the TensorRT backend. + //@@ + KIND_NVDLA = 0; + } + + //@@ .. cpp:var:: SecondaryDeviceKind kind + //@@ + //@@ The secondary device kind. + //@@ + SecondaryDeviceKind kind = 1; + + //@@ .. cpp:var:: int64 device_id + //@@ + //@@ Identifier for the secondary device. + //@@ + int64 device_id = 2; + } + + //@@ .. cpp:var:: string name + //@@ + //@@ Optional name of this group of instances. If not specified the + //@@ name will be formed as _. The name of + //@@ individual instances will be further formed by a unique instance + //@@ number and GPU index: + //@@ + string name = 1; + + //@@ .. cpp:var:: Kind kind + //@@ + //@@ The kind of this instance group. Default is KIND_AUTO. If + //@@ KIND_AUTO or KIND_GPU then both 'count' and 'gpu' are valid and + //@@ may be specified. If KIND_CPU or KIND_MODEL only 'count' is valid + //@@ and 'gpu' cannot be specified. + //@@ + Kind kind = 4; + + //@@ .. cpp:var:: int32 count + //@@ + //@@ For a group assigned to GPU, the number of instances created for + //@@ each GPU listed in 'gpus'. For a group assigned to CPU the number + //@@ of instances created. Default is 1. + int32 count = 2; + + //@@ .. cpp:var:: ModelRateLimiter rate_limiter + //@@ + //@@ The rate limiter specific settings to be associated with this + //@@ instance group. Optional, if not specified no rate limiting + //@@ will be applied to this instance group. + //@@ + ModelRateLimiter rate_limiter = 6; + + //@@ .. cpp:var:: int32 gpus (repeated) + //@@ + //@@ GPU(s) where instances should be available. For each GPU listed, + //@@ 'count' instances of the model will be available. Setting 'gpus' + //@@ to empty (or not specifying at all) is eqivalent to listing all + //@@ available GPUs. + //@@ + repeated int32 gpus = 3; + + //@@ .. cpp:var:: SecondaryDevice secondary_devices (repeated) + //@@ + //@@ Secondary devices that are required by instances specified by this + //@@ instance group. Optional. + //@@ + repeated SecondaryDevice secondary_devices = 8; + + //@@ .. cpp:var:: string profile (repeated) + //@@ + //@@ For TensorRT models containing multiple optimization profile, this + //@@ parameter specifies a set of optimization profiles available to this + //@@ instance group. The inference server will choose the optimal profile + //@@ based on the shapes of the input tensors. This field should lie + //@@ between 0 and - 1 + //@@ and be specified only for TensorRT backend, otherwise an error will + //@@ be generated. If not specified, the server will select the first + //@@ optimization profile by default. + //@@ + repeated string profile = 5; + + //@@ .. cpp:var:: bool passive + //@@ + //@@ Whether the instances within this instance group will be accepting + //@@ inference requests from the scheduler. If true, the instances will + //@@ not be added to the scheduler. Default value is false. + //@@ + bool passive = 7; + + //@@ .. cpp:var:: string host_policy + //@@ + //@@ The host policy name that the instance to be associated with. + //@@ The default value is set to reflect the device kind of the instance, + //@@ for instance, KIND_CPU is "cpu", KIND_MODEL is "model" and + //@@ KIND_GPU is "gpu_". + //@@ + string host_policy = 9; +} + +//@@ +//@@.. cpp:var:: message ModelTensorReshape +//@@ +//@@ Reshape specification for input and output tensors. +//@@ +message ModelTensorReshape +{ + //@@ .. cpp:var:: int64 shape (repeated) + //@@ + //@@ The shape to use for reshaping. + //@@ + repeated int64 shape = 1; +} + +//@@ +//@@.. cpp:var:: message ModelInput +//@@ +//@@ An input required by the model. +//@@ +message ModelInput +{ + //@@ + //@@ .. cpp:enum:: Format + //@@ + //@@ The format for the input. + //@@ + enum Format { + //@@ .. cpp:enumerator:: Format::FORMAT_NONE = 0 + //@@ + //@@ The input has no specific format. This is the default. + //@@ + FORMAT_NONE = 0; + + //@@ .. cpp:enumerator:: Format::FORMAT_NHWC = 1 + //@@ + //@@ HWC image format. Tensors with this format require 3 dimensions + //@@ if the model does not support batching (max_batch_size = 0) or 4 + //@@ dimensions if the model does support batching (max_batch_size + //@@ >= 1). In either case the 'dims' below should only specify the + //@@ 3 non-batch dimensions (i.e. HWC or CHW). + //@@ + FORMAT_NHWC = 1; + + //@@ .. cpp:enumerator:: Format::FORMAT_NCHW = 2 + //@@ + //@@ CHW image format. Tensors with this format require 3 dimensions + //@@ if the model does not support batching (max_batch_size = 0) or 4 + //@@ dimensions if the model does support batching (max_batch_size + //@@ >= 1). In either case the 'dims' below should only specify the + //@@ 3 non-batch dimensions (i.e. HWC or CHW). + //@@ + FORMAT_NCHW = 2; + } + + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the input. + //@@ + string name = 1; + + //@@ .. cpp:var:: DataType data_type + //@@ + //@@ The data-type of the input. + //@@ + DataType data_type = 2; + + //@@ .. cpp:var:: Format format + //@@ + //@@ The format of the input. Optional. + //@@ + Format format = 3; + + //@@ .. cpp:var:: int64 dims (repeated) + //@@ + //@@ The dimensions/shape of the input tensor that must be provided + //@@ when invoking the inference API for this model. + //@@ + repeated int64 dims = 4; + + //@@ .. cpp:var:: ModelTensorReshape reshape + //@@ + //@@ The shape expected for this input by the backend. The input will + //@@ be reshaped to this before being presented to the backend. The + //@@ reshape must have the same number of elements as the input shape + //@@ specified by 'dims'. Optional. + //@@ + ModelTensorReshape reshape = 5; + + //@@ .. cpp:var:: bool is_shape_tensor + //@@ + //@@ Whether or not the input is a shape tensor to the model. This field + //@@ is currently supported only for the TensorRT model. An error will be + //@@ generated if this specification does not comply with underlying + //@@ model. + //@@ + bool is_shape_tensor = 6; + + //@@ .. cpp:var:: bool allow_ragged_batch + //@@ + //@@ Whether or not the input is allowed to be "ragged" in a dynamically + //@@ created batch. Default is false indicating that two requests will + //@@ only be batched if this tensor has the same shape in both requests. + //@@ True indicates that two requests can be batched even if this tensor + //@@ has a different shape in each request. + //@@ + bool allow_ragged_batch = 7; + + //@@ .. cpp:var:: bool optional + //@@ + //@@ Whether or not the input is optional for the model execution. + //@@ If true, the input is not required in the inference request. + //@@ Default value is false. + //@@ + bool optional = 8; +} + +//@@ +//@@.. cpp:var:: message ModelOutput +//@@ +//@@ An output produced by the model. +//@@ +message ModelOutput +{ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the output. + //@@ + string name = 1; + + //@@ .. cpp:var:: DataType data_type + //@@ + //@@ The data-type of the output. + //@@ + DataType data_type = 2; + + //@@ .. cpp:var:: int64 dims (repeated) + //@@ + //@@ The dimensions/shape of the output tensor. + //@@ + repeated int64 dims = 3; + + //@@ .. cpp:var:: ModelTensorReshape reshape + //@@ + //@@ The shape produced for this output by the backend. The output will + //@@ be reshaped from this to the shape specifed in 'dims' before being + //@@ returned in the inference response. The reshape must have the same + //@@ number of elements as the output shape specified by 'dims'. Optional. + //@@ + ModelTensorReshape reshape = 5; + + //@@ .. cpp:var:: string label_filename + //@@ + //@@ The label file associated with this output. Should be specified only + //@@ for outputs that represent classifications. Optional. + //@@ + string label_filename = 4; + + + //@@ .. cpp:var:: bool is_shape_tensor + //@@ + //@@ Whether or not the output is a shape tensor to the model. This field + //@@ is currently supported only for the TensorRT model. An error will be + //@@ generated if this specification does not comply with underlying + //@@ model. + //@@ + bool is_shape_tensor = 6; +} + +//@@ .. cpp:var:: message BatchInput +//@@ +//@@ A batch input is an additional input that must be added by +//@@ the backend based on all the requests in a batch. +//@@ +message BatchInput +{ + //@@ + //@@ .. cpp:enum:: Kind + //@@ + //@@ The kind of the batch input. + //@@ + enum Kind { + //@@ .. cpp:enumerator:: Kind::BATCH_ELEMENT_COUNT = 0 + //@@ + //@@ The element count of the 'source_input' will be added as + //@@ input with shape [1]. + //@@ + BATCH_ELEMENT_COUNT = 0; + + //@@ .. cpp:enumerator:: Kind::BATCH_ACCUMULATED_ELEMENT_COUNT = 1 + //@@ + //@@ The accumulated element count of the 'source_input' will be + //@@ added as input with shape [1]. For example, if there is a + //@@ batch of two request, each with 2 elements, an input of value + //@@ 2 will be added to the first request, and an input of value + //@@ 4 will be added to the second request. + //@@ + BATCH_ACCUMULATED_ELEMENT_COUNT = 1; + + //@@ .. cpp:enumerator:: + //@@ Kind::BATCH_ACCUMULATED_ELEMENT_COUNT_WITH_ZERO = 2 + //@@ + //@@ The accumulated element count of the 'source_input' will be + //@@ added as input with shape [1], except for the first request + //@@ in the batch. For the first request in the batch, the input + //@@ will have shape [2] where the first element is value 0. + //@@ + BATCH_ACCUMULATED_ELEMENT_COUNT_WITH_ZERO = 2; + + //@@ .. cpp:enumerator:: Kind::BATCH_MAX_ELEMENT_COUNT_AS_SHAPE = 3 + //@@ + //@@ Among the requests in the batch, the max element count of the + //@@ 'source_input' will be added as input with shape + //@@ [max_element_count] for the first request in the batch. + //@@ For other requests, such input will be with shape [0]. + //@@ The data of the tensor will be uninitialized. + //@@ + BATCH_MAX_ELEMENT_COUNT_AS_SHAPE = 3; + + //@@ .. cpp:enumerator:: Kind::BATCH_ITEM_SHAPE = 4 + //@@ + //@@ Among the requests in the batch, the shape of the + //@@ 'source_input' will be added as input with shape + //@@ [batch_size, len(input_dim)]. For example, if one + //@@ batch-2 input with shape [3, 1] and batch-1 input + //@@ with shape [2, 2] are batched, the batch input will + //@@ have shape [3, 2] and value [ [3, 1], [3, 1], [2, 2]]. + //@@ + BATCH_ITEM_SHAPE = 4; + + //@@ .. cpp:enumerator:: Kind::BATCH_ITEM_SHAPE_FLATTEN = 5 + //@@ + //@@ Among the requests in the batch, the shape of the + //@@ 'source_input' will be added as input with single dimensional + //@@ shape [batch_size * len(input_dim)]. For example, if one + //@@ batch-2 input with shape [3, 1] and batch-1 input + //@@ with shape [2, 2] are batched, the batch input will + //@@ have shape [6] and value [3, 1, 3, 1, 2, 2]. + //@@ + BATCH_ITEM_SHAPE_FLATTEN = 5; + } + + //@@ .. cpp:var:: Kind kind + //@@ + //@@ The kind of this batch input. + //@@ + Kind kind = 1; + + //@@ .. cpp:var:: string target_name (repeated) + //@@ + //@@ The name of the model inputs that the backend will create + //@@ for this batch input. + //@@ + repeated string target_name = 2; + + //@@ .. cpp:var:: DataType data_type + //@@ + //@@ The input's datatype. The data type can be TYPE_INT32 or + //@@ TYPE_FP32. + //@@ + DataType data_type = 3; + + //@@ .. cpp:var:: string source_input (repeated) + //@@ + //@@ The backend derives the value for each batch input from one or + //@@ more other inputs. 'source_input' gives the names of those + //@@ inputs. + //@@ + repeated string source_input = 4; +} + +//@@.. cpp:var:: message BatchOutput +//@@ +//@@ A batch output is an output produced by the model that must be handled +//@@ differently by the backend based on all the requests in a batch. +//@@ +message BatchOutput +{ + //@@ + //@@ .. cpp:enum:: Kind + //@@ + //@@ The kind of the batch output. + //@@ + enum Kind { + //@@ .. cpp:enumerator:: Kind::BATCH_SCATTER_WITH_INPUT_SHAPE = 0 + //@@ + //@@ The output should be scattered according to the shape of + //@@ 'source_input'. The dynamic dimension of the output will + //@@ be set to the value of the same dimension in the input. + //@@ + BATCH_SCATTER_WITH_INPUT_SHAPE = 0; + } + + //@@ .. cpp:var:: string target_name (repeated) + //@@ + //@@ The name of the outputs to be produced by this batch output + //@@ specification. + //@@ + repeated string target_name = 1; + + //@@ .. cpp:var:: Kind kind + //@@ + //@@ The kind of this batch output. + //@@ + Kind kind = 2; + + //@@ .. cpp:var:: string source_input (repeated) + //@@ + //@@ The backend derives each batch output from one or more inputs. + //@@ 'source_input' gives the names of those inputs. + //@@ + repeated string source_input = 3; +} + +//@@ +//@@.. cpp:var:: message ModelVersionPolicy +//@@ +//@@ Policy indicating which versions of a model should be made +//@@ available by the inference server. +//@@ +message ModelVersionPolicy +{ + //@@ .. cpp:var:: message Latest + //@@ + //@@ Serve only the latest version(s) of a model. This is + //@@ the default policy. + //@@ + message Latest + { + //@@ .. cpp:var:: uint32 num_versions + //@@ + //@@ Serve only the 'num_versions' highest-numbered versions. T + //@@ The default value of 'num_versions' is 1, indicating that by + //@@ default only the single highest-number version of a + //@@ model will be served. + //@@ + uint32 num_versions = 1; + } + + //@@ .. cpp:var:: message All + //@@ + //@@ Serve all versions of the model. + //@@ + message All {} + + //@@ .. cpp:var:: message Specific + //@@ + //@@ Serve only specific versions of the model. + //@@ + message Specific + { + //@@ .. cpp:var:: int64 versions (repeated) + //@@ + //@@ The specific versions of the model that will be served. + //@@ + repeated int64 versions = 1; + } + + //@@ .. cpp:var:: oneof policy_choice + //@@ + //@@ Each model must implement only a single version policy. The + //@@ default policy is 'Latest'. + //@@ + oneof policy_choice + { + //@@ .. cpp:var:: Latest latest + //@@ + //@@ Serve only latest version(s) of the model. + //@@ + Latest latest = 1; + + //@@ .. cpp:var:: All all + //@@ + //@@ Serve all versions of the model. + //@@ + All all = 2; + + //@@ .. cpp:var:: Specific specific + //@@ + //@@ Serve only specific version(s) of the model. + //@@ + Specific specific = 3; + } +} + +//@@ +//@@.. cpp:var:: message ModelOptimizationPolicy +//@@ +//@@ Optimization settings for a model. These settings control if/how a +//@@ model is optimized and prioritized by the backend framework when +//@@ it is loaded. +//@@ +message ModelOptimizationPolicy +{ + //@@ + //@@ .. cpp:var:: message Graph + //@@ + //@@ Enable generic graph optimization of the model. If not specified + //@@ the framework's default level of optimization is used. Supports + //@@ TensorFlow graphdef and savedmodel and Onnx models. For TensorFlow + //@@ causes XLA to be enabled/disabled for the model. For Onnx defaults + //@@ to enabling all optimizations, -1 enables only basic optimizations, + //@@ +1 enables only basic and extended optimizations. + //@@ + message Graph + { + //@@ .. cpp:var:: int32 level + //@@ + //@@ The optimization level. Defaults to 0 (zero) if not specified. + //@@ + //@@ - -1: Disabled + //@@ - 0: Framework default + //@@ - 1+: Enable optimization level (greater values indicate + //@@ higher optimization levels) + //@@ + int32 level = 1; + } + + //@@ + //@@ .. cpp:enum:: ModelPriority + //@@ + //@@ Model priorities. A model will be given scheduling and execution + //@@ preference over models at lower priorities. Current model + //@@ priorities only work for TensorRT models. + //@@ + enum ModelPriority { + //@@ .. cpp:enumerator:: ModelPriority::PRIORITY_DEFAULT = 0 + //@@ + //@@ The default model priority. + //@@ + PRIORITY_DEFAULT = 0; + + //@@ .. cpp:enumerator:: ModelPriority::PRIORITY_MAX = 1 + //@@ + //@@ The maximum model priority. + //@@ + PRIORITY_MAX = 1; + + //@@ .. cpp:enumerator:: ModelPriority::PRIORITY_MIN = 2 + //@@ + //@@ The minimum model priority. + //@@ + PRIORITY_MIN = 2; + } + + //@@ + //@@ .. cpp:var:: message Cuda + //@@ + //@@ CUDA-specific optimization settings. + //@@ + message Cuda + { + //@@ .. cpp:var:: message GraphSpec + //@@ + //@@ Specification of the CUDA graph to be captured. + //@@ + message GraphSpec + { + //@@ .. cpp:var:: message Dims + //@@ + //@@ Specification of tensor dimension. + //@@ + message Shape + { + //@@ .. cpp:var:: int64 dim (repeated) + //@@ + //@@ The dimension. + //@@ + repeated int64 dim = 1; + } + + message LowerBound + { + //@@ .. cpp:var:: int32 batch_size + //@@ + //@@ The batch size of the CUDA graph. If 'max_batch_size' is 0, + //@@ 'batch_size' must be set to 0. Otherwise, 'batch_size' must + //@@ be set to value between 1 and 'max_batch_size'. + //@@ + int32 batch_size = 1; + + //@@ .. cpp:var:: map input + //@@ + //@@ The specification of the inputs. 'Shape' is the shape of + //@@ the input without batching dimension. + //@@ + map input = 2; + } + + //@@ .. cpp:var:: int32 batch_size + //@@ + //@@ The batch size of the CUDA graph. If 'max_batch_size' is 0, + //@@ 'batch_size' must be set to 0. Otherwise, 'batch_size' must + //@@ be set to value between 1 and 'max_batch_size'. + //@@ + int32 batch_size = 1; + + //@@ .. cpp:var:: map input + //@@ + //@@ The specification of the inputs. 'Shape' is the shape of the + //@@ input without batching dimension. + //@@ + map input = 2; + + //@@ .. cpp:var:: LowerBound graph_lower_bound + //@@ + //@@ Specify the lower bound of the CUDA graph. Optional. + //@@ If specified, the graph can be used for input shapes and + //@@ batch sizes that are in closed interval between the lower + //@@ bound specification and graph specification. For dynamic + //@@ shape model, this allows CUDA graphs to be launched + //@@ frequently without capturing all possible shape combinations. + //@@ However, using graph for shape combinations different from + //@@ the one used for capturing introduces uninitialized data for + //@@ execution and it may distort the inference result if + //@@ the model is sensitive to uninitialized data. + //@@ + LowerBound graph_lower_bound = 3; + } + + //@@ .. cpp:var:: bool graphs + //@@ + //@@ Use CUDA graphs API to capture model operations and execute + //@@ them more efficiently. Default value is false. + //@@ Currently only recognized by TensorRT backend. + //@@ + bool graphs = 1; + + //@@ .. cpp:var:: bool busy_wait_events + //@@ + //@@ Use busy-waiting to synchronize CUDA events to achieve minimum + //@@ latency from event complete to host thread to be notified, with + //@@ the cost of high CPU load. Default value is false. + //@@ Currently only recognized by TensorRT backend. + //@@ + bool busy_wait_events = 2; + + //@@ .. cpp:var:: GraphSpec graph_spec (repeated) + //@@ + //@@ Specification of the CUDA graph to be captured. If not specified + //@@ and 'graphs' is true, the default CUDA graphs will be captured + //@@ based on model settings. + //@@ Currently only recognized by TensorRT backend. + //@@ + repeated GraphSpec graph_spec = 3; + + //@@ .. cpp:var:: bool output_copy_stream + //@@ + //@@ Uses a CUDA stream separate from the inference stream to copy the + //@@ output to host. However, be aware that setting this option to + //@@ true will lead to an increase in the memory consumption of the + //@@ model as Triton will allocate twice as much GPU memory for its + //@@ I/O tensor buffers. Default value is false. + //@@ Currently only recognized by TensorRT backend. + //@@ + bool output_copy_stream = 4; + } + + //@@ + //@@ .. cpp:var:: message ExecutionAccelerators + //@@ + //@@ Specify the preferred execution accelerators to be used to execute + //@@ the model. Currently only recognized by ONNX Runtime backend and + //@@ TensorFlow backend. + //@@ + //@@ For ONNX Runtime backend, it will deploy the model with the execution + //@@ accelerators by priority, the priority is determined based on the + //@@ order that they are set, i.e. the provider at the front has highest + //@@ priority. Overall, the priority will be in the following order: + //@@ (if instance is on GPU) + //@@ CUDA Execution Provider (if instance is on GPU) + //@@ + //@@ Default CPU Execution Provider + //@@ + message ExecutionAccelerators + { + //@@ + //@@ .. cpp:var:: message Accelerator + //@@ + //@@ Specify the accelerator to be used to execute the model. + //@@ Accelerator with the same name may accept different parameters + //@@ depending on the backends. + //@@ + message Accelerator + { + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the execution accelerator. + //@@ + string name = 1; + + //@@ .. cpp:var:: map parameters + //@@ + //@@ Additional paremeters used to configure the accelerator. + //@@ + map parameters = 2; + } + + //@@ .. cpp:var:: Accelerator gpu_execution_accelerator (repeated) + //@@ + //@@ The preferred execution provider to be used if the model instance + //@@ is deployed on GPU. + //@@ + //@@ For ONNX Runtime backend, possible value is "tensorrt" as name, + //@@ and no parameters are required. + //@@ + //@@ For TensorFlow backend, possible values are "tensorrt", + //@@ "auto_mixed_precision", "gpu_io". + //@@ + //@@ For "tensorrt", the following parameters can be specified: + //@@ "precision_mode": The precision used for optimization. + //@@ Allowed values are "FP32" and "FP16". Default value is "FP32". + //@@ + //@@ "max_cached_engines": The maximum number of cached TensorRT + //@@ engines in dynamic TensorRT ops. Default value is 100. + //@@ + //@@ "minimum_segment_size": The smallest model subgraph that will + //@@ be considered for optimization by TensorRT. Default value is 3. + //@@ + //@@ "max_workspace_size_bytes": The maximum GPU memory the model + //@@ can use temporarily during execution. Default value is 1GB. + //@@ + //@@ For "auto_mixed_precision", no parameters are required. If set, + //@@ the model will try to use FP16 for better performance. + //@@ This optimization can not be set with "tensorrt". + //@@ + //@@ For "gpu_io", no parameters are required. If set, the model will + //@@ be executed using TensorFlow Callable API to set input and output + //@@ tensors in GPU memory if possible, which can reduce data transfer + //@@ overhead if the model is used in ensemble. However, the Callable + //@@ object will be created on model creation and it will request all + //@@ outputs for every model execution, which may impact the + //@@ performance if a request does not require all outputs. This + //@@ optimization will only take affect if the model instance is + //@@ created with KIND_GPU. + //@@ + repeated Accelerator gpu_execution_accelerator = 1; + + //@@ .. cpp:var:: Accelerator cpu_execution_accelerator (repeated) + //@@ + //@@ The preferred execution provider to be used if the model instance + //@@ is deployed on CPU. + //@@ + //@@ For ONNX Runtime backend, possible value is "openvino" as name, + //@@ and no parameters are required. + //@@ + repeated Accelerator cpu_execution_accelerator = 2; + } + + //@@ + //@@ .. cpp:var:: message PinnedMemoryBuffer + //@@ + //@@ Specify whether to use a pinned memory buffer when transferring data + //@@ between non-pinned system memory and GPU memory. Using a pinned + //@@ memory buffer for system from/to GPU transfers will typically provide + //@@ increased performance. For example, in the common use case where the + //@@ request provides inputs and delivers outputs via non-pinned system + //@@ memory, if the model instance accepts GPU IOs, the inputs will be + //@@ processed by two copies: from non-pinned system memory to pinned + //@@ memory, and from pinned memory to GPU memory. Similarly, pinned + //@@ memory will be used for delivering the outputs. + //@@ + message PinnedMemoryBuffer + { + //@@ .. cpp:var:: bool enable + //@@ + //@@ Use pinned memory buffer. Default is true. + //@@ + bool enable = 1; + } + + //@@ .. cpp:var:: Graph graph + //@@ + //@@ The graph optimization setting for the model. Optional. + //@@ + Graph graph = 1; + + //@@ .. cpp:var:: ModelPriority priority + //@@ + //@@ The priority setting for the model. Optional. + //@@ + ModelPriority priority = 2; + + //@@ .. cpp:var:: Cuda cuda + //@@ + //@@ CUDA-specific optimization settings. Optional. + //@@ + Cuda cuda = 3; + + //@@ .. cpp:var:: ExecutionAccelerators execution_accelerators + //@@ + //@@ The accelerators used for the model. Optional. + //@@ + ExecutionAccelerators execution_accelerators = 4; + + //@@ .. cpp:var:: PinnedMemoryBuffer input_pinned_memory + //@@ + //@@ Use pinned memory buffer when the data transfer for inputs + //@@ is between GPU memory and non-pinned system memory. + //@@ Default is true. + //@@ + PinnedMemoryBuffer input_pinned_memory = 5; + + //@@ .. cpp:var:: PinnedMemoryBuffer output_pinned_memory + //@@ + //@@ Use pinned memory buffer when the data transfer for outputs + //@@ is between GPU memory and non-pinned system memory. + //@@ Default is true. + //@@ + PinnedMemoryBuffer output_pinned_memory = 6; + + //@@ .. cpp:var:: uint32 gather_kernel_buffer_threshold + //@@ + //@@ The backend may use a gather kernel to gather input data if the + //@@ device has direct access to the source buffer and the destination + //@@ buffer. In such case, the gather kernel will be used only if the + //@@ number of buffers to be gathered is greater or equal to + //@@ the specifed value. If 0, the gather kernel will be disabled. + //@@ Default value is 0. + //@@ Currently only recognized by TensorRT backend. + //@@ + uint32 gather_kernel_buffer_threshold = 7; + + //@@ .. cpp:var:: bool eager_batching + //@@ + //@@ Start preparing the next batch before the model instance is ready + //@@ for the next inference. This option can be used to overlap the + //@@ batch preparation with model execution, with the trade-off that + //@@ the next batch might be smaller than what it could have been. + //@@ Default value is false. + //@@ Currently only recognized by TensorRT backend. + //@@ + bool eager_batching = 8; +} + +//@@ +//@@.. cpp:var:: message ModelQueuePolicy +//@@ +//@@ Queue policy for inference requests. +//@@ +message ModelQueuePolicy +{ + //@@ + //@@ .. cpp:enum:: TimeoutAction + //@@ + //@@ The action applied to timed-out requests. + //@@ + enum TimeoutAction { + //@@ .. cpp:enumerator:: Action::REJECT = 0 + //@@ + //@@ Reject the request and return error message accordingly. + //@@ + REJECT = 0; + + //@@ .. cpp:enumerator:: Action::DELAY = 1 + //@@ + //@@ Delay the request until all other requests at the same + //@@ (or higher) priority levels that have not reached their timeouts + //@@ are processed. A delayed request will eventually be processed, + //@@ but may be delayed indefinitely due to newly arriving requests. + //@@ + DELAY = 1; + } + + //@@ + //@@ .. cpp:var:: TimeoutAction timeout_action + //@@ + //@@ The action applied to timed-out request. + //@@ The default action is REJECT. + //@@ + TimeoutAction timeout_action = 1; + + //@@ + //@@ .. cpp:var:: uint64 default_timeout_microseconds + //@@ + //@@ The default timeout for every request, in microseconds. + //@@ The default value is 0 which indicates that no timeout is set. + //@@ + uint64 default_timeout_microseconds = 2; + + //@@ + //@@ .. cpp:var:: bool allow_timeout_override + //@@ + //@@ Whether individual request can override the default timeout value. + //@@ When true, individual requests can set a timeout that is less than + //@@ the default timeout value but may not increase the timeout. + //@@ The default value is false. + //@@ + bool allow_timeout_override = 3; + + //@@ + //@@ .. cpp:var:: uint32 max_queue_size + //@@ + //@@ The maximum queue size for holding requests. A request will be + //@@ rejected immediately if it can't be enqueued because the queue is + //@@ full. The default value is 0 which indicates that no maximum + //@@ queue size is enforced. + //@@ + uint32 max_queue_size = 4; +} + +//@@ +//@@.. cpp:var:: message ModelDynamicBatching +//@@ +//@@ Dynamic batching configuration. These settings control how dynamic +//@@ batching operates for the model. +//@@ +message ModelDynamicBatching +{ + //@@ .. cpp:var:: int32 preferred_batch_size (repeated) + //@@ + //@@ Preferred batch sizes for dynamic batching. If a batch of one of + //@@ these sizes can be formed it will be executed immediately. If + //@@ not specified a preferred batch size will be chosen automatically + //@@ based on model and GPU characteristics. + //@@ + repeated int32 preferred_batch_size = 1; + + //@@ .. cpp:var:: uint64 max_queue_delay_microseconds + //@@ + //@@ The maximum time, in microseconds, a request will be delayed in + //@@ the scheduling queue to wait for additional requests for + //@@ batching. Default is 0. + //@@ + uint64 max_queue_delay_microseconds = 2; + + //@@ .. cpp:var:: bool preserve_ordering + //@@ + //@@ Should the dynamic batcher preserve the ordering of responses to + //@@ match the order of requests received by the scheduler. Default is + //@@ false. If true, the responses will be returned in the same order as + //@@ the order of requests sent to the scheduler. If false, the responses + //@@ may be returned in arbitrary order. This option is specifically + //@@ needed when a sequence of related inference requests (i.e. inference + //@@ requests with the same correlation ID) are sent to the dynamic + //@@ batcher to ensure that the sequence responses are in the correct + //@@ order. + //@@ + bool preserve_ordering = 3; + + //@@ .. cpp:var:: uint64 priority_levels + //@@ + //@@ The number of priority levels to be enabled for the model, + //@@ the priority level starts from 1 and 1 is the highest priority. + //@@ Requests are handled in priority order with all priority 1 requests + //@@ processed before priority 2, all priority 2 requests processed before + //@@ priority 3, etc. Requests with the same priority level will be + //@@ handled in the order that they are received. + //@@ + uint64 priority_levels = 4; + + //@@ .. cpp:var:: uint64 default_priority_level + //@@ + //@@ The priority level used for requests that don't specify their + //@@ priority. The value must be in the range [ 1, 'priority_levels' ]. + //@@ + uint64 default_priority_level = 5; + + //@@ .. cpp:var:: ModelQueuePolicy default_queue_policy + //@@ + //@@ The default queue policy used for requests that don't require + //@@ priority handling and requests that specify priority levels where + //@@ there is no specific policy given. If not specified, a policy with + //@@ default field values will be used. + //@@ + ModelQueuePolicy default_queue_policy = 6; + + //@@ .. cpp:var:: map priority_queue_policy + //@@ + //@@ Specify the queue policy for the priority level. The default queue + //@@ policy will be used if a priority level doesn't specify a queue + //@@ policy. + //@@ + map priority_queue_policy = 7; +} + +//@@ +//@@.. cpp:var:: message ModelSequenceBatching +//@@ +//@@ Sequence batching configuration. These settings control how sequence +//@@ batching operates for the model. +//@@ +message ModelSequenceBatching +{ + //@@ .. cpp:var:: message Control + //@@ + //@@ A control is a signal that the sequence batcher uses to + //@@ communicate with a backend. + //@@ + message Control + { + //@@ + //@@ .. cpp:enum:: Kind + //@@ + //@@ The kind of the control. + //@@ + enum Kind { + //@@ .. cpp:enumerator:: Kind::CONTROL_SEQUENCE_START = 0 + //@@ + //@@ A new sequence is/is-not starting. If true a sequence is + //@@ starting, if false a sequence is continuing. Must + //@@ specify either int32_false_true, fp32_false_true or + //@@ bool_false_true for this control. This control is optional. + //@@ + CONTROL_SEQUENCE_START = 0; + + //@@ .. cpp:enumerator:: Kind::CONTROL_SEQUENCE_READY = 1 + //@@ + //@@ A sequence is/is-not ready for inference. If true the + //@@ input tensor data is valid and should be used. If false + //@@ the input tensor data is invalid and inferencing should + //@@ be "skipped". Must specify either int32_false_true, + //@@ fp32_false_true or bool_false_true for this control. This + //@@ control is optional. + //@@ + CONTROL_SEQUENCE_READY = 1; + + //@@ .. cpp:enumerator:: Kind::CONTROL_SEQUENCE_END = 2 + //@@ + //@@ A sequence is/is-not ending. If true a sequence is + //@@ ending, if false a sequence is continuing. Must specify + //@@ either int32_false_true, fp32_false_true or bool_false_true + //@@ for this control. This control is optional. + //@@ + CONTROL_SEQUENCE_END = 2; + + //@@ .. cpp:enumerator:: Kind::CONTROL_SEQUENCE_CORRID = 3 + //@@ + //@@ The correlation ID of the sequence. The correlation ID + //@@ is an uint64_t value that is communicated in whole or + //@@ in part by the tensor. The tensor's datatype must be + //@@ specified by data_type and must be TYPE_UINT64, TYPE_INT64, + //@@ TYPE_UINT32 or TYPE_INT32. If a 32-bit datatype is specified + //@@ the correlation ID will be truncated to the low-order 32 + //@@ bits. This control is optional. + //@@ + CONTROL_SEQUENCE_CORRID = 3; + } + + //@@ .. cpp:var:: Kind kind + //@@ + //@@ The kind of this control. + //@@ + Kind kind = 1; + + //@@ .. cpp:var:: int32 int32_false_true (repeated) + //@@ + //@@ The control's true and false setting is indicated by setting + //@@ a value in an int32 tensor. The tensor must be a + //@@ 1-dimensional tensor with size equal to the batch size of + //@@ the request. 'int32_false_true' must have two entries: the + //@@ first the false value and the second the true value. + //@@ + repeated int32 int32_false_true = 2; + + //@@ .. cpp:var:: float fp32_false_true (repeated) + //@@ + //@@ The control's true and false setting is indicated by setting + //@@ a value in a fp32 tensor. The tensor must be a + //@@ 1-dimensional tensor with size equal to the batch size of + //@@ the request. 'fp32_false_true' must have two entries: the + //@@ first the false value and the second the true value. + //@@ + repeated float fp32_false_true = 3; + + //@@ .. cpp:var:: bool bool_false_true (repeated) + //@@ + //@@ The control's true and false setting is indicated by setting + //@@ a value in a bool tensor. The tensor must be a + //@@ 1-dimensional tensor with size equal to the batch size of + //@@ the request. 'bool_false_true' must have two entries: the + //@@ first the false value and the second the true value. + //@@ + repeated bool bool_false_true = 5; + + //@@ .. cpp:var:: DataType data_type + //@@ + //@@ The control's datatype. + //@@ + DataType data_type = 4; + } + + //@@ .. cpp:var:: message ControlInput + //@@ + //@@ The sequence control values to communicate by a model input. + //@@ + message ControlInput + { + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the model input. + //@@ + string name = 1; + + //@@ .. cpp:var:: Control control (repeated) + //@@ + //@@ The control value(s) that should be communicated to the + //@@ model using this model input. + //@@ + repeated Control control = 2; + } + + //@@ + //@@ .. cpp:var:: message InitialState + //@@ + //@@ Settings used to initialize data for implicit state. + //@@ + message InitialState + { + //@@ .. cpp:var:: DataType data_type + //@@ + //@@ The data-type of the state. + //@@ + DataType data_type = 1; + + //@@ .. cpp:var:: int64 dims (repeated) + //@@ + //@@ The shape of the state tensor, not including the batch + //@@ dimension. + //@@ + repeated int64 dims = 2; + + //@@ .. cpp:var:: oneof state_data + //@@ + //@@ Specify how the initial state data is generated. + //@@ + oneof state_data + { + //@@ + //@@ .. cpp:var:: bool zero_data + //@@ + //@@ The identifier for using zeros as initial state data. + //@@ Note that the value of 'zero_data' will not be checked, + //@@ instead, zero data will be used as long as the field is set. + //@@ + bool zero_data = 3; + + //@@ .. cpp:var:: string data_file + //@@ + //@@ The file whose content will be used as the initial data for + //@@ the state in row-major order. The file must be provided in + //@@ sub-directory 'initial_state' under the model directory. + //@@ + string data_file = 4; + } + + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the state initialization. + //@@ + string name = 5; + } + + //@@ .. cpp:var:: message State + //@@ + //@@ An input / output pair of tensors that carry state for the sequence. + //@@ + message State + { + //@@ .. cpp:var:: string input_name + //@@ + //@@ The name of the model state input. + //@@ + string input_name = 1; + + //@@ .. cpp:var:: string output_name + //@@ + //@@ The name of the model state output. + //@@ + string output_name = 2; + + //@@ .. cpp:var:: DataType data_type + //@@ + //@@ The data-type of the state. + //@@ + DataType data_type = 3; + + //@@ .. cpp:var:: int64 dim (repeated) + //@@ + //@@ The dimension. + //@@ + repeated int64 dims = 4; + + //@@ .. cpp:var:: InitialState initial_state (repeated) + //@@ + //@@ The optional field to specify the initial state for the model. + //@@ + repeated InitialState initial_state = 5; + } + + //@@ .. cpp:var:: message StrategyDirect + //@@ + //@@ The sequence batcher uses a specific, unique batch + //@@ slot for each sequence. All inference requests in a + //@@ sequence are directed to the same batch slot in the same + //@@ model instance over the lifetime of the sequence. This + //@@ is the default strategy. + //@@ + message StrategyDirect + { + //@@ .. cpp:var:: uint64 max_queue_delay_microseconds + //@@ + //@@ The maximum time, in microseconds, a candidate request + //@@ will be delayed in the sequence batch scheduling queue to + //@@ wait for additional requests for batching. Default is 0. + //@@ + uint64 max_queue_delay_microseconds = 1; + + //@@ .. cpp:var:: float minimum_slot_utilization + //@@ + //@@ The minimum slot utilization that must be satisfied to + //@@ execute the batch before 'max_queue_delay_microseconds' expires. + //@@ For example, a value of 0.5 indicates that the batch should be + //@@ executed as soon as 50% or more of the slots are ready even if + //@@ the 'max_queue_delay_microseconds' timeout has not expired. + //@@ The default is 0.0, indicating that a batch will be executed + //@@ before 'max_queue_delay_microseconds' timeout expires if at least + //@@ one batch slot is ready. 'max_queue_delay_microseconds' will be + //@@ ignored unless minimum_slot_utilization is set to a non-zero + //@@ value. + //@@ + float minimum_slot_utilization = 2; + } + + //@@ .. cpp:var:: message StrategyOldest + //@@ + //@@ The sequence batcher maintains up to 'max_candidate_sequences' + //@@ candidate sequences. 'max_candidate_sequences' can be greater + //@@ than the model's 'max_batch_size'. For inferencing the batcher + //@@ chooses from the candidate sequences up to 'max_batch_size' + //@@ inference requests. Requests are chosen in an oldest-first + //@@ manner across all candidate sequences. A given sequence is + //@@ not guaranteed to be assigned to the same batch slot for + //@@ all inference requests of that sequence. + //@@ + message StrategyOldest + { + //@@ .. cpp:var:: int32 max_candidate_sequences + //@@ + //@@ Maximum number of candidate sequences that the batcher + //@@ maintains. Excess seqences are kept in an ordered backlog + //@@ and become candidates when existing candidate sequences + //@@ complete. + //@@ + int32 max_candidate_sequences = 1; + + //@@ .. cpp:var:: int32 preferred_batch_size (repeated) + //@@ + //@@ Preferred batch sizes for dynamic batching of candidate + //@@ sequences. If a batch of one of these sizes can be formed + //@@ it will be executed immediately. If not specified a + //@@ preferred batch size will be chosen automatically + //@@ based on model and GPU characteristics. + //@@ + repeated int32 preferred_batch_size = 2; + + //@@ .. cpp:var:: uint64 max_queue_delay_microseconds + //@@ + //@@ The maximum time, in microseconds, a candidate request + //@@ will be delayed in the dynamic batch scheduling queue to + //@@ wait for additional requests for batching. Default is 0. + //@@ + uint64 max_queue_delay_microseconds = 3; + } + + //@@ .. cpp:var:: oneof strategy_choice + //@@ + //@@ The strategy used by the sequence batcher. Default strategy + //@@ is 'direct'. + //@@ + oneof strategy_choice + { + //@@ .. cpp:var:: StrategyDirect direct + //@@ + //@@ StrategyDirect scheduling strategy. + //@@ + StrategyDirect direct = 3; + + //@@ .. cpp:var:: StrategyOldest oldest + //@@ + //@@ StrategyOldest scheduling strategy. + //@@ + StrategyOldest oldest = 4; + } + + //@@ .. cpp:var:: uint64 max_sequence_idle_microseconds + //@@ + //@@ The maximum time, in microseconds, that a sequence is allowed to + //@@ be idle before it is aborted. The inference server considers a + //@@ sequence idle when it does not have any inference request queued + //@@ for the sequence. If this limit is exceeded, the inference server + //@@ will free the sequence slot allocated by the sequence and make it + //@@ available for another sequence. If not specified (or specified as + //@@ zero) a default value of 1000000 (1 second) is used. + //@@ + uint64 max_sequence_idle_microseconds = 1; + + //@@ .. cpp:var:: ControlInput control_input (repeated) + //@@ + //@@ The model input(s) that the server should use to communicate + //@@ sequence start, stop, ready and similar control values to the + //@@ model. + //@@ + repeated ControlInput control_input = 2; + + //@@ .. cpp:var:: State state (repeated) + //@@ + //@@ The optional state that can be stored in Triton for performing + //@@ inference requests on a sequence. Each sequence holds an implicit + //@@ state local to itself. The output state tensor provided by the + //@@ model in 'output_name' field of the current inference request will + //@@ be transferred as an input tensor named 'input_name' in the next + //@@ request of the same sequence. The input state of the first request + //@@ in the sequence contains garbage data. + //@@ + repeated State state = 5; +} + +//@@ +//@@.. cpp:var:: message ModelEnsembling +//@@ +//@@ Model ensembling configuration. These settings specify the models that +//@@ compose the ensemble and how data flows between the models. +//@@ +message ModelEnsembling +{ + //@@ .. cpp:var:: message Step + //@@ + //@@ Each step specifies a model included in the ensemble, + //@@ maps ensemble tensor names to the model input tensors, + //@@ and maps model output tensors to ensemble tensor names + //@@ + message Step + { + //@@ .. cpp:var:: string model_name + //@@ + //@@ The name of the model to execute for this step of the ensemble. + //@@ + string model_name = 1; + + //@@ .. cpp:var:: int64 model_version + //@@ + //@@ The version of the model to use for inference. If -1 + //@@ the latest/most-recent version of the model is used. + //@@ + int64 model_version = 2; + + //@@ .. cpp:var:: map input_map + //@@ + //@@ Map from name of an input tensor on this step's model to ensemble + //@@ tensor name. The ensemble tensor must have the same data type and + //@@ shape as the model input. Each model input must be assigned to + //@@ one ensemble tensor, but the same ensemble tensor can be assigned + //@@ to multiple model inputs. + //@@ + map input_map = 3; + + //@@ .. cpp:var:: map output_map + //@@ + //@@ Map from name of an output tensor on this step's model to ensemble + //@@ tensor name. The data type and shape of the ensemble tensor will + //@@ be inferred from the model output. It is optional to assign all + //@@ model outputs to ensemble tensors. One ensemble tensor name + //@@ can appear in an output map only once. + //@@ + map output_map = 4; + + //@@ .. cpp:var:: string model_namespace + //@@ + //@@ [RESERVED] currently this field is reserved for internal use, users + //@@ must not set any value to this field to avoid unexpected behavior. + //@@ + string model_namespace = 5; + } + + //@@ .. cpp:var:: Step step (repeated) + //@@ + //@@ The models and the input / output mappings used within the ensemble. + //@@ + repeated Step step = 1; +} + +//@@ +//@@.. cpp:var:: message ModelParameter +//@@ +//@@ A model parameter. +//@@ +message ModelParameter +{ + //@@ .. cpp:var:: string string_value + //@@ + //@@ The string value of the parameter. + //@@ + string string_value = 1; +} + +//@@ +//@@.. cpp:var:: message ModelWarmup +//@@ +//@@ Settings used to construct the request sample for model warmup. +//@@ +message ModelWarmup +{ + //@@ + //@@ .. cpp:var:: message Input + //@@ + //@@ Meta data associated with an input. + //@@ + message Input + { + //@@ .. cpp:var:: DataType data_type + //@@ + //@@ The data-type of the input. + //@@ + DataType data_type = 1; + + //@@ .. cpp:var:: int64 dims (repeated) + //@@ + //@@ The shape of the input tensor, not including the batch dimension. + //@@ + repeated int64 dims = 2; + + //@@ .. cpp:var:: oneof input_data_type + //@@ + //@@ Specify how the input data is generated. If the input has STRING + //@@ data type and 'random_data' is set, the data generation will fall + //@@ back to 'zero_data'. + //@@ + oneof input_data_type + { + //@@ + //@@ .. cpp:var:: bool zero_data + //@@ + //@@ The identifier for using zeros as input data. Note that the + //@@ value of 'zero_data' will not be checked, instead, zero data + //@@ will be used as long as the field is set. + //@@ + bool zero_data = 3; + + //@@ + //@@ .. cpp:var:: bool random_data + //@@ + //@@ The identifier for using random data as input data. Note that + //@@ the value of 'random_data' will not be checked, instead, + //@@ random data will be used as long as the field is set. + //@@ + bool random_data = 4; + + //@@ .. cpp:var:: string input_data_file + //@@ + //@@ The file whose content will be used as raw input data in + //@@ row-major order. The file must be provided in a sub-directory + //@@ 'warmup' under the model directory. The file contents should be + //@@ in binary format. For TYPE_STRING data-type, an element is + //@@ represented by a 4-byte unsigned integer giving the length + //@@ followed by the actual bytes. + //@@ + string input_data_file = 5; + } + } + + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the request sample. + //@@ + string name = 1; + + //@@ .. cpp:var:: uint32 batch_size + //@@ + //@@ The batch size of the inference request. This must be >= 1. For + //@@ models that don't support batching, batch_size must be 1. If + //@@ batch_size > 1, the 'inputs' specified below will be duplicated to + //@@ match the batch size requested. + //@@ + uint32 batch_size = 2; + + //@@ .. cpp:var:: map inputs + //@@ + //@@ The warmup meta data associated with every model input, including + //@@ control tensors. + //@@ + map inputs = 3; + + //@@ .. cpp:var:: uint32 count + //@@ + //@@ The number of iterations that this warmup sample will be executed. + //@@ For example, if this field is set to 2, 2 model executions using this + //@@ sample will be scheduled for warmup. Default value is 0 which + //@@ indicates that this sample will be used only once. + //@@ Note that for sequence model, 'count' may not work well + //@@ because the model often expect a valid sequence of requests which + //@@ should be represented by a series of warmup samples. 'count > 1' + //@@ essentially "resends" one of the sample, which may invalidate the + //@@ sequence and result in unexpected warmup failure. + //@@ + uint32 count = 4; +} + +//@@ +//@@ .. cpp:var:: message ModelOperations +//@@ +//@@ The metadata of libraries providing custom operations for this model. +//@@ +message ModelOperations +{ + //@@ .. cpp:var:: string op_library_filename (repeated) + //@@ + //@@ Optional paths of the libraries providing custom operations for + //@@ this model. Valid only for ONNX models. + //@@ + repeated string op_library_filename = 1; +} + +//@@ +//@@ .. cpp:var:: message ModelTransactionPolicy +//@@ +//@@ The specification that describes the nature of transactions +//@@ to be expected from the model. +//@@ +message ModelTransactionPolicy +{ + //@@ .. cpp:var:: bool decoupled + //@@ + //@@ Indicates whether responses generated by the model are decoupled with + //@@ the requests issued to it, which means the number of responses + //@@ generated by model may differ from number of requests issued, and + //@@ that the responses may be out of order relative to the order of + //@@ requests. The default is false, which means the model will generate + //@@ exactly one response for each request. + //@@ + bool decoupled = 1; +} + +//@@ +//@@.. cpp:var:: message ModelRepositoryAgents +//@@ +//@@ The repository agents for the model. +//@@ +message ModelRepositoryAgents +{ + //@@ + //@@ .. cpp:var:: message Agent + //@@ + //@@ A repository agent that should be invoked for the specified + //@@ repository actions for this model. + //@@ + message Agent + { + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the agent. + //@@ + string name = 1; + + //@@ .. cpp:var:: map parameters + //@@ + //@@ The parameters for the agent. + //@@ + map parameters = 2; + } + + //@@ + //@@ .. cpp:var:: Agent agents (repeated) + //@@ + //@@ The ordered list of agents for the model. These agents will be + //@@ invoked in order to respond to repository actions occuring for the + //@@ model. + //@@ + repeated Agent agents = 1; +} + +//@@ +//@@.. cpp:var:: message ModelResponseCache +//@@ +//@@ The response cache setting for the model. +//@@ +message ModelResponseCache +{ + //@@ + //@@ .. cpp::var:: bool enable + //@@ + //@@ Whether or not to use response cache for the model. If True, the + //@@ responses from the model are cached and when identical request + //@@ is encountered, instead of going through the model execution, + //@@ the response from the cache is utilized. By default, response + //@@ cache is disabled for the models. + //@@ + bool enable = 1; +} + +//@@ +//@@.. cpp:var:: message ModelConfig +//@@ +//@@ A model configuration. +//@@ +message ModelConfig +{ + //@@ .. cpp:var:: string name + //@@ + //@@ The name of the model. + //@@ + string name = 1; + + //@@ .. cpp:var:: string platform + //@@ + //@@ Additional backend-specific configuration for the model. + //@@ Please refer to the backend documentation on whether this field + //@@ should be specified. + //@@ + string platform = 2; + + //@@ .. cpp:var:: string backend + //@@ + //@@ The backend used by the model. + //@@ + string backend = 17; + + //@@ .. cpp:var:: ModelVersionPolicy version_policy + //@@ + //@@ Policy indicating which version(s) of the model will be served. + //@@ + ModelVersionPolicy version_policy = 3; + + //@@ .. cpp:var:: int32 max_batch_size + //@@ + //@@ Maximum batch size allowed for inference. This can only decrease + //@@ what is allowed by the model itself. A max_batch_size value of 0 + //@@ indicates that batching is not allowed for the model and the + //@@ dimension/shape of the input and output tensors must exactly + //@@ match what is specified in the input and output configuration. A + //@@ max_batch_size value > 0 indicates that batching is allowed and + //@@ so the model expects the input tensors to have an additional + //@@ initial dimension for the batching that is not specified in the + //@@ input (for example, if the model supports batched inputs of + //@@ 2-dimensional tensors then the model configuration will specify + //@@ the input shape as [ X, Y ] but the model will expect the actual + //@@ input tensors to have shape [ N, X, Y ]). For max_batch_size > 0 + //@@ returned outputs will also have an additional initial dimension + //@@ for the batch. + //@@ + int32 max_batch_size = 4; + + //@@ .. cpp:var:: ModelInput input (repeated) + //@@ + //@@ The inputs request by the model. + //@@ + repeated ModelInput input = 5; + + //@@ .. cpp:var:: ModelOutput output (repeated) + //@@ + //@@ The outputs produced by the model. + //@@ + repeated ModelOutput output = 6; + + //@@ .. cpp:var:: BatchInput batch_input (repeated) + //@@ + //@@ The model input(s) that the server should use to communicate + //@@ batch related values to the model. + //@@ + repeated BatchInput batch_input = 20; + + //@@ .. cpp:var:: BatchOutput batch_output (repeated) + //@@ + //@@ The outputs produced by the model that requires special handling + //@@ by the model backend. + //@@ + repeated BatchOutput batch_output = 21; + + //@@ .. cpp:var:: ModelOptimizationPolicy optimization + //@@ + //@@ Optimization configuration for the model. If not specified + //@@ then default optimization policy is used. + //@@ + ModelOptimizationPolicy optimization = 12; + + //@@ .. cpp:var:: oneof scheduling_choice + //@@ + //@@ The scheduling policy for the model. If not specified the + //@@ default scheduling policy is used for the model. The default + //@@ policy is to execute each inference request independently. + //@@ + oneof scheduling_choice + { + //@@ .. cpp:var:: ModelDynamicBatching dynamic_batching + //@@ + //@@ If specified, enables the dynamic-batching scheduling + //@@ policy. With dynamic-batching the scheduler may group + //@@ together independent requests into a single batch to + //@@ improve inference throughput. + //@@ + ModelDynamicBatching dynamic_batching = 11; + + //@@ .. cpp:var:: ModelSequenceBatching sequence_batching + //@@ + //@@ If specified, enables the sequence-batching scheduling + //@@ policy. With sequence-batching, inference requests + //@@ with the same correlation ID are routed to the same + //@@ model instance. Multiple sequences of inference requests + //@@ may be batched together into a single batch to + //@@ improve inference throughput. + //@@ + ModelSequenceBatching sequence_batching = 13; + + //@@ .. cpp:var:: ModelEnsembling ensemble_scheduling + //@@ + //@@ If specified, enables the model-ensembling scheduling + //@@ policy. With model-ensembling, inference requests + //@@ will be processed according to the specification, such as an + //@@ execution sequence of models. The input specified in this model + //@@ config will be the input for the ensemble, and the output + //@@ specified will be the output of the ensemble. + //@@ + ModelEnsembling ensemble_scheduling = 15; + } + + //@@ .. cpp:var:: ModelInstanceGroup instance_group (repeated) + //@@ + //@@ Instances of this model. If not specified, one instance + //@@ of the model will be instantiated on each available GPU. + //@@ + repeated ModelInstanceGroup instance_group = 7; + + //@@ .. cpp:var:: string default_model_filename + //@@ + //@@ Optional filename of the model file to use if a + //@@ compute-capability specific model is not specified in + //@@ :cpp:var:`cc_model_filenames`. If not specified the default name + //@@ is 'model.graphdef', 'model.savedmodel', 'model.plan' or + //@@ 'model.pt' depending on the model type. + //@@ + string default_model_filename = 8; + + //@@ .. cpp:var:: map cc_model_filenames + //@@ + //@@ Optional map from CUDA compute capability to the filename of + //@@ the model that supports that compute capability. The filename + //@@ refers to a file within the model version directory. + //@@ + map cc_model_filenames = 9; + + //@@ .. cpp:var:: map metric_tags + //@@ + //@@ Optional metric tags. User-specific key-value pairs for metrics + //@@ reported for this model. These tags are applied to the metrics + //@@ reported on the HTTP metrics port. + //@@ + map metric_tags = 10; + + //@@ .. cpp:var:: map parameters + //@@ + //@@ Optional model parameters. User-specified parameter values. + //@@ + map parameters = 14; + + //@@ .. cpp:var:: ModelWarmup model_warmup (repeated) + //@@ + //@@ Warmup setting of this model. If specified, all instances + //@@ will be run with the request samples in sequence before + //@@ serving the model. + //@@ This field can only be specified if the model is not an ensemble + //@@ model. + //@@ + repeated ModelWarmup model_warmup = 16; + + //@@ .. cpp:var:: ModelOperations model_operations + //@@ + //@@ Optional metadata of the libraries providing custom operations for + //@@ this model. + //@@ + ModelOperations model_operations = 18; + + //@@ .. cpp:var:: ModelTransactionPolicy model_transaction_policy + //@@ + //@@ Optional specification that describes the nature of transactions + //@@ to be expected from the model. + //@@ + ModelTransactionPolicy model_transaction_policy = 19; + + //@@ .. cpp:var:: ModelRepositoryAgents model_repository_agents + //@@ + //@@ Optional specification of the agent(s) that should be invoked + //@@ with repository actions are performed for this model. + //@@ + ModelRepositoryAgents model_repository_agents = 23; + + //@@ .. cpp:var:: ModelResponseCache response_cache + //@@ + //@@ Optional setting for utilizing the response cache for this + //@@ model. + //@@ + ModelResponseCache response_cache = 24; +}