1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: tensorflow/core/protobuf/remote_tensor_handle.proto
3 
4 #include "tensorflow/core/protobuf/remote_tensor_handle.pb.h"
5 
6 #include <algorithm>
7 #include <cstdint>
8 
9 #include <google/protobuf/io/coded_stream.h>
10 #include <google/protobuf/extension_set.h>
11 #include <google/protobuf/wire_format_lite.h>
12 #include <google/protobuf/io/zero_copy_stream_impl_lite.h>
13 // @@protoc_insertion_point(includes)
14 #include <google/protobuf/port_def.inc>
15 
16 PROTOBUF_PRAGMA_INIT_SEG
17 
18 namespace _pb = ::PROTOBUF_NAMESPACE_ID;
19 namespace _pbi = _pb::internal;
20 
21 namespace tensorflow {
22 namespace eager {
ResourceDtypeAndShape(::_pbi::ConstantInitialized)23 PROTOBUF_CONSTEXPR ResourceDtypeAndShape::ResourceDtypeAndShape(
24     ::_pbi::ConstantInitialized): _impl_{
25     /*decltype(_impl_.shape_)*/nullptr
26   , /*decltype(_impl_.dtype_)*/0
27   , /*decltype(_impl_._cached_size_)*/{}} {}
28 struct ResourceDtypeAndShapeDefaultTypeInternal {
ResourceDtypeAndShapeDefaultTypeInternaltensorflow::eager::ResourceDtypeAndShapeDefaultTypeInternal29   PROTOBUF_CONSTEXPR ResourceDtypeAndShapeDefaultTypeInternal()
30       : _instance(::_pbi::ConstantInitialized{}) {}
~ResourceDtypeAndShapeDefaultTypeInternaltensorflow::eager::ResourceDtypeAndShapeDefaultTypeInternal31   ~ResourceDtypeAndShapeDefaultTypeInternal() {}
32   union {  // NOLINT(misc-non-private-member-variables-in-classes)
33     ResourceDtypeAndShape _instance;
34   };
35 };
36 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 ResourceDtypeAndShapeDefaultTypeInternal _ResourceDtypeAndShape_default_instance_;
RemoteTensorHandle(::_pbi::ConstantInitialized)37 PROTOBUF_CONSTEXPR RemoteTensorHandle::RemoteTensorHandle(
38     ::_pbi::ConstantInitialized): _impl_{
39     /*decltype(_impl_.resource_dtypes_and_shapes_)*/{}
40   , /*decltype(_impl_.device_)*/{&::_pbi::fixed_address_empty_string, ::_pbi::ConstantInitialized{}}
41   , /*decltype(_impl_.op_device_)*/{&::_pbi::fixed_address_empty_string, ::_pbi::ConstantInitialized{}}
42   , /*decltype(_impl_.op_id_)*/::int64_t{0}
43   , /*decltype(_impl_.output_num_)*/0
44   , /*decltype(_impl_.dtype_)*/0
45   , /*decltype(_impl_._cached_size_)*/{}} {}
46 struct RemoteTensorHandleDefaultTypeInternal {
RemoteTensorHandleDefaultTypeInternaltensorflow::eager::RemoteTensorHandleDefaultTypeInternal47   PROTOBUF_CONSTEXPR RemoteTensorHandleDefaultTypeInternal()
48       : _instance(::_pbi::ConstantInitialized{}) {}
~RemoteTensorHandleDefaultTypeInternaltensorflow::eager::RemoteTensorHandleDefaultTypeInternal49   ~RemoteTensorHandleDefaultTypeInternal() {}
50   union {  // NOLINT(misc-non-private-member-variables-in-classes)
51     RemoteTensorHandle _instance;
52   };
53 };
54 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 RemoteTensorHandleDefaultTypeInternal _RemoteTensorHandle_default_instance_;
55 }  // namespace eager
56 }  // namespace tensorflow
57 namespace tensorflow {
58 namespace eager {
59 
60 // ===================================================================
61 
62 class ResourceDtypeAndShape::_Internal {
63  public:
64   static const ::tensorflow::TensorShapeProto& shape(const ResourceDtypeAndShape* msg);
65 };
66 
67 const ::tensorflow::TensorShapeProto&
shape(const ResourceDtypeAndShape * msg)68 ResourceDtypeAndShape::_Internal::shape(const ResourceDtypeAndShape* msg) {
69   return *msg->_impl_.shape_;
70 }
clear_shape()71 void ResourceDtypeAndShape::clear_shape() {
72   if (GetArenaForAllocation() == nullptr && _impl_.shape_ != nullptr) {
73     delete _impl_.shape_;
74   }
75   _impl_.shape_ = nullptr;
76 }
ResourceDtypeAndShape(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)77 ResourceDtypeAndShape::ResourceDtypeAndShape(::PROTOBUF_NAMESPACE_ID::Arena* arena,
78                          bool is_message_owned)
79   : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
80   SharedCtor(arena, is_message_owned);
81   // @@protoc_insertion_point(arena_constructor:tensorflow.eager.ResourceDtypeAndShape)
82 }
ResourceDtypeAndShape(const ResourceDtypeAndShape & from)83 ResourceDtypeAndShape::ResourceDtypeAndShape(const ResourceDtypeAndShape& from)
84   : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
85   ResourceDtypeAndShape* const _this = this; (void)_this;
86   new (&_impl_) Impl_{
87       decltype(_impl_.shape_){nullptr}
88     , decltype(_impl_.dtype_){}
89     , /*decltype(_impl_._cached_size_)*/{}};
90 
91   _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
92   if (from._internal_has_shape()) {
93     _this->_impl_.shape_ = new ::tensorflow::TensorShapeProto(*from._impl_.shape_);
94   }
95   _this->_impl_.dtype_ = from._impl_.dtype_;
96   // @@protoc_insertion_point(copy_constructor:tensorflow.eager.ResourceDtypeAndShape)
97 }
98 
SharedCtor(::_pb::Arena * arena,bool is_message_owned)99 inline void ResourceDtypeAndShape::SharedCtor(
100     ::_pb::Arena* arena, bool is_message_owned) {
101   (void)arena;
102   (void)is_message_owned;
103   new (&_impl_) Impl_{
104       decltype(_impl_.shape_){nullptr}
105     , decltype(_impl_.dtype_){0}
106     , /*decltype(_impl_._cached_size_)*/{}
107   };
108 }
109 
~ResourceDtypeAndShape()110 ResourceDtypeAndShape::~ResourceDtypeAndShape() {
111   // @@protoc_insertion_point(destructor:tensorflow.eager.ResourceDtypeAndShape)
112   if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
113   (void)arena;
114     return;
115   }
116   SharedDtor();
117 }
118 
SharedDtor()119 inline void ResourceDtypeAndShape::SharedDtor() {
120   GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
121   if (this != internal_default_instance()) delete _impl_.shape_;
122 }
123 
SetCachedSize(int size) const124 void ResourceDtypeAndShape::SetCachedSize(int size) const {
125   _impl_._cached_size_.Set(size);
126 }
127 
Clear()128 void ResourceDtypeAndShape::Clear() {
129 // @@protoc_insertion_point(message_clear_start:tensorflow.eager.ResourceDtypeAndShape)
130   ::uint32_t cached_has_bits = 0;
131   // Prevent compiler warnings about cached_has_bits being unused
132   (void) cached_has_bits;
133 
134   if (GetArenaForAllocation() == nullptr && _impl_.shape_ != nullptr) {
135     delete _impl_.shape_;
136   }
137   _impl_.shape_ = nullptr;
138   _impl_.dtype_ = 0;
139   _internal_metadata_.Clear<std::string>();
140 }
141 
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)142 const char* ResourceDtypeAndShape::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
143 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
144   while (!ctx->Done(&ptr)) {
145     ::uint32_t tag;
146     ptr = ::_pbi::ReadTag(ptr, &tag);
147     switch (tag >> 3) {
148       // .tensorflow.DataType dtype = 1;
149       case 1:
150         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 8)) {
151           ::uint64_t val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
152           CHK_(ptr);
153           _internal_set_dtype(static_cast<::tensorflow::DataType>(val));
154         } else {
155           goto handle_unusual;
156         }
157         continue;
158       // .tensorflow.TensorShapeProto shape = 2;
159       case 2:
160         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 18)) {
161           ptr = ctx->ParseMessage(_internal_mutable_shape(), ptr);
162           CHK_(ptr);
163         } else {
164           goto handle_unusual;
165         }
166         continue;
167       default:
168         goto handle_unusual;
169     }  // switch
170   handle_unusual:
171     if ((tag == 0) || ((tag & 7) == 4)) {
172       CHK_(ptr);
173       ctx->SetLastTag(tag);
174       goto message_done;
175     }
176     ptr = UnknownFieldParse(
177         tag,
178         _internal_metadata_.mutable_unknown_fields<std::string>(),
179         ptr, ctx);
180     CHK_(ptr != nullptr);
181   }  // while
182 message_done:
183   return ptr;
184 failure:
185   ptr = nullptr;
186   goto message_done;
187 #undef CHK_
188 }
189 
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const190 ::uint8_t* ResourceDtypeAndShape::_InternalSerialize(
191     ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
192   // @@protoc_insertion_point(serialize_to_array_start:tensorflow.eager.ResourceDtypeAndShape)
193   ::uint32_t cached_has_bits = 0;
194   (void) cached_has_bits;
195 
196   // .tensorflow.DataType dtype = 1;
197   if (this->_internal_dtype() != 0) {
198     target = stream->EnsureSpace(target);
199     target = ::_pbi::WireFormatLite::WriteEnumToArray(
200       1, this->_internal_dtype(), target);
201   }
202 
203   // .tensorflow.TensorShapeProto shape = 2;
204   if (this->_internal_has_shape()) {
205     target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
206       InternalWriteMessage(2, _Internal::shape(this),
207         _Internal::shape(this).GetCachedSize(), target, stream);
208   }
209 
210   if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
211     target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
212         static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
213   }
214   // @@protoc_insertion_point(serialize_to_array_end:tensorflow.eager.ResourceDtypeAndShape)
215   return target;
216 }
217 
ByteSizeLong() const218 size_t ResourceDtypeAndShape::ByteSizeLong() const {
219 // @@protoc_insertion_point(message_byte_size_start:tensorflow.eager.ResourceDtypeAndShape)
220   size_t total_size = 0;
221 
222   ::uint32_t cached_has_bits = 0;
223   // Prevent compiler warnings about cached_has_bits being unused
224   (void) cached_has_bits;
225 
226   // .tensorflow.TensorShapeProto shape = 2;
227   if (this->_internal_has_shape()) {
228     total_size += 1 +
229       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize(
230         *_impl_.shape_);
231   }
232 
233   // .tensorflow.DataType dtype = 1;
234   if (this->_internal_dtype() != 0) {
235     total_size += 1 +
236       ::_pbi::WireFormatLite::EnumSize(this->_internal_dtype());
237   }
238 
239   if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
240     total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
241   }
242   int cached_size = ::_pbi::ToCachedSize(total_size);
243   SetCachedSize(cached_size);
244   return total_size;
245 }
246 
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)247 void ResourceDtypeAndShape::CheckTypeAndMergeFrom(
248     const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
249   MergeFrom(*::_pbi::DownCast<const ResourceDtypeAndShape*>(
250       &from));
251 }
252 
MergeFrom(const ResourceDtypeAndShape & from)253 void ResourceDtypeAndShape::MergeFrom(const ResourceDtypeAndShape& from) {
254   ResourceDtypeAndShape* const _this = this;
255   // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.eager.ResourceDtypeAndShape)
256   GOOGLE_DCHECK_NE(&from, _this);
257   ::uint32_t cached_has_bits = 0;
258   (void) cached_has_bits;
259 
260   if (from._internal_has_shape()) {
261     _this->_internal_mutable_shape()->::tensorflow::TensorShapeProto::MergeFrom(
262         from._internal_shape());
263   }
264   if (from._internal_dtype() != 0) {
265     _this->_internal_set_dtype(from._internal_dtype());
266   }
267   _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
268 }
269 
CopyFrom(const ResourceDtypeAndShape & from)270 void ResourceDtypeAndShape::CopyFrom(const ResourceDtypeAndShape& from) {
271 // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.eager.ResourceDtypeAndShape)
272   if (&from == this) return;
273   Clear();
274   MergeFrom(from);
275 }
276 
IsInitialized() const277 bool ResourceDtypeAndShape::IsInitialized() const {
278   return true;
279 }
280 
InternalSwap(ResourceDtypeAndShape * other)281 void ResourceDtypeAndShape::InternalSwap(ResourceDtypeAndShape* other) {
282   using std::swap;
283   _internal_metadata_.InternalSwap(&other->_internal_metadata_);
284   ::PROTOBUF_NAMESPACE_ID::internal::memswap<
285       PROTOBUF_FIELD_OFFSET(ResourceDtypeAndShape, _impl_.dtype_)
286       + sizeof(ResourceDtypeAndShape::_impl_.dtype_)  // NOLINT
287       - PROTOBUF_FIELD_OFFSET(ResourceDtypeAndShape, _impl_.shape_)>(
288           reinterpret_cast<char*>(&_impl_.shape_),
289           reinterpret_cast<char*>(&other->_impl_.shape_));
290 }
291 
GetTypeName() const292 std::string ResourceDtypeAndShape::GetTypeName() const {
293   return "tensorflow.eager.ResourceDtypeAndShape";
294 }
295 
296 
297 // ===================================================================
298 
299 class RemoteTensorHandle::_Internal {
300  public:
301 };
302 
RemoteTensorHandle(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)303 RemoteTensorHandle::RemoteTensorHandle(::PROTOBUF_NAMESPACE_ID::Arena* arena,
304                          bool is_message_owned)
305   : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
306   SharedCtor(arena, is_message_owned);
307   // @@protoc_insertion_point(arena_constructor:tensorflow.eager.RemoteTensorHandle)
308 }
RemoteTensorHandle(const RemoteTensorHandle & from)309 RemoteTensorHandle::RemoteTensorHandle(const RemoteTensorHandle& from)
310   : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
311   RemoteTensorHandle* const _this = this; (void)_this;
312   new (&_impl_) Impl_{
313       decltype(_impl_.resource_dtypes_and_shapes_){from._impl_.resource_dtypes_and_shapes_}
314     , decltype(_impl_.device_){}
315     , decltype(_impl_.op_device_){}
316     , decltype(_impl_.op_id_){}
317     , decltype(_impl_.output_num_){}
318     , decltype(_impl_.dtype_){}
319     , /*decltype(_impl_._cached_size_)*/{}};
320 
321   _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
322   _impl_.device_.InitDefault();
323   #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
324     _impl_.device_.Set("", GetArenaForAllocation());
325   #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
326   if (!from._internal_device().empty()) {
327     _this->_impl_.device_.Set(from._internal_device(),
328       _this->GetArenaForAllocation());
329   }
330   _impl_.op_device_.InitDefault();
331   #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
332     _impl_.op_device_.Set("", GetArenaForAllocation());
333   #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
334   if (!from._internal_op_device().empty()) {
335     _this->_impl_.op_device_.Set(from._internal_op_device(),
336       _this->GetArenaForAllocation());
337   }
338   ::memcpy(&_impl_.op_id_, &from._impl_.op_id_,
339     static_cast<size_t>(reinterpret_cast<char*>(&_impl_.dtype_) -
340     reinterpret_cast<char*>(&_impl_.op_id_)) + sizeof(_impl_.dtype_));
341   // @@protoc_insertion_point(copy_constructor:tensorflow.eager.RemoteTensorHandle)
342 }
343 
SharedCtor(::_pb::Arena * arena,bool is_message_owned)344 inline void RemoteTensorHandle::SharedCtor(
345     ::_pb::Arena* arena, bool is_message_owned) {
346   (void)arena;
347   (void)is_message_owned;
348   new (&_impl_) Impl_{
349       decltype(_impl_.resource_dtypes_and_shapes_){arena}
350     , decltype(_impl_.device_){}
351     , decltype(_impl_.op_device_){}
352     , decltype(_impl_.op_id_){::int64_t{0}}
353     , decltype(_impl_.output_num_){0}
354     , decltype(_impl_.dtype_){0}
355     , /*decltype(_impl_._cached_size_)*/{}
356   };
357   _impl_.device_.InitDefault();
358   #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
359     _impl_.device_.Set("", GetArenaForAllocation());
360   #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
361   _impl_.op_device_.InitDefault();
362   #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
363     _impl_.op_device_.Set("", GetArenaForAllocation());
364   #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
365 }
366 
~RemoteTensorHandle()367 RemoteTensorHandle::~RemoteTensorHandle() {
368   // @@protoc_insertion_point(destructor:tensorflow.eager.RemoteTensorHandle)
369   if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
370   (void)arena;
371     return;
372   }
373   SharedDtor();
374 }
375 
SharedDtor()376 inline void RemoteTensorHandle::SharedDtor() {
377   GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
378   _impl_.resource_dtypes_and_shapes_.~RepeatedPtrField();
379   _impl_.device_.Destroy();
380   _impl_.op_device_.Destroy();
381 }
382 
SetCachedSize(int size) const383 void RemoteTensorHandle::SetCachedSize(int size) const {
384   _impl_._cached_size_.Set(size);
385 }
386 
Clear()387 void RemoteTensorHandle::Clear() {
388 // @@protoc_insertion_point(message_clear_start:tensorflow.eager.RemoteTensorHandle)
389   ::uint32_t cached_has_bits = 0;
390   // Prevent compiler warnings about cached_has_bits being unused
391   (void) cached_has_bits;
392 
393   _impl_.resource_dtypes_and_shapes_.Clear();
394   _impl_.device_.ClearToEmpty();
395   _impl_.op_device_.ClearToEmpty();
396   ::memset(&_impl_.op_id_, 0, static_cast<size_t>(
397       reinterpret_cast<char*>(&_impl_.dtype_) -
398       reinterpret_cast<char*>(&_impl_.op_id_)) + sizeof(_impl_.dtype_));
399   _internal_metadata_.Clear<std::string>();
400 }
401 
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)402 const char* RemoteTensorHandle::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
403 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
404   while (!ctx->Done(&ptr)) {
405     ::uint32_t tag;
406     ptr = ::_pbi::ReadTag(ptr, &tag);
407     switch (tag >> 3) {
408       // int64 op_id = 1;
409       case 1:
410         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 8)) {
411           _impl_.op_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
412           CHK_(ptr);
413         } else {
414           goto handle_unusual;
415         }
416         continue;
417       // int32 output_num = 2;
418       case 2:
419         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 16)) {
420           _impl_.output_num_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint32(&ptr);
421           CHK_(ptr);
422         } else {
423           goto handle_unusual;
424         }
425         continue;
426       // string device = 3;
427       case 3:
428         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 26)) {
429           auto str = _internal_mutable_device();
430           ptr = ::_pbi::InlineGreedyStringParser(str, ptr, ctx);
431           CHK_(ptr);
432           CHK_(::_pbi::VerifyUTF8(str, nullptr));
433         } else {
434           goto handle_unusual;
435         }
436         continue;
437       // string op_device = 4;
438       case 4:
439         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 34)) {
440           auto str = _internal_mutable_op_device();
441           ptr = ::_pbi::InlineGreedyStringParser(str, ptr, ctx);
442           CHK_(ptr);
443           CHK_(::_pbi::VerifyUTF8(str, nullptr));
444         } else {
445           goto handle_unusual;
446         }
447         continue;
448       // .tensorflow.DataType dtype = 5;
449       case 5:
450         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 40)) {
451           ::uint64_t val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
452           CHK_(ptr);
453           _internal_set_dtype(static_cast<::tensorflow::DataType>(val));
454         } else {
455           goto handle_unusual;
456         }
457         continue;
458       // repeated .tensorflow.eager.ResourceDtypeAndShape resource_dtypes_and_shapes = 6;
459       case 6:
460         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 50)) {
461           ptr -= 1;
462           do {
463             ptr += 1;
464             ptr = ctx->ParseMessage(_internal_add_resource_dtypes_and_shapes(), ptr);
465             CHK_(ptr);
466             if (!ctx->DataAvailable(ptr)) break;
467           } while (::PROTOBUF_NAMESPACE_ID::internal::ExpectTag<50>(ptr));
468         } else {
469           goto handle_unusual;
470         }
471         continue;
472       default:
473         goto handle_unusual;
474     }  // switch
475   handle_unusual:
476     if ((tag == 0) || ((tag & 7) == 4)) {
477       CHK_(ptr);
478       ctx->SetLastTag(tag);
479       goto message_done;
480     }
481     ptr = UnknownFieldParse(
482         tag,
483         _internal_metadata_.mutable_unknown_fields<std::string>(),
484         ptr, ctx);
485     CHK_(ptr != nullptr);
486   }  // while
487 message_done:
488   return ptr;
489 failure:
490   ptr = nullptr;
491   goto message_done;
492 #undef CHK_
493 }
494 
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const495 ::uint8_t* RemoteTensorHandle::_InternalSerialize(
496     ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
497   // @@protoc_insertion_point(serialize_to_array_start:tensorflow.eager.RemoteTensorHandle)
498   ::uint32_t cached_has_bits = 0;
499   (void) cached_has_bits;
500 
501   // int64 op_id = 1;
502   if (this->_internal_op_id() != 0) {
503     target = stream->EnsureSpace(target);
504     target = ::_pbi::WireFormatLite::WriteInt64ToArray(1, this->_internal_op_id(), target);
505   }
506 
507   // int32 output_num = 2;
508   if (this->_internal_output_num() != 0) {
509     target = stream->EnsureSpace(target);
510     target = ::_pbi::WireFormatLite::WriteInt32ToArray(2, this->_internal_output_num(), target);
511   }
512 
513   // string device = 3;
514   if (!this->_internal_device().empty()) {
515     ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(
516       this->_internal_device().data(), static_cast<int>(this->_internal_device().length()),
517       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE,
518       "tensorflow.eager.RemoteTensorHandle.device");
519     target = stream->WriteStringMaybeAliased(
520         3, this->_internal_device(), target);
521   }
522 
523   // string op_device = 4;
524   if (!this->_internal_op_device().empty()) {
525     ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(
526       this->_internal_op_device().data(), static_cast<int>(this->_internal_op_device().length()),
527       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE,
528       "tensorflow.eager.RemoteTensorHandle.op_device");
529     target = stream->WriteStringMaybeAliased(
530         4, this->_internal_op_device(), target);
531   }
532 
533   // .tensorflow.DataType dtype = 5;
534   if (this->_internal_dtype() != 0) {
535     target = stream->EnsureSpace(target);
536     target = ::_pbi::WireFormatLite::WriteEnumToArray(
537       5, this->_internal_dtype(), target);
538   }
539 
540   // repeated .tensorflow.eager.ResourceDtypeAndShape resource_dtypes_and_shapes = 6;
541   for (unsigned i = 0,
542       n = static_cast<unsigned>(this->_internal_resource_dtypes_and_shapes_size()); i < n; i++) {
543     const auto& repfield = this->_internal_resource_dtypes_and_shapes(i);
544     target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
545         InternalWriteMessage(6, repfield, repfield.GetCachedSize(), target, stream);
546   }
547 
548   if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
549     target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
550         static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
551   }
552   // @@protoc_insertion_point(serialize_to_array_end:tensorflow.eager.RemoteTensorHandle)
553   return target;
554 }
555 
ByteSizeLong() const556 size_t RemoteTensorHandle::ByteSizeLong() const {
557 // @@protoc_insertion_point(message_byte_size_start:tensorflow.eager.RemoteTensorHandle)
558   size_t total_size = 0;
559 
560   ::uint32_t cached_has_bits = 0;
561   // Prevent compiler warnings about cached_has_bits being unused
562   (void) cached_has_bits;
563 
564   // repeated .tensorflow.eager.ResourceDtypeAndShape resource_dtypes_and_shapes = 6;
565   total_size += 1UL * this->_internal_resource_dtypes_and_shapes_size();
566   for (const auto& msg : this->_impl_.resource_dtypes_and_shapes_) {
567     total_size +=
568       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize(msg);
569   }
570 
571   // string device = 3;
572   if (!this->_internal_device().empty()) {
573     total_size += 1 +
574       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize(
575         this->_internal_device());
576   }
577 
578   // string op_device = 4;
579   if (!this->_internal_op_device().empty()) {
580     total_size += 1 +
581       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize(
582         this->_internal_op_device());
583   }
584 
585   // int64 op_id = 1;
586   if (this->_internal_op_id() != 0) {
587     total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_op_id());
588   }
589 
590   // int32 output_num = 2;
591   if (this->_internal_output_num() != 0) {
592     total_size += ::_pbi::WireFormatLite::Int32SizePlusOne(this->_internal_output_num());
593   }
594 
595   // .tensorflow.DataType dtype = 5;
596   if (this->_internal_dtype() != 0) {
597     total_size += 1 +
598       ::_pbi::WireFormatLite::EnumSize(this->_internal_dtype());
599   }
600 
601   if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
602     total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
603   }
604   int cached_size = ::_pbi::ToCachedSize(total_size);
605   SetCachedSize(cached_size);
606   return total_size;
607 }
608 
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)609 void RemoteTensorHandle::CheckTypeAndMergeFrom(
610     const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
611   MergeFrom(*::_pbi::DownCast<const RemoteTensorHandle*>(
612       &from));
613 }
614 
MergeFrom(const RemoteTensorHandle & from)615 void RemoteTensorHandle::MergeFrom(const RemoteTensorHandle& from) {
616   RemoteTensorHandle* const _this = this;
617   // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.eager.RemoteTensorHandle)
618   GOOGLE_DCHECK_NE(&from, _this);
619   ::uint32_t cached_has_bits = 0;
620   (void) cached_has_bits;
621 
622   _this->_impl_.resource_dtypes_and_shapes_.MergeFrom(from._impl_.resource_dtypes_and_shapes_);
623   if (!from._internal_device().empty()) {
624     _this->_internal_set_device(from._internal_device());
625   }
626   if (!from._internal_op_device().empty()) {
627     _this->_internal_set_op_device(from._internal_op_device());
628   }
629   if (from._internal_op_id() != 0) {
630     _this->_internal_set_op_id(from._internal_op_id());
631   }
632   if (from._internal_output_num() != 0) {
633     _this->_internal_set_output_num(from._internal_output_num());
634   }
635   if (from._internal_dtype() != 0) {
636     _this->_internal_set_dtype(from._internal_dtype());
637   }
638   _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
639 }
640 
CopyFrom(const RemoteTensorHandle & from)641 void RemoteTensorHandle::CopyFrom(const RemoteTensorHandle& from) {
642 // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.eager.RemoteTensorHandle)
643   if (&from == this) return;
644   Clear();
645   MergeFrom(from);
646 }
647 
IsInitialized() const648 bool RemoteTensorHandle::IsInitialized() const {
649   return true;
650 }
651 
InternalSwap(RemoteTensorHandle * other)652 void RemoteTensorHandle::InternalSwap(RemoteTensorHandle* other) {
653   using std::swap;
654   auto* lhs_arena = GetArenaForAllocation();
655   auto* rhs_arena = other->GetArenaForAllocation();
656   _internal_metadata_.InternalSwap(&other->_internal_metadata_);
657   _impl_.resource_dtypes_and_shapes_.InternalSwap(&other->_impl_.resource_dtypes_and_shapes_);
658   ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::InternalSwap(
659       &_impl_.device_, lhs_arena,
660       &other->_impl_.device_, rhs_arena
661   );
662   ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::InternalSwap(
663       &_impl_.op_device_, lhs_arena,
664       &other->_impl_.op_device_, rhs_arena
665   );
666   ::PROTOBUF_NAMESPACE_ID::internal::memswap<
667       PROTOBUF_FIELD_OFFSET(RemoteTensorHandle, _impl_.dtype_)
668       + sizeof(RemoteTensorHandle::_impl_.dtype_)  // NOLINT
669       - PROTOBUF_FIELD_OFFSET(RemoteTensorHandle, _impl_.op_id_)>(
670           reinterpret_cast<char*>(&_impl_.op_id_),
671           reinterpret_cast<char*>(&other->_impl_.op_id_));
672 }
673 
GetTypeName() const674 std::string RemoteTensorHandle::GetTypeName() const {
675   return "tensorflow.eager.RemoteTensorHandle";
676 }
677 
678 
679 // @@protoc_insertion_point(namespace_scope)
680 }  // namespace eager
681 }  // namespace tensorflow
682 PROTOBUF_NAMESPACE_OPEN
683 template<> PROTOBUF_NOINLINE ::tensorflow::eager::ResourceDtypeAndShape*
CreateMaybeMessage(Arena * arena)684 Arena::CreateMaybeMessage< ::tensorflow::eager::ResourceDtypeAndShape >(Arena* arena) {
685   return Arena::CreateMessageInternal< ::tensorflow::eager::ResourceDtypeAndShape >(arena);
686 }
687 template<> PROTOBUF_NOINLINE ::tensorflow::eager::RemoteTensorHandle*
CreateMaybeMessage(Arena * arena)688 Arena::CreateMaybeMessage< ::tensorflow::eager::RemoteTensorHandle >(Arena* arena) {
689   return Arena::CreateMessageInternal< ::tensorflow::eager::RemoteTensorHandle >(arena);
690 }
691 PROTOBUF_NAMESPACE_CLOSE
692 
693 // @@protoc_insertion_point(global_scope)
694 #include <google/protobuf/port_undef.inc>
695