1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: tensorflow/core/protobuf/queue_runner.proto
3 
4 #include "tensorflow/core/protobuf/queue_runner.pb.h"
5 
6 #include <algorithm>
7 #include <cstdint>
8 
9 #include <google/protobuf/io/coded_stream.h>
10 #include <google/protobuf/extension_set.h>
11 #include <google/protobuf/wire_format_lite.h>
12 #include <google/protobuf/io/zero_copy_stream_impl_lite.h>
13 // @@protoc_insertion_point(includes)
14 #include <google/protobuf/port_def.inc>
15 
16 PROTOBUF_PRAGMA_INIT_SEG
17 
18 namespace _pb = ::PROTOBUF_NAMESPACE_ID;
19 namespace _pbi = _pb::internal;
20 
21 namespace tensorflow {
QueueRunnerDef(::_pbi::ConstantInitialized)22 PROTOBUF_CONSTEXPR QueueRunnerDef::QueueRunnerDef(
23     ::_pbi::ConstantInitialized): _impl_{
24     /*decltype(_impl_.enqueue_op_name_)*/{}
25   , /*decltype(_impl_.queue_closed_exception_types_)*/{}
26   , /*decltype(_impl_._queue_closed_exception_types_cached_byte_size_)*/{0}
27   , /*decltype(_impl_.queue_name_)*/{&::_pbi::fixed_address_empty_string, ::_pbi::ConstantInitialized{}}
28   , /*decltype(_impl_.close_op_name_)*/{&::_pbi::fixed_address_empty_string, ::_pbi::ConstantInitialized{}}
29   , /*decltype(_impl_.cancel_op_name_)*/{&::_pbi::fixed_address_empty_string, ::_pbi::ConstantInitialized{}}
30   , /*decltype(_impl_._cached_size_)*/{}} {}
31 struct QueueRunnerDefDefaultTypeInternal {
QueueRunnerDefDefaultTypeInternaltensorflow::QueueRunnerDefDefaultTypeInternal32   PROTOBUF_CONSTEXPR QueueRunnerDefDefaultTypeInternal()
33       : _instance(::_pbi::ConstantInitialized{}) {}
~QueueRunnerDefDefaultTypeInternaltensorflow::QueueRunnerDefDefaultTypeInternal34   ~QueueRunnerDefDefaultTypeInternal() {}
35   union {  // NOLINT(misc-non-private-member-variables-in-classes)
36     QueueRunnerDef _instance;
37   };
38 };
39 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 QueueRunnerDefDefaultTypeInternal _QueueRunnerDef_default_instance_;
40 }  // namespace tensorflow
41 namespace tensorflow {
42 
43 // ===================================================================
44 
45 class QueueRunnerDef::_Internal {
46  public:
47 };
48 
QueueRunnerDef(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)49 QueueRunnerDef::QueueRunnerDef(::PROTOBUF_NAMESPACE_ID::Arena* arena,
50                          bool is_message_owned)
51   : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
52   SharedCtor(arena, is_message_owned);
53   // @@protoc_insertion_point(arena_constructor:tensorflow.QueueRunnerDef)
54 }
QueueRunnerDef(const QueueRunnerDef & from)55 QueueRunnerDef::QueueRunnerDef(const QueueRunnerDef& from)
56   : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
57   QueueRunnerDef* const _this = this; (void)_this;
58   new (&_impl_) Impl_{
59       decltype(_impl_.enqueue_op_name_){from._impl_.enqueue_op_name_}
60     , decltype(_impl_.queue_closed_exception_types_){from._impl_.queue_closed_exception_types_}
61     , /*decltype(_impl_._queue_closed_exception_types_cached_byte_size_)*/{0}
62     , decltype(_impl_.queue_name_){}
63     , decltype(_impl_.close_op_name_){}
64     , decltype(_impl_.cancel_op_name_){}
65     , /*decltype(_impl_._cached_size_)*/{}};
66 
67   _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
68   _impl_.queue_name_.InitDefault();
69   #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
70     _impl_.queue_name_.Set("", GetArenaForAllocation());
71   #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
72   if (!from._internal_queue_name().empty()) {
73     _this->_impl_.queue_name_.Set(from._internal_queue_name(),
74       _this->GetArenaForAllocation());
75   }
76   _impl_.close_op_name_.InitDefault();
77   #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
78     _impl_.close_op_name_.Set("", GetArenaForAllocation());
79   #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
80   if (!from._internal_close_op_name().empty()) {
81     _this->_impl_.close_op_name_.Set(from._internal_close_op_name(),
82       _this->GetArenaForAllocation());
83   }
84   _impl_.cancel_op_name_.InitDefault();
85   #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
86     _impl_.cancel_op_name_.Set("", GetArenaForAllocation());
87   #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
88   if (!from._internal_cancel_op_name().empty()) {
89     _this->_impl_.cancel_op_name_.Set(from._internal_cancel_op_name(),
90       _this->GetArenaForAllocation());
91   }
92   // @@protoc_insertion_point(copy_constructor:tensorflow.QueueRunnerDef)
93 }
94 
SharedCtor(::_pb::Arena * arena,bool is_message_owned)95 inline void QueueRunnerDef::SharedCtor(
96     ::_pb::Arena* arena, bool is_message_owned) {
97   (void)arena;
98   (void)is_message_owned;
99   new (&_impl_) Impl_{
100       decltype(_impl_.enqueue_op_name_){arena}
101     , decltype(_impl_.queue_closed_exception_types_){arena}
102     , /*decltype(_impl_._queue_closed_exception_types_cached_byte_size_)*/{0}
103     , decltype(_impl_.queue_name_){}
104     , decltype(_impl_.close_op_name_){}
105     , decltype(_impl_.cancel_op_name_){}
106     , /*decltype(_impl_._cached_size_)*/{}
107   };
108   _impl_.queue_name_.InitDefault();
109   #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
110     _impl_.queue_name_.Set("", GetArenaForAllocation());
111   #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
112   _impl_.close_op_name_.InitDefault();
113   #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
114     _impl_.close_op_name_.Set("", GetArenaForAllocation());
115   #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
116   _impl_.cancel_op_name_.InitDefault();
117   #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
118     _impl_.cancel_op_name_.Set("", GetArenaForAllocation());
119   #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
120 }
121 
~QueueRunnerDef()122 QueueRunnerDef::~QueueRunnerDef() {
123   // @@protoc_insertion_point(destructor:tensorflow.QueueRunnerDef)
124   if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
125   (void)arena;
126     return;
127   }
128   SharedDtor();
129 }
130 
SharedDtor()131 inline void QueueRunnerDef::SharedDtor() {
132   GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
133   _impl_.enqueue_op_name_.~RepeatedPtrField();
134   _impl_.queue_closed_exception_types_.~RepeatedField();
135   _impl_.queue_name_.Destroy();
136   _impl_.close_op_name_.Destroy();
137   _impl_.cancel_op_name_.Destroy();
138 }
139 
SetCachedSize(int size) const140 void QueueRunnerDef::SetCachedSize(int size) const {
141   _impl_._cached_size_.Set(size);
142 }
143 
Clear()144 void QueueRunnerDef::Clear() {
145 // @@protoc_insertion_point(message_clear_start:tensorflow.QueueRunnerDef)
146   ::uint32_t cached_has_bits = 0;
147   // Prevent compiler warnings about cached_has_bits being unused
148   (void) cached_has_bits;
149 
150   _impl_.enqueue_op_name_.Clear();
151   _impl_.queue_closed_exception_types_.Clear();
152   _impl_.queue_name_.ClearToEmpty();
153   _impl_.close_op_name_.ClearToEmpty();
154   _impl_.cancel_op_name_.ClearToEmpty();
155   _internal_metadata_.Clear<std::string>();
156 }
157 
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)158 const char* QueueRunnerDef::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
159 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
160   while (!ctx->Done(&ptr)) {
161     ::uint32_t tag;
162     ptr = ::_pbi::ReadTag(ptr, &tag);
163     switch (tag >> 3) {
164       // string queue_name = 1;
165       case 1:
166         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 10)) {
167           auto str = _internal_mutable_queue_name();
168           ptr = ::_pbi::InlineGreedyStringParser(str, ptr, ctx);
169           CHK_(ptr);
170           CHK_(::_pbi::VerifyUTF8(str, nullptr));
171         } else {
172           goto handle_unusual;
173         }
174         continue;
175       // repeated string enqueue_op_name = 2;
176       case 2:
177         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 18)) {
178           ptr -= 1;
179           do {
180             ptr += 1;
181             auto str = _internal_add_enqueue_op_name();
182             ptr = ::_pbi::InlineGreedyStringParser(str, ptr, ctx);
183             CHK_(ptr);
184             CHK_(::_pbi::VerifyUTF8(str, nullptr));
185             if (!ctx->DataAvailable(ptr)) break;
186           } while (::PROTOBUF_NAMESPACE_ID::internal::ExpectTag<18>(ptr));
187         } else {
188           goto handle_unusual;
189         }
190         continue;
191       // string close_op_name = 3;
192       case 3:
193         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 26)) {
194           auto str = _internal_mutable_close_op_name();
195           ptr = ::_pbi::InlineGreedyStringParser(str, ptr, ctx);
196           CHK_(ptr);
197           CHK_(::_pbi::VerifyUTF8(str, nullptr));
198         } else {
199           goto handle_unusual;
200         }
201         continue;
202       // string cancel_op_name = 4;
203       case 4:
204         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 34)) {
205           auto str = _internal_mutable_cancel_op_name();
206           ptr = ::_pbi::InlineGreedyStringParser(str, ptr, ctx);
207           CHK_(ptr);
208           CHK_(::_pbi::VerifyUTF8(str, nullptr));
209         } else {
210           goto handle_unusual;
211         }
212         continue;
213       // repeated .tensorflow.error.Code queue_closed_exception_types = 5;
214       case 5:
215         if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 42)) {
216           ptr = ::PROTOBUF_NAMESPACE_ID::internal::PackedEnumParser(_internal_mutable_queue_closed_exception_types(), ptr, ctx);
217           CHK_(ptr);
218         } else if (static_cast<::uint8_t>(tag) == 40) {
219           ::uint64_t val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
220           CHK_(ptr);
221           _internal_add_queue_closed_exception_types(static_cast<::tensorflow::error::Code>(val));
222         } else {
223           goto handle_unusual;
224         }
225         continue;
226       default:
227         goto handle_unusual;
228     }  // switch
229   handle_unusual:
230     if ((tag == 0) || ((tag & 7) == 4)) {
231       CHK_(ptr);
232       ctx->SetLastTag(tag);
233       goto message_done;
234     }
235     ptr = UnknownFieldParse(
236         tag,
237         _internal_metadata_.mutable_unknown_fields<std::string>(),
238         ptr, ctx);
239     CHK_(ptr != nullptr);
240   }  // while
241 message_done:
242   return ptr;
243 failure:
244   ptr = nullptr;
245   goto message_done;
246 #undef CHK_
247 }
248 
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const249 ::uint8_t* QueueRunnerDef::_InternalSerialize(
250     ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
251   // @@protoc_insertion_point(serialize_to_array_start:tensorflow.QueueRunnerDef)
252   ::uint32_t cached_has_bits = 0;
253   (void) cached_has_bits;
254 
255   // string queue_name = 1;
256   if (!this->_internal_queue_name().empty()) {
257     ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(
258       this->_internal_queue_name().data(), static_cast<int>(this->_internal_queue_name().length()),
259       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE,
260       "tensorflow.QueueRunnerDef.queue_name");
261     target = stream->WriteStringMaybeAliased(
262         1, this->_internal_queue_name(), target);
263   }
264 
265   // repeated string enqueue_op_name = 2;
266   for (int i = 0, n = this->_internal_enqueue_op_name_size(); i < n; i++) {
267     const auto& s = this->_internal_enqueue_op_name(i);
268     ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(
269       s.data(), static_cast<int>(s.length()),
270       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE,
271       "tensorflow.QueueRunnerDef.enqueue_op_name");
272     target = stream->WriteString(2, s, target);
273   }
274 
275   // string close_op_name = 3;
276   if (!this->_internal_close_op_name().empty()) {
277     ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(
278       this->_internal_close_op_name().data(), static_cast<int>(this->_internal_close_op_name().length()),
279       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE,
280       "tensorflow.QueueRunnerDef.close_op_name");
281     target = stream->WriteStringMaybeAliased(
282         3, this->_internal_close_op_name(), target);
283   }
284 
285   // string cancel_op_name = 4;
286   if (!this->_internal_cancel_op_name().empty()) {
287     ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(
288       this->_internal_cancel_op_name().data(), static_cast<int>(this->_internal_cancel_op_name().length()),
289       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE,
290       "tensorflow.QueueRunnerDef.cancel_op_name");
291     target = stream->WriteStringMaybeAliased(
292         4, this->_internal_cancel_op_name(), target);
293   }
294 
295   // repeated .tensorflow.error.Code queue_closed_exception_types = 5;
296   {
297     int byte_size = _impl_._queue_closed_exception_types_cached_byte_size_.load(std::memory_order_relaxed);
298     if (byte_size > 0) {
299       target = stream->WriteEnumPacked(
300           5, _impl_.queue_closed_exception_types_, byte_size, target);
301     }
302   }
303 
304   if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
305     target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
306         static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
307   }
308   // @@protoc_insertion_point(serialize_to_array_end:tensorflow.QueueRunnerDef)
309   return target;
310 }
311 
ByteSizeLong() const312 size_t QueueRunnerDef::ByteSizeLong() const {
313 // @@protoc_insertion_point(message_byte_size_start:tensorflow.QueueRunnerDef)
314   size_t total_size = 0;
315 
316   ::uint32_t cached_has_bits = 0;
317   // Prevent compiler warnings about cached_has_bits being unused
318   (void) cached_has_bits;
319 
320   // repeated string enqueue_op_name = 2;
321   total_size += 1 *
322       ::PROTOBUF_NAMESPACE_ID::internal::FromIntSize(_impl_.enqueue_op_name_.size());
323   for (int i = 0, n = _impl_.enqueue_op_name_.size(); i < n; i++) {
324     total_size += ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize(
325       _impl_.enqueue_op_name_.Get(i));
326   }
327 
328   // repeated .tensorflow.error.Code queue_closed_exception_types = 5;
329   {
330     size_t data_size = 0;
331     unsigned int count = static_cast<unsigned int>(this->_internal_queue_closed_exception_types_size());for (unsigned int i = 0; i < count; i++) {
332       data_size += ::_pbi::WireFormatLite::EnumSize(
333         this->_internal_queue_closed_exception_types(static_cast<int>(i)));
334     }
335     if (data_size > 0) {
336       total_size += 1 +
337         ::_pbi::WireFormatLite::Int32Size(static_cast<::int32_t>(data_size));
338     }
339     int cached_size = ::_pbi::ToCachedSize(data_size);
340     _impl_._queue_closed_exception_types_cached_byte_size_.store(cached_size,
341                                     std::memory_order_relaxed);
342     total_size += data_size;
343   }
344 
345   // string queue_name = 1;
346   if (!this->_internal_queue_name().empty()) {
347     total_size += 1 +
348       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize(
349         this->_internal_queue_name());
350   }
351 
352   // string close_op_name = 3;
353   if (!this->_internal_close_op_name().empty()) {
354     total_size += 1 +
355       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize(
356         this->_internal_close_op_name());
357   }
358 
359   // string cancel_op_name = 4;
360   if (!this->_internal_cancel_op_name().empty()) {
361     total_size += 1 +
362       ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize(
363         this->_internal_cancel_op_name());
364   }
365 
366   if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
367     total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
368   }
369   int cached_size = ::_pbi::ToCachedSize(total_size);
370   SetCachedSize(cached_size);
371   return total_size;
372 }
373 
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)374 void QueueRunnerDef::CheckTypeAndMergeFrom(
375     const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
376   MergeFrom(*::_pbi::DownCast<const QueueRunnerDef*>(
377       &from));
378 }
379 
MergeFrom(const QueueRunnerDef & from)380 void QueueRunnerDef::MergeFrom(const QueueRunnerDef& from) {
381   QueueRunnerDef* const _this = this;
382   // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.QueueRunnerDef)
383   GOOGLE_DCHECK_NE(&from, _this);
384   ::uint32_t cached_has_bits = 0;
385   (void) cached_has_bits;
386 
387   _this->_impl_.enqueue_op_name_.MergeFrom(from._impl_.enqueue_op_name_);
388   _this->_impl_.queue_closed_exception_types_.MergeFrom(from._impl_.queue_closed_exception_types_);
389   if (!from._internal_queue_name().empty()) {
390     _this->_internal_set_queue_name(from._internal_queue_name());
391   }
392   if (!from._internal_close_op_name().empty()) {
393     _this->_internal_set_close_op_name(from._internal_close_op_name());
394   }
395   if (!from._internal_cancel_op_name().empty()) {
396     _this->_internal_set_cancel_op_name(from._internal_cancel_op_name());
397   }
398   _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
399 }
400 
CopyFrom(const QueueRunnerDef & from)401 void QueueRunnerDef::CopyFrom(const QueueRunnerDef& from) {
402 // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.QueueRunnerDef)
403   if (&from == this) return;
404   Clear();
405   MergeFrom(from);
406 }
407 
IsInitialized() const408 bool QueueRunnerDef::IsInitialized() const {
409   return true;
410 }
411 
InternalSwap(QueueRunnerDef * other)412 void QueueRunnerDef::InternalSwap(QueueRunnerDef* other) {
413   using std::swap;
414   auto* lhs_arena = GetArenaForAllocation();
415   auto* rhs_arena = other->GetArenaForAllocation();
416   _internal_metadata_.InternalSwap(&other->_internal_metadata_);
417   _impl_.enqueue_op_name_.InternalSwap(&other->_impl_.enqueue_op_name_);
418   _impl_.queue_closed_exception_types_.InternalSwap(&other->_impl_.queue_closed_exception_types_);
419   ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::InternalSwap(
420       &_impl_.queue_name_, lhs_arena,
421       &other->_impl_.queue_name_, rhs_arena
422   );
423   ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::InternalSwap(
424       &_impl_.close_op_name_, lhs_arena,
425       &other->_impl_.close_op_name_, rhs_arena
426   );
427   ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::InternalSwap(
428       &_impl_.cancel_op_name_, lhs_arena,
429       &other->_impl_.cancel_op_name_, rhs_arena
430   );
431 }
432 
GetTypeName() const433 std::string QueueRunnerDef::GetTypeName() const {
434   return "tensorflow.QueueRunnerDef";
435 }
436 
437 
438 // @@protoc_insertion_point(namespace_scope)
439 }  // namespace tensorflow
440 PROTOBUF_NAMESPACE_OPEN
441 template<> PROTOBUF_NOINLINE ::tensorflow::QueueRunnerDef*
CreateMaybeMessage(Arena * arena)442 Arena::CreateMaybeMessage< ::tensorflow::QueueRunnerDef >(Arena* arena) {
443   return Arena::CreateMessageInternal< ::tensorflow::QueueRunnerDef >(arena);
444 }
445 PROTOBUF_NAMESPACE_CLOSE
446 
447 // @@protoc_insertion_point(global_scope)
448 #include <google/protobuf/port_undef.inc>
449