1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: tensorflow/core/protobuf/queue_runner.proto
3 
4 #ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fqueue_5frunner_2eproto
5 #define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fqueue_5frunner_2eproto
6 
7 #include <cstdint>
8 #include <limits>
9 #include <string>
10 
11 #include <google/protobuf/port_def.inc>
12 #if PROTOBUF_VERSION < 3021000
13 #error This file was generated by a newer version of protoc which is
14 #error incompatible with your Protocol Buffer headers. Please update
15 #error your headers.
16 #endif
17 #if 3021012 < PROTOBUF_MIN_PROTOC_VERSION
18 #error This file was generated by an older version of protoc which is
19 #error incompatible with your Protocol Buffer headers. Please
20 #error regenerate this file with a newer version of protoc.
21 #endif
22 
23 #include <google/protobuf/port_undef.inc>
24 #include <google/protobuf/io/coded_stream.h>
25 #include <google/protobuf/arena.h>
26 #include <google/protobuf/arenastring.h>
27 #include <google/protobuf/generated_message_util.h>
28 #include <google/protobuf/metadata_lite.h>
29 #include <google/protobuf/message_lite.h>
30 #include <google/protobuf/repeated_field.h>  // IWYU pragma: export
31 #include <google/protobuf/extension_set.h>  // IWYU pragma: export
32 #include "tensorflow/core/protobuf/error_codes.pb.h"
33 // @@protoc_insertion_point(includes)
34 #include <google/protobuf/port_def.inc>
35 #define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fprotobuf_2fqueue_5frunner_2eproto
36 PROTOBUF_NAMESPACE_OPEN
37 namespace internal {
38 class AnyMetadata;
39 }  // namespace internal
40 PROTOBUF_NAMESPACE_CLOSE
41 
42 // Internal implementation detail -- do not use these members.
43 struct TableStruct_tensorflow_2fcore_2fprotobuf_2fqueue_5frunner_2eproto {
44   static const ::uint32_t offsets[];
45 };
46 namespace tensorflow {
47 class QueueRunnerDef;
48 struct QueueRunnerDefDefaultTypeInternal;
49 extern QueueRunnerDefDefaultTypeInternal _QueueRunnerDef_default_instance_;
50 }  // namespace tensorflow
51 PROTOBUF_NAMESPACE_OPEN
52 template<> ::tensorflow::QueueRunnerDef* Arena::CreateMaybeMessage<::tensorflow::QueueRunnerDef>(Arena*);
53 PROTOBUF_NAMESPACE_CLOSE
54 namespace tensorflow {
55 
56 // ===================================================================
57 
58 class QueueRunnerDef final :
59     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.QueueRunnerDef) */ {
60  public:
QueueRunnerDef()61   inline QueueRunnerDef() : QueueRunnerDef(nullptr) {}
62   ~QueueRunnerDef() override;
63   explicit PROTOBUF_CONSTEXPR QueueRunnerDef(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
64 
65   QueueRunnerDef(const QueueRunnerDef& from);
QueueRunnerDef(QueueRunnerDef && from)66   QueueRunnerDef(QueueRunnerDef&& from) noexcept
67     : QueueRunnerDef() {
68     *this = ::std::move(from);
69   }
70 
71   inline QueueRunnerDef& operator=(const QueueRunnerDef& from) {
72     if (this == &from) return *this;
73     CopyFrom(from);
74     return *this;
75   }
76   inline QueueRunnerDef& operator=(QueueRunnerDef&& from) noexcept {
77     if (this == &from) return *this;
78     if (GetOwningArena() == from.GetOwningArena()
79   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
80         && GetOwningArena() != nullptr
81   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
82     ) {
83       InternalSwap(&from);
84     } else {
85       CopyFrom(from);
86     }
87     return *this;
88   }
89 
default_instance()90   static const QueueRunnerDef& default_instance() {
91     return *internal_default_instance();
92   }
internal_default_instance()93   static inline const QueueRunnerDef* internal_default_instance() {
94     return reinterpret_cast<const QueueRunnerDef*>(
95                &_QueueRunnerDef_default_instance_);
96   }
97   static constexpr int kIndexInFileMessages =
98     0;
99 
swap(QueueRunnerDef & a,QueueRunnerDef & b)100   friend void swap(QueueRunnerDef& a, QueueRunnerDef& b) {
101     a.Swap(&b);
102   }
Swap(QueueRunnerDef * other)103   inline void Swap(QueueRunnerDef* other) {
104     if (other == this) return;
105   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
106     if (GetOwningArena() != nullptr &&
107         GetOwningArena() == other->GetOwningArena()) {
108    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
109     if (GetOwningArena() == other->GetOwningArena()) {
110   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
111       InternalSwap(other);
112     } else {
113       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
114     }
115   }
116   void UnsafeArenaSwap(QueueRunnerDef* other) {
117     if (other == this) return;
118     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
119     InternalSwap(other);
120   }
121 
122   // implements Message ----------------------------------------------
123 
124   QueueRunnerDef* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
125     return CreateMaybeMessage<QueueRunnerDef>(arena);
126   }
127   QueueRunnerDef* New() const {
128     return New(nullptr);
129   }
130   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
131   void CopyFrom(const QueueRunnerDef& from);
132   void MergeFrom(const QueueRunnerDef& from);
133   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
134   bool IsInitialized() const final;
135 
136   size_t ByteSizeLong() const final;
137   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
138   ::uint8_t* _InternalSerialize(
139       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
140   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
141 
142   private:
143   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
144   void SharedDtor();
145   void SetCachedSize(int size) const;
146   void InternalSwap(QueueRunnerDef* other);
147 
148   private:
149   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
150   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
151     return "tensorflow.QueueRunnerDef";
152   }
153   protected:
154   explicit QueueRunnerDef(::PROTOBUF_NAMESPACE_ID::Arena* arena,
155                        bool is_message_owned = false);
156   public:
157 
158   std::string GetTypeName() const final;
159 
160   // nested types ----------------------------------------------------
161 
162   // accessors -------------------------------------------------------
163 
164   enum : int {
165     kEnqueueOpNameFieldNumber = 2,
166     kQueueClosedExceptionTypesFieldNumber = 5,
167     kQueueNameFieldNumber = 1,
168     kCloseOpNameFieldNumber = 3,
169     kCancelOpNameFieldNumber = 4,
170   };
171   // repeated string enqueue_op_name = 2;
172   int enqueue_op_name_size() const;
173   private:
174   int _internal_enqueue_op_name_size() const;
175   public:
176   void clear_enqueue_op_name();
177   const std::string& enqueue_op_name(int index) const;
178   std::string* mutable_enqueue_op_name(int index);
179   void set_enqueue_op_name(int index, const std::string& value);
180   void set_enqueue_op_name(int index, std::string&& value);
181   void set_enqueue_op_name(int index, const char* value);
182   void set_enqueue_op_name(int index, const char* value, size_t size);
183   std::string* add_enqueue_op_name();
184   void add_enqueue_op_name(const std::string& value);
185   void add_enqueue_op_name(std::string&& value);
186   void add_enqueue_op_name(const char* value);
187   void add_enqueue_op_name(const char* value, size_t size);
188   const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& enqueue_op_name() const;
189   ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_enqueue_op_name();
190   private:
191   const std::string& _internal_enqueue_op_name(int index) const;
192   std::string* _internal_add_enqueue_op_name();
193   public:
194 
195   // repeated .tensorflow.error.Code queue_closed_exception_types = 5;
196   int queue_closed_exception_types_size() const;
197   private:
198   int _internal_queue_closed_exception_types_size() const;
199   public:
200   void clear_queue_closed_exception_types();
201   private:
202   ::tensorflow::error::Code _internal_queue_closed_exception_types(int index) const;
203   void _internal_add_queue_closed_exception_types(::tensorflow::error::Code value);
204   ::PROTOBUF_NAMESPACE_ID::RepeatedField<int>* _internal_mutable_queue_closed_exception_types();
205   public:
206   ::tensorflow::error::Code queue_closed_exception_types(int index) const;
207   void set_queue_closed_exception_types(int index, ::tensorflow::error::Code value);
208   void add_queue_closed_exception_types(::tensorflow::error::Code value);
209   const ::PROTOBUF_NAMESPACE_ID::RepeatedField<int>& queue_closed_exception_types() const;
210   ::PROTOBUF_NAMESPACE_ID::RepeatedField<int>* mutable_queue_closed_exception_types();
211 
212   // string queue_name = 1;
213   void clear_queue_name();
214   const std::string& queue_name() const;
215   template <typename ArgT0 = const std::string&, typename... ArgT>
216   void set_queue_name(ArgT0&& arg0, ArgT... args);
217   std::string* mutable_queue_name();
218   PROTOBUF_NODISCARD std::string* release_queue_name();
219   void set_allocated_queue_name(std::string* queue_name);
220   private:
221   const std::string& _internal_queue_name() const;
222   inline PROTOBUF_ALWAYS_INLINE void _internal_set_queue_name(const std::string& value);
223   std::string* _internal_mutable_queue_name();
224   public:
225 
226   // string close_op_name = 3;
227   void clear_close_op_name();
228   const std::string& close_op_name() const;
229   template <typename ArgT0 = const std::string&, typename... ArgT>
230   void set_close_op_name(ArgT0&& arg0, ArgT... args);
231   std::string* mutable_close_op_name();
232   PROTOBUF_NODISCARD std::string* release_close_op_name();
233   void set_allocated_close_op_name(std::string* close_op_name);
234   private:
235   const std::string& _internal_close_op_name() const;
236   inline PROTOBUF_ALWAYS_INLINE void _internal_set_close_op_name(const std::string& value);
237   std::string* _internal_mutable_close_op_name();
238   public:
239 
240   // string cancel_op_name = 4;
241   void clear_cancel_op_name();
242   const std::string& cancel_op_name() const;
243   template <typename ArgT0 = const std::string&, typename... ArgT>
244   void set_cancel_op_name(ArgT0&& arg0, ArgT... args);
245   std::string* mutable_cancel_op_name();
246   PROTOBUF_NODISCARD std::string* release_cancel_op_name();
247   void set_allocated_cancel_op_name(std::string* cancel_op_name);
248   private:
249   const std::string& _internal_cancel_op_name() const;
250   inline PROTOBUF_ALWAYS_INLINE void _internal_set_cancel_op_name(const std::string& value);
251   std::string* _internal_mutable_cancel_op_name();
252   public:
253 
254   // @@protoc_insertion_point(class_scope:tensorflow.QueueRunnerDef)
255  private:
256   class _Internal;
257 
258   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
259   typedef void InternalArenaConstructable_;
260   typedef void DestructorSkippable_;
261   struct Impl_ {
262     ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> enqueue_op_name_;
263     ::PROTOBUF_NAMESPACE_ID::RepeatedField<int> queue_closed_exception_types_;
264     mutable std::atomic<int> _queue_closed_exception_types_cached_byte_size_;
265     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr queue_name_;
266     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr close_op_name_;
267     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr cancel_op_name_;
268     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
269   };
270   union { Impl_ _impl_; };
271   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fqueue_5frunner_2eproto;
272 };
273 // ===================================================================
274 
275 
276 // ===================================================================
277 
278 #ifdef __GNUC__
279   #pragma GCC diagnostic push
280   #pragma GCC diagnostic ignored "-Wstrict-aliasing"
281 #endif  // __GNUC__
282 // QueueRunnerDef
283 
284 // string queue_name = 1;
clear_queue_name()285 inline void QueueRunnerDef::clear_queue_name() {
286   _impl_.queue_name_.ClearToEmpty();
287 }
queue_name()288 inline const std::string& QueueRunnerDef::queue_name() const {
289   // @@protoc_insertion_point(field_get:tensorflow.QueueRunnerDef.queue_name)
290   return _internal_queue_name();
291 }
292 template <typename ArgT0, typename... ArgT>
293 inline PROTOBUF_ALWAYS_INLINE
set_queue_name(ArgT0 && arg0,ArgT...args)294 void QueueRunnerDef::set_queue_name(ArgT0&& arg0, ArgT... args) {
295 
296  _impl_.queue_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
297   // @@protoc_insertion_point(field_set:tensorflow.QueueRunnerDef.queue_name)
298 }
mutable_queue_name()299 inline std::string* QueueRunnerDef::mutable_queue_name() {
300   std::string* _s = _internal_mutable_queue_name();
301   // @@protoc_insertion_point(field_mutable:tensorflow.QueueRunnerDef.queue_name)
302   return _s;
303 }
_internal_queue_name()304 inline const std::string& QueueRunnerDef::_internal_queue_name() const {
305   return _impl_.queue_name_.Get();
306 }
_internal_set_queue_name(const std::string & value)307 inline void QueueRunnerDef::_internal_set_queue_name(const std::string& value) {
308 
309   _impl_.queue_name_.Set(value, GetArenaForAllocation());
310 }
_internal_mutable_queue_name()311 inline std::string* QueueRunnerDef::_internal_mutable_queue_name() {
312 
313   return _impl_.queue_name_.Mutable(GetArenaForAllocation());
314 }
release_queue_name()315 inline std::string* QueueRunnerDef::release_queue_name() {
316   // @@protoc_insertion_point(field_release:tensorflow.QueueRunnerDef.queue_name)
317   return _impl_.queue_name_.Release();
318 }
set_allocated_queue_name(std::string * queue_name)319 inline void QueueRunnerDef::set_allocated_queue_name(std::string* queue_name) {
320   _impl_.queue_name_.SetAllocated(queue_name, GetArenaForAllocation());
321 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
322   if (_impl_.queue_name_.IsDefault()) {
323     _impl_.queue_name_.Set("", GetArenaForAllocation());
324   }
325 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
326   // @@protoc_insertion_point(field_set_allocated:tensorflow.QueueRunnerDef.queue_name)
327 }
328 
329 // repeated string enqueue_op_name = 2;
_internal_enqueue_op_name_size()330 inline int QueueRunnerDef::_internal_enqueue_op_name_size() const {
331   return _impl_.enqueue_op_name_.size();
332 }
enqueue_op_name_size()333 inline int QueueRunnerDef::enqueue_op_name_size() const {
334   return _internal_enqueue_op_name_size();
335 }
clear_enqueue_op_name()336 inline void QueueRunnerDef::clear_enqueue_op_name() {
337   _impl_.enqueue_op_name_.Clear();
338 }
add_enqueue_op_name()339 inline std::string* QueueRunnerDef::add_enqueue_op_name() {
340   std::string* _s = _internal_add_enqueue_op_name();
341   // @@protoc_insertion_point(field_add_mutable:tensorflow.QueueRunnerDef.enqueue_op_name)
342   return _s;
343 }
_internal_enqueue_op_name(int index)344 inline const std::string& QueueRunnerDef::_internal_enqueue_op_name(int index) const {
345   return _impl_.enqueue_op_name_.Get(index);
346 }
enqueue_op_name(int index)347 inline const std::string& QueueRunnerDef::enqueue_op_name(int index) const {
348   // @@protoc_insertion_point(field_get:tensorflow.QueueRunnerDef.enqueue_op_name)
349   return _internal_enqueue_op_name(index);
350 }
mutable_enqueue_op_name(int index)351 inline std::string* QueueRunnerDef::mutable_enqueue_op_name(int index) {
352   // @@protoc_insertion_point(field_mutable:tensorflow.QueueRunnerDef.enqueue_op_name)
353   return _impl_.enqueue_op_name_.Mutable(index);
354 }
set_enqueue_op_name(int index,const std::string & value)355 inline void QueueRunnerDef::set_enqueue_op_name(int index, const std::string& value) {
356   _impl_.enqueue_op_name_.Mutable(index)->assign(value);
357   // @@protoc_insertion_point(field_set:tensorflow.QueueRunnerDef.enqueue_op_name)
358 }
set_enqueue_op_name(int index,std::string && value)359 inline void QueueRunnerDef::set_enqueue_op_name(int index, std::string&& value) {
360   _impl_.enqueue_op_name_.Mutable(index)->assign(std::move(value));
361   // @@protoc_insertion_point(field_set:tensorflow.QueueRunnerDef.enqueue_op_name)
362 }
set_enqueue_op_name(int index,const char * value)363 inline void QueueRunnerDef::set_enqueue_op_name(int index, const char* value) {
364   GOOGLE_DCHECK(value != nullptr);
365   _impl_.enqueue_op_name_.Mutable(index)->assign(value);
366   // @@protoc_insertion_point(field_set_char:tensorflow.QueueRunnerDef.enqueue_op_name)
367 }
set_enqueue_op_name(int index,const char * value,size_t size)368 inline void QueueRunnerDef::set_enqueue_op_name(int index, const char* value, size_t size) {
369   _impl_.enqueue_op_name_.Mutable(index)->assign(
370     reinterpret_cast<const char*>(value), size);
371   // @@protoc_insertion_point(field_set_pointer:tensorflow.QueueRunnerDef.enqueue_op_name)
372 }
_internal_add_enqueue_op_name()373 inline std::string* QueueRunnerDef::_internal_add_enqueue_op_name() {
374   return _impl_.enqueue_op_name_.Add();
375 }
add_enqueue_op_name(const std::string & value)376 inline void QueueRunnerDef::add_enqueue_op_name(const std::string& value) {
377   _impl_.enqueue_op_name_.Add()->assign(value);
378   // @@protoc_insertion_point(field_add:tensorflow.QueueRunnerDef.enqueue_op_name)
379 }
add_enqueue_op_name(std::string && value)380 inline void QueueRunnerDef::add_enqueue_op_name(std::string&& value) {
381   _impl_.enqueue_op_name_.Add(std::move(value));
382   // @@protoc_insertion_point(field_add:tensorflow.QueueRunnerDef.enqueue_op_name)
383 }
add_enqueue_op_name(const char * value)384 inline void QueueRunnerDef::add_enqueue_op_name(const char* value) {
385   GOOGLE_DCHECK(value != nullptr);
386   _impl_.enqueue_op_name_.Add()->assign(value);
387   // @@protoc_insertion_point(field_add_char:tensorflow.QueueRunnerDef.enqueue_op_name)
388 }
add_enqueue_op_name(const char * value,size_t size)389 inline void QueueRunnerDef::add_enqueue_op_name(const char* value, size_t size) {
390   _impl_.enqueue_op_name_.Add()->assign(reinterpret_cast<const char*>(value), size);
391   // @@protoc_insertion_point(field_add_pointer:tensorflow.QueueRunnerDef.enqueue_op_name)
392 }
393 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
enqueue_op_name()394 QueueRunnerDef::enqueue_op_name() const {
395   // @@protoc_insertion_point(field_list:tensorflow.QueueRunnerDef.enqueue_op_name)
396   return _impl_.enqueue_op_name_;
397 }
398 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
mutable_enqueue_op_name()399 QueueRunnerDef::mutable_enqueue_op_name() {
400   // @@protoc_insertion_point(field_mutable_list:tensorflow.QueueRunnerDef.enqueue_op_name)
401   return &_impl_.enqueue_op_name_;
402 }
403 
404 // string close_op_name = 3;
clear_close_op_name()405 inline void QueueRunnerDef::clear_close_op_name() {
406   _impl_.close_op_name_.ClearToEmpty();
407 }
close_op_name()408 inline const std::string& QueueRunnerDef::close_op_name() const {
409   // @@protoc_insertion_point(field_get:tensorflow.QueueRunnerDef.close_op_name)
410   return _internal_close_op_name();
411 }
412 template <typename ArgT0, typename... ArgT>
413 inline PROTOBUF_ALWAYS_INLINE
set_close_op_name(ArgT0 && arg0,ArgT...args)414 void QueueRunnerDef::set_close_op_name(ArgT0&& arg0, ArgT... args) {
415 
416  _impl_.close_op_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
417   // @@protoc_insertion_point(field_set:tensorflow.QueueRunnerDef.close_op_name)
418 }
mutable_close_op_name()419 inline std::string* QueueRunnerDef::mutable_close_op_name() {
420   std::string* _s = _internal_mutable_close_op_name();
421   // @@protoc_insertion_point(field_mutable:tensorflow.QueueRunnerDef.close_op_name)
422   return _s;
423 }
_internal_close_op_name()424 inline const std::string& QueueRunnerDef::_internal_close_op_name() const {
425   return _impl_.close_op_name_.Get();
426 }
_internal_set_close_op_name(const std::string & value)427 inline void QueueRunnerDef::_internal_set_close_op_name(const std::string& value) {
428 
429   _impl_.close_op_name_.Set(value, GetArenaForAllocation());
430 }
_internal_mutable_close_op_name()431 inline std::string* QueueRunnerDef::_internal_mutable_close_op_name() {
432 
433   return _impl_.close_op_name_.Mutable(GetArenaForAllocation());
434 }
release_close_op_name()435 inline std::string* QueueRunnerDef::release_close_op_name() {
436   // @@protoc_insertion_point(field_release:tensorflow.QueueRunnerDef.close_op_name)
437   return _impl_.close_op_name_.Release();
438 }
set_allocated_close_op_name(std::string * close_op_name)439 inline void QueueRunnerDef::set_allocated_close_op_name(std::string* close_op_name) {
440   _impl_.close_op_name_.SetAllocated(close_op_name, GetArenaForAllocation());
441 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
442   if (_impl_.close_op_name_.IsDefault()) {
443     _impl_.close_op_name_.Set("", GetArenaForAllocation());
444   }
445 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
446   // @@protoc_insertion_point(field_set_allocated:tensorflow.QueueRunnerDef.close_op_name)
447 }
448 
449 // string cancel_op_name = 4;
clear_cancel_op_name()450 inline void QueueRunnerDef::clear_cancel_op_name() {
451   _impl_.cancel_op_name_.ClearToEmpty();
452 }
cancel_op_name()453 inline const std::string& QueueRunnerDef::cancel_op_name() const {
454   // @@protoc_insertion_point(field_get:tensorflow.QueueRunnerDef.cancel_op_name)
455   return _internal_cancel_op_name();
456 }
457 template <typename ArgT0, typename... ArgT>
458 inline PROTOBUF_ALWAYS_INLINE
set_cancel_op_name(ArgT0 && arg0,ArgT...args)459 void QueueRunnerDef::set_cancel_op_name(ArgT0&& arg0, ArgT... args) {
460 
461  _impl_.cancel_op_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
462   // @@protoc_insertion_point(field_set:tensorflow.QueueRunnerDef.cancel_op_name)
463 }
mutable_cancel_op_name()464 inline std::string* QueueRunnerDef::mutable_cancel_op_name() {
465   std::string* _s = _internal_mutable_cancel_op_name();
466   // @@protoc_insertion_point(field_mutable:tensorflow.QueueRunnerDef.cancel_op_name)
467   return _s;
468 }
_internal_cancel_op_name()469 inline const std::string& QueueRunnerDef::_internal_cancel_op_name() const {
470   return _impl_.cancel_op_name_.Get();
471 }
_internal_set_cancel_op_name(const std::string & value)472 inline void QueueRunnerDef::_internal_set_cancel_op_name(const std::string& value) {
473 
474   _impl_.cancel_op_name_.Set(value, GetArenaForAllocation());
475 }
_internal_mutable_cancel_op_name()476 inline std::string* QueueRunnerDef::_internal_mutable_cancel_op_name() {
477 
478   return _impl_.cancel_op_name_.Mutable(GetArenaForAllocation());
479 }
release_cancel_op_name()480 inline std::string* QueueRunnerDef::release_cancel_op_name() {
481   // @@protoc_insertion_point(field_release:tensorflow.QueueRunnerDef.cancel_op_name)
482   return _impl_.cancel_op_name_.Release();
483 }
set_allocated_cancel_op_name(std::string * cancel_op_name)484 inline void QueueRunnerDef::set_allocated_cancel_op_name(std::string* cancel_op_name) {
485   _impl_.cancel_op_name_.SetAllocated(cancel_op_name, GetArenaForAllocation());
486 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
487   if (_impl_.cancel_op_name_.IsDefault()) {
488     _impl_.cancel_op_name_.Set("", GetArenaForAllocation());
489   }
490 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
491   // @@protoc_insertion_point(field_set_allocated:tensorflow.QueueRunnerDef.cancel_op_name)
492 }
493 
494 // repeated .tensorflow.error.Code queue_closed_exception_types = 5;
_internal_queue_closed_exception_types_size()495 inline int QueueRunnerDef::_internal_queue_closed_exception_types_size() const {
496   return _impl_.queue_closed_exception_types_.size();
497 }
queue_closed_exception_types_size()498 inline int QueueRunnerDef::queue_closed_exception_types_size() const {
499   return _internal_queue_closed_exception_types_size();
500 }
clear_queue_closed_exception_types()501 inline void QueueRunnerDef::clear_queue_closed_exception_types() {
502   _impl_.queue_closed_exception_types_.Clear();
503 }
_internal_queue_closed_exception_types(int index)504 inline ::tensorflow::error::Code QueueRunnerDef::_internal_queue_closed_exception_types(int index) const {
505   return static_cast< ::tensorflow::error::Code >(_impl_.queue_closed_exception_types_.Get(index));
506 }
queue_closed_exception_types(int index)507 inline ::tensorflow::error::Code QueueRunnerDef::queue_closed_exception_types(int index) const {
508   // @@protoc_insertion_point(field_get:tensorflow.QueueRunnerDef.queue_closed_exception_types)
509   return _internal_queue_closed_exception_types(index);
510 }
set_queue_closed_exception_types(int index,::tensorflow::error::Code value)511 inline void QueueRunnerDef::set_queue_closed_exception_types(int index, ::tensorflow::error::Code value) {
512   _impl_.queue_closed_exception_types_.Set(index, value);
513   // @@protoc_insertion_point(field_set:tensorflow.QueueRunnerDef.queue_closed_exception_types)
514 }
_internal_add_queue_closed_exception_types(::tensorflow::error::Code value)515 inline void QueueRunnerDef::_internal_add_queue_closed_exception_types(::tensorflow::error::Code value) {
516   _impl_.queue_closed_exception_types_.Add(value);
517 }
add_queue_closed_exception_types(::tensorflow::error::Code value)518 inline void QueueRunnerDef::add_queue_closed_exception_types(::tensorflow::error::Code value) {
519   _internal_add_queue_closed_exception_types(value);
520   // @@protoc_insertion_point(field_add:tensorflow.QueueRunnerDef.queue_closed_exception_types)
521 }
522 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField<int>&
queue_closed_exception_types()523 QueueRunnerDef::queue_closed_exception_types() const {
524   // @@protoc_insertion_point(field_list:tensorflow.QueueRunnerDef.queue_closed_exception_types)
525   return _impl_.queue_closed_exception_types_;
526 }
527 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField<int>*
_internal_mutable_queue_closed_exception_types()528 QueueRunnerDef::_internal_mutable_queue_closed_exception_types() {
529   return &_impl_.queue_closed_exception_types_;
530 }
531 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField<int>*
mutable_queue_closed_exception_types()532 QueueRunnerDef::mutable_queue_closed_exception_types() {
533   // @@protoc_insertion_point(field_mutable_list:tensorflow.QueueRunnerDef.queue_closed_exception_types)
534   return _internal_mutable_queue_closed_exception_types();
535 }
536 
537 #ifdef __GNUC__
538   #pragma GCC diagnostic pop
539 #endif  // __GNUC__
540 
541 // @@protoc_insertion_point(namespace_scope)
542 
543 }  // namespace tensorflow
544 
545 // @@protoc_insertion_point(global_scope)
546 
547 #include <google/protobuf/port_undef.inc>
548 #endif  // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fqueue_5frunner_2eproto
549