1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: tensorflow/core/protobuf/tensor_bundle.proto
3 
4 #ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto
5 #define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto
6 
7 #include <cstdint>
8 #include <limits>
9 #include <string>
10 
11 #include <google/protobuf/port_def.inc>
12 #if PROTOBUF_VERSION < 3021000
13 #error This file was generated by a newer version of protoc which is
14 #error incompatible with your Protocol Buffer headers. Please update
15 #error your headers.
16 #endif
17 #if 3021012 < PROTOBUF_MIN_PROTOC_VERSION
18 #error This file was generated by an older version of protoc which is
19 #error incompatible with your Protocol Buffer headers. Please
20 #error regenerate this file with a newer version of protoc.
21 #endif
22 
23 #include <google/protobuf/port_undef.inc>
24 #include <google/protobuf/io/coded_stream.h>
25 #include <google/protobuf/arena.h>
26 #include <google/protobuf/arenastring.h>
27 #include <google/protobuf/generated_message_util.h>
28 #include <google/protobuf/metadata_lite.h>
29 #include <google/protobuf/message_lite.h>
30 #include <google/protobuf/repeated_field.h>  // IWYU pragma: export
31 #include <google/protobuf/extension_set.h>  // IWYU pragma: export
32 #include <google/protobuf/generated_enum_util.h>
33 #include "tensorflow/core/framework/tensor_shape.pb.h"
34 #include "tensorflow/core/framework/tensor_slice.pb.h"
35 #include "tensorflow/core/framework/types.pb.h"
36 #include "tensorflow/core/framework/versions.pb.h"
37 // @@protoc_insertion_point(includes)
38 #include <google/protobuf/port_def.inc>
39 #define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto
40 PROTOBUF_NAMESPACE_OPEN
41 namespace internal {
42 class AnyMetadata;
43 }  // namespace internal
44 PROTOBUF_NAMESPACE_CLOSE
45 
46 // Internal implementation detail -- do not use these members.
47 struct TableStruct_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto {
48   static const ::uint32_t offsets[];
49 };
50 namespace tensorflow {
51 class BundleEntryProto;
52 struct BundleEntryProtoDefaultTypeInternal;
53 extern BundleEntryProtoDefaultTypeInternal _BundleEntryProto_default_instance_;
54 class BundleHeaderProto;
55 struct BundleHeaderProtoDefaultTypeInternal;
56 extern BundleHeaderProtoDefaultTypeInternal _BundleHeaderProto_default_instance_;
57 }  // namespace tensorflow
58 PROTOBUF_NAMESPACE_OPEN
59 template<> ::tensorflow::BundleEntryProto* Arena::CreateMaybeMessage<::tensorflow::BundleEntryProto>(Arena*);
60 template<> ::tensorflow::BundleHeaderProto* Arena::CreateMaybeMessage<::tensorflow::BundleHeaderProto>(Arena*);
61 PROTOBUF_NAMESPACE_CLOSE
62 namespace tensorflow {
63 
64 enum BundleHeaderProto_Endianness : int {
65   BundleHeaderProto_Endianness_LITTLE = 0,
66   BundleHeaderProto_Endianness_BIG = 1,
67   BundleHeaderProto_Endianness_BundleHeaderProto_Endianness_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::int32_t>::min(),
68   BundleHeaderProto_Endianness_BundleHeaderProto_Endianness_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::int32_t>::max()
69 };
70 bool BundleHeaderProto_Endianness_IsValid(int value);
71 constexpr BundleHeaderProto_Endianness BundleHeaderProto_Endianness_Endianness_MIN = BundleHeaderProto_Endianness_LITTLE;
72 constexpr BundleHeaderProto_Endianness BundleHeaderProto_Endianness_Endianness_MAX = BundleHeaderProto_Endianness_BIG;
73 constexpr int BundleHeaderProto_Endianness_Endianness_ARRAYSIZE = BundleHeaderProto_Endianness_Endianness_MAX + 1;
74 
75 const std::string& BundleHeaderProto_Endianness_Name(BundleHeaderProto_Endianness value);
76 template<typename T>
BundleHeaderProto_Endianness_Name(T enum_t_value)77 inline const std::string& BundleHeaderProto_Endianness_Name(T enum_t_value) {
78   static_assert(::std::is_same<T, BundleHeaderProto_Endianness>::value ||
79     ::std::is_integral<T>::value,
80     "Incorrect type passed to function BundleHeaderProto_Endianness_Name.");
81   return BundleHeaderProto_Endianness_Name(static_cast<BundleHeaderProto_Endianness>(enum_t_value));
82 }
83 bool BundleHeaderProto_Endianness_Parse(
84     ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, BundleHeaderProto_Endianness* value);
85 // ===================================================================
86 
87 class BundleHeaderProto final :
88     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.BundleHeaderProto) */ {
89  public:
BundleHeaderProto()90   inline BundleHeaderProto() : BundleHeaderProto(nullptr) {}
91   ~BundleHeaderProto() override;
92   explicit PROTOBUF_CONSTEXPR BundleHeaderProto(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
93 
94   BundleHeaderProto(const BundleHeaderProto& from);
BundleHeaderProto(BundleHeaderProto && from)95   BundleHeaderProto(BundleHeaderProto&& from) noexcept
96     : BundleHeaderProto() {
97     *this = ::std::move(from);
98   }
99 
100   inline BundleHeaderProto& operator=(const BundleHeaderProto& from) {
101     if (this == &from) return *this;
102     CopyFrom(from);
103     return *this;
104   }
105   inline BundleHeaderProto& operator=(BundleHeaderProto&& from) noexcept {
106     if (this == &from) return *this;
107     if (GetOwningArena() == from.GetOwningArena()
108   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
109         && GetOwningArena() != nullptr
110   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
111     ) {
112       InternalSwap(&from);
113     } else {
114       CopyFrom(from);
115     }
116     return *this;
117   }
118 
default_instance()119   static const BundleHeaderProto& default_instance() {
120     return *internal_default_instance();
121   }
internal_default_instance()122   static inline const BundleHeaderProto* internal_default_instance() {
123     return reinterpret_cast<const BundleHeaderProto*>(
124                &_BundleHeaderProto_default_instance_);
125   }
126   static constexpr int kIndexInFileMessages =
127     0;
128 
swap(BundleHeaderProto & a,BundleHeaderProto & b)129   friend void swap(BundleHeaderProto& a, BundleHeaderProto& b) {
130     a.Swap(&b);
131   }
Swap(BundleHeaderProto * other)132   inline void Swap(BundleHeaderProto* other) {
133     if (other == this) return;
134   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
135     if (GetOwningArena() != nullptr &&
136         GetOwningArena() == other->GetOwningArena()) {
137    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
138     if (GetOwningArena() == other->GetOwningArena()) {
139   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
140       InternalSwap(other);
141     } else {
142       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
143     }
144   }
145   void UnsafeArenaSwap(BundleHeaderProto* other) {
146     if (other == this) return;
147     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
148     InternalSwap(other);
149   }
150 
151   // implements Message ----------------------------------------------
152 
153   BundleHeaderProto* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
154     return CreateMaybeMessage<BundleHeaderProto>(arena);
155   }
156   BundleHeaderProto* New() const {
157     return New(nullptr);
158   }
159   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
160   void CopyFrom(const BundleHeaderProto& from);
161   void MergeFrom(const BundleHeaderProto& from);
162   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
163   bool IsInitialized() const final;
164 
165   size_t ByteSizeLong() const final;
166   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
167   ::uint8_t* _InternalSerialize(
168       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
169   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
170 
171   private:
172   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
173   void SharedDtor();
174   void SetCachedSize(int size) const;
175   void InternalSwap(BundleHeaderProto* other);
176 
177   private:
178   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
179   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
180     return "tensorflow.BundleHeaderProto";
181   }
182   protected:
183   explicit BundleHeaderProto(::PROTOBUF_NAMESPACE_ID::Arena* arena,
184                        bool is_message_owned = false);
185   public:
186 
187   std::string GetTypeName() const final;
188 
189   // nested types ----------------------------------------------------
190 
191   typedef BundleHeaderProto_Endianness Endianness;
192   static constexpr Endianness LITTLE =
193     BundleHeaderProto_Endianness_LITTLE;
194   static constexpr Endianness BIG =
195     BundleHeaderProto_Endianness_BIG;
196   static inline bool Endianness_IsValid(int value) {
197     return BundleHeaderProto_Endianness_IsValid(value);
198   }
199   static constexpr Endianness Endianness_MIN =
200     BundleHeaderProto_Endianness_Endianness_MIN;
201   static constexpr Endianness Endianness_MAX =
202     BundleHeaderProto_Endianness_Endianness_MAX;
203   static constexpr int Endianness_ARRAYSIZE =
204     BundleHeaderProto_Endianness_Endianness_ARRAYSIZE;
205   template<typename T>
206   static inline const std::string& Endianness_Name(T enum_t_value) {
207     static_assert(::std::is_same<T, Endianness>::value ||
208       ::std::is_integral<T>::value,
209       "Incorrect type passed to function Endianness_Name.");
210     return BundleHeaderProto_Endianness_Name(enum_t_value);
211   }
212   static inline bool Endianness_Parse(::PROTOBUF_NAMESPACE_ID::ConstStringParam name,
213       Endianness* value) {
214     return BundleHeaderProto_Endianness_Parse(name, value);
215   }
216 
217   // accessors -------------------------------------------------------
218 
219   enum : int {
220     kVersionFieldNumber = 3,
221     kNumShardsFieldNumber = 1,
222     kEndiannessFieldNumber = 2,
223   };
224   // .tensorflow.VersionDef version = 3;
225   bool has_version() const;
226   private:
227   bool _internal_has_version() const;
228   public:
229   void clear_version();
230   const ::tensorflow::VersionDef& version() const;
231   PROTOBUF_NODISCARD ::tensorflow::VersionDef* release_version();
232   ::tensorflow::VersionDef* mutable_version();
233   void set_allocated_version(::tensorflow::VersionDef* version);
234   private:
235   const ::tensorflow::VersionDef& _internal_version() const;
236   ::tensorflow::VersionDef* _internal_mutable_version();
237   public:
238   void unsafe_arena_set_allocated_version(
239       ::tensorflow::VersionDef* version);
240   ::tensorflow::VersionDef* unsafe_arena_release_version();
241 
242   // int32 num_shards = 1;
243   void clear_num_shards();
244   ::int32_t num_shards() const;
245   void set_num_shards(::int32_t value);
246   private:
247   ::int32_t _internal_num_shards() const;
248   void _internal_set_num_shards(::int32_t value);
249   public:
250 
251   // .tensorflow.BundleHeaderProto.Endianness endianness = 2;
252   void clear_endianness();
253   ::tensorflow::BundleHeaderProto_Endianness endianness() const;
254   void set_endianness(::tensorflow::BundleHeaderProto_Endianness value);
255   private:
256   ::tensorflow::BundleHeaderProto_Endianness _internal_endianness() const;
257   void _internal_set_endianness(::tensorflow::BundleHeaderProto_Endianness value);
258   public:
259 
260   // @@protoc_insertion_point(class_scope:tensorflow.BundleHeaderProto)
261  private:
262   class _Internal;
263 
264   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
265   typedef void InternalArenaConstructable_;
266   typedef void DestructorSkippable_;
267   struct Impl_ {
268     ::tensorflow::VersionDef* version_;
269     ::int32_t num_shards_;
270     int endianness_;
271     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
272   };
273   union { Impl_ _impl_; };
274   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto;
275 };
276 // -------------------------------------------------------------------
277 
278 class BundleEntryProto final :
279     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.BundleEntryProto) */ {
280  public:
BundleEntryProto()281   inline BundleEntryProto() : BundleEntryProto(nullptr) {}
282   ~BundleEntryProto() override;
283   explicit PROTOBUF_CONSTEXPR BundleEntryProto(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
284 
285   BundleEntryProto(const BundleEntryProto& from);
BundleEntryProto(BundleEntryProto && from)286   BundleEntryProto(BundleEntryProto&& from) noexcept
287     : BundleEntryProto() {
288     *this = ::std::move(from);
289   }
290 
291   inline BundleEntryProto& operator=(const BundleEntryProto& from) {
292     if (this == &from) return *this;
293     CopyFrom(from);
294     return *this;
295   }
296   inline BundleEntryProto& operator=(BundleEntryProto&& from) noexcept {
297     if (this == &from) return *this;
298     if (GetOwningArena() == from.GetOwningArena()
299   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
300         && GetOwningArena() != nullptr
301   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
302     ) {
303       InternalSwap(&from);
304     } else {
305       CopyFrom(from);
306     }
307     return *this;
308   }
309 
default_instance()310   static const BundleEntryProto& default_instance() {
311     return *internal_default_instance();
312   }
internal_default_instance()313   static inline const BundleEntryProto* internal_default_instance() {
314     return reinterpret_cast<const BundleEntryProto*>(
315                &_BundleEntryProto_default_instance_);
316   }
317   static constexpr int kIndexInFileMessages =
318     1;
319 
swap(BundleEntryProto & a,BundleEntryProto & b)320   friend void swap(BundleEntryProto& a, BundleEntryProto& b) {
321     a.Swap(&b);
322   }
Swap(BundleEntryProto * other)323   inline void Swap(BundleEntryProto* other) {
324     if (other == this) return;
325   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
326     if (GetOwningArena() != nullptr &&
327         GetOwningArena() == other->GetOwningArena()) {
328    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
329     if (GetOwningArena() == other->GetOwningArena()) {
330   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
331       InternalSwap(other);
332     } else {
333       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
334     }
335   }
336   void UnsafeArenaSwap(BundleEntryProto* other) {
337     if (other == this) return;
338     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
339     InternalSwap(other);
340   }
341 
342   // implements Message ----------------------------------------------
343 
344   BundleEntryProto* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
345     return CreateMaybeMessage<BundleEntryProto>(arena);
346   }
347   BundleEntryProto* New() const {
348     return New(nullptr);
349   }
350   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
351   void CopyFrom(const BundleEntryProto& from);
352   void MergeFrom(const BundleEntryProto& from);
353   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
354   bool IsInitialized() const final;
355 
356   size_t ByteSizeLong() const final;
357   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
358   ::uint8_t* _InternalSerialize(
359       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
360   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
361 
362   private:
363   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
364   void SharedDtor();
365   void SetCachedSize(int size) const;
366   void InternalSwap(BundleEntryProto* other);
367 
368   private:
369   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
370   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
371     return "tensorflow.BundleEntryProto";
372   }
373   protected:
374   explicit BundleEntryProto(::PROTOBUF_NAMESPACE_ID::Arena* arena,
375                        bool is_message_owned = false);
376   public:
377 
378   std::string GetTypeName() const final;
379 
380   // nested types ----------------------------------------------------
381 
382   // accessors -------------------------------------------------------
383 
384   enum : int {
385     kSlicesFieldNumber = 7,
386     kShapeFieldNumber = 2,
387     kDtypeFieldNumber = 1,
388     kShardIdFieldNumber = 3,
389     kOffsetFieldNumber = 4,
390     kSizeFieldNumber = 5,
391     kCrc32CFieldNumber = 6,
392   };
393   // repeated .tensorflow.TensorSliceProto slices = 7;
394   int slices_size() const;
395   private:
396   int _internal_slices_size() const;
397   public:
398   void clear_slices();
399   ::tensorflow::TensorSliceProto* mutable_slices(int index);
400   ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorSliceProto >*
401       mutable_slices();
402   private:
403   const ::tensorflow::TensorSliceProto& _internal_slices(int index) const;
404   ::tensorflow::TensorSliceProto* _internal_add_slices();
405   public:
406   const ::tensorflow::TensorSliceProto& slices(int index) const;
407   ::tensorflow::TensorSliceProto* add_slices();
408   const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorSliceProto >&
409       slices() const;
410 
411   // .tensorflow.TensorShapeProto shape = 2;
412   bool has_shape() const;
413   private:
414   bool _internal_has_shape() const;
415   public:
416   void clear_shape();
417   const ::tensorflow::TensorShapeProto& shape() const;
418   PROTOBUF_NODISCARD ::tensorflow::TensorShapeProto* release_shape();
419   ::tensorflow::TensorShapeProto* mutable_shape();
420   void set_allocated_shape(::tensorflow::TensorShapeProto* shape);
421   private:
422   const ::tensorflow::TensorShapeProto& _internal_shape() const;
423   ::tensorflow::TensorShapeProto* _internal_mutable_shape();
424   public:
425   void unsafe_arena_set_allocated_shape(
426       ::tensorflow::TensorShapeProto* shape);
427   ::tensorflow::TensorShapeProto* unsafe_arena_release_shape();
428 
429   // .tensorflow.DataType dtype = 1;
430   void clear_dtype();
431   ::tensorflow::DataType dtype() const;
432   void set_dtype(::tensorflow::DataType value);
433   private:
434   ::tensorflow::DataType _internal_dtype() const;
435   void _internal_set_dtype(::tensorflow::DataType value);
436   public:
437 
438   // int32 shard_id = 3;
439   void clear_shard_id();
440   ::int32_t shard_id() const;
441   void set_shard_id(::int32_t value);
442   private:
443   ::int32_t _internal_shard_id() const;
444   void _internal_set_shard_id(::int32_t value);
445   public:
446 
447   // int64 offset = 4;
448   void clear_offset();
449   ::int64_t offset() const;
450   void set_offset(::int64_t value);
451   private:
452   ::int64_t _internal_offset() const;
453   void _internal_set_offset(::int64_t value);
454   public:
455 
456   // int64 size = 5;
457   void clear_size();
458   ::int64_t size() const;
459   void set_size(::int64_t value);
460   private:
461   ::int64_t _internal_size() const;
462   void _internal_set_size(::int64_t value);
463   public:
464 
465   // fixed32 crc32c = 6;
466   void clear_crc32c();
467   ::uint32_t crc32c() const;
468   void set_crc32c(::uint32_t value);
469   private:
470   ::uint32_t _internal_crc32c() const;
471   void _internal_set_crc32c(::uint32_t value);
472   public:
473 
474   // @@protoc_insertion_point(class_scope:tensorflow.BundleEntryProto)
475  private:
476   class _Internal;
477 
478   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
479   typedef void InternalArenaConstructable_;
480   typedef void DestructorSkippable_;
481   struct Impl_ {
482     ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorSliceProto > slices_;
483     ::tensorflow::TensorShapeProto* shape_;
484     int dtype_;
485     ::int32_t shard_id_;
486     ::int64_t offset_;
487     ::int64_t size_;
488     ::uint32_t crc32c_;
489     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
490   };
491   union { Impl_ _impl_; };
492   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto;
493 };
494 // ===================================================================
495 
496 
497 // ===================================================================
498 
499 #ifdef __GNUC__
500   #pragma GCC diagnostic push
501   #pragma GCC diagnostic ignored "-Wstrict-aliasing"
502 #endif  // __GNUC__
503 // BundleHeaderProto
504 
505 // int32 num_shards = 1;
clear_num_shards()506 inline void BundleHeaderProto::clear_num_shards() {
507   _impl_.num_shards_ = 0;
508 }
_internal_num_shards()509 inline ::int32_t BundleHeaderProto::_internal_num_shards() const {
510   return _impl_.num_shards_;
511 }
num_shards()512 inline ::int32_t BundleHeaderProto::num_shards() const {
513   // @@protoc_insertion_point(field_get:tensorflow.BundleHeaderProto.num_shards)
514   return _internal_num_shards();
515 }
_internal_set_num_shards(::int32_t value)516 inline void BundleHeaderProto::_internal_set_num_shards(::int32_t value) {
517 
518   _impl_.num_shards_ = value;
519 }
set_num_shards(::int32_t value)520 inline void BundleHeaderProto::set_num_shards(::int32_t value) {
521   _internal_set_num_shards(value);
522   // @@protoc_insertion_point(field_set:tensorflow.BundleHeaderProto.num_shards)
523 }
524 
525 // .tensorflow.BundleHeaderProto.Endianness endianness = 2;
clear_endianness()526 inline void BundleHeaderProto::clear_endianness() {
527   _impl_.endianness_ = 0;
528 }
_internal_endianness()529 inline ::tensorflow::BundleHeaderProto_Endianness BundleHeaderProto::_internal_endianness() const {
530   return static_cast< ::tensorflow::BundleHeaderProto_Endianness >(_impl_.endianness_);
531 }
endianness()532 inline ::tensorflow::BundleHeaderProto_Endianness BundleHeaderProto::endianness() const {
533   // @@protoc_insertion_point(field_get:tensorflow.BundleHeaderProto.endianness)
534   return _internal_endianness();
535 }
_internal_set_endianness(::tensorflow::BundleHeaderProto_Endianness value)536 inline void BundleHeaderProto::_internal_set_endianness(::tensorflow::BundleHeaderProto_Endianness value) {
537 
538   _impl_.endianness_ = value;
539 }
set_endianness(::tensorflow::BundleHeaderProto_Endianness value)540 inline void BundleHeaderProto::set_endianness(::tensorflow::BundleHeaderProto_Endianness value) {
541   _internal_set_endianness(value);
542   // @@protoc_insertion_point(field_set:tensorflow.BundleHeaderProto.endianness)
543 }
544 
545 // .tensorflow.VersionDef version = 3;
_internal_has_version()546 inline bool BundleHeaderProto::_internal_has_version() const {
547   return this != internal_default_instance() && _impl_.version_ != nullptr;
548 }
has_version()549 inline bool BundleHeaderProto::has_version() const {
550   return _internal_has_version();
551 }
_internal_version()552 inline const ::tensorflow::VersionDef& BundleHeaderProto::_internal_version() const {
553   const ::tensorflow::VersionDef* p = _impl_.version_;
554   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::VersionDef&>(
555       ::tensorflow::_VersionDef_default_instance_);
556 }
version()557 inline const ::tensorflow::VersionDef& BundleHeaderProto::version() const {
558   // @@protoc_insertion_point(field_get:tensorflow.BundleHeaderProto.version)
559   return _internal_version();
560 }
unsafe_arena_set_allocated_version(::tensorflow::VersionDef * version)561 inline void BundleHeaderProto::unsafe_arena_set_allocated_version(
562     ::tensorflow::VersionDef* version) {
563   if (GetArenaForAllocation() == nullptr) {
564     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.version_);
565   }
566   _impl_.version_ = version;
567   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.BundleHeaderProto.version)
568 }
release_version()569 inline ::tensorflow::VersionDef* BundleHeaderProto::release_version() {
570 
571   ::tensorflow::VersionDef* temp = _impl_.version_;
572   _impl_.version_ = nullptr;
573 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
574   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
575   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
576   if (GetArenaForAllocation() == nullptr) { delete old; }
577 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
578   if (GetArenaForAllocation() != nullptr) {
579     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
580   }
581 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
582   return temp;
583 }
unsafe_arena_release_version()584 inline ::tensorflow::VersionDef* BundleHeaderProto::unsafe_arena_release_version() {
585   // @@protoc_insertion_point(field_release:tensorflow.BundleHeaderProto.version)
586 
587   ::tensorflow::VersionDef* temp = _impl_.version_;
588   _impl_.version_ = nullptr;
589   return temp;
590 }
_internal_mutable_version()591 inline ::tensorflow::VersionDef* BundleHeaderProto::_internal_mutable_version() {
592 
593   if (_impl_.version_ == nullptr) {
594     auto* p = CreateMaybeMessage<::tensorflow::VersionDef>(GetArenaForAllocation());
595     _impl_.version_ = p;
596   }
597   return _impl_.version_;
598 }
mutable_version()599 inline ::tensorflow::VersionDef* BundleHeaderProto::mutable_version() {
600   ::tensorflow::VersionDef* _msg = _internal_mutable_version();
601   // @@protoc_insertion_point(field_mutable:tensorflow.BundleHeaderProto.version)
602   return _msg;
603 }
set_allocated_version(::tensorflow::VersionDef * version)604 inline void BundleHeaderProto::set_allocated_version(::tensorflow::VersionDef* version) {
605   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
606   if (message_arena == nullptr) {
607     delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.version_);
608   }
609   if (version) {
610     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
611         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
612                 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(version));
613     if (message_arena != submessage_arena) {
614       version = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
615           message_arena, version, submessage_arena);
616     }
617 
618   } else {
619 
620   }
621   _impl_.version_ = version;
622   // @@protoc_insertion_point(field_set_allocated:tensorflow.BundleHeaderProto.version)
623 }
624 
625 // -------------------------------------------------------------------
626 
627 // BundleEntryProto
628 
629 // .tensorflow.DataType dtype = 1;
clear_dtype()630 inline void BundleEntryProto::clear_dtype() {
631   _impl_.dtype_ = 0;
632 }
_internal_dtype()633 inline ::tensorflow::DataType BundleEntryProto::_internal_dtype() const {
634   return static_cast< ::tensorflow::DataType >(_impl_.dtype_);
635 }
dtype()636 inline ::tensorflow::DataType BundleEntryProto::dtype() const {
637   // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.dtype)
638   return _internal_dtype();
639 }
_internal_set_dtype(::tensorflow::DataType value)640 inline void BundleEntryProto::_internal_set_dtype(::tensorflow::DataType value) {
641 
642   _impl_.dtype_ = value;
643 }
set_dtype(::tensorflow::DataType value)644 inline void BundleEntryProto::set_dtype(::tensorflow::DataType value) {
645   _internal_set_dtype(value);
646   // @@protoc_insertion_point(field_set:tensorflow.BundleEntryProto.dtype)
647 }
648 
649 // .tensorflow.TensorShapeProto shape = 2;
_internal_has_shape()650 inline bool BundleEntryProto::_internal_has_shape() const {
651   return this != internal_default_instance() && _impl_.shape_ != nullptr;
652 }
has_shape()653 inline bool BundleEntryProto::has_shape() const {
654   return _internal_has_shape();
655 }
_internal_shape()656 inline const ::tensorflow::TensorShapeProto& BundleEntryProto::_internal_shape() const {
657   const ::tensorflow::TensorShapeProto* p = _impl_.shape_;
658   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::TensorShapeProto&>(
659       ::tensorflow::_TensorShapeProto_default_instance_);
660 }
shape()661 inline const ::tensorflow::TensorShapeProto& BundleEntryProto::shape() const {
662   // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.shape)
663   return _internal_shape();
664 }
unsafe_arena_set_allocated_shape(::tensorflow::TensorShapeProto * shape)665 inline void BundleEntryProto::unsafe_arena_set_allocated_shape(
666     ::tensorflow::TensorShapeProto* shape) {
667   if (GetArenaForAllocation() == nullptr) {
668     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.shape_);
669   }
670   _impl_.shape_ = shape;
671   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.BundleEntryProto.shape)
672 }
release_shape()673 inline ::tensorflow::TensorShapeProto* BundleEntryProto::release_shape() {
674 
675   ::tensorflow::TensorShapeProto* temp = _impl_.shape_;
676   _impl_.shape_ = nullptr;
677 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
678   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
679   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
680   if (GetArenaForAllocation() == nullptr) { delete old; }
681 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
682   if (GetArenaForAllocation() != nullptr) {
683     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
684   }
685 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
686   return temp;
687 }
unsafe_arena_release_shape()688 inline ::tensorflow::TensorShapeProto* BundleEntryProto::unsafe_arena_release_shape() {
689   // @@protoc_insertion_point(field_release:tensorflow.BundleEntryProto.shape)
690 
691   ::tensorflow::TensorShapeProto* temp = _impl_.shape_;
692   _impl_.shape_ = nullptr;
693   return temp;
694 }
_internal_mutable_shape()695 inline ::tensorflow::TensorShapeProto* BundleEntryProto::_internal_mutable_shape() {
696 
697   if (_impl_.shape_ == nullptr) {
698     auto* p = CreateMaybeMessage<::tensorflow::TensorShapeProto>(GetArenaForAllocation());
699     _impl_.shape_ = p;
700   }
701   return _impl_.shape_;
702 }
mutable_shape()703 inline ::tensorflow::TensorShapeProto* BundleEntryProto::mutable_shape() {
704   ::tensorflow::TensorShapeProto* _msg = _internal_mutable_shape();
705   // @@protoc_insertion_point(field_mutable:tensorflow.BundleEntryProto.shape)
706   return _msg;
707 }
set_allocated_shape(::tensorflow::TensorShapeProto * shape)708 inline void BundleEntryProto::set_allocated_shape(::tensorflow::TensorShapeProto* shape) {
709   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
710   if (message_arena == nullptr) {
711     delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.shape_);
712   }
713   if (shape) {
714     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
715         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
716                 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(shape));
717     if (message_arena != submessage_arena) {
718       shape = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
719           message_arena, shape, submessage_arena);
720     }
721 
722   } else {
723 
724   }
725   _impl_.shape_ = shape;
726   // @@protoc_insertion_point(field_set_allocated:tensorflow.BundleEntryProto.shape)
727 }
728 
729 // int32 shard_id = 3;
clear_shard_id()730 inline void BundleEntryProto::clear_shard_id() {
731   _impl_.shard_id_ = 0;
732 }
_internal_shard_id()733 inline ::int32_t BundleEntryProto::_internal_shard_id() const {
734   return _impl_.shard_id_;
735 }
shard_id()736 inline ::int32_t BundleEntryProto::shard_id() const {
737   // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.shard_id)
738   return _internal_shard_id();
739 }
_internal_set_shard_id(::int32_t value)740 inline void BundleEntryProto::_internal_set_shard_id(::int32_t value) {
741 
742   _impl_.shard_id_ = value;
743 }
set_shard_id(::int32_t value)744 inline void BundleEntryProto::set_shard_id(::int32_t value) {
745   _internal_set_shard_id(value);
746   // @@protoc_insertion_point(field_set:tensorflow.BundleEntryProto.shard_id)
747 }
748 
749 // int64 offset = 4;
clear_offset()750 inline void BundleEntryProto::clear_offset() {
751   _impl_.offset_ = ::int64_t{0};
752 }
_internal_offset()753 inline ::int64_t BundleEntryProto::_internal_offset() const {
754   return _impl_.offset_;
755 }
offset()756 inline ::int64_t BundleEntryProto::offset() const {
757   // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.offset)
758   return _internal_offset();
759 }
_internal_set_offset(::int64_t value)760 inline void BundleEntryProto::_internal_set_offset(::int64_t value) {
761 
762   _impl_.offset_ = value;
763 }
set_offset(::int64_t value)764 inline void BundleEntryProto::set_offset(::int64_t value) {
765   _internal_set_offset(value);
766   // @@protoc_insertion_point(field_set:tensorflow.BundleEntryProto.offset)
767 }
768 
769 // int64 size = 5;
clear_size()770 inline void BundleEntryProto::clear_size() {
771   _impl_.size_ = ::int64_t{0};
772 }
_internal_size()773 inline ::int64_t BundleEntryProto::_internal_size() const {
774   return _impl_.size_;
775 }
size()776 inline ::int64_t BundleEntryProto::size() const {
777   // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.size)
778   return _internal_size();
779 }
_internal_set_size(::int64_t value)780 inline void BundleEntryProto::_internal_set_size(::int64_t value) {
781 
782   _impl_.size_ = value;
783 }
set_size(::int64_t value)784 inline void BundleEntryProto::set_size(::int64_t value) {
785   _internal_set_size(value);
786   // @@protoc_insertion_point(field_set:tensorflow.BundleEntryProto.size)
787 }
788 
789 // fixed32 crc32c = 6;
clear_crc32c()790 inline void BundleEntryProto::clear_crc32c() {
791   _impl_.crc32c_ = 0u;
792 }
_internal_crc32c()793 inline ::uint32_t BundleEntryProto::_internal_crc32c() const {
794   return _impl_.crc32c_;
795 }
crc32c()796 inline ::uint32_t BundleEntryProto::crc32c() const {
797   // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.crc32c)
798   return _internal_crc32c();
799 }
_internal_set_crc32c(::uint32_t value)800 inline void BundleEntryProto::_internal_set_crc32c(::uint32_t value) {
801 
802   _impl_.crc32c_ = value;
803 }
set_crc32c(::uint32_t value)804 inline void BundleEntryProto::set_crc32c(::uint32_t value) {
805   _internal_set_crc32c(value);
806   // @@protoc_insertion_point(field_set:tensorflow.BundleEntryProto.crc32c)
807 }
808 
809 // repeated .tensorflow.TensorSliceProto slices = 7;
_internal_slices_size()810 inline int BundleEntryProto::_internal_slices_size() const {
811   return _impl_.slices_.size();
812 }
slices_size()813 inline int BundleEntryProto::slices_size() const {
814   return _internal_slices_size();
815 }
mutable_slices(int index)816 inline ::tensorflow::TensorSliceProto* BundleEntryProto::mutable_slices(int index) {
817   // @@protoc_insertion_point(field_mutable:tensorflow.BundleEntryProto.slices)
818   return _impl_.slices_.Mutable(index);
819 }
820 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorSliceProto >*
mutable_slices()821 BundleEntryProto::mutable_slices() {
822   // @@protoc_insertion_point(field_mutable_list:tensorflow.BundleEntryProto.slices)
823   return &_impl_.slices_;
824 }
_internal_slices(int index)825 inline const ::tensorflow::TensorSliceProto& BundleEntryProto::_internal_slices(int index) const {
826   return _impl_.slices_.Get(index);
827 }
slices(int index)828 inline const ::tensorflow::TensorSliceProto& BundleEntryProto::slices(int index) const {
829   // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.slices)
830   return _internal_slices(index);
831 }
_internal_add_slices()832 inline ::tensorflow::TensorSliceProto* BundleEntryProto::_internal_add_slices() {
833   return _impl_.slices_.Add();
834 }
add_slices()835 inline ::tensorflow::TensorSliceProto* BundleEntryProto::add_slices() {
836   ::tensorflow::TensorSliceProto* _add = _internal_add_slices();
837   // @@protoc_insertion_point(field_add:tensorflow.BundleEntryProto.slices)
838   return _add;
839 }
840 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorSliceProto >&
slices()841 BundleEntryProto::slices() const {
842   // @@protoc_insertion_point(field_list:tensorflow.BundleEntryProto.slices)
843   return _impl_.slices_;
844 }
845 
846 #ifdef __GNUC__
847   #pragma GCC diagnostic pop
848 #endif  // __GNUC__
849 // -------------------------------------------------------------------
850 
851 
852 // @@protoc_insertion_point(namespace_scope)
853 
854 }  // namespace tensorflow
855 
856 PROTOBUF_NAMESPACE_OPEN
857 
858 template <> struct is_proto_enum< ::tensorflow::BundleHeaderProto_Endianness> : ::std::true_type {};
859 
860 PROTOBUF_NAMESPACE_CLOSE
861 
862 // @@protoc_insertion_point(global_scope)
863 
864 #include <google/protobuf/port_undef.inc>
865 #endif  // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto
866