1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: tensorflow/core/framework/tensor_description.proto
3 
4 #ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fframework_2ftensor_5fdescription_2eproto
5 #define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fframework_2ftensor_5fdescription_2eproto
6 
7 #include <cstdint>
8 #include <limits>
9 #include <string>
10 
11 #include <google/protobuf/port_def.inc>
12 #if PROTOBUF_VERSION < 3021000
13 #error This file was generated by a newer version of protoc which is
14 #error incompatible with your Protocol Buffer headers. Please update
15 #error your headers.
16 #endif
17 #if 3021012 < PROTOBUF_MIN_PROTOC_VERSION
18 #error This file was generated by an older version of protoc which is
19 #error incompatible with your Protocol Buffer headers. Please
20 #error regenerate this file with a newer version of protoc.
21 #endif
22 
23 #include <google/protobuf/port_undef.inc>
24 #include <google/protobuf/io/coded_stream.h>
25 #include <google/protobuf/arena.h>
26 #include <google/protobuf/arenastring.h>
27 #include <google/protobuf/generated_message_util.h>
28 #include <google/protobuf/metadata_lite.h>
29 #include <google/protobuf/message_lite.h>
30 #include <google/protobuf/repeated_field.h>  // IWYU pragma: export
31 #include <google/protobuf/extension_set.h>  // IWYU pragma: export
32 #include "tensorflow/core/framework/allocation_description.pb.h"
33 #include "tensorflow/core/framework/tensor_shape.pb.h"
34 #include "tensorflow/core/framework/types.pb.h"
35 // @@protoc_insertion_point(includes)
36 #include <google/protobuf/port_def.inc>
37 #define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fframework_2ftensor_5fdescription_2eproto
38 PROTOBUF_NAMESPACE_OPEN
39 namespace internal {
40 class AnyMetadata;
41 }  // namespace internal
42 PROTOBUF_NAMESPACE_CLOSE
43 
44 // Internal implementation detail -- do not use these members.
45 struct TableStruct_tensorflow_2fcore_2fframework_2ftensor_5fdescription_2eproto {
46   static const ::uint32_t offsets[];
47 };
48 namespace tensorflow {
49 class TensorDescription;
50 struct TensorDescriptionDefaultTypeInternal;
51 extern TensorDescriptionDefaultTypeInternal _TensorDescription_default_instance_;
52 }  // namespace tensorflow
53 PROTOBUF_NAMESPACE_OPEN
54 template<> ::tensorflow::TensorDescription* Arena::CreateMaybeMessage<::tensorflow::TensorDescription>(Arena*);
55 PROTOBUF_NAMESPACE_CLOSE
56 namespace tensorflow {
57 
58 // ===================================================================
59 
60 class TensorDescription final :
61     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.TensorDescription) */ {
62  public:
TensorDescription()63   inline TensorDescription() : TensorDescription(nullptr) {}
64   ~TensorDescription() override;
65   explicit PROTOBUF_CONSTEXPR TensorDescription(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
66 
67   TensorDescription(const TensorDescription& from);
TensorDescription(TensorDescription && from)68   TensorDescription(TensorDescription&& from) noexcept
69     : TensorDescription() {
70     *this = ::std::move(from);
71   }
72 
73   inline TensorDescription& operator=(const TensorDescription& from) {
74     if (this == &from) return *this;
75     CopyFrom(from);
76     return *this;
77   }
78   inline TensorDescription& operator=(TensorDescription&& from) noexcept {
79     if (this == &from) return *this;
80     if (GetOwningArena() == from.GetOwningArena()
81   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
82         && GetOwningArena() != nullptr
83   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
84     ) {
85       InternalSwap(&from);
86     } else {
87       CopyFrom(from);
88     }
89     return *this;
90   }
91 
default_instance()92   static const TensorDescription& default_instance() {
93     return *internal_default_instance();
94   }
internal_default_instance()95   static inline const TensorDescription* internal_default_instance() {
96     return reinterpret_cast<const TensorDescription*>(
97                &_TensorDescription_default_instance_);
98   }
99   static constexpr int kIndexInFileMessages =
100     0;
101 
swap(TensorDescription & a,TensorDescription & b)102   friend void swap(TensorDescription& a, TensorDescription& b) {
103     a.Swap(&b);
104   }
Swap(TensorDescription * other)105   inline void Swap(TensorDescription* other) {
106     if (other == this) return;
107   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
108     if (GetOwningArena() != nullptr &&
109         GetOwningArena() == other->GetOwningArena()) {
110    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
111     if (GetOwningArena() == other->GetOwningArena()) {
112   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
113       InternalSwap(other);
114     } else {
115       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
116     }
117   }
118   void UnsafeArenaSwap(TensorDescription* other) {
119     if (other == this) return;
120     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
121     InternalSwap(other);
122   }
123 
124   // implements Message ----------------------------------------------
125 
126   TensorDescription* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
127     return CreateMaybeMessage<TensorDescription>(arena);
128   }
129   TensorDescription* New() const {
130     return New(nullptr);
131   }
132   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
133   void CopyFrom(const TensorDescription& from);
134   void MergeFrom(const TensorDescription& from);
135   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
136   bool IsInitialized() const final;
137 
138   size_t ByteSizeLong() const final;
139   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
140   ::uint8_t* _InternalSerialize(
141       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
142   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
143 
144   private:
145   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
146   void SharedDtor();
147   void SetCachedSize(int size) const;
148   void InternalSwap(TensorDescription* other);
149 
150   private:
151   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
152   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
153     return "tensorflow.TensorDescription";
154   }
155   protected:
156   explicit TensorDescription(::PROTOBUF_NAMESPACE_ID::Arena* arena,
157                        bool is_message_owned = false);
158   public:
159 
160   std::string GetTypeName() const final;
161 
162   // nested types ----------------------------------------------------
163 
164   // accessors -------------------------------------------------------
165 
166   enum : int {
167     kShapeFieldNumber = 2,
168     kAllocationDescriptionFieldNumber = 4,
169     kDtypeFieldNumber = 1,
170   };
171   // .tensorflow.TensorShapeProto shape = 2;
172   bool has_shape() const;
173   private:
174   bool _internal_has_shape() const;
175   public:
176   void clear_shape();
177   const ::tensorflow::TensorShapeProto& shape() const;
178   PROTOBUF_NODISCARD ::tensorflow::TensorShapeProto* release_shape();
179   ::tensorflow::TensorShapeProto* mutable_shape();
180   void set_allocated_shape(::tensorflow::TensorShapeProto* shape);
181   private:
182   const ::tensorflow::TensorShapeProto& _internal_shape() const;
183   ::tensorflow::TensorShapeProto* _internal_mutable_shape();
184   public:
185   void unsafe_arena_set_allocated_shape(
186       ::tensorflow::TensorShapeProto* shape);
187   ::tensorflow::TensorShapeProto* unsafe_arena_release_shape();
188 
189   // .tensorflow.AllocationDescription allocation_description = 4;
190   bool has_allocation_description() const;
191   private:
192   bool _internal_has_allocation_description() const;
193   public:
194   void clear_allocation_description();
195   const ::tensorflow::AllocationDescription& allocation_description() const;
196   PROTOBUF_NODISCARD ::tensorflow::AllocationDescription* release_allocation_description();
197   ::tensorflow::AllocationDescription* mutable_allocation_description();
198   void set_allocated_allocation_description(::tensorflow::AllocationDescription* allocation_description);
199   private:
200   const ::tensorflow::AllocationDescription& _internal_allocation_description() const;
201   ::tensorflow::AllocationDescription* _internal_mutable_allocation_description();
202   public:
203   void unsafe_arena_set_allocated_allocation_description(
204       ::tensorflow::AllocationDescription* allocation_description);
205   ::tensorflow::AllocationDescription* unsafe_arena_release_allocation_description();
206 
207   // .tensorflow.DataType dtype = 1;
208   void clear_dtype();
209   ::tensorflow::DataType dtype() const;
210   void set_dtype(::tensorflow::DataType value);
211   private:
212   ::tensorflow::DataType _internal_dtype() const;
213   void _internal_set_dtype(::tensorflow::DataType value);
214   public:
215 
216   // @@protoc_insertion_point(class_scope:tensorflow.TensorDescription)
217  private:
218   class _Internal;
219 
220   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
221   typedef void InternalArenaConstructable_;
222   typedef void DestructorSkippable_;
223   struct Impl_ {
224     ::tensorflow::TensorShapeProto* shape_;
225     ::tensorflow::AllocationDescription* allocation_description_;
226     int dtype_;
227     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
228   };
229   union { Impl_ _impl_; };
230   friend struct ::TableStruct_tensorflow_2fcore_2fframework_2ftensor_5fdescription_2eproto;
231 };
232 // ===================================================================
233 
234 
235 // ===================================================================
236 
237 #ifdef __GNUC__
238   #pragma GCC diagnostic push
239   #pragma GCC diagnostic ignored "-Wstrict-aliasing"
240 #endif  // __GNUC__
241 // TensorDescription
242 
243 // .tensorflow.DataType dtype = 1;
clear_dtype()244 inline void TensorDescription::clear_dtype() {
245   _impl_.dtype_ = 0;
246 }
_internal_dtype()247 inline ::tensorflow::DataType TensorDescription::_internal_dtype() const {
248   return static_cast< ::tensorflow::DataType >(_impl_.dtype_);
249 }
dtype()250 inline ::tensorflow::DataType TensorDescription::dtype() const {
251   // @@protoc_insertion_point(field_get:tensorflow.TensorDescription.dtype)
252   return _internal_dtype();
253 }
_internal_set_dtype(::tensorflow::DataType value)254 inline void TensorDescription::_internal_set_dtype(::tensorflow::DataType value) {
255 
256   _impl_.dtype_ = value;
257 }
set_dtype(::tensorflow::DataType value)258 inline void TensorDescription::set_dtype(::tensorflow::DataType value) {
259   _internal_set_dtype(value);
260   // @@protoc_insertion_point(field_set:tensorflow.TensorDescription.dtype)
261 }
262 
263 // .tensorflow.TensorShapeProto shape = 2;
_internal_has_shape()264 inline bool TensorDescription::_internal_has_shape() const {
265   return this != internal_default_instance() && _impl_.shape_ != nullptr;
266 }
has_shape()267 inline bool TensorDescription::has_shape() const {
268   return _internal_has_shape();
269 }
_internal_shape()270 inline const ::tensorflow::TensorShapeProto& TensorDescription::_internal_shape() const {
271   const ::tensorflow::TensorShapeProto* p = _impl_.shape_;
272   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::TensorShapeProto&>(
273       ::tensorflow::_TensorShapeProto_default_instance_);
274 }
shape()275 inline const ::tensorflow::TensorShapeProto& TensorDescription::shape() const {
276   // @@protoc_insertion_point(field_get:tensorflow.TensorDescription.shape)
277   return _internal_shape();
278 }
unsafe_arena_set_allocated_shape(::tensorflow::TensorShapeProto * shape)279 inline void TensorDescription::unsafe_arena_set_allocated_shape(
280     ::tensorflow::TensorShapeProto* shape) {
281   if (GetArenaForAllocation() == nullptr) {
282     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.shape_);
283   }
284   _impl_.shape_ = shape;
285   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.TensorDescription.shape)
286 }
release_shape()287 inline ::tensorflow::TensorShapeProto* TensorDescription::release_shape() {
288 
289   ::tensorflow::TensorShapeProto* temp = _impl_.shape_;
290   _impl_.shape_ = nullptr;
291 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
292   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
293   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
294   if (GetArenaForAllocation() == nullptr) { delete old; }
295 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
296   if (GetArenaForAllocation() != nullptr) {
297     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
298   }
299 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
300   return temp;
301 }
unsafe_arena_release_shape()302 inline ::tensorflow::TensorShapeProto* TensorDescription::unsafe_arena_release_shape() {
303   // @@protoc_insertion_point(field_release:tensorflow.TensorDescription.shape)
304 
305   ::tensorflow::TensorShapeProto* temp = _impl_.shape_;
306   _impl_.shape_ = nullptr;
307   return temp;
308 }
_internal_mutable_shape()309 inline ::tensorflow::TensorShapeProto* TensorDescription::_internal_mutable_shape() {
310 
311   if (_impl_.shape_ == nullptr) {
312     auto* p = CreateMaybeMessage<::tensorflow::TensorShapeProto>(GetArenaForAllocation());
313     _impl_.shape_ = p;
314   }
315   return _impl_.shape_;
316 }
mutable_shape()317 inline ::tensorflow::TensorShapeProto* TensorDescription::mutable_shape() {
318   ::tensorflow::TensorShapeProto* _msg = _internal_mutable_shape();
319   // @@protoc_insertion_point(field_mutable:tensorflow.TensorDescription.shape)
320   return _msg;
321 }
set_allocated_shape(::tensorflow::TensorShapeProto * shape)322 inline void TensorDescription::set_allocated_shape(::tensorflow::TensorShapeProto* shape) {
323   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
324   if (message_arena == nullptr) {
325     delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.shape_);
326   }
327   if (shape) {
328     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
329         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
330                 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(shape));
331     if (message_arena != submessage_arena) {
332       shape = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
333           message_arena, shape, submessage_arena);
334     }
335 
336   } else {
337 
338   }
339   _impl_.shape_ = shape;
340   // @@protoc_insertion_point(field_set_allocated:tensorflow.TensorDescription.shape)
341 }
342 
343 // .tensorflow.AllocationDescription allocation_description = 4;
_internal_has_allocation_description()344 inline bool TensorDescription::_internal_has_allocation_description() const {
345   return this != internal_default_instance() && _impl_.allocation_description_ != nullptr;
346 }
has_allocation_description()347 inline bool TensorDescription::has_allocation_description() const {
348   return _internal_has_allocation_description();
349 }
_internal_allocation_description()350 inline const ::tensorflow::AllocationDescription& TensorDescription::_internal_allocation_description() const {
351   const ::tensorflow::AllocationDescription* p = _impl_.allocation_description_;
352   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::AllocationDescription&>(
353       ::tensorflow::_AllocationDescription_default_instance_);
354 }
allocation_description()355 inline const ::tensorflow::AllocationDescription& TensorDescription::allocation_description() const {
356   // @@protoc_insertion_point(field_get:tensorflow.TensorDescription.allocation_description)
357   return _internal_allocation_description();
358 }
unsafe_arena_set_allocated_allocation_description(::tensorflow::AllocationDescription * allocation_description)359 inline void TensorDescription::unsafe_arena_set_allocated_allocation_description(
360     ::tensorflow::AllocationDescription* allocation_description) {
361   if (GetArenaForAllocation() == nullptr) {
362     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.allocation_description_);
363   }
364   _impl_.allocation_description_ = allocation_description;
365   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.TensorDescription.allocation_description)
366 }
release_allocation_description()367 inline ::tensorflow::AllocationDescription* TensorDescription::release_allocation_description() {
368 
369   ::tensorflow::AllocationDescription* temp = _impl_.allocation_description_;
370   _impl_.allocation_description_ = nullptr;
371 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
372   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
373   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
374   if (GetArenaForAllocation() == nullptr) { delete old; }
375 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
376   if (GetArenaForAllocation() != nullptr) {
377     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
378   }
379 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
380   return temp;
381 }
unsafe_arena_release_allocation_description()382 inline ::tensorflow::AllocationDescription* TensorDescription::unsafe_arena_release_allocation_description() {
383   // @@protoc_insertion_point(field_release:tensorflow.TensorDescription.allocation_description)
384 
385   ::tensorflow::AllocationDescription* temp = _impl_.allocation_description_;
386   _impl_.allocation_description_ = nullptr;
387   return temp;
388 }
_internal_mutable_allocation_description()389 inline ::tensorflow::AllocationDescription* TensorDescription::_internal_mutable_allocation_description() {
390 
391   if (_impl_.allocation_description_ == nullptr) {
392     auto* p = CreateMaybeMessage<::tensorflow::AllocationDescription>(GetArenaForAllocation());
393     _impl_.allocation_description_ = p;
394   }
395   return _impl_.allocation_description_;
396 }
mutable_allocation_description()397 inline ::tensorflow::AllocationDescription* TensorDescription::mutable_allocation_description() {
398   ::tensorflow::AllocationDescription* _msg = _internal_mutable_allocation_description();
399   // @@protoc_insertion_point(field_mutable:tensorflow.TensorDescription.allocation_description)
400   return _msg;
401 }
set_allocated_allocation_description(::tensorflow::AllocationDescription * allocation_description)402 inline void TensorDescription::set_allocated_allocation_description(::tensorflow::AllocationDescription* allocation_description) {
403   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
404   if (message_arena == nullptr) {
405     delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.allocation_description_);
406   }
407   if (allocation_description) {
408     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
409         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
410                 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(allocation_description));
411     if (message_arena != submessage_arena) {
412       allocation_description = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
413           message_arena, allocation_description, submessage_arena);
414     }
415 
416   } else {
417 
418   }
419   _impl_.allocation_description_ = allocation_description;
420   // @@protoc_insertion_point(field_set_allocated:tensorflow.TensorDescription.allocation_description)
421 }
422 
423 #ifdef __GNUC__
424   #pragma GCC diagnostic pop
425 #endif  // __GNUC__
426 
427 // @@protoc_insertion_point(namespace_scope)
428 
429 }  // namespace tensorflow
430 
431 // @@protoc_insertion_point(global_scope)
432 
433 #include <google/protobuf/port_undef.inc>
434 #endif  // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fframework_2ftensor_5fdescription_2eproto
435