xref: /aosp_15_r20/external/flatbuffers/src/reflection.cpp (revision 890232f25432b36107d06881e0a25aaa6b473652)
1 /*
2  * Copyright 2015 Google Inc. All rights reserved.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "flatbuffers/reflection.h"
18 
19 #include "flatbuffers/util.h"
20 
21 // Helper functionality for reflection.
22 
23 namespace flatbuffers {
24 
25 namespace {
26 
CopyInline(FlatBufferBuilder & fbb,const reflection::Field & fielddef,const Table & table,size_t align,size_t size)27 static void CopyInline(FlatBufferBuilder &fbb, const reflection::Field &fielddef,
28                 const Table &table, size_t align, size_t size) {
29   fbb.Align(align);
30   fbb.PushBytes(table.GetStruct<const uint8_t *>(fielddef.offset()), size);
31   fbb.TrackField(fielddef.offset(), fbb.GetSize());
32 }
33 
VerifyStruct(flatbuffers::Verifier & v,const flatbuffers::Table & parent_table,voffset_t field_offset,const reflection::Object & obj,bool required)34 static bool VerifyStruct(flatbuffers::Verifier &v,
35                   const flatbuffers::Table &parent_table,
36                   voffset_t field_offset, const reflection::Object &obj,
37                   bool required) {
38   auto offset = parent_table.GetOptionalFieldOffset(field_offset);
39   if (required && !offset) { return false; }
40 
41   return !offset ||
42          v.VerifyFieldStruct(reinterpret_cast<const uint8_t *>(&parent_table),
43                              offset, obj.bytesize(), obj.minalign());
44 }
45 
VerifyVectorOfStructs(flatbuffers::Verifier & v,const flatbuffers::Table & parent_table,voffset_t field_offset,const reflection::Object & obj,bool required)46 static bool VerifyVectorOfStructs(flatbuffers::Verifier &v,
47                            const flatbuffers::Table &parent_table,
48                            voffset_t field_offset,
49                            const reflection::Object &obj, bool required) {
50   auto p = parent_table.GetPointer<const uint8_t *>(field_offset);
51   if (required && !p) { return false; }
52 
53   return !p || v.VerifyVectorOrString(p, obj.bytesize());
54 }
55 
56 // forward declare to resolve cyclic deps between VerifyObject and VerifyVector
57 static bool VerifyObject(flatbuffers::Verifier &v, const reflection::Schema &schema,
58                   const reflection::Object &obj,
59                   const flatbuffers::Table *table, bool required);
60 
VerifyUnion(flatbuffers::Verifier & v,const reflection::Schema & schema,uint8_t utype,const uint8_t * elem,const reflection::Field & union_field)61 static bool VerifyUnion(flatbuffers::Verifier &v, const reflection::Schema &schema,
62                  uint8_t utype, const uint8_t *elem,
63                  const reflection::Field &union_field) {
64   if (!utype) return true;  // Not present.
65   auto fb_enum = schema.enums()->Get(union_field.type()->index());
66   if (utype >= fb_enum->values()->size()) return false;
67   auto elem_type = fb_enum->values()->Get(utype)->union_type();
68   switch (elem_type->base_type()) {
69     case reflection::Obj: {
70       auto elem_obj = schema.objects()->Get(elem_type->index());
71       if (elem_obj->is_struct()) {
72         return v.VerifyFromPointer(elem, elem_obj->bytesize());
73       } else {
74         return VerifyObject(v, schema, *elem_obj,
75                             reinterpret_cast<const flatbuffers::Table *>(elem),
76                             true);
77       }
78     }
79     case reflection::String:
80       return v.VerifyString(
81           reinterpret_cast<const flatbuffers::String *>(elem));
82     default: return false;
83   }
84 }
85 
VerifyVector(flatbuffers::Verifier & v,const reflection::Schema & schema,const flatbuffers::Table & table,const reflection::Field & vec_field)86 static bool VerifyVector(flatbuffers::Verifier &v, const reflection::Schema &schema,
87                   const flatbuffers::Table &table,
88                   const reflection::Field &vec_field) {
89   FLATBUFFERS_ASSERT(vec_field.type()->base_type() == reflection::Vector);
90   if (!table.VerifyField<uoffset_t>(v, vec_field.offset(), sizeof(uoffset_t)))
91     return false;
92 
93   switch (vec_field.type()->element()) {
94     case reflection::UType:
95       return v.VerifyVector(flatbuffers::GetFieldV<uint8_t>(table, vec_field));
96     case reflection::Bool:
97     case reflection::Byte:
98     case reflection::UByte:
99       return v.VerifyVector(flatbuffers::GetFieldV<int8_t>(table, vec_field));
100     case reflection::Short:
101     case reflection::UShort:
102       return v.VerifyVector(flatbuffers::GetFieldV<int16_t>(table, vec_field));
103     case reflection::Int:
104     case reflection::UInt:
105       return v.VerifyVector(flatbuffers::GetFieldV<int32_t>(table, vec_field));
106     case reflection::Long:
107     case reflection::ULong:
108       return v.VerifyVector(flatbuffers::GetFieldV<int64_t>(table, vec_field));
109     case reflection::Float:
110       return v.VerifyVector(flatbuffers::GetFieldV<float>(table, vec_field));
111     case reflection::Double:
112       return v.VerifyVector(flatbuffers::GetFieldV<double>(table, vec_field));
113     case reflection::String: {
114       auto vec_string =
115           flatbuffers::GetFieldV<flatbuffers::Offset<flatbuffers::String>>(
116               table, vec_field);
117       if (v.VerifyVector(vec_string) && v.VerifyVectorOfStrings(vec_string)) {
118         return true;
119       } else {
120         return false;
121       }
122     }
123     case reflection::Obj: {
124       auto obj = schema.objects()->Get(vec_field.type()->index());
125       if (obj->is_struct()) {
126         return VerifyVectorOfStructs(v, table, vec_field.offset(), *obj,
127                                      vec_field.required());
128       } else {
129         auto vec =
130             flatbuffers::GetFieldV<flatbuffers::Offset<flatbuffers::Table>>(
131                 table, vec_field);
132         if (!v.VerifyVector(vec)) return false;
133         if (!vec) return true;
134         for (uoffset_t j = 0; j < vec->size(); j++) {
135           if (!VerifyObject(v, schema, *obj, vec->Get(j), true)) {
136             return false;
137           }
138         }
139         return true;
140       }
141     }
142     case reflection::Union: {
143       auto vec = flatbuffers::GetFieldV<flatbuffers::Offset<uint8_t>>(
144           table, vec_field);
145       if (!v.VerifyVector(vec)) return false;
146       if (!vec) return true;
147       auto type_vec = table.GetPointer<Vector<uint8_t> *>(vec_field.offset() -
148                                                           sizeof(voffset_t));
149       if (!v.VerifyVector(type_vec)) return false;
150       for (uoffset_t j = 0; j < vec->size(); j++) {
151         //  get union type from the prev field
152         auto utype = type_vec->Get(j);
153         auto elem = vec->Get(j);
154         if (!VerifyUnion(v, schema, utype, elem, vec_field)) return false;
155       }
156       return true;
157     }
158     case reflection::Vector:
159     case reflection::None:
160     default: FLATBUFFERS_ASSERT(false); return false;
161   }
162 }
163 
VerifyObject(flatbuffers::Verifier & v,const reflection::Schema & schema,const reflection::Object & obj,const flatbuffers::Table * table,bool required)164 static bool VerifyObject(flatbuffers::Verifier &v, const reflection::Schema &schema,
165                   const reflection::Object &obj,
166                   const flatbuffers::Table *table, bool required) {
167   if (!table) return !required;
168   if (!table->VerifyTableStart(v)) return false;
169   for (uoffset_t i = 0; i < obj.fields()->size(); i++) {
170     auto field_def = obj.fields()->Get(i);
171     switch (field_def->type()->base_type()) {
172       case reflection::None: FLATBUFFERS_ASSERT(false); break;
173       case reflection::UType:
174         if (!table->VerifyField<uint8_t>(v, field_def->offset(),
175                                          sizeof(uint8_t)))
176           return false;
177         break;
178       case reflection::Bool:
179       case reflection::Byte:
180       case reflection::UByte:
181         if (!table->VerifyField<int8_t>(v, field_def->offset(), sizeof(int8_t)))
182           return false;
183         break;
184       case reflection::Short:
185       case reflection::UShort:
186         if (!table->VerifyField<int16_t>(v, field_def->offset(),
187                                          sizeof(int16_t)))
188           return false;
189         break;
190       case reflection::Int:
191       case reflection::UInt:
192         if (!table->VerifyField<int32_t>(v, field_def->offset(),
193                                          sizeof(int32_t)))
194           return false;
195         break;
196       case reflection::Long:
197       case reflection::ULong:
198         if (!table->VerifyField<int64_t>(v, field_def->offset(),
199                                          sizeof(int64_t)))
200           return false;
201         break;
202       case reflection::Float:
203         if (!table->VerifyField<float>(v, field_def->offset(), sizeof(float)))
204           return false;
205         break;
206       case reflection::Double:
207         if (!table->VerifyField<double>(v, field_def->offset(), sizeof(double)))
208           return false;
209         break;
210       case reflection::String:
211         if (!table->VerifyField<uoffset_t>(v, field_def->offset(),
212                                            sizeof(uoffset_t)) ||
213             !v.VerifyString(flatbuffers::GetFieldS(*table, *field_def))) {
214           return false;
215         }
216         break;
217       case reflection::Vector:
218         if (!VerifyVector(v, schema, *table, *field_def)) return false;
219         break;
220       case reflection::Obj: {
221         auto child_obj = schema.objects()->Get(field_def->type()->index());
222         if (child_obj->is_struct()) {
223           if (!VerifyStruct(v, *table, field_def->offset(), *child_obj,
224                             field_def->required())) {
225             return false;
226           }
227         } else {
228           if (!VerifyObject(v, schema, *child_obj,
229                             flatbuffers::GetFieldT(*table, *field_def),
230                             field_def->required())) {
231             return false;
232           }
233         }
234         break;
235       }
236       case reflection::Union: {
237         //  get union type from the prev field
238         voffset_t utype_offset = field_def->offset() - sizeof(voffset_t);
239         auto utype = table->GetField<uint8_t>(utype_offset, 0);
240         auto uval = reinterpret_cast<const uint8_t *>(
241             flatbuffers::GetFieldT(*table, *field_def));
242         if (!VerifyUnion(v, schema, utype, uval, *field_def)) { return false; }
243         break;
244       }
245       default: FLATBUFFERS_ASSERT(false); break;
246     }
247   }
248 
249   if (!v.EndTable()) return false;
250 
251   return true;
252 }
253 
254 
255 } // namespace
256 
GetAnyValueI(reflection::BaseType type,const uint8_t * data)257 int64_t GetAnyValueI(reflection::BaseType type, const uint8_t *data) {
258   // clang-format off
259   #define FLATBUFFERS_GET(T) static_cast<int64_t>(ReadScalar<T>(data))
260   switch (type) {
261     case reflection::UType:
262     case reflection::Bool:
263     case reflection::UByte:  return FLATBUFFERS_GET(uint8_t);
264     case reflection::Byte:   return FLATBUFFERS_GET(int8_t);
265     case reflection::Short:  return FLATBUFFERS_GET(int16_t);
266     case reflection::UShort: return FLATBUFFERS_GET(uint16_t);
267     case reflection::Int:    return FLATBUFFERS_GET(int32_t);
268     case reflection::UInt:   return FLATBUFFERS_GET(uint32_t);
269     case reflection::Long:   return FLATBUFFERS_GET(int64_t);
270     case reflection::ULong:  return FLATBUFFERS_GET(uint64_t);
271     case reflection::Float:  return FLATBUFFERS_GET(float);
272     case reflection::Double: return FLATBUFFERS_GET(double);
273     case reflection::String: {
274       auto s = reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) +
275                                                 data);
276       return s ? StringToInt(s->c_str()) : 0;
277     }
278     default: return 0;  // Tables & vectors do not make sense.
279   }
280   #undef FLATBUFFERS_GET
281   // clang-format on
282 }
283 
GetAnyValueF(reflection::BaseType type,const uint8_t * data)284 double GetAnyValueF(reflection::BaseType type, const uint8_t *data) {
285   switch (type) {
286     case reflection::Float: return static_cast<double>(ReadScalar<float>(data));
287     case reflection::Double: return ReadScalar<double>(data);
288     case reflection::String: {
289       auto s =
290           reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) + data);
291       if (s) {
292         double d;
293         StringToNumber(s->c_str(), &d);
294         return d;
295       } else {
296         return 0.0;
297       }
298     }
299     default: return static_cast<double>(GetAnyValueI(type, data));
300   }
301 }
302 
GetAnyValueS(reflection::BaseType type,const uint8_t * data,const reflection::Schema * schema,int type_index)303 std::string GetAnyValueS(reflection::BaseType type, const uint8_t *data,
304                          const reflection::Schema *schema, int type_index) {
305   switch (type) {
306     case reflection::Float:
307     case reflection::Double: return NumToString(GetAnyValueF(type, data));
308     case reflection::String: {
309       auto s =
310           reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) + data);
311       return s ? s->c_str() : "";
312     }
313     case reflection::Obj:
314       if (schema) {
315         // Convert the table to a string. This is mostly for debugging purposes,
316         // and does NOT promise to be JSON compliant.
317         // Also prefixes the type.
318         auto &objectdef = *schema->objects()->Get(type_index);
319         auto s = objectdef.name()->str();
320         if (objectdef.is_struct()) {
321           s += "(struct)";  // TODO: implement this as well.
322         } else {
323           auto table_field = reinterpret_cast<const Table *>(
324               ReadScalar<uoffset_t>(data) + data);
325           s += " { ";
326           auto fielddefs = objectdef.fields();
327           for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
328             auto &fielddef = **it;
329             if (!table_field->CheckField(fielddef.offset())) continue;
330             auto val = GetAnyFieldS(*table_field, fielddef, schema);
331             if (fielddef.type()->base_type() == reflection::String) {
332               std::string esc;
333               flatbuffers::EscapeString(val.c_str(), val.length(), &esc, true,
334                                         false);
335               val = esc;
336             }
337             s += fielddef.name()->str();
338             s += ": ";
339             s += val;
340             s += ", ";
341           }
342           s += "}";
343         }
344         return s;
345       } else {
346         return "(table)";
347       }
348     case reflection::Vector:
349       return "[(elements)]";                   // TODO: implement this as well.
350     case reflection::Union: return "(union)";  // TODO: implement this as well.
351     default: return NumToString(GetAnyValueI(type, data));
352   }
353 }
354 
ForAllFields(const reflection::Object * object,bool reverse,std::function<void (const reflection::Field *)> func)355 void ForAllFields(const reflection::Object *object, bool reverse,
356                   std::function<void(const reflection::Field *)> func) {
357   std::vector<uint32_t> field_to_id_map;
358   field_to_id_map.resize(object->fields()->size());
359 
360   // Create the mapping of field ID to the index into the vector.
361   for (uint32_t i = 0; i < object->fields()->size(); ++i) {
362     auto field = object->fields()->Get(i);
363     field_to_id_map[field->id()] = i;
364   }
365 
366   for (size_t i = 0; i < field_to_id_map.size(); ++i) {
367     func(object->fields()->Get(
368         field_to_id_map[reverse ? field_to_id_map.size() - i + 1 : i]));
369   }
370 }
371 
SetAnyValueI(reflection::BaseType type,uint8_t * data,int64_t val)372 void SetAnyValueI(reflection::BaseType type, uint8_t *data, int64_t val) {
373   // clang-format off
374   #define FLATBUFFERS_SET(T) WriteScalar(data, static_cast<T>(val))
375   switch (type) {
376     case reflection::UType:
377     case reflection::Bool:
378     case reflection::UByte:  FLATBUFFERS_SET(uint8_t ); break;
379     case reflection::Byte:   FLATBUFFERS_SET(int8_t  ); break;
380     case reflection::Short:  FLATBUFFERS_SET(int16_t ); break;
381     case reflection::UShort: FLATBUFFERS_SET(uint16_t); break;
382     case reflection::Int:    FLATBUFFERS_SET(int32_t ); break;
383     case reflection::UInt:   FLATBUFFERS_SET(uint32_t); break;
384     case reflection::Long:   FLATBUFFERS_SET(int64_t ); break;
385     case reflection::ULong:  FLATBUFFERS_SET(uint64_t); break;
386     case reflection::Float:  FLATBUFFERS_SET(float   ); break;
387     case reflection::Double: FLATBUFFERS_SET(double  ); break;
388     // TODO: support strings
389     default: break;
390   }
391   #undef FLATBUFFERS_SET
392   // clang-format on
393 }
394 
SetAnyValueF(reflection::BaseType type,uint8_t * data,double val)395 void SetAnyValueF(reflection::BaseType type, uint8_t *data, double val) {
396   switch (type) {
397     case reflection::Float: WriteScalar(data, static_cast<float>(val)); break;
398     case reflection::Double: WriteScalar(data, val); break;
399     // TODO: support strings.
400     default: SetAnyValueI(type, data, static_cast<int64_t>(val)); break;
401   }
402 }
403 
SetAnyValueS(reflection::BaseType type,uint8_t * data,const char * val)404 void SetAnyValueS(reflection::BaseType type, uint8_t *data, const char *val) {
405   switch (type) {
406     case reflection::Float:
407     case reflection::Double: {
408       double d;
409       StringToNumber(val, &d);
410       SetAnyValueF(type, data, d);
411       break;
412     }
413     // TODO: support strings.
414     default: SetAnyValueI(type, data, StringToInt(val)); break;
415   }
416 }
417 
418 // Resize a FlatBuffer in-place by iterating through all offsets in the buffer
419 // and adjusting them by "delta" if they straddle the start offset.
420 // Once that is done, bytes can now be inserted/deleted safely.
421 // "delta" may be negative (shrinking).
422 // Unless "delta" is a multiple of the largest alignment, you'll create a small
423 // amount of garbage space in the buffer (usually 0..7 bytes).
424 // If your FlatBuffer's root table is not the schema's root table, you should
425 // pass in your root_table type as well.
426 class ResizeContext {
427  public:
ResizeContext(const reflection::Schema & schema,uoffset_t start,int delta,std::vector<uint8_t> * flatbuf,const reflection::Object * root_table=nullptr)428   ResizeContext(const reflection::Schema &schema, uoffset_t start, int delta,
429                 std::vector<uint8_t> *flatbuf,
430                 const reflection::Object *root_table = nullptr)
431       : schema_(schema),
432         startptr_(flatbuf->data() + start),
433         delta_(delta),
434         buf_(*flatbuf),
435         dag_check_(flatbuf->size() / sizeof(uoffset_t), false) {
436     auto mask = static_cast<int>(sizeof(largest_scalar_t) - 1);
437     delta_ = (delta_ + mask) & ~mask;
438     if (!delta_) return;  // We can't shrink by less than largest_scalar_t.
439     // Now change all the offsets by delta_.
440     auto root = GetAnyRoot(buf_.data());
441     Straddle<uoffset_t, 1>(buf_.data(), root, buf_.data());
442     ResizeTable(root_table ? *root_table : *schema.root_table(), root);
443     // We can now add or remove bytes at start.
444     if (delta_ > 0)
445       buf_.insert(buf_.begin() + start, delta_, 0);
446     else
447       buf_.erase(buf_.begin() + start + delta_, buf_.begin() + start);
448   }
449 
450   // Check if the range between first (lower address) and second straddles
451   // the insertion point. If it does, change the offset at offsetloc (of
452   // type T, with direction D).
453   template<typename T, int D>
Straddle(const void * first,const void * second,void * offsetloc)454   void Straddle(const void *first, const void *second, void *offsetloc) {
455     if (first <= startptr_ && second >= startptr_) {
456       WriteScalar<T>(offsetloc, ReadScalar<T>(offsetloc) + delta_ * D);
457       DagCheck(offsetloc) = true;
458     }
459   }
460 
461   // This returns a boolean that records if the corresponding offset location
462   // has been modified already. If so, we can't even read the corresponding
463   // offset, since it is pointing to a location that is illegal until the
464   // resize actually happens.
465   // This must be checked for every offset, since we can't know which offsets
466   // will straddle and which won't.
DagCheck(const void * offsetloc)467   uint8_t &DagCheck(const void *offsetloc) {
468     auto dag_idx = reinterpret_cast<const uoffset_t *>(offsetloc) -
469                    reinterpret_cast<const uoffset_t *>(buf_.data());
470     return dag_check_[dag_idx];
471   }
472 
ResizeTable(const reflection::Object & objectdef,Table * table)473   void ResizeTable(const reflection::Object &objectdef, Table *table) {
474     if (DagCheck(table)) return;  // Table already visited.
475     auto vtable = table->GetVTable();
476     // Early out: since all fields inside the table must point forwards in
477     // memory, if the insertion point is before the table we can stop here.
478     auto tableloc = reinterpret_cast<uint8_t *>(table);
479     if (startptr_ <= tableloc) {
480       // Check if insertion point is between the table and a vtable that
481       // precedes it. This can't happen in current construction code, but check
482       // just in case we ever change the way flatbuffers are built.
483       Straddle<soffset_t, -1>(vtable, table, table);
484     } else {
485       // Check each field.
486       auto fielddefs = objectdef.fields();
487       for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
488         auto &fielddef = **it;
489         auto base_type = fielddef.type()->base_type();
490         // Ignore scalars.
491         if (base_type <= reflection::Double) continue;
492         // Ignore fields that are not stored.
493         auto offset = table->GetOptionalFieldOffset(fielddef.offset());
494         if (!offset) continue;
495         // Ignore structs.
496         auto subobjectdef =
497             base_type == reflection::Obj
498                 ? schema_.objects()->Get(fielddef.type()->index())
499                 : nullptr;
500         if (subobjectdef && subobjectdef->is_struct()) continue;
501         // Get this fields' offset, and read it if safe.
502         auto offsetloc = tableloc + offset;
503         if (DagCheck(offsetloc)) continue;  // This offset already visited.
504         auto ref = offsetloc + ReadScalar<uoffset_t>(offsetloc);
505         Straddle<uoffset_t, 1>(offsetloc, ref, offsetloc);
506         // Recurse.
507         switch (base_type) {
508           case reflection::Obj: {
509             ResizeTable(*subobjectdef, reinterpret_cast<Table *>(ref));
510             break;
511           }
512           case reflection::Vector: {
513             auto elem_type = fielddef.type()->element();
514             if (elem_type != reflection::Obj && elem_type != reflection::String)
515               break;
516             auto vec = reinterpret_cast<Vector<uoffset_t> *>(ref);
517             auto elemobjectdef =
518                 elem_type == reflection::Obj
519                     ? schema_.objects()->Get(fielddef.type()->index())
520                     : nullptr;
521             if (elemobjectdef && elemobjectdef->is_struct()) break;
522             for (uoffset_t i = 0; i < vec->size(); i++) {
523               auto loc = vec->Data() + i * sizeof(uoffset_t);
524               if (DagCheck(loc)) continue;  // This offset already visited.
525               auto dest = loc + vec->Get(i);
526               Straddle<uoffset_t, 1>(loc, dest, loc);
527               if (elemobjectdef)
528                 ResizeTable(*elemobjectdef, reinterpret_cast<Table *>(dest));
529             }
530             break;
531           }
532           case reflection::Union: {
533             ResizeTable(GetUnionType(schema_, objectdef, fielddef, *table),
534                         reinterpret_cast<Table *>(ref));
535             break;
536           }
537           case reflection::String: break;
538           default: FLATBUFFERS_ASSERT(false);
539         }
540       }
541       // Check if the vtable offset points beyond the insertion point.
542       // Must do this last, since GetOptionalFieldOffset above still reads
543       // this value.
544       Straddle<soffset_t, -1>(table, vtable, table);
545     }
546   }
547 
548  private:
549   const reflection::Schema &schema_;
550   uint8_t *startptr_;
551   int delta_;
552   std::vector<uint8_t> &buf_;
553   std::vector<uint8_t> dag_check_;
554 };
555 
SetString(const reflection::Schema & schema,const std::string & val,const String * str,std::vector<uint8_t> * flatbuf,const reflection::Object * root_table)556 void SetString(const reflection::Schema &schema, const std::string &val,
557                const String *str, std::vector<uint8_t> *flatbuf,
558                const reflection::Object *root_table) {
559   auto delta = static_cast<int>(val.size()) - static_cast<int>(str->size());
560   auto str_start = static_cast<uoffset_t>(
561       reinterpret_cast<const uint8_t *>(str) - flatbuf->data());
562   auto start = str_start + static_cast<uoffset_t>(sizeof(uoffset_t));
563   if (delta) {
564     // Clear the old string, since we don't want parts of it remaining.
565     memset(flatbuf->data() + start, 0, str->size());
566     // Different size, we must expand (or contract).
567     ResizeContext(schema, start, delta, flatbuf, root_table);
568     // Set the new length.
569     WriteScalar(flatbuf->data() + str_start,
570                 static_cast<uoffset_t>(val.size()));
571   }
572   // Copy new data. Safe because we created the right amount of space.
573   memcpy(flatbuf->data() + start, val.c_str(), val.size() + 1);
574 }
575 
ResizeAnyVector(const reflection::Schema & schema,uoffset_t newsize,const VectorOfAny * vec,uoffset_t num_elems,uoffset_t elem_size,std::vector<uint8_t> * flatbuf,const reflection::Object * root_table)576 uint8_t *ResizeAnyVector(const reflection::Schema &schema, uoffset_t newsize,
577                          const VectorOfAny *vec, uoffset_t num_elems,
578                          uoffset_t elem_size, std::vector<uint8_t> *flatbuf,
579                          const reflection::Object *root_table) {
580   auto delta_elem = static_cast<int>(newsize) - static_cast<int>(num_elems);
581   auto delta_bytes = delta_elem * static_cast<int>(elem_size);
582   auto vec_start = reinterpret_cast<const uint8_t *>(vec) - flatbuf->data();
583   auto start = static_cast<uoffset_t>(vec_start) +
584                static_cast<uoffset_t>(sizeof(uoffset_t)) +
585                elem_size * num_elems;
586   if (delta_bytes) {
587     if (delta_elem < 0) {
588       // Clear elements we're throwing away, since some might remain in the
589       // buffer.
590       auto size_clear = -delta_elem * elem_size;
591       memset(flatbuf->data() + start - size_clear, 0, size_clear);
592     }
593     ResizeContext(schema, start, delta_bytes, flatbuf, root_table);
594     WriteScalar(flatbuf->data() + vec_start, newsize);  // Length field.
595     // Set new elements to 0.. this can be overwritten by the caller.
596     if (delta_elem > 0) {
597       memset(flatbuf->data() + start, 0,
598              static_cast<size_t>(delta_elem) * elem_size);
599     }
600   }
601   return flatbuf->data() + start;
602 }
603 
AddFlatBuffer(std::vector<uint8_t> & flatbuf,const uint8_t * newbuf,size_t newlen)604 const uint8_t *AddFlatBuffer(std::vector<uint8_t> &flatbuf,
605                              const uint8_t *newbuf, size_t newlen) {
606   // Align to sizeof(uoffset_t) past sizeof(largest_scalar_t) since we're
607   // going to chop off the root offset.
608   while ((flatbuf.size() & (sizeof(uoffset_t) - 1)) ||
609          !(flatbuf.size() & (sizeof(largest_scalar_t) - 1))) {
610     flatbuf.push_back(0);
611   }
612   auto insertion_point = static_cast<uoffset_t>(flatbuf.size());
613   // Insert the entire FlatBuffer minus the root pointer.
614   flatbuf.insert(flatbuf.end(), newbuf + sizeof(uoffset_t), newbuf + newlen);
615   auto root_offset = ReadScalar<uoffset_t>(newbuf) - sizeof(uoffset_t);
616   return flatbuf.data() + insertion_point + root_offset;
617 }
618 
619 
620 
621 
CopyTable(FlatBufferBuilder & fbb,const reflection::Schema & schema,const reflection::Object & objectdef,const Table & table,bool use_string_pooling)622 Offset<const Table *> CopyTable(FlatBufferBuilder &fbb,
623                                 const reflection::Schema &schema,
624                                 const reflection::Object &objectdef,
625                                 const Table &table, bool use_string_pooling) {
626   // Before we can construct the table, we have to first generate any
627   // subobjects, and collect their offsets.
628   std::vector<uoffset_t> offsets;
629   auto fielddefs = objectdef.fields();
630   for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
631     auto &fielddef = **it;
632     // Skip if field is not present in the source.
633     if (!table.CheckField(fielddef.offset())) continue;
634     uoffset_t offset = 0;
635     switch (fielddef.type()->base_type()) {
636       case reflection::String: {
637         offset = use_string_pooling
638                      ? fbb.CreateSharedString(GetFieldS(table, fielddef)).o
639                      : fbb.CreateString(GetFieldS(table, fielddef)).o;
640         break;
641       }
642       case reflection::Obj: {
643         auto &subobjectdef = *schema.objects()->Get(fielddef.type()->index());
644         if (!subobjectdef.is_struct()) {
645           offset = CopyTable(fbb, schema, subobjectdef,
646                              *GetFieldT(table, fielddef), use_string_pooling)
647                        .o;
648         }
649         break;
650       }
651       case reflection::Union: {
652         auto &subobjectdef = GetUnionType(schema, objectdef, fielddef, table);
653         offset = CopyTable(fbb, schema, subobjectdef,
654                            *GetFieldT(table, fielddef), use_string_pooling)
655                      .o;
656         break;
657       }
658       case reflection::Vector: {
659         auto vec =
660             table.GetPointer<const Vector<Offset<Table>> *>(fielddef.offset());
661         auto element_base_type = fielddef.type()->element();
662         auto elemobjectdef =
663             element_base_type == reflection::Obj
664                 ? schema.objects()->Get(fielddef.type()->index())
665                 : nullptr;
666         switch (element_base_type) {
667           case reflection::String: {
668             std::vector<Offset<const String *>> elements(vec->size());
669             auto vec_s = reinterpret_cast<const Vector<Offset<String>> *>(vec);
670             for (uoffset_t i = 0; i < vec_s->size(); i++) {
671               elements[i] = use_string_pooling
672                                 ? fbb.CreateSharedString(vec_s->Get(i)).o
673                                 : fbb.CreateString(vec_s->Get(i)).o;
674             }
675             offset = fbb.CreateVector(elements).o;
676             break;
677           }
678           case reflection::Obj: {
679             if (!elemobjectdef->is_struct()) {
680               std::vector<Offset<const Table *>> elements(vec->size());
681               for (uoffset_t i = 0; i < vec->size(); i++) {
682                 elements[i] = CopyTable(fbb, schema, *elemobjectdef,
683                                         *vec->Get(i), use_string_pooling);
684               }
685               offset = fbb.CreateVector(elements).o;
686               break;
687             }
688           }
689             FLATBUFFERS_FALLTHROUGH();  // fall thru
690           default: {                    // Scalars and structs.
691             auto element_size = GetTypeSize(element_base_type);
692             if (elemobjectdef && elemobjectdef->is_struct())
693               element_size = elemobjectdef->bytesize();
694             fbb.StartVector(vec->size(), element_size);
695             fbb.PushBytes(vec->Data(), element_size * vec->size());
696             offset = fbb.EndVector(vec->size());
697             break;
698           }
699         }
700         break;
701       }
702       default:  // Scalars.
703         break;
704     }
705     if (offset) { offsets.push_back(offset); }
706   }
707   // Now we can build the actual table from either offsets or scalar data.
708   auto start = objectdef.is_struct() ? fbb.StartStruct(objectdef.minalign())
709                                      : fbb.StartTable();
710   size_t offset_idx = 0;
711   for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
712     auto &fielddef = **it;
713     if (!table.CheckField(fielddef.offset())) continue;
714     auto base_type = fielddef.type()->base_type();
715     switch (base_type) {
716       case reflection::Obj: {
717         auto &subobjectdef = *schema.objects()->Get(fielddef.type()->index());
718         if (subobjectdef.is_struct()) {
719           CopyInline(fbb, fielddef, table, subobjectdef.minalign(),
720                      subobjectdef.bytesize());
721           break;
722         }
723       }
724         FLATBUFFERS_FALLTHROUGH();  // fall thru
725       case reflection::Union:
726       case reflection::String:
727       case reflection::Vector:
728         fbb.AddOffset(fielddef.offset(), Offset<void>(offsets[offset_idx++]));
729         break;
730       default: {  // Scalars.
731         auto size = GetTypeSize(base_type);
732         CopyInline(fbb, fielddef, table, size, size);
733         break;
734       }
735     }
736   }
737   FLATBUFFERS_ASSERT(offset_idx == offsets.size());
738   if (objectdef.is_struct()) {
739     fbb.ClearOffsets();
740     return fbb.EndStruct();
741   } else {
742     return fbb.EndTable(start);
743   }
744 }
745 
746 
Verify(const reflection::Schema & schema,const reflection::Object & root,const uint8_t * const buf,const size_t length,const uoffset_t max_depth,const uoffset_t max_tables)747 bool Verify(const reflection::Schema &schema, const reflection::Object &root,
748             const uint8_t *const buf, const size_t length,
749             const uoffset_t max_depth, const uoffset_t max_tables) {
750   Verifier v(buf, length, max_depth, max_tables);
751   return VerifyObject(v, schema, root, flatbuffers::GetAnyRoot(buf),
752                       /*required=*/true);
753 }
754 
VerifySizePrefixed(const reflection::Schema & schema,const reflection::Object & root,const uint8_t * const buf,const size_t length,const uoffset_t max_depth,const uoffset_t max_tables)755 bool VerifySizePrefixed(const reflection::Schema &schema,
756                         const reflection::Object &root,
757                         const uint8_t *const buf, const size_t length,
758                         const uoffset_t max_depth, const uoffset_t max_tables) {
759   Verifier v(buf, length, max_depth, max_tables);
760   return VerifyObject(v, schema, root, flatbuffers::GetAnySizePrefixedRoot(buf),
761                       /*required=*/true);
762 }
763 
764 }  // namespace flatbuffers
765