From c855cd411c1672635c4a29792804f60f07836591 Mon Sep 17 00:00:00 2001 From: rw Date: Tue, 28 Aug 2018 20:06:51 -0700 Subject: [PATCH] port flatbuffers to rust --- CMakeLists.txt | 1 + docs/source/Compiler.md | 2 + docs/source/RustUsage.md | 162 ++ docs/source/doxyfile | 2 + include/flatbuffers/idl.h | 59 +- rust/flatbuffers/Cargo.lock | 4 + rust/flatbuffers/Cargo.toml | 7 + rust/flatbuffers/src/builder.rs | 601 ++++ rust/flatbuffers/src/endian_scalar.rs | 189 ++ rust/flatbuffers/src/follow.rs | 62 + rust/flatbuffers/src/lib.rs | 36 + rust/flatbuffers/src/primitives.rs | 297 ++ rust/flatbuffers/src/push.rs | 153 + rust/flatbuffers/src/table.rs | 77 + rust/flatbuffers/src/vector.rs | 133 + rust/flatbuffers/src/vtable.rs | 95 + rust/flatbuffers/src/vtable_writer.rs | 85 + src/code_generators.cpp | 2 + src/flatc_main.cpp | 4 + src/idl_gen_cpp.cpp | 3 +- src/idl_gen_general.cpp | 4 +- src/idl_gen_go.cpp | 2 +- src/idl_gen_lobster.cpp | 2 +- src/idl_gen_lua.cpp | 2 +- src/idl_gen_php.cpp | 2 +- src/idl_gen_python.cpp | 2 +- src/idl_gen_rust.cpp | 1782 ++++++++++++ src/idl_gen_text.cpp | 6 +- src/idl_parser.cpp | 12 +- tests/RustTest.sh | 28 + tests/generate_code.sh | 4 +- tests/monster_test_generated.rs | 1671 +++++++++++ .../namespace_test1_generated.rs | 231 ++ .../namespace_test2_generated.rs | 296 ++ tests/rust_usage_test/Cargo.lock | 285 ++ tests/rust_usage_test/Cargo.toml | 22 + .../benches/flatbuffers_benchmarks.rs | 218 ++ tests/rust_usage_test/src/lib.rs | 1 + tests/rust_usage_test/test_bench_output.txt | 184 ++ .../rust_usage_test/tests/integration_test.rs | 2473 +++++++++++++++++ 40 files changed, 9162 insertions(+), 39 deletions(-) create mode 100644 docs/source/RustUsage.md create mode 100644 rust/flatbuffers/Cargo.lock create mode 100644 rust/flatbuffers/Cargo.toml create mode 100644 rust/flatbuffers/src/builder.rs create mode 100644 rust/flatbuffers/src/endian_scalar.rs create mode 100644 rust/flatbuffers/src/follow.rs create mode 100644 rust/flatbuffers/src/lib.rs create mode 100644 rust/flatbuffers/src/primitives.rs create mode 100644 rust/flatbuffers/src/push.rs create mode 100644 rust/flatbuffers/src/table.rs create mode 100644 rust/flatbuffers/src/vector.rs create mode 100644 rust/flatbuffers/src/vtable.rs create mode 100644 rust/flatbuffers/src/vtable_writer.rs create mode 100644 src/idl_gen_rust.cpp create mode 100755 tests/RustTest.sh create mode 100644 tests/monster_test_generated.rs create mode 100644 tests/namespace_test/namespace_test1_generated.rs create mode 100644 tests/namespace_test/namespace_test2_generated.rs create mode 100644 tests/rust_usage_test/Cargo.lock create mode 100644 tests/rust_usage_test/Cargo.toml create mode 100644 tests/rust_usage_test/benches/flatbuffers_benchmarks.rs create mode 100644 tests/rust_usage_test/src/lib.rs create mode 100644 tests/rust_usage_test/test_bench_output.txt create mode 100644 tests/rust_usage_test/tests/integration_test.rs diff --git a/CMakeLists.txt b/CMakeLists.txt index af1048dbecd1..37534b7c4056 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -54,6 +54,7 @@ set(FlatBuffers_Compiler_SRCS src/idl_gen_python.cpp src/idl_gen_lobster.cpp src/idl_gen_lua.cpp + src/idl_gen_rust.cpp src/idl_gen_fbs.cpp src/idl_gen_grpc.cpp src/idl_gen_json_schema.cpp diff --git a/docs/source/Compiler.md b/docs/source/Compiler.md index 60494049cae6..586087ad7377 100644 --- a/docs/source/Compiler.md +++ b/docs/source/Compiler.md @@ -43,6 +43,8 @@ For any schema input files, one or more generators can be specified: - `--lobster`: Generate Lobster code. +- `--rust`, `-r` : Generate Rust code. + For any data input files: - `--binary`, `-b` : If data is contained in this file, generate a diff --git a/docs/source/RustUsage.md b/docs/source/RustUsage.md new file mode 100644 index 000000000000..98b0e9e78a81 --- /dev/null +++ b/docs/source/RustUsage.md @@ -0,0 +1,162 @@ +Use in Rust {#flatbuffers_guide_use_rust} +========== + +## Before you get started + +Before diving into the FlatBuffers usage in Rust, it should be noted that +the [Tutorial](@ref flatbuffers_guide_tutorial) page has a complete guide +to general FlatBuffers usage in all of the supported languages (including Rust). +This page is designed to cover the nuances of FlatBuffers usage, specific to +Rust. + +#### Prerequisites + +This page assumes you have written a FlatBuffers schema and compiled it +with the Schema Compiler. If you have not, please see +[Using the schema compiler](@ref flatbuffers_guide_using_schema_compiler) +and [Writing a schema](@ref flatbuffers_guide_writing_schema). + +Assuming you wrote a schema, say `mygame.fbs` (though the extension doesn't +matter), you've generated a Rust file called `mygame_generated.rs` using the +compiler (e.g. `flatc --rust mygame.fbs`), you can now start using this in +your program by including the file. As noted, this header relies on the crate +`flatbuffers`, which should be in your include `Cargo.toml`. + +## FlatBuffers Rust library code location + +The code for the FlatBuffers Rust library can be found at +`flatbuffers/rust`. You can browse the library code on the +[FlatBuffers GitHub page](https://github.com/google/flatbuffers/tree/master/rust). + +## Testing the FlatBuffers Rust library + +The code to test the Rust library can be found at `flatbuffers/tests/rust_usage_test`. +The test code itself is located in +[integration_test.rs](https://github.com/google/flatbuffers/blob/master/tests/rust_usage_test/tests/integration_test.rs) + +This test file requires `flatc` to be present. To review how to build the project, +please read the [Building](@ref flatbuffers_guide_building) documenation. + +To run the tests, execute `RustTest.sh` from the `flatbuffers/tests` directory. +For example, on [Linux](https://en.wikipedia.org/wiki/Linux), you would simply +run: `cd tests && ./RustTest.sh`. + +*Note: The shell script requires [Rust](https://www.rust-lang.org) to +be installed.* + +## Using the FlatBuffers Rust library + +*Note: See [Tutorial](@ref flatbuffers_guide_tutorial) for a more in-depth +example of how to use FlatBuffers in Rust.* + +FlatBuffers supports both reading and writing FlatBuffers in Rust. + +To use FlatBuffers in your code, first generate the Rust modules from your +schema with the `--rust` option to `flatc`. Then you can import both FlatBuffers +and the generated code to read or write FlatBuffers. + +For example, here is how you would read a FlatBuffer binary file in Rust: +First, include the library and generated code. Then read the file into +a `u8` vector, which you pass to `GetMonster()`. + +This full example program is available in the Rust test suite: +[monster_example.rs](https://github.com/google/flatbuffers/blob/master/tests/rust_usage_test/bin/monster_example.rs) + +It can be run by `cd`ing to the `rust_usage_test` directory and executing: `cargo run monster_example`. + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.rs} + extern crate flatbuffers; + + #[path = "../../monster_test_generated.rs"] + mod monster_test_generated; + pub use monster_test_generated::my_game; + + use std::io::Read; + + fn main() { + let mut f = std::fs::File::open("../monsterdata_test.mon").unwrap(); + let mut buf = Vec::new(); + f.read_to_end(&mut buf).expect("file reading failed"); + + let monster = my_game::example::get_root_as_monster(&buf[..]); +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +`monster` is of type `Monster`, and points to somewhere *inside* your +buffer (root object pointers are not the same as `buffer_pointer` !). +If you look in your generated header, you'll see it has +convenient accessors for all fields, e.g. `hp()`, `mana()`, etc: + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp} + println!("{}", monster.hp()); // `80` + println!("{}", monster.mana()); // default value of `150` + println!("{:?}", monster.name()); // Some("MyMonster") + } +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +*Note: That we never stored a `mana` value, so it will return the default.* + +## Direct memory access + +As you can see from the above examples, all elements in a buffer are +accessed through generated accessors. This is because everything is +stored in little endian format on all platforms (the accessor +performs a swap operation on big endian machines), and also because +the layout of things is generally not known to the user. + +For structs, layout is deterministic and guaranteed to be the same +across platforms (scalars are aligned to their +own size, and structs themselves to their largest member), and you +are allowed to access this memory directly by using `safe_slice` and +on the reference to a struct, or even an array of structs. + +To compute offsets to sub-elements of a struct, make sure they +are structs themselves, as then you can use the pointers to +figure out the offset without having to hardcode it. This is +handy for use of arrays of structs with calls like `glVertexAttribPointer` +in OpenGL or similar APIs. + +It is important to note is that structs are still little endian on all +machines, so only use tricks like this if you can guarantee you're not +shipping on a big endian machine (using an `#[cfg(target_endian = "little")]` +attribute would be wise). + +The special function `safe_slice` is implemented on Vector objects that are +represented in memory the same way as they are represented on the wire. This +function is always available on vectors of struct, bool, u8, and i8. It is +conditionally-compiled on little-endian systems for all the remaining scalar +types. + +The FlatBufferBuilder function `create_vector_direct` is implemented for all +types that are endian-safe to write with a `memcpy`. It is the write-equivelent +of `safe_slice`. + +## Access of untrusted buffers + +The generated accessor functions access fields over offsets, which is +very quick. These offsets are not verified at run-time, so a malformed +buffer could cause a program to crash by accessing random memory. (We try to +prevent this in Rust by using safe slice accesses, instead of unsafe pointer +dereferencing, but you should not rely on its correctness.) + +When you're processing large amounts of data from a source you know (e.g. +your own generated data on disk), this is acceptable, but when reading +data from the network that can potentially have been modified by an +attacker, this is undesirable. + +The C++ port provides a buffer verifier, but, at this time, Rust does not. + +## Threading + +Reading a FlatBuffer does not touch any memory outside the original buffer, +and is entirely read-only (all immutable), so is safe to access from multiple +threads even without synchronisation primitives. + +Creating a FlatBuffer is not thread safe. All state related to building +a FlatBuffer is contained in a FlatBufferBuilder instance, and no memory +outside of it is touched. To make this thread safe, either do not +share instances of FlatBufferBuilder between threads (recommended), or +manually wrap it in synchronisation primitives. There's no automatic way to +accomplish this, by design, as we feel multithreaded construction +of a single buffer will be rare, and synchronisation overhead would be costly. + +
diff --git a/docs/source/doxyfile b/docs/source/doxyfile index 19a2ec943d3f..6ba3c108cc90 100644 --- a/docs/source/doxyfile +++ b/docs/source/doxyfile @@ -760,6 +760,7 @@ INPUT = "FlatBuffers.md" \ "PythonUsage.md" \ "LuaUsage.md" \ "LobsterUsage.md" \ + "RustUsage.md" \ "Support.md" \ "Benchmarks.md" \ "WhitePaper.md" \ @@ -778,6 +779,7 @@ INPUT = "FlatBuffers.md" \ "../../net/FlatBuffers/FlatBufferBuilder.cs" \ "../../include/flatbuffers/flatbuffers.h" \ "../../go/builder.go" + "../../rust/flatbuffers/src/builder.rs" # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses diff --git a/include/flatbuffers/idl.h b/include/flatbuffers/idl.h index 54ebf764c6d8..75e34c52d13f 100644 --- a/include/flatbuffers/idl.h +++ b/include/flatbuffers/idl.h @@ -41,24 +41,24 @@ namespace flatbuffers { // of type tokens. // clang-format off #define FLATBUFFERS_GEN_TYPES_SCALAR(TD) \ - TD(NONE, "", uint8_t, byte, byte, byte, uint8) \ - TD(UTYPE, "", uint8_t, byte, byte, byte, uint8) /* begin scalar/int */ \ - TD(BOOL, "bool", uint8_t, boolean,byte, bool, bool) \ - TD(CHAR, "byte", int8_t, byte, int8, sbyte, int8) \ - TD(UCHAR, "ubyte", uint8_t, byte, byte, byte, uint8) \ - TD(SHORT, "short", int16_t, short, int16, short, int16) \ - TD(USHORT, "ushort", uint16_t, short, uint16, ushort, uint16) \ - TD(INT, "int", int32_t, int, int32, int, int32) \ - TD(UINT, "uint", uint32_t, int, uint32, uint, uint32) \ - TD(LONG, "long", int64_t, long, int64, long, int64) \ - TD(ULONG, "ulong", uint64_t, long, uint64, ulong, uint64) /* end int */ \ - TD(FLOAT, "float", float, float, float32, float, float32) /* begin float */ \ - TD(DOUBLE, "double", double, double, float64, double, float64) /* end float/scalar */ + TD(NONE, "", uint8_t, byte, byte, byte, uint8, u8) \ + TD(UTYPE, "", uint8_t, byte, byte, byte, uint8, u8) /* begin scalar/int */ \ + TD(BOOL, "bool", uint8_t, boolean,byte, bool, bool, bool) \ + TD(CHAR, "byte", int8_t, byte, int8, sbyte, int8, i8) \ + TD(UCHAR, "ubyte", uint8_t, byte, byte, byte, uint8, u8) \ + TD(SHORT, "short", int16_t, short, int16, short, int16, i16) \ + TD(USHORT, "ushort", uint16_t, short, uint16, ushort, uint16, u16) \ + TD(INT, "int", int32_t, int, int32, int, int32, i32) \ + TD(UINT, "uint", uint32_t, int, uint32, uint, uint32, u32) \ + TD(LONG, "long", int64_t, long, int64, long, int64, i64) \ + TD(ULONG, "ulong", uint64_t, long, uint64, ulong, uint64, u64) /* end int */ \ + TD(FLOAT, "float", float, float, float32, float, float32, f32) /* begin float */ \ + TD(DOUBLE, "double", double, double, float64, double, float64, f64) /* end float/scalar */ #define FLATBUFFERS_GEN_TYPES_POINTER(TD) \ - TD(STRING, "string", Offset, int, int, StringOffset, int) \ - TD(VECTOR, "", Offset, int, int, VectorOffset, int) \ - TD(STRUCT, "", Offset, int, int, int, int) \ - TD(UNION, "", Offset, int, int, int, int) + TD(STRING, "string", Offset, int, int, StringOffset, int, unused) \ + TD(VECTOR, "", Offset, int, int, VectorOffset, int, unused) \ + TD(STRUCT, "", Offset, int, int, int, int, unused) \ + TD(UNION, "", Offset, int, int, int, int, unused) // The fields are: // - enum @@ -68,12 +68,14 @@ namespace flatbuffers { // - Go type. // - C# / .Net type. // - Python type. +// - Rust type. // using these macros, we can now write code dealing with types just once, e.g. /* switch (type) { - #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \ + RTYPE) \ case BASE_TYPE_ ## ENUM: \ // do something specific to CTYPE here FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) @@ -90,13 +92,15 @@ switch (type) { __extension__ // Stop GCC complaining about trailing comma with -Wpendantic. #endif enum BaseType { - #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \ + RTYPE) \ BASE_TYPE_ ## ENUM, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD }; -#define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ +#define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \ + RTYPE) \ static_assert(sizeof(CTYPE) <= sizeof(largest_scalar_t), \ "define largest_scalar_t as " #CTYPE); FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) @@ -111,6 +115,8 @@ inline bool IsFloat (BaseType t) { return t == BASE_TYPE_FLOAT || inline bool IsLong (BaseType t) { return t == BASE_TYPE_LONG || t == BASE_TYPE_ULONG; } inline bool IsBool (BaseType t) { return t == BASE_TYPE_BOOL; } +inline bool IsOneByte(BaseType t) { return t >= BASE_TYPE_UTYPE && + t <= BASE_TYPE_UCHAR; } // clang-format on extern const char *const kTypeNames[]; @@ -410,6 +416,7 @@ struct IDLOptions { kDart = 1 << 11, kLua = 1 << 12, kLobster = 1 << 13, + kRust = 1 << 14, kMAX }; @@ -834,6 +841,12 @@ extern bool GenerateLua(const Parser &parser, const std::string &path, const std::string &file_name); +// Generate Rust files from the definitions in the Parser object. +// See idl_gen_rust.cpp. +extern bool GenerateRust(const Parser &parser, + const std::string &path, + const std::string &file_name); + // Generate Json schema file // See idl_gen_json_schema.cpp. extern bool GenerateJsonSchema(const Parser &parser, @@ -872,6 +885,12 @@ extern std::string DartMakeRule(const Parser &parser, const std::string &path, const std::string &file_name); +// Generate a make rule for the generated Rust code. +// See idl_gen_rust.cpp. +extern std::string RustMakeRule(const Parser &parser, + const std::string &path, + const std::string &file_name); + // Generate a make rule for the generated Java/C#/... files. // See idl_gen_general.cpp. extern std::string GeneralMakeRule(const Parser &parser, diff --git a/rust/flatbuffers/Cargo.lock b/rust/flatbuffers/Cargo.lock new file mode 100644 index 000000000000..dc2168d0834b --- /dev/null +++ b/rust/flatbuffers/Cargo.lock @@ -0,0 +1,4 @@ +[[package]] +name = "flatbuffers" +version = "0.1.0" + diff --git a/rust/flatbuffers/Cargo.toml b/rust/flatbuffers/Cargo.toml new file mode 100644 index 000000000000..f5914e991fd8 --- /dev/null +++ b/rust/flatbuffers/Cargo.toml @@ -0,0 +1,7 @@ +[package] +name = "flatbuffers" +version = "0.1.0" +authors = ["Robert Winslow ", "FlatBuffers Maintainers"] + +[dependencies] +smallvec = "0.6" diff --git a/rust/flatbuffers/src/builder.rs b/rust/flatbuffers/src/builder.rs new file mode 100644 index 000000000000..cb8644363baa --- /dev/null +++ b/rust/flatbuffers/src/builder.rs @@ -0,0 +1,601 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +extern crate smallvec; + +use std::cmp::max; +use std::marker::PhantomData; +use std::mem::size_of; +use std::ptr::write_bytes; + +use endian_scalar::{read_scalar, emplace_scalar}; +use primitives::*; +use push::{Push, ZeroTerminatedByteSlice}; +use table::Table; +use vtable::{VTable, field_index_to_field_offset}; +use vtable_writer::VTableWriter; +use vector::{SafeSliceAccess, Vector}; + +#[derive(Clone, Copy, Debug)] +struct FieldLoc { + off: UOffsetT, + id: VOffsetT, +} + +/// FlatBufferBuilder builds a FlatBuffer through manipulating its internal +/// state. It has an owned `Vec` that grows as needed (up to the hardcoded +/// limit of 2GiB, which is set by the FlatBuffers format). +/// +/// Users are expected to create/reset instances of this type; however, its +/// functions are primarily intended for use by the generated code emitted by +/// the `flatc` compiler. +pub struct FlatBufferBuilder<'fbb> { + owned_buf: Vec, + head: usize, + + field_locs: Vec, + written_vtable_revpos: Vec, + + nested: bool, + finished: bool, + + min_align: usize, + + _phantom: PhantomData<&'fbb ()>, +} +impl<'fbb> FlatBufferBuilder<'fbb> { + /// Create a FlatBufferBuilder that is ready for writing. + pub fn new() -> Self { + Self::new_with_capacity(0) + } + + /// Create a FlatBufferBuilder that is ready for writing, with a + /// ready-to-use capacity of the provided size. + /// + /// To minimize memory allocations after initialization, call this function + /// with the constant `FLATBUFFERS_MAX_BUFFER_SIZE`. + pub fn new_with_capacity(size: usize) -> Self { + assert!(size <= FLATBUFFERS_MAX_BUFFER_SIZE, + "cannot initialize buffer bigger than 2 gigabytes"); + FlatBufferBuilder { + owned_buf: vec![0u8; size], + head: size, + + field_locs: Vec::new(), + written_vtable_revpos: Vec::new(), + + nested: false, + finished: false, + + min_align: 0, + + _phantom: PhantomData, + } + } + + /// Reset the FlatBufferBuilder in order to reduce heap allocations. + /// If you are using a FlatBufferBuilder repeatedly, make sure to use this + /// function! + pub fn reset(&mut self) { + // memset only the part of the buffer that could be dirty: + { + let to_clear = self.owned_buf.len() - self.head; + let ptr = (&mut self.owned_buf[self.head..]).as_mut_ptr(); + unsafe { write_bytes(ptr, 0, to_clear); } + } + + self.head = self.owned_buf.len(); + self.written_vtable_revpos.clear(); + + self.nested = false; + self.finished = false; + + self.min_align = 0; + } + + /// Destroy the FlatBufferBuilder, returning its internal byte vector + /// and the index into it that represents the start of valid data. + pub fn collapse(self) -> (Vec, usize) { + (self.owned_buf, self.head) + } + + /// Push a Push'able value onto the front of the in-progress data. + /// + /// This function uses traits to provide a unified API for writing + /// scalars, tables, vectors, and WIPOffsets. + #[inline] + pub fn push(&mut self, x: X) -> WIPOffset { + self.align(x.size(), x.alignment()); + self.make_space(x.size()); + { + let (dst, rest) = (&mut self.owned_buf[self.head..]).split_at_mut(x.size()); + x.push(dst, rest); + } + WIPOffset::new(self.used_space() as UOffsetT) + } + + /// Push a Push'able value onto the front of the in-progress data, and + /// store a reference to it in the in-progress vtable. If the value matches + /// the default, then this is a no-op. + #[inline] + pub fn push_slot(&mut self, slotoff: VOffsetT, x: X, default: X) { + self.assert_nested("push_slot must be called after start_table"); + if x == default { + return; + } + self.push_slot_always(slotoff, x); + } + + /// Push a Push'able value onto the front of the in-progress data, and + /// store a reference to it in the in-progress vtable. + #[inline] + pub fn push_slot_always(&mut self, slotoff: VOffsetT, x: X) { + self.assert_nested("push_slot_always must be called after start_table"); + let off = self.push(x); + self.track_field(slotoff, off.value()); + } + + /// Retrieve the number of vtables that have been serialized into the + /// FlatBuffer. This is primarily used to check vtable deduplication. + /// store a reference to it in the in-progress vtable. + #[inline] + pub fn num_written_vtables(&self) -> usize { + self.written_vtable_revpos.len() + } + + /// Start a Table write. + /// + /// Asserts that the builder is not in a nested state. + /// + /// Users probably want to use `push_slot` to add values after calling this. + #[inline] + pub fn start_table(&mut self) -> WIPOffset { + self.assert_not_nested("start_table can not be called when a table or vector is under construction"); + self.nested = true; + + WIPOffset::new(self.used_space() as UOffsetT) + } + + /// End a Table write. + /// + /// Asserts that the builder is in a nested state. + #[inline] + pub fn end_table(&mut self, off: WIPOffset) -> WIPOffset { + self.assert_nested("end_table must be called after a call to start_table"); + + let o = self.write_vtable(off); + + self.nested = false; + self.field_locs.clear(); + + WIPOffset::new(o.value()) + } + + /// Start a Vector write. + /// + /// Asserts that the builder is not in a nested state. + /// + /// Most users will prefer to call `create_vector`. + /// Speed optimizing users who choose to create vectors manually using this + /// function will want to use `push` to add values. + #[inline] + pub fn start_vector(&mut self, len: usize, elem_size: usize) { + self.assert_not_nested("start_vector can not be called when a table or vector is under construction"); + self.nested = true; + self.align(len * elem_size, SIZE_UOFFSET); + self.align(len * elem_size, elem_size); // Just in case elemsize > uoffset_t. + } + + /// End a Vector write. + /// + /// Note that the `num_elems` parameter is the number of written items, not + /// the byte count. + /// + /// Asserts that the builder is in a nested state. + #[inline] + pub fn end_vector(&mut self, num_elems: usize) -> WIPOffset> { + self.assert_nested("end_vector must be called after a call to start_vector"); + self.nested = false; + let o = self.push::(num_elems as UOffsetT); + WIPOffset::new(o.value()) + } + + /// Create a utf8 string. + /// + /// The wire format represents this as a zero-terminated byte vector. + #[inline] + pub fn create_string(&mut self, s: &str) -> WIPOffset<&'fbb str> { + self.assert_not_nested("create_string can not be called when a table or vector is under construction"); + self.push(ZeroTerminatedByteSlice::new(s.as_bytes())); + WIPOffset::new(self.used_space() as UOffsetT) + } + + /// Create a zero-terminated byte vector. + #[inline] + pub fn create_byte_string(&mut self, data: &[u8]) -> WIPOffset<&'fbb [u8]> { + self.assert_not_nested("create_byte_string can not be called when a table or vector is under construction"); + self.push(ZeroTerminatedByteSlice::new(data)); + WIPOffset::new(self.used_space() as UOffsetT) + } + + /// Create a vector by memcpy'ing. This is much faster than calling + /// `create_vector`, but the underlying type must be represented as + /// little-endian on the host machine. This property is encoded in the + /// type system through the SafeSliceAccess trait. The following types are + /// always safe, on any platform: bool, u8, i8, and any + /// FlatBuffers-generated struct. + #[inline] + pub fn create_vector_direct(&mut self, data: &[T]) -> WIPOffset> { + self.assert_not_nested("create_vector_direct can not be called when a table or vector is under construction"); + self.push(data); + WIPOffset::new(self.used_space() as UOffsetT) + } + + /// Create a vector of strings. + /// + /// Speed-sensitive users may wish to reduce memory usage by creating the + /// vector manually: use `create_vector`, `push`, and `end_vector`. + #[inline] + pub fn create_vector_of_strings<'a, 'b>(&'a mut self, xs: &'b [&'b str]) -> WIPOffset>> { + self.assert_not_nested("create_vector_of_strings can not be called when a table or vector is under construction"); + // internally, smallvec can be a stack-allocated or heap-allocated vector. + // we expect it to usually be stack-allocated. + let mut offsets: smallvec::SmallVec<[WIPOffset<&str>; 0]> = smallvec::SmallVec::with_capacity(xs.len()); + unsafe { offsets.set_len(xs.len()); } + for (i, &s) in xs.iter().enumerate().rev() { + let o = self.create_string(s); + offsets[i] = o; + } + self.create_vector(&offsets[..]) + } + + /// Create a vector of Push-able objects. + /// + /// Speed-sensitive users may wish to reduce memory usage by creating the + /// vector manually: use `create_vector`, `push`, and `end_vector`. + #[inline] + pub fn create_vector<'a, T: Push + Copy + 'fbb>(&'a mut self, items: &'a [T]) -> WIPOffset> { + let elemsize = size_of::(); + self.start_vector(elemsize, items.len()); + // TODO(rw): precompute the space needed and call `make_space` only once + for i in (0..items.len()).rev() { + self.push(items[i]); + } + WIPOffset::new(self.end_vector::(items.len()).value()) + } + + /// Get the byte slice for the data that has been written, regardless of + /// whether it has been finished. + #[inline] + pub fn unfinished_data(&self) -> &[u8] { + &self.owned_buf[self.head..] + } + /// Get the byte slice for the data that has been written after a call to + /// one of the `finish` functions. + #[inline] + pub fn finished_data(&self) -> &[u8] { + self.assert_finished("finished_bytes cannot be called when the buffer is not yet finished"); + &self.owned_buf[self.head..] + } + /// Assert that a field is present in the just-finished Table. + /// + /// This is somewhat low-level and is mostly used by the generated code. + #[inline] + pub fn required(&self, + tab_revloc: WIPOffset, + slot_byte_loc: VOffsetT, + assert_msg_name: &'static str) { + let idx = self.used_space() - tab_revloc.value() as usize; + let tab = Table::new(&self.owned_buf[self.head..], idx); + let o = tab.vtable().get(slot_byte_loc) as usize; + assert!(o != 0, "missing required field {}", assert_msg_name); + } + + /// Finalize the FlatBuffer by aligning it, pushing an optional file + /// identifier on to it, pushing a size prefix on to it, and marking the + /// internal state of the FlatBufferBuilder as `finished`. Afterwards, + /// users can call `finished_data` to get the resulting data. + #[inline] + pub fn finish_size_prefixed(&mut self, root: WIPOffset, file_identifier: Option<&str>) { + self.finish_with_opts(root, file_identifier, true); + } + + /// Finalize the FlatBuffer by aligning it, pushing an optional file + /// identifier on to it, and marking the internal state of the + /// FlatBufferBuilder as `finished`. Afterwards, users can call + /// `finished_data` to get the resulting data. + #[inline] + pub fn finish(&mut self, root: WIPOffset, file_identifier: Option<&str>) { + self.finish_with_opts(root, file_identifier, false); + } + + /// Finalize the FlatBuffer by aligning it and marking the internal state + /// of the FlatBufferBuilder as `finished`. Afterwards, users can call + /// `finished_data` to get the resulting data. + #[inline] + pub fn finish_minimal(&mut self, root: WIPOffset) { + self.finish_with_opts(root, None, false); + } + + #[inline] + fn used_space(&self) -> usize { + self.owned_buf.len() - self.head as usize + } + + #[inline] + fn track_field(&mut self, slot_off: VOffsetT, off: UOffsetT) { + let fl = FieldLoc { + id: slot_off, + off: off, + }; + self.field_locs.push(fl); + } + + #[inline] + fn fill(&mut self, zero_pad_bytes: usize) { + self.make_space(zero_pad_bytes); + } + + /// Write the VTable, if needed. + // TODO(rw): simplify this function + fn write_vtable(&mut self, table_tail_revloc: WIPOffset) -> WIPOffset { + self.assert_nested("write_vtable must be called after a call to start_table"); + + // Write the vtable offset, which is the start of any Table. + // We fill its value later. + let object_vtable_revloc: WIPOffset = + WIPOffset::new(self.push::(0xF0F0F0F0 as UOffsetT).value()); + + // Layout of the data this function will create when a new vtable is + // needed. + // -------------------------------------------------------------------- + // vtable starts here + // | x, x -- vtable len (bytes) [u16] + // | x, x -- object inline len (bytes) [u16] + // | x, x -- zero, or num bytes from start of object to field #0 [u16] + // | ... + // | x, x -- zero, or num bytes from start of object to field #n-1 [u16] + // vtable ends here + // table starts here + // | x, x, x, x -- offset (negative direction) to the vtable [i32] + // | aka "vtableoffset" + // | -- table inline data begins here, we don't touch it -- + // table ends here -- aka "table_start" + // -------------------------------------------------------------------- + // + // Layout of the data this function will create when we re-use an + // existing vtable. + // + // We always serialize this particular vtable, then compare it to the + // other vtables we know about to see if there is a duplicate. If there + // is, then we erase the serialized vtable we just made. + // We serialize it first so that we are able to do byte-by-byte + // comparisons with already-serialized vtables. This 1) saves + // bookkeeping space (we only keep revlocs to existing vtables), 2) + // allows us to convert to little-endian once, then do + // fast memcmp comparisons, and 3) by ensuring we are comparing real + // serialized vtables, we can be more assured that we are doing the + // comparisons correctly. + // + // -------------------------------------------------------------------- + // table starts here + // | x, x, x, x -- offset (negative direction) to an existing vtable [i32] + // | aka "vtableoffset" + // | -- table inline data begins here, we don't touch it -- + // table starts here: aka "table_start" + // -------------------------------------------------------------------- + + // Include space for the last offset and ensure empty tables have a + // minimum size. + let max_voffset = self.field_locs.iter().map(|fl| fl.id).max(); + let vtable_len = match max_voffset { + None => { field_index_to_field_offset(0) as usize } + Some(mv) => { mv as usize + SIZE_VOFFSET } + }; + self.fill(vtable_len); + let table_object_size = object_vtable_revloc.value() - table_tail_revloc.value(); + debug_assert!(table_object_size < 0x10000); // Vtable use 16bit offsets. + + let vt_start_pos = self.head; + let vt_end_pos = self.head + vtable_len; + { + let vtfw = &mut VTableWriter::init(&mut self.owned_buf[vt_start_pos..vt_end_pos]); + vtfw.write_vtable_byte_length(vtable_len as VOffsetT); + vtfw.write_object_inline_size(table_object_size as VOffsetT); + for &fl in self.field_locs.iter() { + let pos: VOffsetT = (object_vtable_revloc.value() - fl.off) as VOffsetT; + debug_assert_eq!(vtfw.get_field_offset(fl.id), + 0, + "tried to write a vtable field multiple times"); + vtfw.write_field_offset(fl.id, pos); + } + } + let vt_use = { + let mut ret: usize = self.used_space(); + + // LIFO order + for &vt_rev_pos in self.written_vtable_revpos.iter().rev() { + let eq = { + let this_vt = VTable::init(&self.owned_buf[..], self.head); + let other_vt = VTable::init(&self.owned_buf[..], self.head + self.used_space() - vt_rev_pos as usize); + other_vt == this_vt + }; + if eq { + VTableWriter::init(&mut self.owned_buf[vt_start_pos..vt_end_pos]).clear(); + self.head += vtable_len; + ret = vt_rev_pos as usize; + break; + } + } + ret + }; + + if vt_use == self.used_space() { + self.written_vtable_revpos.push(vt_use as UOffsetT); + } + + { + let n = self.head + self.used_space() - object_vtable_revloc.value() as usize; + let saw = read_scalar::(&self.owned_buf[n..n + SIZE_SOFFSET]); + debug_assert_eq!(saw, 0xF0F0F0F0); + emplace_scalar::( + &mut self.owned_buf[n..n + SIZE_SOFFSET], + vt_use as SOffsetT - object_vtable_revloc.value() as SOffsetT, + ); + } + + self.field_locs.clear(); + + object_vtable_revloc + } + fn grow_owned_buf(&mut self) { + let old_len = self.owned_buf.len(); + let new_len = max(1, old_len * 2); + + assert!(new_len <= FLATBUFFERS_MAX_BUFFER_SIZE, + "cannot grow buffer beyond 2 gigabytes"); + + let starting_active_size = self.used_space(); + + let diff = new_len - old_len; + self.owned_buf.resize(new_len, 0); + self.head += diff; + + let ending_active_size = self.used_space(); + debug_assert_eq!(starting_active_size, ending_active_size); + + if new_len == 1 { + return; + } + + // calculate the midpoint, and safely copy the old end data to the new + // end position: + let middle = new_len / 2; + { + let (left, right) = &mut self.owned_buf[..].split_at_mut(middle); + right.copy_from_slice(left); + } + // finally, zero out the old end data. + { + let ptr = (&mut self.owned_buf[..middle]).as_mut_ptr(); + unsafe { write_bytes(ptr, 0, middle); } + } + } + // with or without a size prefix changes how we load the data, so finish* + // functions are split along those lines. + fn finish_with_opts(&mut self, + root: WIPOffset, + file_identifier: Option<&str>, + size_prefixed: bool) { + self.assert_not_finished("buffer cannot be finished when it is already finished"); + self.assert_not_nested("buffer cannot be finished when a table or vector is under construction"); + self.written_vtable_revpos.clear(); + + let to_align = { + // for the root offset: + let a = SIZE_UOFFSET; + // for the size prefix: + let b = if size_prefixed { SIZE_UOFFSET } else { 0 }; + // for the file identifier (a string that is not zero-terminated): + let c = if file_identifier.is_some() { + FILE_IDENTIFIER_LENGTH + } else { + 0 + }; + a + b + c + }; + + { + let ma = self.min_align; + self.align(to_align, ma); + } + + if let Some(ident) = file_identifier { + debug_assert_eq!(ident.len(), FILE_IDENTIFIER_LENGTH); + self.push_bytes_unprefixed(ident.as_bytes()); + } + + self.push(root); + + if size_prefixed { + let sz = self.used_space() as UOffsetT; + self.push::(sz); + } + self.finished = true; + } + + fn align(&mut self, len: usize, alignment: usize) { + self.track_min_align(alignment); + let s = self.used_space() as usize; + self.fill(padding_bytes(s + len, alignment)); + } + fn track_min_align(&mut self, alignment: usize) { + self.min_align = max(self.min_align, alignment); + } + fn push_bytes_unprefixed(&mut self, x: &[u8]) -> UOffsetT { + let n = self.make_space(x.len()); + &mut self.owned_buf[n..n + x.len()].copy_from_slice(x); + + n as UOffsetT + } + fn make_space(&mut self, want: usize) -> usize { + self.ensure_capacity(want); + self.head -= want; + self.head + } + fn ensure_capacity(&mut self, want: usize) -> usize { + if self.unused_ready_space() >= want { + return want; + } + assert!( + want <= FLATBUFFERS_MAX_BUFFER_SIZE, + "cannot grow buffer beyond 2 gigabytes" + ); + while self.unused_ready_space() < want { + self.grow_owned_buf(); + } + want + } + #[inline] + fn unused_ready_space(&self) -> usize { + self.head + } + #[inline] + fn assert_nested(&self, msg: &'static str) { + // we don't assert that self.field_locs.len() >0 because the vtable + // could be empty (e.g. for empty tables, or for all-default values). + debug_assert!(self.nested, msg); + } + #[inline] + fn assert_not_nested(&self, msg: &'static str) { + debug_assert!(!self.nested, msg); + } + #[inline] + fn assert_finished(&self, msg: &'static str) { + debug_assert!(self.finished, msg); + } + #[inline] + fn assert_not_finished(&self, msg: &'static str) { + debug_assert!(!self.finished, msg); + } + +} + +#[inline] +fn padding_bytes(buf_size: usize, scalar_size: usize) -> usize { + // ((!buf_size) + 1) & (scalar_size - 1) + (!buf_size).wrapping_add(1) & (scalar_size.wrapping_sub(1)) +} diff --git a/rust/flatbuffers/src/endian_scalar.rs b/rust/flatbuffers/src/endian_scalar.rs new file mode 100644 index 000000000000..f29bd2573e66 --- /dev/null +++ b/rust/flatbuffers/src/endian_scalar.rs @@ -0,0 +1,189 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use std::mem::size_of; + +/// Trait for values that must be stored in little-endian byte order, but +/// might be represented in memory as big-endian. Every type that implements +/// EndianScalar is a valid FlatBuffers scalar value. +/// +/// The Rust stdlib does not provide a trait to represent scalars, so this trait +/// serves that purpose, too. +/// +/// Note that we do not use the num-traits crate for this, because it provides +/// "too much". For example, num-traits provides i128 support, but that is an +/// invalid FlatBuffers type. +pub trait EndianScalar: Sized + PartialEq + Copy + Clone { + fn to_little_endian(self) -> Self; + fn from_little_endian(self) -> Self; +} + +/// Macro for implementing a no-op endian conversion. This is used for types +/// that are one byte wide. +macro_rules! impl_endian_scalar_noop { + ($ty:ident) => ( + impl EndianScalar for $ty { + #[inline] + fn to_little_endian(self) -> Self { + self + } + #[inline] + fn from_little_endian(self) -> Self { + self + } + } + ) +} + +/// Macro for implementing a endian conversion using the stdlib to_le and from_le +/// functions. This is used for integer types. It is not used for floats, because +/// the to_le and from_le are not implemented for them in the stdlib. +macro_rules! impl_endian_scalar_stdlib_le_conversion { + ($ty:ident) => ( + impl EndianScalar for $ty { + #[inline] + fn to_little_endian(self) -> Self { + Self::to_le(self) + } + #[inline] + fn from_little_endian(self) -> Self { + Self::from_le(self) + } + } + ) +} + +impl_endian_scalar_noop!(bool); +impl_endian_scalar_noop!(u8); +impl_endian_scalar_noop!(i8); + +impl_endian_scalar_stdlib_le_conversion!(u16); +impl_endian_scalar_stdlib_le_conversion!(u32); +impl_endian_scalar_stdlib_le_conversion!(u64); +impl_endian_scalar_stdlib_le_conversion!(i16); +impl_endian_scalar_stdlib_le_conversion!(i32); +impl_endian_scalar_stdlib_le_conversion!(i64); + +impl EndianScalar for f32 { + /// Convert f32 from host endian-ness to little-endian. + #[inline] + fn to_little_endian(self) -> Self { + #[cfg(target_endian = "little")] + { + self + } + #[cfg(not(target_endian = "little"))] + { + byte_swap_f32(&self) + } + } + /// Convert f32 from little-endian to host endian-ness. + #[inline] + fn from_little_endian(self) -> Self { + #[cfg(target_endian = "little")] + { + self + } + #[cfg(not(target_endian = "little"))] + { + byte_swap_f32(&self) + } + } +} + +impl EndianScalar for f64 { + /// Convert f64 from host endian-ness to little-endian. + #[inline] + fn to_little_endian(self) -> Self { + #[cfg(target_endian = "little")] + { + self + } + #[cfg(not(target_endian = "little"))] + { + byte_swap_f64(&self) + } + } + /// Convert f64 from little-endian to host endian-ness. + #[inline] + fn from_little_endian(self) -> Self { + #[cfg(target_endian = "little")] + { + self + } + #[cfg(not(target_endian = "little"))] + { + byte_swap_f64(&self) + } + } +} + +/// Swaps the bytes of an f32. +#[allow(dead_code)] +#[inline] +pub fn byte_swap_f32(x: f32) -> f32 { + let mut ret = x; + + let ptr = &mut ret as *mut f32 as *mut u32; + unsafe { *ptr }.swap_bytes(); + + ret +} + +/// Swaps the bytes of an f64. +#[allow(dead_code)] +#[inline] +pub fn byte_swap_f64(x: f64) -> f64 { + let mut ret = x; + + let ptr = &mut ret as *mut f64 as *mut u64; + unsafe { *ptr }.swap_bytes(); + + ret +} + +/// Place an EndianScalar into the provided mutable byte slice. Performs +/// endian conversion, if necessary. +#[inline] +pub fn emplace_scalar(s: &mut [u8], x: T) { + let sz = size_of::(); + let mut_ptr = (&mut s[..sz]).as_mut_ptr() as *mut T; + let val = x.to_little_endian(); + unsafe { + *mut_ptr = val; + } +} + +/// Read an EndianScalar from the provided byte slice at the specified location. +/// Performs endian conversion, if necessary. +#[inline] +pub fn read_scalar_at(s: &[u8], loc: usize) -> T { + let buf = &s[loc..loc + size_of::()]; + read_scalar(buf) +} + +/// Read an EndianScalar from the provided byte slice. Performs endian +/// conversion, if necessary. +#[inline] +pub fn read_scalar(s: &[u8]) -> T { + let sz = size_of::(); + + let p = (&s[..sz]).as_ptr() as *const T; + let x = unsafe { *p }; + + x.from_little_endian() +} + diff --git a/rust/flatbuffers/src/follow.rs b/rust/flatbuffers/src/follow.rs new file mode 100644 index 000000000000..4d3eff776459 --- /dev/null +++ b/rust/flatbuffers/src/follow.rs @@ -0,0 +1,62 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use std::marker::PhantomData; + +/// Follow is a trait that allows us to access FlatBuffers in a declarative, +/// type safe, and fast way. They compile down to almost no code (after +/// optimizations). Conceptually, Follow lifts the offset-based access +/// patterns of FlatBuffers data into the type system. This trait is used +/// pervasively at read time, to access tables, vtables, vectors, strings, and +/// all other data. At this time, Follow is not utilized much on the write +/// path. +/// +/// Writing a new Follow implementation primarily involves deciding whether +/// you want to return data (of the type Self::Inner) or do you want to +/// continue traversing the FlatBuffer. +pub trait Follow<'a> { + type Inner; + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner; +} + +/// Execute a follow as a top-level function. +#[allow(dead_code)] +#[inline] +pub fn lifted_follow<'a, T: Follow<'a>>(buf: &'a [u8], loc: usize) -> T::Inner { + T::follow(buf, loc) +} + +/// FollowStart wraps a Follow impl in a struct type. This can make certain +/// programming patterns more ergonomic. +#[derive(Debug)] +pub struct FollowStart(PhantomData); +impl<'a, T: Follow<'a> + 'a> FollowStart { + #[inline] + pub fn new() -> Self { + Self { 0: PhantomData } + } + #[inline] + pub fn self_follow(&'a self, buf: &'a [u8], loc: usize) -> T::Inner { + T::follow(buf, loc) + } +} +impl<'a, T: Follow<'a>> Follow<'a> for FollowStart { + type Inner = T::Inner; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + T::follow(buf, loc) + } +} diff --git a/rust/flatbuffers/src/lib.rs b/rust/flatbuffers/src/lib.rs new file mode 100644 index 000000000000..ecfb2da9aeb5 --- /dev/null +++ b/rust/flatbuffers/src/lib.rs @@ -0,0 +1,36 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +mod builder; +mod endian_scalar; +mod follow; +mod primitives; +mod push; +mod table; +mod vector; +mod vtable; +mod vtable_writer; + +pub use builder::FlatBufferBuilder; +pub use endian_scalar::{EndianScalar, emplace_scalar, read_scalar, read_scalar_at}; +pub use follow::{Follow, FollowStart}; +pub use primitives::*; +pub use push::Push; +pub use table::{Table, buffer_has_identifier, get_root, get_size_prefixed_root}; +pub use vector::{SafeSliceAccess, Vector, follow_cast_ref}; +pub use vtable::field_index_to_field_offset; + +// TODO(rw): Split fill ops in builder into fill_small, fill_big like in C++. diff --git a/rust/flatbuffers/src/primitives.rs b/rust/flatbuffers/src/primitives.rs new file mode 100644 index 000000000000..3a9e4f7e4af8 --- /dev/null +++ b/rust/flatbuffers/src/primitives.rs @@ -0,0 +1,297 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use std::marker::PhantomData; +use std::ops::Deref; + +use endian_scalar::{emplace_scalar, read_scalar, read_scalar_at}; +use follow::Follow; +use push::Push; + +pub const FLATBUFFERS_MAX_BUFFER_SIZE: usize = (1u64 << 31) as usize; + +pub const FILE_IDENTIFIER_LENGTH: usize = 4; + +pub const VTABLE_METADATA_FIELDS: usize = 2; + +pub const SIZE_U8: usize = 1; +pub const SIZE_I8: usize = 1; + +pub const SIZE_U16: usize = 2; +pub const SIZE_I16: usize = 2; + +pub const SIZE_U32: usize = 4; +pub const SIZE_I32: usize = 4; + +pub const SIZE_U64: usize = 8; +pub const SIZE_I64: usize = 8; + +pub const SIZE_F32: usize = 4; +pub const SIZE_F64: usize = 8; + +pub const SIZE_SOFFSET: usize = SIZE_I32; +pub const SIZE_UOFFSET: usize = SIZE_U32; +pub const SIZE_VOFFSET: usize = SIZE_I16; + +pub const SIZE_SIZEPREFIX: usize = SIZE_U32; + +/// SOffsetT is an i32 that is used by tables to reference their vtables. +pub type SOffsetT = i32; + +/// UOffsetT is a u32 that is used by pervasively to represent both pointers +/// and lengths of vectors. +pub type UOffsetT = u32; + +/// VOffsetT is a i32 that is used by vtables to store field data. +pub type VOffsetT = i16; + +/// TableFinishedWIPOffset marks a WIPOffset as being for a finished table. +pub struct TableFinishedWIPOffset {} + +/// TableUnfinishedWIPOffset marks a WIPOffset as being for an unfinished table. +pub struct TableUnfinishedWIPOffset {} + +/// UnionWIPOffset marks a WIPOffset as being for a union value. +pub struct UnionWIPOffset {} + +/// VTableWIPOffset marks a WIPOffset as being for a vtable. +pub struct VTableWIPOffset {} + +/// WIPOffset contains an UOffsetT with a special meaning: it is the location of +/// data relative to the *end* of an in-progress FlatBuffer. The +/// FlatBufferBuilder uses this to track the location of objects in an absolute +/// way. The impl of Push converts a WIPOffset into a ForwardsUOffset. +#[derive(Debug)] +pub struct WIPOffset(UOffsetT, PhantomData); + +// TODO(rw): why do we need to reimplement (with a default impl) Copy to +// avoid ownership errors? +impl Copy for WIPOffset {} +impl Clone for WIPOffset { + #[inline] + fn clone(&self) -> WIPOffset { + WIPOffset::new(self.0.clone()) + } +} +impl PartialEq for WIPOffset { + fn eq(&self, o: &WIPOffset) -> bool { + self.value() == o.value() + } +} + +impl Deref for WIPOffset { + type Target = UOffsetT; + #[inline] + fn deref(&self) -> &UOffsetT { + &self.0 + } +} +impl<'a, T: 'a> WIPOffset { + /// Create a new WIPOffset. + #[inline] + pub fn new(o: UOffsetT) -> WIPOffset { + WIPOffset { + 0: o, + 1: PhantomData, + } + } + + /// Return a wrapped value that brings its meaning as a union WIPOffset + /// into the type system. + #[inline] + pub fn as_union_value(&self) -> WIPOffset { + WIPOffset::new(self.0) + } + /// Get the underlying value. + #[inline] + pub fn value(&self) -> UOffsetT { + self.0 + } +} + +impl Push for WIPOffset { + type Output = ForwardsUOffset; + + #[inline] + fn push(&self, dst: &mut [u8], rest: &[u8]) { + let n = (SIZE_UOFFSET + rest.len() - self.value() as usize) as UOffsetT; + emplace_scalar::(dst, n); + } +} + +impl Push for ForwardsUOffset { + type Output = Self; + + #[inline] + fn push(&self, dst: &mut [u8], rest: &[u8]) { + self.value().push(dst, rest); + } +} + +/// ForwardsUOffset is used by Follow to traverse a FlatBuffer: the pointer +/// is incremented by the value contained in this type. +#[derive(Debug)] +pub struct ForwardsUOffset(UOffsetT, PhantomData); +impl ForwardsUOffset { + #[inline(always)] + pub fn value(&self) -> UOffsetT { + self.0 + } +} + +impl<'a, T: Follow<'a>> Follow<'a> for ForwardsUOffset { + type Inner = T::Inner; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let slice = &buf[loc..loc + SIZE_UOFFSET]; + let off = read_scalar::(slice) as usize; + T::follow(buf, loc + off) + } +} + +/// ForwardsVOffset is used by Follow to traverse a FlatBuffer: the pointer +/// is incremented by the value contained in this type. +#[derive(Debug)] +pub struct ForwardsVOffset(VOffsetT, PhantomData); +impl ForwardsVOffset { + #[inline(always)] + pub fn value(&self) -> VOffsetT { + self.0 + } +} + +impl<'a, T: Follow<'a>> Follow<'a> for ForwardsVOffset { + type Inner = T::Inner; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let slice = &buf[loc..loc + SIZE_VOFFSET]; + let off = read_scalar::(slice) as usize; + T::follow(buf, loc + off) + } +} + +impl Push for ForwardsVOffset { + type Output = Self; + + #[inline] + fn push(&self, dst: &mut [u8], rest: &[u8]) { + self.value().push(dst, rest); + } +} + +/// ForwardsSOffset is used by Follow to traverse a FlatBuffer: the pointer +/// is incremented by the *negative* of the value contained in this type. +#[derive(Debug)] +pub struct BackwardsSOffset(SOffsetT, PhantomData); +impl BackwardsSOffset { + #[inline(always)] + pub fn value(&self) -> SOffsetT { + self.0 + } +} + +impl<'a, T: Follow<'a>> Follow<'a> for BackwardsSOffset { + type Inner = T::Inner; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let slice = &buf[loc..loc + SIZE_SOFFSET]; + let off = read_scalar::(slice); + T::follow(buf, (loc as SOffsetT - off) as usize) + } +} + +impl Push for BackwardsSOffset { + type Output = Self; + + #[inline] + fn push(&self, dst: &mut [u8], rest: &[u8]) { + self.value().push(dst, rest); + } +} + +/// SkipSizePrefix is used by Follow to traverse a FlatBuffer: the pointer is +/// incremented by a fixed constant in order to skip over the size prefix value. +pub struct SkipSizePrefix(PhantomData); +impl<'a, T: Follow<'a> + 'a> Follow<'a> for SkipSizePrefix { + type Inner = T::Inner; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + T::follow(buf, loc + SIZE_SIZEPREFIX) + } +} + +/// SkipRootOffset is used by Follow to traverse a FlatBuffer: the pointer is +/// incremented by a fixed constant in order to skip over the root offset value. +pub struct SkipRootOffset(PhantomData); +impl<'a, T: Follow<'a> + 'a> Follow<'a> for SkipRootOffset { + type Inner = T::Inner; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + T::follow(buf, loc + SIZE_UOFFSET) + } +} + +/// FileIdentifier is used by Follow to traverse a FlatBuffer: the pointer is +/// dereferenced into a byte slice, whose bytes are the file identifer value. +pub struct FileIdentifier; +impl<'a> Follow<'a> for FileIdentifier { + type Inner = &'a [u8]; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + &buf[loc..loc + FILE_IDENTIFIER_LENGTH] + } +} + +/// SkipFileIdentifier is used by Follow to traverse a FlatBuffer: the pointer +/// is incremented by a fixed constant in order to skip over the file +/// identifier value. +pub struct SkipFileIdentifier(PhantomData); +impl<'a, T: Follow<'a> + 'a> Follow<'a> for SkipFileIdentifier { + type Inner = T::Inner; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + T::follow(buf, loc + FILE_IDENTIFIER_LENGTH) + } +} + +/// Follow trait impls for primitive types. +/// +/// Ideally, these would be implemented as a single impl using trait bounds on +/// EndianScalar, but implementing Follow that way causes a conflict with +/// other impls. +macro_rules! impl_follow_for_endian_scalar { + ($ty:ident) => ( + impl<'a> Follow<'a> for $ty { + type Inner = $ty; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + read_scalar_at::<$ty>(buf, loc) + } + } + ) +} + +impl_follow_for_endian_scalar!(bool); +impl_follow_for_endian_scalar!(u8); +impl_follow_for_endian_scalar!(u16); +impl_follow_for_endian_scalar!(u32); +impl_follow_for_endian_scalar!(u64); +impl_follow_for_endian_scalar!(i8); +impl_follow_for_endian_scalar!(i16); +impl_follow_for_endian_scalar!(i32); +impl_follow_for_endian_scalar!(i64); +impl_follow_for_endian_scalar!(f32); +impl_follow_for_endian_scalar!(f64); diff --git a/rust/flatbuffers/src/push.rs b/rust/flatbuffers/src/push.rs new file mode 100644 index 000000000000..b78f2b1e0581 --- /dev/null +++ b/rust/flatbuffers/src/push.rs @@ -0,0 +1,153 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use std::cmp::max; +use std::mem::size_of; +use std::slice::from_raw_parts; + +use endian_scalar::emplace_scalar; +use primitives::*; +use vector::{SafeSliceAccess, Vector}; + +/// Trait to abstract over functionality needed to write values. Used in +/// FlatBufferBuilder and implemented for generated types. +pub trait Push: Sized { + type Output; + fn push(&self, dst: &mut [u8], _rest: &[u8]); + + #[inline] + fn size(&self) -> usize { + size_of::() + } + + #[inline] + fn alignment(&self) -> usize { + self.size() + } +} + +impl<'b> Push for &'b str { + type Output = Vector<'b, u8>; + + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + let l = self.len(); + emplace_scalar::(&mut dst[..SIZE_UOFFSET], l as UOffsetT); + dst[SIZE_UOFFSET..SIZE_UOFFSET+l].copy_from_slice(self.as_bytes()); + } + + #[inline] + fn size(&self) -> usize { + SIZE_UOFFSET + self.len() + 1 + } + + #[inline] + fn alignment(&self) -> usize { + SIZE_UOFFSET + } +} + +/// Push-able wrapper for slices of types that implement SafeSliceAccess. +impl<'a, T: SafeSliceAccess + Sized> Push for &'a [T] { + type Output = Vector<'a, u8>; + + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + let elem_sz = size_of::(); + let data = { + let ptr = self.as_ptr() as *const T as *const u8; + unsafe { + from_raw_parts(ptr, self.len() * elem_sz) + } + }; + emplace_scalar::(&mut dst[..SIZE_UOFFSET], self.len() as UOffsetT); + dst[SIZE_UOFFSET..SIZE_UOFFSET+data.len()].copy_from_slice(data); + } + + #[inline] + fn size(&self) -> usize { + SIZE_UOFFSET + self.len() * size_of::() + } + + #[inline] + fn alignment(&self) -> usize { + max(SIZE_UOFFSET, size_of::()) + } +} + + +/// Push-able wrapper for byte slices that need a zero-terminator written +/// after them. +pub struct ZeroTerminatedByteSlice<'a>(&'a [u8]); + +impl<'a> ZeroTerminatedByteSlice<'a> { + #[inline] + pub fn new(buf: &'a [u8]) -> Self { + ZeroTerminatedByteSlice { 0: buf } + } + + #[inline] + pub fn data(&'a self) -> &'a [u8] { + self.0 + } +} + +impl<'a> Push for ZeroTerminatedByteSlice<'a> { + type Output = Vector<'a, u8>; + + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + let l = self.data().len(); + emplace_scalar::(&mut dst[..SIZE_UOFFSET], l as UOffsetT); + dst[SIZE_UOFFSET..SIZE_UOFFSET+l].copy_from_slice(self.data()); + } + + #[inline] + fn size(&self) -> usize { + SIZE_UOFFSET + self.0.len() + 1 + } + + #[inline] + fn alignment(&self) -> usize { + SIZE_UOFFSET + } +} + +/// Macro to implement Push for EndianScalar types. +macro_rules! impl_push_for_endian_scalar { + ($ty:ident) => ( + impl Push for $ty { + type Output = $ty; + + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + emplace_scalar::<$ty>(dst, *self); + } + } + ) +} + +impl_push_for_endian_scalar!(bool); +impl_push_for_endian_scalar!(u8); +impl_push_for_endian_scalar!(i8); +impl_push_for_endian_scalar!(u16); +impl_push_for_endian_scalar!(i16); +impl_push_for_endian_scalar!(u32); +impl_push_for_endian_scalar!(i32); +impl_push_for_endian_scalar!(u64); +impl_push_for_endian_scalar!(i64); +impl_push_for_endian_scalar!(f32); +impl_push_for_endian_scalar!(f64); diff --git a/rust/flatbuffers/src/table.rs b/rust/flatbuffers/src/table.rs new file mode 100644 index 000000000000..d9e952d01895 --- /dev/null +++ b/rust/flatbuffers/src/table.rs @@ -0,0 +1,77 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use follow::Follow; +use primitives::*; +use vtable::VTable; + +#[derive(Clone, Copy, Debug, PartialEq)] +pub struct Table<'a> { + pub buf: &'a [u8], + pub loc: usize, +} + +impl<'a> Table<'a> { + #[inline] + pub fn new(buf: &'a [u8], loc: usize) -> Self { + Table { buf: buf, loc: loc } + } + #[inline] + pub fn vtable(&'a self) -> VTable<'a> { + >>::follow(self.buf, self.loc) + } + #[inline] + pub fn get + 'a>( + &'a self, + slot_byte_loc: VOffsetT, + default: Option, + ) -> Option { + let o = self.vtable().get(slot_byte_loc) as usize; + if o == 0 { + return default; + } + Some(::follow(self.buf, self.loc + o)) + } +} + +impl<'a> Follow<'a> for Table<'a> { + type Inner = Table<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Table { buf: buf, loc: loc } + } +} + +#[inline] +pub fn get_root<'a, T: Follow<'a> + 'a>(data: &'a [u8]) -> T::Inner { + >::follow(data, 0) +} +#[inline] +pub fn get_size_prefixed_root<'a, T: Follow<'a> + 'a>(data: &'a [u8]) -> T::Inner { + >>::follow(data, 0) +} +#[inline] +pub fn buffer_has_identifier(data: &[u8], ident: &str, size_prefixed: bool) -> bool { + assert_eq!(ident.len(), FILE_IDENTIFIER_LENGTH); + + let got = if size_prefixed { + >>::follow(data, 0) + } else { + >::follow(data, 0) + }; + + ident.as_bytes() == got +} diff --git a/rust/flatbuffers/src/vector.rs b/rust/flatbuffers/src/vector.rs new file mode 100644 index 000000000000..8c2d6d509160 --- /dev/null +++ b/rust/flatbuffers/src/vector.rs @@ -0,0 +1,133 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use std::marker::PhantomData; +use std::mem::size_of; +use std::slice::from_raw_parts; +use std::str::from_utf8_unchecked; + +use endian_scalar::{EndianScalar, read_scalar}; +use follow::Follow; +use primitives::*; + +#[derive(Debug)] +pub struct Vector<'a, T: 'a>(&'a [u8], usize, PhantomData); + +impl<'a, T: 'a> Vector<'a, T> { + #[inline(always)] + pub fn new(buf: &'a [u8], loc: usize) -> Self { + Vector { + 0: buf, + 1: loc, + 2: PhantomData, + } + } + + #[inline(always)] + pub fn len(&self) -> usize { + read_scalar::(&self.0[self.1 as usize..]) as usize + } +} + +impl<'a, T: Follow<'a> + 'a> Vector<'a, T> { + #[inline(always)] + pub fn get(&self, idx: usize) -> T::Inner { + debug_assert!(idx < read_scalar::(&self.0[self.1 as usize..]) as usize); + let sz = size_of::(); + debug_assert!(sz > 0); + T::follow(self.0, self.1 as usize + SIZE_UOFFSET + sz * idx) + } +} + +pub trait SafeSliceAccess {} +impl<'a, T: SafeSliceAccess + 'a> Vector<'a, T> { + pub fn safe_slice(self) -> &'a [T] { + let buf = self.0; + let loc = self.1; + let sz = size_of::(); + debug_assert!(sz > 0); + let len = read_scalar::(&buf[loc..loc + SIZE_UOFFSET]) as usize; + let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz]; + let ptr = data_buf.as_ptr() as *const T; + let s: &'a [T] = unsafe { from_raw_parts(ptr, len) }; + s + } +} + +impl SafeSliceAccess for u8 {} +impl SafeSliceAccess for i8 {} +impl SafeSliceAccess for bool {} + +#[cfg(target_endian = "little")] +mod le_safe_slice_impls { + impl super::SafeSliceAccess for u16 {} + impl super::SafeSliceAccess for u32 {} + impl super::SafeSliceAccess for u64 {} + + impl super::SafeSliceAccess for i16 {} + impl super::SafeSliceAccess for i32 {} + impl super::SafeSliceAccess for i64 {} + + impl super::SafeSliceAccess for f32 {} + impl super::SafeSliceAccess for f64 {} +} + +pub use self::le_safe_slice_impls::*; + +pub fn follow_cast_ref<'a, T: Sized + 'a>(buf: &'a [u8], loc: usize) -> &'a T { + let sz = size_of::(); + let buf = &buf[loc..loc + sz]; + let ptr = buf.as_ptr() as *const T; + unsafe { &*ptr } +} + +impl<'a> Follow<'a> for &'a str { + type Inner = &'a str; + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let len = read_scalar::(&buf[loc..loc + SIZE_UOFFSET]) as usize; + let slice = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len]; + let s = unsafe { from_utf8_unchecked(slice) }; + s + } +} + +fn follow_slice_helper(buf: &[u8], loc: usize) -> &[T] { + let sz = size_of::(); + debug_assert!(sz > 0); + let len = read_scalar::(&buf[loc..loc + SIZE_UOFFSET]) as usize; + let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz]; + let ptr = data_buf.as_ptr() as *const T; + let s: &[T] = unsafe { from_raw_parts(ptr, len) }; + s +} + +/// Implement direct slice access if the host is little-endian. +#[cfg(target_endian = "little")] +impl<'a, T: EndianScalar> Follow<'a> for &'a [T] { + type Inner = &'a [T]; + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + follow_slice_helper::(buf, loc) + } +} + +/// Implement Follow for all possible Vectors that have Follow-able elements. +impl<'a, T: Follow<'a> + 'a> Follow<'a> for Vector<'a, T> { + type Inner = Vector<'a, T>; + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Vector::new(buf, loc) + } +} + diff --git a/rust/flatbuffers/src/vtable.rs b/rust/flatbuffers/src/vtable.rs new file mode 100644 index 000000000000..cd7ede6e4463 --- /dev/null +++ b/rust/flatbuffers/src/vtable.rs @@ -0,0 +1,95 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use endian_scalar::read_scalar_at; +use follow::Follow; +use primitives::*; + +/// VTable encapsulates read-only usage of a vtable. It is only to be used +/// by generated code. +#[derive(Debug)] +pub struct VTable<'a> { + buf: &'a [u8], + loc: usize, +} + +impl<'a> PartialEq for VTable<'a> { + fn eq(&self, other: &VTable) -> bool { + self.as_bytes().eq(other.as_bytes()) + } +} + +impl<'a> VTable<'a> { + pub fn init(buf: &'a [u8], loc: usize) -> Self { + VTable { + buf: buf, + loc: loc, + } + } + pub fn num_fields(&self) -> usize { + (self.num_bytes() / SIZE_VOFFSET) - 2 + } + pub fn num_bytes(&self) -> usize { + read_scalar_at::(self.buf, self.loc) as usize + } + pub fn object_inline_num_bytes(&self) -> usize { + let n = read_scalar_at::(self.buf, self.loc + SIZE_VOFFSET); + n as usize + } + pub fn get_field(&self, idx: usize) -> VOffsetT { + // TODO(rw): distinguish between None and 0? + if idx > self.num_fields() { + return 0; + } + read_scalar_at::( + self.buf, + self.loc + SIZE_VOFFSET + SIZE_VOFFSET + SIZE_VOFFSET * idx, + ) + } + pub fn get(&self, byte_loc: VOffsetT) -> VOffsetT { + // TODO(rw): distinguish between None and 0? + if byte_loc as usize >= self.num_bytes() { + return 0; + } + read_scalar_at::(self.buf, self.loc + byte_loc as usize) + } + pub fn as_bytes(&self) -> &[u8] { + let len = self.num_bytes(); + &self.buf[self.loc..self.loc + len] + } +} + + +#[allow(dead_code)] +pub fn field_index_to_field_offset(field_id: VOffsetT) -> VOffsetT { + // Should correspond to what end_table() below builds up. + let fixed_fields = 2; // Vtable size and Object Size. + ((field_id + fixed_fields) * (SIZE_VOFFSET as VOffsetT)) as VOffsetT +} + +#[allow(dead_code)] +pub fn field_offset_to_field_index(field_o: VOffsetT) -> VOffsetT { + debug_assert!(field_o >= 2); + let fixed_fields = 2; // VTable size and Object Size. + (field_o / (SIZE_VOFFSET as VOffsetT)) - fixed_fields +} + +impl<'a> Follow<'a> for VTable<'a> { + type Inner = VTable<'a>; + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + VTable::init(buf, loc) + } +} diff --git a/rust/flatbuffers/src/vtable_writer.rs b/rust/flatbuffers/src/vtable_writer.rs new file mode 100644 index 000000000000..119f794cd1c7 --- /dev/null +++ b/rust/flatbuffers/src/vtable_writer.rs @@ -0,0 +1,85 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use std::ptr::write_bytes; + +use endian_scalar::{emplace_scalar, read_scalar}; +use primitives::*; + +/// VTableWriter compartmentalizes actions needed to create a vtable. +#[derive(Debug)] +pub struct VTableWriter<'a> { + buf: &'a mut [u8], +} + +impl<'a> VTableWriter<'a> { + #[inline(always)] + pub fn init(buf: &'a mut [u8]) -> Self { + VTableWriter { buf: buf } + } + + /// Writes the vtable length (in bytes) into the vtable. + /// + /// Note that callers already need to have computed this to initialize + /// a VTableWriter. + /// + /// In debug mode, asserts that the length of the underlying data is equal + /// to the provided value. + #[inline(always)] + pub fn write_vtable_byte_length(&mut self, n: VOffsetT) { + emplace_scalar::(&mut self.buf[..SIZE_VOFFSET], n); + debug_assert_eq!(n as usize, self.buf.len()); + } + + /// Writes an object length (in bytes) into the vtable. + #[inline(always)] + pub fn write_object_inline_size(&mut self, n: VOffsetT) { + emplace_scalar::(&mut self.buf[SIZE_VOFFSET..2 * SIZE_VOFFSET], n); + } + + /// Gets an object field offset from the vtable. Only used for debugging. + /// + /// Note that this expects field offsets (which are like pointers), not + /// field ids (which are like array indices). + #[inline(always)] + pub fn get_field_offset(&self, vtable_offset: VOffsetT) -> VOffsetT { + let idx = vtable_offset as usize; + read_scalar::(&self.buf[idx..idx + SIZE_VOFFSET]) + } + + /// Writes an object field offset into the vtable. + /// + /// Note that this expects field offsets (which are like pointers), not + /// field ids (which are like array indices). + #[inline(always)] + pub fn write_field_offset(&mut self, vtable_offset: VOffsetT, object_data_offset: VOffsetT) { + let idx = vtable_offset as usize; + emplace_scalar::(&mut self.buf[idx..idx + SIZE_VOFFSET], object_data_offset); + } + + /// Clears all data in this VTableWriter. Used to cleanly undo a + /// vtable write. + #[inline(always)] + pub fn clear(&mut self) { + // This is the closest thing to memset in Rust right now. + let len = self.buf.len(); + let p = self.buf.as_mut_ptr() as *mut u8; + unsafe { + write_bytes(p, 0, len); + } + } +} + diff --git a/src/code_generators.cpp b/src/code_generators.cpp index 2ecd5e3a7e03..a180b09df3ae 100644 --- a/src/code_generators.cpp +++ b/src/code_generators.cpp @@ -16,6 +16,7 @@ #include "flatbuffers/code_generators.h" #include +#include // cerr messages for logging warnings #include "flatbuffers/base.h" #include "flatbuffers/util.h" @@ -47,6 +48,7 @@ void CodeWriter::operator+=(std::string text) { const std::string &value = iter->second; stream_ << value; } else { + std::cerr << "MISSING " << key << std::endl; FLATBUFFERS_ASSERT(false && "could not find key"); stream_ << key; } diff --git a/src/flatc_main.cpp b/src/flatc_main.cpp index 2a3bc98083de..c632fe27670d 100644 --- a/src/flatc_main.cpp +++ b/src/flatc_main.cpp @@ -79,6 +79,10 @@ int main(int argc, const char *argv[]) { flatbuffers::IDLOptions::kLua, "Generate Lua files for tables/structs", flatbuffers::GeneralMakeRule }, + { flatbuffers::GenerateRust, "-r", "--rust", "Rust", true, nullptr, + flatbuffers::IDLOptions::kRust, + "Generate Rust files for tables/structs", + flatbuffers::RustMakeRule }, { flatbuffers::GeneratePhp, nullptr, "--php", "PHP", true, nullptr, flatbuffers::IDLOptions::kPhp, "Generate PHP files for tables/structs", flatbuffers::GeneralMakeRule }, diff --git a/src/idl_gen_cpp.cpp b/src/idl_gen_cpp.cpp index df4c0ad925dc..b9bfacb02668 100644 --- a/src/idl_gen_cpp.cpp +++ b/src/idl_gen_cpp.cpp @@ -477,7 +477,8 @@ class CppGenerator : public BaseGenerator { std::string GenTypeBasic(const Type &type, bool user_facing_type) const { static const char * const ctypename[] = { // clang-format off - #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \ + RTYPE) \ #CTYPE, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD diff --git a/src/idl_gen_general.cpp b/src/idl_gen_general.cpp index f3ed3e4fce44..3245c586f51c 100644 --- a/src/idl_gen_general.cpp +++ b/src/idl_gen_general.cpp @@ -247,7 +247,7 @@ class GeneralGenerator : public BaseGenerator { // clang-format off static const char * const java_typename[] = { #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ #JTYPE, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD @@ -255,7 +255,7 @@ class GeneralGenerator : public BaseGenerator { static const char * const csharp_typename[] = { #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ #NTYPE, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD diff --git a/src/idl_gen_go.cpp b/src/idl_gen_go.cpp index c5767b479c04..4f20719bfaf7 100644 --- a/src/idl_gen_go.cpp +++ b/src/idl_gen_go.cpp @@ -690,7 +690,7 @@ static std::string GenTypeBasic(const Type &type) { static const char *ctypename[] = { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ #GTYPE, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD diff --git a/src/idl_gen_lobster.cpp b/src/idl_gen_lobster.cpp index 0487d17a7aa8..5f199e3a1c9c 100644 --- a/src/idl_gen_lobster.cpp +++ b/src/idl_gen_lobster.cpp @@ -81,7 +81,7 @@ class LobsterGenerator : public BaseGenerator { static const char *ctypename[] = { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ #PTYPE, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD diff --git a/src/idl_gen_lua.cpp b/src/idl_gen_lua.cpp index 862649181669..34408d316bb2 100644 --- a/src/idl_gen_lua.cpp +++ b/src/idl_gen_lua.cpp @@ -604,7 +604,7 @@ namespace lua { static const char *ctypename[] = { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ #PTYPE, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD diff --git a/src/idl_gen_php.cpp b/src/idl_gen_php.cpp index 7d98e00dee97..5563f4bb593f 100644 --- a/src/idl_gen_php.cpp +++ b/src/idl_gen_php.cpp @@ -864,7 +864,7 @@ class PhpGenerator : public BaseGenerator { static const char *ctypename[] = { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ #NTYPE, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD diff --git a/src/idl_gen_python.cpp b/src/idl_gen_python.cpp index e000ada9a29a..109e203d462a 100644 --- a/src/idl_gen_python.cpp +++ b/src/idl_gen_python.cpp @@ -615,7 +615,7 @@ class PythonGenerator : public BaseGenerator { static const char *ctypename[] = { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ #PTYPE, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD diff --git a/src/idl_gen_rust.cpp b/src/idl_gen_rust.cpp new file mode 100644 index 000000000000..5c7fd5c6e5f6 --- /dev/null +++ b/src/idl_gen_rust.cpp @@ -0,0 +1,1782 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// independent from idl_parser, since this code is not needed for most clients + +#include "flatbuffers/code_generators.h" +#include "flatbuffers/flatbuffers.h" +#include "flatbuffers/idl.h" +#include "flatbuffers/util.h" + +namespace flatbuffers { + +static std::string GeneratedFileName(const std::string &path, + const std::string &file_name) { + return path + file_name + "_generated.rs"; +} + +// Convert a camelCaseIdentifier or CamelCaseIdentifier to a +// snake_case_indentifier. +std::string MakeSnakeCase(const std::string &in) { + std::string s; + for (size_t i = 0; i < in.length(); i++) { + if (islower(in[i])) { + s += static_cast(in[i]); + } else { + if (i > 0) { + s += '_'; + } + s += static_cast(tolower(in[i])); + } + } + return s; +} + +// Convert a string to all uppercase. +std::string MakeUpper(const std::string &in) { + std::string s; + for (size_t i = 0; i < in.length(); i++) { + s += static_cast(toupper(in[i])); + } + return s; +} + +// Encapsulate all logical field types in this enum. This allows us to write +// field logic based on type switches, instead of branches on the properties +// set on the Type. +enum class FullType { + Integer, + Float, + Bool, + + Struct, + Table, + + EnumKey, + UnionKey, + + UnionValue, + + String, // TODO(rw): bytestring? + VectorOfInteger, VectorOfFloat, VectorOfBool, VectorOfEnumKey, + VectorOfStruct, VectorOfTable, VectorOfString, VectorOfUnionValue, +}; + +// Convert a Type to a FullType (exhaustive). +FullType GetFullType(const Type &type) { + // N.B. The order of these conditionals matters for some types. + + if (type.base_type == BASE_TYPE_STRING) { + return FullType::String; + } else if (type.base_type == BASE_TYPE_STRUCT) { + if (type.struct_def->fixed) { + return FullType::Struct; + } else { + return FullType::Table; + } + } else if (type.base_type == BASE_TYPE_VECTOR) { + switch (GetFullType(type.VectorType())) { + case FullType::Integer: { + return FullType::VectorOfInteger; + } + case FullType::Float: { + return FullType::VectorOfFloat; + } + case FullType::Bool: { + return FullType::VectorOfBool; + } + case FullType::Struct: { + return FullType::VectorOfStruct; + } + case FullType::Table: { + return FullType::VectorOfTable; + } + case FullType::String: { + return FullType::VectorOfString; + } + case FullType::EnumKey: { + return FullType::VectorOfEnumKey; + } + case FullType::UnionKey: + case FullType::UnionValue: { + FLATBUFFERS_ASSERT(false && "vectors of unions are unsupported"); + } + default: { + FLATBUFFERS_ASSERT(false && "vector of vectors are unsupported"); + } + } + } else if (type.enum_def != nullptr) { + if (type.enum_def->is_union) { + if (type.base_type == BASE_TYPE_UNION) { + return FullType::UnionValue; + } else if (IsInteger(type.base_type)) { + return FullType::UnionKey; + } else { + FLATBUFFERS_ASSERT(false && "unknown union field type"); + } + } else { + return FullType::EnumKey; + } + } else if (IsScalar(type.base_type)) { + if (IsBool(type.base_type)) { + return FullType::Bool; + } else if (IsInteger(type.base_type)) { + return FullType::Integer; + } else if (IsFloat(type.base_type)) { + return FullType::Float; + } else { + FLATBUFFERS_ASSERT(false && "unknown number type"); + } + } + + FLATBUFFERS_ASSERT(false && "completely unknown type"); + + // this is only to satisfy the compiler's return analysis. + return FullType::Bool; +} + +namespace rust { + +class RustGenerator : public BaseGenerator { + public: + RustGenerator(const Parser &parser, const std::string &path, + const std::string &file_name) + : BaseGenerator(parser, path, file_name, "", "::"), + cur_name_space_(nullptr) { + const char *keywords[] = { + // currently-used keywords + "as", + "break", + "const", + "continue", + "crate", + "else", + "enum", + "extern", + "false", + "fn", + "for", + "if", + "impl", + "in", + "let", + "loop", + "match", + "mod", + "move", + "mut", + "pub", + "ref", + "return", + "Self", + "self", + "static", + "struct", + "super", + "trait", + "true", + "type", + "unsafe", + "use", + "where", + "while", + + // future possible keywords + "abstract", + "alignof", + "become", + "box", + "do", + "final", + "macro", + "offsetof", + "override", + "priv", + "proc", + "pure", + "sizeof", + "typeof", + "unsized", + "virtual", + "yield", + + // other terms we should not use + "std", + "usize", + "isize", + "u8", + "i8", + "u16", + "i16", + "u32", + "i32", + "u64", + "i64", + "u128", + "i128", + "f32", + "f64", + nullptr }; + for (auto kw = keywords; *kw; kw++) keywords_.insert(*kw); + } + + // Iterate through all definitions we haven't generated code for (enums, + // structs, and tables) and output them to a single file. + bool generate() { + code_.Clear(); + + assert(!cur_name_space_); + + // Generate all code in their namespaces, once, because Rust does not + // permit re-opening modules. + // + // TODO(rw): Use a set data structure to reduce namespace evaluations from + // O(n**2) to O(n). + for (auto it = parser_.namespaces_.begin(); it != parser_.namespaces_.end(); + ++it) { + const auto &ns = *it; + + // Generate code for all the enum declarations. + for (auto it = parser_.enums_.vec.begin(); it != parser_.enums_.vec.end(); + ++it) { + const auto &enum_def = **it; + if (enum_def.defined_namespace != ns) { continue; } + if (!enum_def.generated) { + SetNameSpace(enum_def.defined_namespace); + GenEnum(enum_def); + } + } + + // Generate code for all structs. + for (auto it = parser_.structs_.vec.begin(); + it != parser_.structs_.vec.end(); ++it) { + const auto &struct_def = **it; + if (struct_def.defined_namespace != ns) { continue; } + if (struct_def.fixed && !struct_def.generated) { + SetNameSpace(struct_def.defined_namespace); + GenStruct(struct_def); + } + } + + // Generate code for all tables. + for (auto it = parser_.structs_.vec.begin(); + it != parser_.structs_.vec.end(); ++it) { + const auto &struct_def = **it; + if (struct_def.defined_namespace != ns) { continue; } + if (!struct_def.fixed && !struct_def.generated) { + SetNameSpace(struct_def.defined_namespace); + GenTable(struct_def); + } + } + + // Generate global helper functions. + if (parser_.root_struct_def_) { + auto &struct_def = *parser_.root_struct_def_; + if (struct_def.defined_namespace != ns) { continue; } + SetNameSpace(struct_def.defined_namespace); + GenRootTableFuncs(struct_def); + } + } + if (cur_name_space_) SetNameSpace(nullptr); + + const auto file_path = GeneratedFileName(path_, file_name_); + const auto final_code = code_.ToString(); + return SaveFile(file_path.c_str(), final_code, false); + } + + private: + CodeWriter code_; + + std::set keywords_; + + // This tracks the current namespace so we can insert namespace declarations. + const Namespace *cur_name_space_; + + const Namespace *CurrentNameSpace() const { return cur_name_space_; } + + // Determine if a Type needs a lifetime template parameter when used in Rust. + bool TypeNeedsLifetimeParameter(const Type &type) const { + switch (GetFullType(type)) { + case FullType::Integer: + case FullType::Float: + case FullType::Bool: + case FullType::Table: + case FullType::EnumKey: + case FullType::UnionKey: + case FullType::Struct: { return false; } + default: { return true; } + } + } + + // Determine if a Type needs to be copied (for endian safety) when used in a + // Struct. + bool StructMemberAccessNeedsCopy(const Type &type) const { + switch (GetFullType(type)) { + case FullType::Integer: // requires endian swap + case FullType::Float: // requires endian swap + case FullType::Bool: // no endian-swap, but do the copy for UX consistency + case FullType::EnumKey: { return true; } // requires endian swap + case FullType::Struct: { return false; } // no endian swap + default: { + // logic error: no other types can be struct members. + FLATBUFFERS_ASSERT(false && "invalid struct member type"); + return false; // only to satisfy compiler's return analysis + } + } + } + + std::string EscapeKeyword(const std::string &name) const { + return keywords_.find(name) == keywords_.end() ? name : name + "_"; + } + + std::string Name(const Definition &def) const { + return EscapeKeyword(def.name); + } + + std::string Name(const EnumVal &ev) const { return EscapeKeyword(ev.name); } + + std::string WrapInNameSpace(const Definition &def) const { + return WrapInNameSpace(def.defined_namespace, Name(def)); + } + std::string WrapInNameSpace(const Namespace *ns, + const std::string &name) const { + if (CurrentNameSpace() == ns) return name; + std::string prefix = GetRelativeNamespaceTraversal(CurrentNameSpace(), ns); + return prefix + name; + } + + // Determine the namespace traversal needed from the Rust crate root. + // This may be useful in the future for referring to included files, but is + // currently unused. + std::string GetAbsoluteNamespaceTraversal(const Namespace *dst) const { + std::stringstream stream; + + stream << "::"; + for (auto d = dst->components.begin(); d != dst->components.end(); d++) { + stream << MakeSnakeCase(*d) + "::"; + } + return stream.str(); + } + + // Determine the relative namespace traversal needed to reference one + // namespace from another namespace. This is useful because it does not force + // the user to have a particular file layout. (If we madk users use absolute + // namespace paths, that may require users to organize their crates in a + // particular way.) + std::string GetRelativeNamespaceTraversal(const Namespace *src, + const Namespace *dst) const { + // calculate the path needed to reference dst from src. + // example: f(A::B::C, A::B::C) -> (none) + // example: f(A::B::C, A::B) -> super:: + // example: f(A::B::C, A::B::D) -> super::D + // example: f(A::B::C, A) -> super::super:: + // example: f(A::B::C, D) -> super::super::super::D + // example: f(A::B::C, D::E) -> super::super::super::D::E + // example: f(A, D::E) -> super::D::E + // does not include leaf object (typically a struct type). + + size_t i = 0; + std::stringstream stream; + + auto s = src->components.begin(); + auto d = dst->components.begin(); + while(true) { + if (s == src->components.end()) { break; } + if (d == dst->components.end()) { break; } + if (*s != *d) { break; } + s++; + d++; + i++; + } + + for (; s != src->components.end(); s++) { + stream << "super::"; + } + for (; d != dst->components.end(); d++) { + stream << MakeSnakeCase(*d) + "::"; + } + return stream.str(); + } + + // Generate a comment from the schema. + void GenComment(const std::vector &dc, const char *prefix = "") { + std::string text; + ::flatbuffers::GenComment(dc, &text, nullptr, prefix); + code_ += text + "\\"; + } + + // Return a Rust type from the table in idl.h. + std::string GetTypeBasic(const Type &type) const { + switch (GetFullType(type)) { + case FullType::Integer: + case FullType::Float: + case FullType::Bool: + case FullType::EnumKey: + case FullType::UnionKey: { break; } + default: { FLATBUFFERS_ASSERT(false && "incorrect type given");} + } + + // clang-format off + static const char * const ctypename[] = { + #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \ + RTYPE) \ + #RTYPE, + FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) + #undef FLATBUFFERS_TD + // clang-format on + }; + + if (type.enum_def) { return WrapInNameSpace(*type.enum_def); } + return ctypename[type.base_type]; + } + + // Look up the native type for an enum. This will always be an integer like + // u8, i32, etc. + std::string GetEnumTypeForDecl(const Type &type) { + const auto ft = GetFullType(type); + if (!(ft == FullType::EnumKey || ft == FullType::UnionKey)) { + FLATBUFFERS_ASSERT(false && "precondition failed in GetEnumTypeForDecl"); + } + + static const char *ctypename[] = { + // clang-format off + #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \ + RTYPE) \ + #RTYPE, + FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) + #undef FLATBUFFERS_TD + // clang-format on + }; + + // Enums can be bools, but their Rust representation must be a u8, as used + // in the repr attribute (#[repr(bool)] is an invalid attribute). + if (type.base_type == BASE_TYPE_BOOL) return "u8"; + return ctypename[type.base_type]; + } + + // Return a Rust type for any type (scalar, table, struct) specifically for + // using a FlatBuffer. + std::string GetTypeGet(const Type &type) const { + switch (GetFullType(type)) { + case FullType::Integer: + case FullType::Float: + case FullType::Bool: + case FullType::EnumKey: + case FullType::UnionKey: { + return GetTypeBasic(type); } + case FullType::Table: { + return WrapInNameSpace(type.struct_def->defined_namespace, + type.struct_def->name) + "<'a>"; } + default: { + return WrapInNameSpace(type.struct_def->defined_namespace, + type.struct_def->name); } + } + } + + std::string GetEnumValUse(const EnumDef &enum_def, + const EnumVal &enum_val) const { + return Name(enum_def) + "::" + Name(enum_val); + } + + // Generate an enum declaration, + // an enum string lookup table, + // an enum match function, + // and an enum array of values + void GenEnum(const EnumDef &enum_def) { + code_.SetValue("ENUM_NAME", Name(enum_def)); + code_.SetValue("BASE_TYPE", GetEnumTypeForDecl(enum_def.underlying_type)); + code_.SetValue("SEP", ""); + + GenComment(enum_def.doc_comment); + code_ += "#[allow(non_camel_case_types)]"; + code_ += "#[repr({{BASE_TYPE}})]"; + code_ += "#[derive(Clone, Copy, PartialEq, Debug)]"; + code_ += "pub enum " + Name(enum_def) + " {"; + + int64_t anyv = 0; + const EnumVal *minv = nullptr, *maxv = nullptr; + for (auto it = enum_def.vals.vec.begin(); it != enum_def.vals.vec.end(); + ++it) { + const auto &ev = **it; + + GenComment(ev.doc_comment, " "); + code_.SetValue("KEY", Name(ev)); + code_.SetValue("VALUE", NumToString(ev.value)); + code_ += "{{SEP}} {{KEY}} = {{VALUE}}\\"; + code_.SetValue("SEP", ",\n"); + + minv = !minv || minv->value > ev.value ? &ev : minv; + maxv = !maxv || maxv->value < ev.value ? &ev : maxv; + anyv |= ev.value; + } + + code_ += ""; + code_ += "}"; + code_ += ""; + + code_.SetValue("ENUM_NAME", Name(enum_def)); + code_.SetValue("ENUM_NAME_SNAKE", MakeSnakeCase(Name(enum_def))); + code_.SetValue("ENUM_NAME_CAPS", MakeUpper(MakeSnakeCase(Name(enum_def)))); + code_.SetValue("ENUM_MIN_BASE_VALUE", NumToString(minv->value)); + code_.SetValue("ENUM_MAX_BASE_VALUE", NumToString(maxv->value)); + + // Generate enum constants, and impls for Follow, EndianScalar, and Push. + code_ += "const ENUM_MIN_{{ENUM_NAME_CAPS}}: {{BASE_TYPE}} = \\"; + code_ += "{{ENUM_MIN_BASE_VALUE}};"; + code_ += "const ENUM_MAX_{{ENUM_NAME_CAPS}}: {{BASE_TYPE}} = \\"; + code_ += "{{ENUM_MAX_BASE_VALUE}};"; + code_ += ""; + code_ += "impl<'a> flatbuffers::Follow<'a> for {{ENUM_NAME}} {"; + code_ += " type Inner = Self;"; + code_ += " #[inline]"; + code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {"; + code_ += " flatbuffers::read_scalar_at::(buf, loc)"; + code_ += " }"; + code_ += "}"; + code_ += ""; + code_ += "impl flatbuffers::EndianScalar for {{ENUM_NAME}} {"; + code_ += " #[inline]"; + code_ += " fn to_little_endian(self) -> Self {"; + code_ += " let n = {{BASE_TYPE}}::to_le(self as {{BASE_TYPE}});"; + code_ += " let p = &n as *const {{BASE_TYPE}} as *const {{ENUM_NAME}};"; + code_ += " unsafe { *p }"; + code_ += " }"; + code_ += " #[inline]"; + code_ += " fn from_little_endian(self) -> Self {"; + code_ += " let n = {{BASE_TYPE}}::from_le(self as {{BASE_TYPE}});"; + code_ += " let p = &n as *const {{BASE_TYPE}} as *const {{ENUM_NAME}};"; + code_ += " unsafe { *p }"; + code_ += " }"; + code_ += "}"; + code_ += ""; + code_ += "impl flatbuffers::Push for {{ENUM_NAME}} {"; + code_ += " type Output = {{ENUM_NAME}};"; + code_ += " #[inline]"; + code_ += " fn push(&self, dst: &mut [u8], _rest: &[u8]) {"; + code_ += " flatbuffers::emplace_scalar::<{{ENUM_NAME}}>" + "(dst, *self);"; + code_ += " }"; + code_ += "}"; + code_ += ""; + + // Generate an array of all enumeration values. + auto num_fields = NumToString(enum_def.vals.vec.size()); + code_ += "#[allow(non_camel_case_types)]"; + code_ += "const ENUM_VALUES_{{ENUM_NAME_CAPS}}:[{{ENUM_NAME}}; " + + num_fields + "] = ["; + for (auto it = enum_def.vals.vec.begin(); it != enum_def.vals.vec.end(); + ++it) { + const auto &ev = **it; + auto value = GetEnumValUse(enum_def, ev); + auto suffix = *it != enum_def.vals.vec.back() ? "," : ""; + code_ += " " + value + suffix; + } + code_ += "];"; + code_ += ""; + + // Generate a string table for enum values. + // Problem is, if values are very sparse that could generate really big + // tables. Ideally in that case we generate a map lookup instead, but for + // the moment we simply don't output a table at all. + auto range = + enum_def.vals.vec.back()->value - enum_def.vals.vec.front()->value + 1; + // Average distance between values above which we consider a table + // "too sparse". Change at will. + static const int kMaxSparseness = 5; + if (range / static_cast(enum_def.vals.vec.size()) < + kMaxSparseness) { + code_ += "#[allow(non_camel_case_types)]"; + code_ += "const ENUM_NAMES_{{ENUM_NAME_CAPS}}:[&'static str; " + + NumToString(range) + "] = ["; + + auto val = enum_def.vals.vec.front()->value; + for (auto it = enum_def.vals.vec.begin(); it != enum_def.vals.vec.end(); + ++it) { + const auto &ev = **it; + while (val++ != ev.value) { code_ += " \"\","; } + auto suffix = *it != enum_def.vals.vec.back() ? "," : ""; + code_ += " \"" + Name(ev) + "\"" + suffix; + } + code_ += "];"; + code_ += ""; + + code_ += "pub fn enum_name_{{ENUM_NAME_SNAKE}}(e: {{ENUM_NAME}}) -> " + "&'static str {"; + + code_ += " let index: usize = e as usize\\"; + if (enum_def.vals.vec.front()->value) { + auto vals = GetEnumValUse(enum_def, *enum_def.vals.vec.front()); + code_ += " - " + vals + " as usize\\"; + } + code_ += ";"; + + code_ += " ENUM_NAMES_{{ENUM_NAME_CAPS}}[index]"; + code_ += "}"; + code_ += ""; + } + + if (enum_def.is_union) { + // Generate tyoesafe offset(s) for unions + code_.SetValue("NAME", Name(enum_def)); + code_.SetValue("UNION_OFFSET_NAME", Name(enum_def) + "UnionTableOffset"); + code_ += "pub struct {{UNION_OFFSET_NAME}} {}"; + } + } + + // Generates a value with optionally a cast applied if the field has a + // different underlying type from its interface type (currently only the + // case for enums. "from" specify the direction, true meaning from the + // underlying type to the interface type. + std::string GenUnderlyingCast(const FieldDef &field, bool from, + const std::string &val) { + switch (GetFullType(field.value.type)) { + case FullType::EnumKey: { return val; } + default: {} + } + if (from && field.value.type.base_type == BASE_TYPE_BOOL) { + return val + " != 0"; + } else if ((field.value.type.enum_def && + IsScalar(field.value.type.base_type)) || + field.value.type.base_type == BASE_TYPE_BOOL) { + // TODO(rw): handle enums in other namespaces + if (from) { + //return "unsafe { ::std::mem::transmute(" + val + ") }"; + return val; + } else { + return val + " as " + GetTypeBasic(field.value.type); + } + } else { + return val; + } + } + + std::string GetFieldOffsetName(const FieldDef &field) { + return "VT_" + MakeUpper(Name(field)); + } + + std::string GetDefaultConstant(const FieldDef &field) { + return field.value.type.base_type == BASE_TYPE_FLOAT + ? field.value.constant + "" + : field.value.constant; + } + + std::string GetDefaultScalarValue(const FieldDef &field) { + switch (GetFullType(field.value.type)) { + case FullType::Integer: { return GetDefaultConstant(field); } + case FullType::Float: { return GetDefaultConstant(field); } + case FullType::Bool: { + return field.value.constant == "0" ? "false" : "true"; + } + case FullType::UnionKey: + case FullType::EnumKey: { + auto ev = field.value.type.enum_def->ReverseLookup( + StringToInt(field.value.constant.c_str()), false); + assert(ev); + return WrapInNameSpace(field.value.type.enum_def->defined_namespace, + GetEnumValUse(*field.value.type.enum_def, *ev)); + } + + // All pointer-ish types have a default value of None, because they are + // wrapped in Option.. + default: { return "None"; } + } + } + + // Create the return type for fields in the *BuilderArgs structs that are + // used to create Tables. + // + // Note: we could make all inputs to the BuilderArgs be an Option, as well + // as all outputs. But, the UX of Flatbuffers is that the user doesn't get to + // know if the value is default or not, because there are three ways to + // return a default value: + // 1) return a stored value that happens to be the default, + // 2) return a hardcoded value because the relevant vtable field is not in + // the vtable, or + // 3) return a hardcoded value because the vtable field value is set to zero. + std::string TableBuilderArgsDefnType(const FieldDef &field, + const std::string lifetime) { + const Type& type = field.value.type; + + switch (GetFullType(type)) { + case FullType::Integer: + case FullType::Float: + case FullType::Bool: { + const auto typname = GetTypeBasic(type); + return typname; + } + case FullType::Struct: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "Option<&" + lifetime + " " + typname + ">"; + } + case FullType::Table: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "Option>>"; + } + case FullType::String: { + return "Option>"; + } + case FullType::EnumKey: + case FullType::UnionKey: { + const auto typname = WrapInNameSpace(*type.enum_def); + return typname; + } + case FullType::UnionValue: { + const auto typname = WrapInNameSpace(*type.enum_def); + return "Option>"; + } + + case FullType::VectorOfInteger: + case FullType::VectorOfFloat: { + const auto typname = GetTypeBasic(type.VectorType()); + return "Option>>"; + } + case FullType::VectorOfBool: { + return "Option>>"; + } + case FullType::VectorOfEnumKey: { + const auto typname = WrapInNameSpace(*type.enum_def); + return "Option>>"; + } + case FullType::VectorOfStruct: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "Option>>"; + } + case FullType::VectorOfTable: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "Option>>>>"; + } + case FullType::VectorOfString: { + return "Option>>>"; + } + case FullType::VectorOfUnionValue: { + const auto typname = WrapInNameSpace(*type.enum_def) + \ + "UnionTableOffset"; + return "Option>>>"; + } + } + } + + std::string TableBuilderArgsDefaultValue(const FieldDef &field) { + return GetDefaultScalarValue(field); + } + std::string TableBuilderAddFuncDefaultValue(const FieldDef &field) { + switch (GetFullType(field.value.type)) { + case FullType::UnionKey: + case FullType::EnumKey: { + const std::string basetype = GetTypeBasic(field.value.type); + return GetDefaultScalarValue(field); + } + + default: { return GetDefaultScalarValue(field); } + } + } + + std::string TableBuilderArgsAddFuncType(const FieldDef &field, + const std::string lifetime) { + const Type& type = field.value.type; + + switch (GetFullType(field.value.type)) { + case FullType::VectorOfStruct: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "flatbuffers::WIPOffset>"; + } + case FullType::VectorOfTable: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "flatbuffers::WIPOffset>>>"; + } + case FullType::VectorOfInteger: + case FullType::VectorOfFloat: { + const auto typname = GetTypeBasic(type.VectorType()); + return "flatbuffers::WIPOffset>"; + } + case FullType::VectorOfBool: { + return "flatbuffers::WIPOffset>"; + } + case FullType::VectorOfString: { + return "flatbuffers::WIPOffset>>"; + } + case FullType::VectorOfEnumKey: { + const auto typname = WrapInNameSpace(*type.enum_def); + return "flatbuffers::WIPOffset>"; + } + case FullType::VectorOfUnionValue: { + const auto typname = WrapInNameSpace(*type.enum_def); + return "flatbuffers::WIPOffset>>"; + } + case FullType::EnumKey: { + const auto typname = WrapInNameSpace(*type.enum_def); + return typname; + } + case FullType::Struct: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "&" + lifetime + " " + typname + ""; + } + case FullType::Table: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "flatbuffers::WIPOffset<" + typname + "<" + lifetime + ">>"; + } + case FullType::Integer: + case FullType::Float: { + const auto typname = GetTypeBasic(type); + return typname; + } + case FullType::Bool: { + return "bool"; + } + case FullType::String: { + return "flatbuffers::WIPOffset<&" + lifetime + " str>"; + } + case FullType::UnionKey: { + const auto typname = WrapInNameSpace(*type.enum_def); + return typname; + } + case FullType::UnionValue: { + const auto typname = WrapInNameSpace(*type.enum_def); + return "flatbuffers::WIPOffset"; + } + } + } + + std::string TableBuilderArgsAddFuncBody(const FieldDef &field) { + const Type& type = field.value.type; + + switch (GetFullType(field.value.type)) { + case FullType::Integer: + case FullType::Float: { + const auto typname = GetTypeBasic(field.value.type); + return "self.fbb_.push_slot::<" + typname + ">"; + } + case FullType::Bool: { + return "self.fbb_.push_slot::"; + } + + case FullType::EnumKey: + case FullType::UnionKey: { + const auto underlying_typname = GetTypeBasic(type); + return "self.fbb_.push_slot::<" + underlying_typname + ">"; + } + + case FullType::Struct: { + const std::string typname = WrapInNameSpace(*type.struct_def); + return "self.fbb_.push_slot_always::<&" + typname + ">"; + } + case FullType::Table: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "self.fbb_.push_slot_always::>"; + } + + case FullType::UnionValue: + case FullType::String: + case FullType::VectorOfInteger: + case FullType::VectorOfFloat: + case FullType::VectorOfBool: + case FullType::VectorOfEnumKey: + case FullType::VectorOfStruct: + case FullType::VectorOfTable: + case FullType::VectorOfString: + case FullType::VectorOfUnionValue: { + return "self.fbb_.push_slot_always::>"; + } + } + } + + std::string GenTableAccessorFuncReturnType(const FieldDef &field, + const std::string lifetime) { + const Type& type = field.value.type; + + switch (GetFullType(field.value.type)) { + case FullType::Integer: + case FullType::Float: { + const auto typname = GetTypeBasic(type); + return typname; + } + case FullType::Bool: { + return "bool"; + } + case FullType::Struct: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "Option<&" + lifetime + " " + typname + ">"; + } + case FullType::Table: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "Option<" + typname + "<" + lifetime + ">>"; + } + case FullType::EnumKey: + case FullType::UnionKey: { + const auto typname = WrapInNameSpace(*type.enum_def); + return typname; + } + + case FullType::UnionValue: { + return "Option>"; + } + case FullType::String: { + return "Option<&" + lifetime + " str>"; + } + case FullType::VectorOfInteger: + case FullType::VectorOfFloat: { + const auto typname = GetTypeBasic(type.VectorType()); + if (IsOneByte(type.VectorType().base_type)) { + return "Option<&" + lifetime + " [" + typname + "]>"; + } + return "Option>"; + } + case FullType::VectorOfBool: { + return "Option<&" + lifetime + " [bool]>"; + } + case FullType::VectorOfEnumKey: { + const auto typname = WrapInNameSpace(*type.enum_def); + return "Option>"; + } + case FullType::VectorOfStruct: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "Option<&" + lifetime + " [" + typname + "]>"; + } + case FullType::VectorOfTable: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "Option>>>"; + } + case FullType::VectorOfString: { + return "Option>>"; + } + case FullType::VectorOfUnionValue: { + FLATBUFFERS_ASSERT(false && "vectors of unions are not yet supported"); + // TODO(rw): when we do support these, we should consider using the + // Into trait to convert tables to typesafe union values. + return "INVALID_CODE_GENERATION"; // for return analysis + } + } + } + + std::string GenTableAccessorFuncBody(const FieldDef &field, + const std::string lifetime, + const std::string offset_prefix) { + const std::string offset_name = offset_prefix + "::" + \ + GetFieldOffsetName(field); + const Type& type = field.value.type; + + switch (GetFullType(field.value.type)) { + case FullType::Integer: + case FullType::Float: + case FullType::Bool: { + const auto typname = GetTypeBasic(type); + const std::string default_value = GetDefaultScalarValue(field); + return "self._tab.get::<" + typname + ">(" + offset_name + ", Some(" + \ + default_value + ")).unwrap()"; + } + case FullType::Struct: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "self._tab.get::<" + typname + ">(" + offset_name + ", None)"; + } + case FullType::Table: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "self._tab.get::>>(" + offset_name + ", None)"; + } + case FullType::UnionValue: { + return "self._tab.get::>>(" + offset_name + \ + ", None)"; + } + case FullType::UnionKey: + case FullType::EnumKey: { + const std::string underlying_typname = GetTypeBasic(type); + const std::string typname = WrapInNameSpace(*type.enum_def); + const std::string default_value = GetDefaultScalarValue(field); + return "self._tab.get::<" + typname + ">(" + offset_name + \ + ", Some(" + default_value + ")).unwrap()"; + } + case FullType::String: { + return "self._tab.get::>(" + \ + offset_name + ", None)"; + } + + case FullType::VectorOfInteger: + case FullType::VectorOfFloat: { + const auto typname = GetTypeBasic(type.VectorType()); + std::string s = "self._tab.get::>>(" + offset_name + ", None)"; + // single-byte values are safe to slice + if (IsOneByte(type.VectorType().base_type)) { + s += ".map(|v| v.safe_slice())"; + } + return s; + } + case FullType::VectorOfBool: { + return "self._tab.get::>>(" + \ + offset_name + ", None).map(|v| v.safe_slice())"; + } + case FullType::VectorOfEnumKey: { + const auto typname = WrapInNameSpace(*type.enum_def); + return "self._tab.get::>>(" + \ + offset_name + ", None)"; + } + case FullType::VectorOfStruct: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "self._tab.get::>>(" + \ + offset_name + ", None).map(|v| v.safe_slice() )"; + } + case FullType::VectorOfTable: { + const auto typname = WrapInNameSpace(*type.struct_def); + return "self._tab.get::>>>>(" + offset_name + ", None)"; + } + case FullType::VectorOfString: { + return "self._tab.get::>>>(" + offset_name + ", None)"; + } + case FullType::VectorOfUnionValue: { + FLATBUFFERS_ASSERT(false && "vectors of unions are not yet supported"); + return "INVALID_CODE_GENERATION"; // for return analysis + } + } + } + + bool TableFieldReturnsOption(const Type& type) { + switch (GetFullType(type)) { + case FullType::Integer: + case FullType::Float: + case FullType::Bool: + case FullType::EnumKey: + case FullType::UnionKey: + return false; + default: return true; + } + } + + // Generate an accessor struct, builder struct, and create function for a + // table. + void GenTable(const StructDef &struct_def) { + GenComment(struct_def.doc_comment); + + code_.SetValue("STRUCT_NAME", Name(struct_def)); + code_.SetValue("OFFSET_TYPELABEL", Name(struct_def) + "Offset"); + code_.SetValue("STRUCT_NAME_SNAKECASE", MakeSnakeCase(Name(struct_def))); + + // Generate an offset type, the base type, the Follow impl, and the + // init_from_table impl. + code_ += "pub enum {{OFFSET_TYPELABEL}} {}"; + code_ += "#[derive(Copy, Clone, Debug, PartialEq)]"; + code_ += ""; + code_ += "pub struct {{STRUCT_NAME}}<'a> {"; + code_ += " pub _tab: flatbuffers::Table<'a>,"; + code_ += " _phantom: PhantomData<&'a ()>,"; + code_ += "}"; + code_ += ""; + code_ += "impl<'a> flatbuffers::Follow<'a> for {{STRUCT_NAME}}<'a> {"; + code_ += " type Inner = {{STRUCT_NAME}}<'a>;"; + code_ += " #[inline]"; + code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {"; + code_ += " Self {"; + code_ += " _tab: flatbuffers::Table { buf: buf, loc: loc },"; + code_ += " _phantom: PhantomData,"; + code_ += " }"; + code_ += " }"; + code_ += "}"; + code_ += ""; + code_ += "impl<'a> {{STRUCT_NAME}}<'a> {"; + code_ += " #[inline]"; + code_ += " pub fn init_from_table(table: flatbuffers::Table<'a>) -> " + "Self {"; + code_ += " {{STRUCT_NAME}} {"; + code_ += " _tab: table,"; + code_ += " _phantom: PhantomData,"; + code_ += " }"; + code_ += " }"; + + // Generate a convenient create* function that uses the above builder + // to create a table in one function call. + code_.SetValue("MAYBE_US", + struct_def.fields.vec.size() == 0 ? "_" : ""); + code_ += " #[allow(unused_mut)]"; + code_ += " pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>("; + code_ += " _fbb: " + "&'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,"; + code_ += " {{MAYBE_US}}args: &'args {{STRUCT_NAME}}Args<'args>) -> " + "flatbuffers::WIPOffset<{{STRUCT_NAME}}<'bldr>> {"; + + code_ += " let mut builder = {{STRUCT_NAME}}Builder::new(_fbb);"; + for (size_t size = struct_def.sortbysize ? sizeof(largest_scalar_t) : 1; + size; size /= 2) { + for (auto it = struct_def.fields.vec.rbegin(); + it != struct_def.fields.vec.rend(); ++it) { + const auto &field = **it; + // TODO(rw): fully understand this sortbysize usage + if (!field.deprecated && (!struct_def.sortbysize || + size == SizeOf(field.value.type.base_type))) { + code_.SetValue("FIELD_NAME", Name(field)); + if (TableFieldReturnsOption(field.value.type)) { + code_ += " if let Some(x) = args.{{FIELD_NAME}} " + "{ builder.add_{{FIELD_NAME}}(x); }"; + } else { + code_ += " builder.add_{{FIELD_NAME}}(args.{{FIELD_NAME}});"; + } + } + } + } + code_ += " builder.finish()"; + code_ += " }"; + code_ += ""; + + // Generate field id constants. + if (struct_def.fields.vec.size() > 0) { + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + if (field.deprecated) { + // Deprecated fields won't be accessible. + continue; + } + + code_.SetValue("OFFSET_NAME", GetFieldOffsetName(field)); + code_.SetValue("OFFSET_VALUE", NumToString(field.value.offset)); + code_ += " pub const {{OFFSET_NAME}}: flatbuffers::VOffsetT = " + "{{OFFSET_VALUE}};"; + } + code_ += ""; + } + + // Generate the accessors. Each has one of two forms: + // + // If a value can be None: + // pub fn name(&'a self) -> Option { + // self._tab.get::(offset, defaultval) + // } + // + // If a value is always Some: + // pub fn name(&'a self) -> user_facing_type { + // self._tab.get::(offset, defaultval).unwrap() + // } + const std::string offset_prefix = Name(struct_def); + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + if (field.deprecated) { + // Deprecated fields won't be accessible. + continue; + } + + code_.SetValue("FIELD_NAME", Name(field)); + code_.SetValue("RETURN_TYPE", + GenTableAccessorFuncReturnType(field, "'a")); + code_.SetValue("FUNC_BODY", + GenTableAccessorFuncBody(field, "'a", offset_prefix)); + + GenComment(field.doc_comment, " "); + code_ += " #[inline]"; + code_ += " pub fn {{FIELD_NAME}}(&'a self) -> {{RETURN_TYPE}} {"; + code_ += " {{FUNC_BODY}}"; + code_ += " }"; + + // Generate a comparison function for this field if it is a key. + if (field.key) { + GenKeyFieldMethods(field); + } + + // Generate a nested flatbuffer field, if applicable. + auto nested = field.attributes.Lookup("nested_flatbuffer"); + if (nested) { + std::string qualified_name = nested->constant; + auto nested_root = parser_.LookupStruct(nested->constant); + if (nested_root == nullptr) { + qualified_name = parser_.current_namespace_->GetFullyQualifiedName( + nested->constant); + nested_root = parser_.LookupStruct(qualified_name); + } + FLATBUFFERS_ASSERT(nested_root); // Guaranteed to exist by parser. + (void)nested_root; + + code_.SetValue("OFFSET_NAME", + offset_prefix + "::" + GetFieldOffsetName(field)); + code_ += " pub fn {{FIELD_NAME}}_nested_flatbuffer(&'a self) -> " + " Option<{{STRUCT_NAME}}<'a>> {"; + code_ += " match self.{{FIELD_NAME}}() {"; + code_ += " None => { None }"; + code_ += " Some(data) => {"; + code_ += " use self::flatbuffers::Follow;"; + code_ += " Some(>>::follow(data, 0))"; + code_ += " },"; + code_ += " }"; + code_ += " }"; + } + } + + // Explicit specializations for union accessors + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + if (field.deprecated || field.value.type.base_type != BASE_TYPE_UNION) { + continue; + } + + auto u = field.value.type.enum_def; + if (u->uses_type_aliases) continue; + + code_.SetValue("FIELD_NAME", Name(field)); + + for (auto u_it = u->vals.vec.begin(); u_it != u->vals.vec.end(); ++u_it) { + auto &ev = **u_it; + if (ev.union_type.base_type == BASE_TYPE_NONE) { continue; } + + auto table_init_type = WrapInNameSpace( + ev.union_type.struct_def->defined_namespace, + ev.union_type.struct_def->name); + + code_.SetValue("U_ELEMENT_ENUM_TYPE", + WrapInNameSpace(u->defined_namespace, GetEnumValUse(*u, ev))); + code_.SetValue("U_ELEMENT_TABLE_TYPE", table_init_type); + code_.SetValue("U_ELEMENT_NAME", MakeSnakeCase(Name(ev))); + + code_ += "#[inline]"; + code_ += "#[allow(non_snake_case)]"; + code_ += "pub fn {{FIELD_NAME}}_as_{{U_ELEMENT_NAME}}(&'a self) -> " + "Option<{{U_ELEMENT_TABLE_TYPE}}> {"; + code_ += " if self.{{FIELD_NAME}}_type() == {{U_ELEMENT_ENUM_TYPE}} {"; + code_ += " self.{{FIELD_NAME}}().map(|u| " + "{{U_ELEMENT_TABLE_TYPE}}::init_from_table(u))"; + code_ += " } else {"; + code_ += " None"; + code_ += " }"; + code_ += "}"; + code_ += ""; + } + } + + code_ += "}"; // End of table impl. + code_ += ""; + + // Generate an args struct: + code_ += "pub struct {{STRUCT_NAME}}Args<'a> {"; + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + if (!field.deprecated) { + code_.SetValue("PARAM_NAME", Name(field)); + code_.SetValue("PARAM_TYPE", TableBuilderArgsDefnType(field, "'a ")); + code_ += " pub {{PARAM_NAME}}: {{PARAM_TYPE}},"; + } + } + code_ += " pub _phantom: PhantomData<&'a ()>, // pub for default trait"; + code_ += "}"; + + // Generate an impl of Default for the *Args type: + code_ += "impl<'a> Default for {{STRUCT_NAME}}Args<'a> {"; + code_ += " fn default() -> Self {"; + code_ += " {{STRUCT_NAME}}Args {"; + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + if (!field.deprecated) { + code_.SetValue("PARAM_VALUE", TableBuilderArgsDefaultValue(field)); + //code_.SetValue("PARAM_VALUE", "None"); + if (field.required) { + code_ += " // required"; + } + code_.SetValue("PARAM_NAME", Name(field)); + code_ += " {{PARAM_NAME}}: {{PARAM_VALUE}},"; + } + } + code_ += " _phantom: PhantomData,"; + code_ += " }"; + code_ += " }"; + code_ += "}"; + + // Generate a builder struct: + code_ += "pub struct {{STRUCT_NAME}}Builder<'a: 'b, 'b> {"; + code_ += " fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,"; + code_ += " start_: flatbuffers::WIPOffset<" + "flatbuffers::TableUnfinishedWIPOffset>,"; + code_ += "}"; + + // Generate builder functions: + code_ += "impl<'a: 'b, 'b> {{STRUCT_NAME}}Builder<'a, 'b> {"; + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + if (!field.deprecated) { + const bool is_scalar = IsScalar(field.value.type.base_type); + + std::string offset = GetFieldOffsetName(field); + std::string name = GenUnderlyingCast(field, false, Name(field)); + std::string value = GetDefaultScalarValue(field); + + // Generate functions to add data, which take one of two forms. + // + // If a value has a default: + // fn add_x(x_: type) { + // fbb_.push_slot::(offset, x_, Some(default)); + // } + // + // If a value does not have a default: + // fn add_x(x_: type) { + // fbb_.push_slot_always::(offset, x_); + // } + code_.SetValue("FIELD_NAME", Name(field)); + code_.SetValue("FIELD_OFFSET", Name(struct_def) + "::" + offset); + code_.SetValue("FIELD_TYPE", TableBuilderArgsAddFuncType(field, "'b ")); + code_.SetValue("FUNC_BODY", TableBuilderArgsAddFuncBody(field)); + code_ += " #[inline]"; + code_ += " pub fn add_{{FIELD_NAME}}(&mut self, {{FIELD_NAME}}: " + "{{FIELD_TYPE}}) {"; + if (is_scalar) { + code_.SetValue("FIELD_DEFAULT_VALUE", + TableBuilderAddFuncDefaultValue(field)); + code_ += " {{FUNC_BODY}}({{FIELD_OFFSET}}, {{FIELD_NAME}}, " + "{{FIELD_DEFAULT_VALUE}});"; + } else { + code_ += " {{FUNC_BODY}}({{FIELD_OFFSET}}, {{FIELD_NAME}});"; + } + code_ += " }"; + } + } + + // Struct initializer (all fields required); + code_ += + " pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> " + "{{STRUCT_NAME}}Builder<'a, 'b> {"; + code_.SetValue("NUM_FIELDS", NumToString(struct_def.fields.vec.size())); + code_ += " let start = _fbb.start_table();"; + code_ += " {{STRUCT_NAME}}Builder {"; + code_ += " fbb_: _fbb,"; + code_ += " start_: start,"; + code_ += " }"; + code_ += " }"; + + // finish() function. + code_ += " pub fn finish(self) -> " + "flatbuffers::WIPOffset<{{STRUCT_NAME}}<'a>> {"; + code_ += " let o = self.fbb_.end_table(self.start_);"; + + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + if (!field.deprecated && field.required) { + code_.SetValue("FIELD_NAME", MakeSnakeCase(Name(field))); + code_.SetValue("OFFSET_NAME", GetFieldOffsetName(field)); + code_ += " self.fbb_.required(o, {{STRUCT_NAME}}::{{OFFSET_NAME}}," + "\"{{FIELD_NAME}}\");"; + } + } + code_ += " flatbuffers::WIPOffset::new(o.value())"; + code_ += " }"; + code_ += "}"; + code_ += ""; + } + + // Generate functions to compare tables and structs by key. This function + // must only be called if the field key is defined. + void GenKeyFieldMethods(const FieldDef &field) { + FLATBUFFERS_ASSERT(field.key); + const bool is_string = (field.value.type.base_type == BASE_TYPE_STRING); + + if (is_string) { + code_.SetValue("KEY_TYPE", "Option<&str>"); + } else { + FLATBUFFERS_ASSERT(IsScalar(field.value.type.base_type)); + auto type = GetTypeBasic(field.value.type); + if (parser_.opts.scoped_enums && field.value.type.enum_def && + IsScalar(field.value.type.base_type)) { + type = GetTypeGet(field.value.type); + } + code_.SetValue("KEY_TYPE", type); + } + + code_ += " pub fn key_compare_less_than(&self, o: &{{STRUCT_NAME}}) -> " + " bool {"; + code_ += " self.{{FIELD_NAME}}() < o.{{FIELD_NAME}}()"; + code_ += " }"; + code_ += ""; + code_ += " pub fn key_compare_with_value(&self, val: {{KEY_TYPE}}) -> " + " ::std::cmp::Ordering {"; + code_ += " let key = self.{{FIELD_NAME}}();"; + code_ += " key.cmp(&val)"; + code_ += " }"; + } + + // Generate functions for accessing the root table object. This function + // must only be called if the root table is defined. + void GenRootTableFuncs(const StructDef &struct_def) { + FLATBUFFERS_ASSERT(parser_.root_struct_def_ && "root table not defined"); + auto name = Name(struct_def); + + code_.SetValue("STRUCT_NAME", name); + code_.SetValue("STRUCT_NAME_SNAKECASE", MakeSnakeCase(name)); + code_.SetValue("STRUCT_NAME_CAPS", MakeUpper(MakeSnakeCase(name))); + + // The root datatype accessors: + code_ += "#[inline]"; + code_ += + "pub fn get_root_as_{{STRUCT_NAME_SNAKECASE}}<'a>(buf: &'a [u8])" + " -> {{STRUCT_NAME}}<'a> {"; + code_ += " flatbuffers::get_root::<{{STRUCT_NAME}}<'a>>(buf)"; + code_ += "}"; + code_ += ""; + + code_ += "#[inline]"; + code_ += "pub fn get_size_prefixed_root_as_{{STRUCT_NAME_SNAKECASE}}" + "<'a>(buf: &'a [u8]) -> {{STRUCT_NAME}}<'a> {"; + code_ += " flatbuffers::get_size_prefixed_root::<{{STRUCT_NAME}}<'a>>" + "(buf)"; + code_ += "}"; + code_ += ""; + + if (parser_.file_identifier_.length()) { + // Declare the identifier + code_ += "pub const {{STRUCT_NAME_CAPS}}_IDENTIFIER: &'static str\\"; + code_ += " = \"" + parser_.file_identifier_ + "\";"; + code_ += ""; + + // Check if a buffer has the identifier. + code_ += "#[inline]"; + code_ += "pub fn {{STRUCT_NAME_SNAKECASE}}_buffer_has_identifier\\"; + code_ += "(buf: &[u8]) -> bool {"; + code_ += " return flatbuffers::buffer_has_identifier(buf, \\"; + code_ += "{{STRUCT_NAME_CAPS}}_IDENTIFIER, false);"; + code_ += "}"; + code_ += ""; + code_ += "#[inline]"; + code_ += "pub fn {{STRUCT_NAME_SNAKECASE}}_size_prefixed\\"; + code_ += "_buffer_has_identifier(buf: &[u8]) -> bool {"; + code_ += " return flatbuffers::buffer_has_identifier(buf, \\"; + code_ += "{{STRUCT_NAME_CAPS}}_IDENTIFIER, true);"; + code_ += "}"; + code_ += ""; + } + + if (parser_.file_extension_.length()) { + // Return the extension + code_ += "pub const {{STRUCT_NAME_CAPS}}_EXTENSION: &'static str = \\"; + code_ += "\"" + parser_.file_extension_ + "\";"; + code_ += ""; + } + + // Finish a buffer with a given root object: + code_.SetValue("OFFSET_TYPELABEL", Name(struct_def) + "Offset"); + code_ += "#[inline]"; + code_ += "pub fn finish_{{STRUCT_NAME_SNAKECASE}}_buffer<'a, 'b>("; + code_ += " fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,"; + code_ += " root: flatbuffers::WIPOffset<{{STRUCT_NAME}}<'a>>) {"; + if (parser_.file_identifier_.length()) { + code_ += " fbb.finish(root, Some({{STRUCT_NAME_CAPS}}_IDENTIFIER));"; + } else { + code_ += " fbb.finish(root, None);"; + } + code_ += "}"; + code_ += ""; + code_ += "#[inline]"; + code_ += "pub fn finish_size_prefixed_{{STRUCT_NAME_SNAKECASE}}_buffer" + "<'a, 'b>(" + "fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, " + "root: flatbuffers::WIPOffset<{{STRUCT_NAME}}<'a>>) {"; + if (parser_.file_identifier_.length()) { + code_ += " fbb.finish_size_prefixed(root, " + "Some({{STRUCT_NAME_CAPS}}_IDENTIFIER));"; + } else { + code_ += " fbb.finish_size_prefixed(root, None);"; + } + code_ += "}"; + } + + static void GenPadding( + const FieldDef &field, std::string *code_ptr, int *id, + const std::function &f) { + if (field.padding) { + for (int i = 0; i < 4; i++) { + if (static_cast(field.padding) & (1 << i)) { + f((1 << i) * 8, code_ptr, id); + } + } + assert(!(field.padding & ~0xF)); + } + } + + static void PaddingDefinition(int bits, std::string *code_ptr, int *id) { + *code_ptr += " padding" + NumToString((*id)++) + "__: u" + \ + NumToString(bits) + ","; + } + + static void PaddingInitializer(int bits, std::string *code_ptr, int *id) { + (void)bits; + *code_ptr += "padding" + NumToString((*id)++) + "__: 0,"; + } + + // Generate an accessor struct with constructor for a flatbuffers struct. + void GenStruct(const StructDef &struct_def) { + // Generates manual padding and alignment. + // Variables are private because they contain little endian data on all + // platforms. + GenComment(struct_def.doc_comment); + code_.SetValue("ALIGN", NumToString(struct_def.minalign)); + code_.SetValue("STRUCT_NAME", Name(struct_def)); + + code_ += "// struct {{STRUCT_NAME}}, aligned to {{ALIGN}}"; + code_ += "#[repr(C, packed)]"; + + // PartialEq is useful to derive because we can correctly compare structs + // for equality by just comparing their underlying byte data. This doesn't + // hold for PartialOrd/Ord. + code_ += "#[derive(Clone, Copy, Debug, PartialEq)]"; + code_ += "pub struct {{STRUCT_NAME}} {"; + + int padding_id = 0; + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + code_.SetValue("FIELD_TYPE", GetTypeGet(field.value.type)); + code_.SetValue("FIELD_NAME", Name(field)); + code_ += " {{FIELD_NAME}}_: {{FIELD_TYPE}},"; + + if (field.padding) { + std::string padding; + GenPadding(field, &padding, &padding_id, PaddingDefinition); + code_ += padding; + } + } + + code_ += "} // pub struct {{STRUCT_NAME}}"; + + // Generate impls for SafeSliceAccess (because all structs are endian-safe), + // Follow for the value type, Follow for the reference type, Push for the + // value type, and Push for the reference type. + code_ += "impl flatbuffers::SafeSliceAccess for {{STRUCT_NAME}} {}"; + code_ += "impl<'a> flatbuffers::Follow<'a> for {{STRUCT_NAME}} {"; + code_ += " type Inner = &'a {{STRUCT_NAME}};"; + code_ += " #[inline]"; + code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {"; + code_ += " <&'a {{STRUCT_NAME}}>::follow(buf, loc)"; + code_ += " //flatbuffers::follow_cast_ref::<{{STRUCT_NAME}}>(buf, loc)"; + code_ += " }"; + code_ += "}"; + code_ += "impl<'a> flatbuffers::Follow<'a> for &'a {{STRUCT_NAME}} {"; + code_ += " type Inner = &'a {{STRUCT_NAME}};"; + code_ += " #[inline]"; + code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {"; + code_ += " flatbuffers::follow_cast_ref::<{{STRUCT_NAME}}>(buf, loc)"; + code_ += " }"; + code_ += "}"; + code_ += "impl<'b> flatbuffers::Push for {{STRUCT_NAME}} {"; + code_ += " type Output = {{STRUCT_NAME}};"; + code_ += " #[inline]"; + code_ += " fn push(&self, dst: &mut [u8], _rest: &[u8]) {"; + code_ += " (&self).push(dst, _rest)"; + code_ += " }"; + code_ += " #[inline]"; + code_ += " fn size(&self) -> usize {"; + code_ += " ::std::mem::size_of::<{{STRUCT_NAME}}>()"; + code_ += " }"; + code_ += "}"; + code_ += "impl<'b> flatbuffers::Push for &'b {{STRUCT_NAME}} {"; + code_ += " type Output = {{STRUCT_NAME}};"; + code_ += ""; + code_ += " #[inline]"; + code_ += " fn push(&self, dst: &mut [u8], _rest: &[u8]) {"; + code_ += " let src = unsafe {"; + code_ += " ::std::slice::from_raw_parts(" + "*self as *const {{STRUCT_NAME}} as *const u8, self.size())"; + code_ += " };"; + code_ += " dst.copy_from_slice(src);"; + code_ += " }"; + code_ += " #[inline]"; + code_ += " fn size(&self) -> usize {"; + code_ += " ::std::mem::size_of::<{{STRUCT_NAME}}>()"; + code_ += " }"; + code_ += "}"; + code_ += ""; + code_ += ""; + + // Generate a constructor that takes all fields as arguments. + code_ += "impl {{STRUCT_NAME}} {"; + std::string arg_list; + std::string init_list; + padding_id = 0; + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + const auto member_name = Name(field) + "_"; + const auto reference = StructMemberAccessNeedsCopy(field.value.type) + ? "" : "&'a "; + const auto arg_name = "_" + Name(field); + const auto arg_type = reference + GetTypeGet(field.value.type); + + if (it != struct_def.fields.vec.begin()) { + arg_list += ", "; + } + arg_list += arg_name + ": "; + arg_list += arg_type; + init_list += " " + member_name; + if (StructMemberAccessNeedsCopy(field.value.type)) { + init_list += ": " + arg_name + ".to_little_endian(),\n"; + } else { + init_list += ": *" + arg_name + ",\n"; + } + } + + code_.SetValue("ARG_LIST", arg_list); + code_.SetValue("INIT_LIST", init_list); + code_ += " pub fn new<'a>({{ARG_LIST}}) -> Self {"; + code_ += " {{STRUCT_NAME}} {"; + code_ += "{{INIT_LIST}}"; + padding_id = 0; + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + if (field.padding) { + std::string padding; + GenPadding(field, &padding, &padding_id, PaddingInitializer); + code_ += " " + padding; + } + } + code_ += " }"; + code_ += " }"; + + // Generate accessor methods for the struct. + for (auto it = struct_def.fields.vec.begin(); + it != struct_def.fields.vec.end(); ++it) { + const auto &field = **it; + + auto field_type = TableBuilderArgsAddFuncType(field, "'a"); + auto member = "self." + Name(field) + "_"; + auto value = StructMemberAccessNeedsCopy(field.value.type) ? + member + ".from_little_endian()" : member; + + code_.SetValue("FIELD_NAME", Name(field)); + code_.SetValue("FIELD_TYPE", field_type); + code_.SetValue("FIELD_VALUE", GenUnderlyingCast(field, true, value)); + code_.SetValue("REF", IsStruct(field.value.type) ? "&" : ""); + + GenComment(field.doc_comment, " "); + code_ += " pub fn {{FIELD_NAME}}<'a>(&'a self) -> {{FIELD_TYPE}} {"; + code_ += " {{REF}}{{FIELD_VALUE}}"; + code_ += " }"; + + // Generate a comparison function for this field if it is a key. + if (field.key) { + GenKeyFieldMethods(field); + } + } + code_ += "}"; + code_ += ""; + } + + // Set up the correct namespace. This opens a namespace if the current + // namespace is different from the target namespace. This function + // closes and opens the namespaces only as necessary. + // + // The file must start and end with an empty (or null) namespace so that + // namespaces are properly opened and closed. + void SetNameSpace(const Namespace *ns) { + if (cur_name_space_ == ns) { return; } + + // Compute the size of the longest common namespace prefix. + // If cur_name_space is A::B::C::D and ns is A::B::E::F::G, + // the common prefix is A::B:: and we have old_size = 4, new_size = 5 + // and common_prefix_size = 2 + size_t old_size = cur_name_space_ ? cur_name_space_->components.size() : 0; + size_t new_size = ns ? ns->components.size() : 0; + + size_t common_prefix_size = 0; + while (common_prefix_size < old_size && common_prefix_size < new_size && + ns->components[common_prefix_size] == + cur_name_space_->components[common_prefix_size]) { + common_prefix_size++; + } + + // Close cur_name_space in reverse order to reach the common prefix. + // In the previous example, D then C are closed. + for (size_t j = old_size; j > common_prefix_size; --j) { + code_ += "} // pub mod " + cur_name_space_->components[j - 1]; + } + if (old_size != common_prefix_size) { code_ += ""; } + + // open namespace parts to reach the ns namespace + // in the previous example, E, then F, then G are opened + for (auto j = common_prefix_size; j != new_size; ++j) { + code_ += "pub mod " + MakeSnakeCase(ns->components[j]) + " {"; + code_ += " #![allow(dead_code)]"; + code_ += " #![allow(unused_imports)]"; + code_ += ""; + code_ += " use std::mem;"; + code_ += " use std::marker::PhantomData;"; + code_ += " use std::cmp::Ordering;"; + code_ += ""; + code_ += " extern crate flatbuffers;"; + code_ += " use self::flatbuffers::EndianScalar;"; + } + if (new_size != common_prefix_size) { code_ += ""; } + + cur_name_space_ = ns; + } +}; + +} // namespace rust + +bool GenerateRust(const Parser &parser, const std::string &path, + const std::string &file_name) { + rust::RustGenerator generator(parser, path, file_name); + return generator.generate(); +} + +std::string RustMakeRule(const Parser &parser, const std::string &path, + const std::string &file_name) { + std::string filebase = + flatbuffers::StripPath(flatbuffers::StripExtension(file_name)); + std::string make_rule = GeneratedFileName(path, filebase) + ": "; + + auto included_files = parser.GetIncludedFilesRecursive(file_name); + for (auto it = included_files.begin(); it != included_files.end(); ++it) { + make_rule += " " + *it; + } + return make_rule; +} + +} // namespace flatbuffers + +// TODO(rw): Generated code should import other generated files. +// TODO(rw): Generated code should refer to namespaces in included files in a +// way that makes them referrable. +// TODO(rw): Generated code should indent according to nesting level. +// TODO(rw): Generated code could use a Rust-only enum type to access unions, +// instead of making the user use _type() to manually switch. diff --git a/src/idl_gen_text.cpp b/src/idl_gen_text.cpp index 41d19125ecbb..563f6901309e 100644 --- a/src/idl_gen_text.cpp +++ b/src/idl_gen_text.cpp @@ -131,7 +131,7 @@ bool Print(const void *val, Type type, int indent, switch (type.base_type) { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ case BASE_TYPE_ ## ENUM: \ if (!PrintVector( \ *reinterpret_cast *>(val), \ @@ -223,7 +223,7 @@ static bool GenStruct(const StructDef &struct_def, const Table *table, switch (fd.value.type.base_type) { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ case BASE_TYPE_ ## ENUM: \ if (!GenField(fd, table, struct_def.fixed, \ opts, indent + Indent(opts), _text)) { \ @@ -234,7 +234,7 @@ static bool GenStruct(const StructDef &struct_def, const Table *table, #undef FLATBUFFERS_TD // Generate drop-thru case statements for all pointer types: #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ case BASE_TYPE_ ## ENUM: FLATBUFFERS_GEN_TYPES_POINTER(FLATBUFFERS_TD) #undef FLATBUFFERS_TD diff --git a/src/idl_parser.cpp b/src/idl_parser.cpp index 366a77be8e76..d10a2e7e948e 100644 --- a/src/idl_parser.cpp +++ b/src/idl_parser.cpp @@ -30,7 +30,7 @@ const double kPi = 3.14159265358979323846; const char *const kTypeNames[] = { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ IDLTYPE, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD @@ -41,7 +41,7 @@ const char *const kTypeNames[] = { const char kTypeSizes[] = { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ sizeof(CTYPE), FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD @@ -217,7 +217,7 @@ static std::string TokenToString(int t) { FLATBUFFERS_GEN_TOKENS(FLATBUFFERS_TOKEN) #undef FLATBUFFERS_TOKEN #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ IDLTYPE, FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD) #undef FLATBUFFERS_TD @@ -1077,7 +1077,7 @@ CheckedError Parser::ParseTable(const StructDef &struct_def, std::string *value, switch (field_value.type.base_type) { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ case BASE_TYPE_ ## ENUM: \ builder_.Pad(field->padding); \ if (struct_def.fixed) { \ @@ -1094,7 +1094,7 @@ CheckedError Parser::ParseTable(const StructDef &struct_def, std::string *value, FLATBUFFERS_GEN_TYPES_SCALAR(FLATBUFFERS_TD); #undef FLATBUFFERS_TD #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ case BASE_TYPE_ ## ENUM: \ builder_.Pad(field->padding); \ if (IsStruct(field->value.type)) { \ @@ -1176,7 +1176,7 @@ CheckedError Parser::ParseVector(const Type &type, uoffset_t *ovalue) { switch (val.type.base_type) { // clang-format off #define FLATBUFFERS_TD(ENUM, IDLTYPE, \ - CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \ + CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \ case BASE_TYPE_ ## ENUM: \ if (IsStruct(val.type)) SerializeStruct(*val.type.struct_def, val); \ else { \ diff --git a/tests/RustTest.sh b/tests/RustTest.sh new file mode 100755 index 000000000000..8388701cec38 --- /dev/null +++ b/tests/RustTest.sh @@ -0,0 +1,28 @@ +#!/bin/bash +set -ex +# +# Copyright 2018 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +cd ./rust_usage_test +cargo test $1 +TEST_RESULT=$? +if [[ $TEST_RESULT == 0 ]]; then + echo "OK: Rust tests passed." +else + echo "KO: Rust tests failed." + exit 1 +fi + +cargo bench diff --git a/tests/generate_code.sh b/tests/generate_code.sh index 8e060dbd707f..f25366b0e214 100755 --- a/tests/generate_code.sh +++ b/tests/generate_code.sh @@ -14,8 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -../flatc --cpp --java --csharp --dart --go --binary --lobster --lua --python --js --ts --php --grpc --gen-mutable --reflect-names --gen-object-api --no-includes --cpp-ptr-type flatbuffers::unique_ptr --no-fb-import -I include_test monster_test.fbs monsterdata_test.json -../flatc --cpp --java --csharp --dart --go --binary --lobster --lua --python --js --ts --php --gen-mutable --reflect-names --no-fb-import --cpp-ptr-type flatbuffers::unique_ptr -o namespace_test namespace_test/namespace_test1.fbs namespace_test/namespace_test2.fbs +../flatc --cpp --java --csharp --dart --go --binary --lobster --lua --python --js --ts --php --rust --grpc --gen-mutable --reflect-names --gen-object-api --no-includes --cpp-ptr-type flatbuffers::unique_ptr --no-fb-import -I include_test monster_test.fbs monsterdata_test.json +../flatc --cpp --java --csharp --dart --go --binary --lobster --lua --python --js --ts --php --rust --gen-mutable --reflect-names --no-fb-import --cpp-ptr-type flatbuffers::unique_ptr -o namespace_test namespace_test/namespace_test1.fbs namespace_test/namespace_test2.fbs ../flatc --cpp --js --ts --php --gen-mutable --reflect-names --gen-object-api --cpp-ptr-type flatbuffers::unique_ptr -o union_vector ./union_vector/union_vector.fbs ../flatc -b --schema --bfbs-comments -I include_test monster_test.fbs ../flatc --jsonschema --schema -I include_test monster_test.fbs diff --git a/tests/monster_test_generated.rs b/tests/monster_test_generated.rs new file mode 100644 index 000000000000..3d0b14527b79 --- /dev/null +++ b/tests/monster_test_generated.rs @@ -0,0 +1,1671 @@ +pub mod my_game { + #![allow(dead_code)] + #![allow(unused_imports)] + + use std::mem; + use std::marker::PhantomData; + use std::cmp::Ordering; + + extern crate flatbuffers; + use self::flatbuffers::EndianScalar; + +pub enum InParentNamespaceOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct InParentNamespace<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for InParentNamespace<'a> { + type Inner = InParentNamespace<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> InParentNamespace<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + InParentNamespace { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + _args: &'args InParentNamespaceArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = InParentNamespaceBuilder::new(_fbb); + builder.finish() + } + +} + +pub struct InParentNamespaceArgs<'a> { + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for InParentNamespaceArgs<'a> { + fn default() -> Self { + InParentNamespaceArgs { + _phantom: PhantomData, + } + } +} +pub struct InParentNamespaceBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> InParentNamespaceBuilder<'a, 'b> { + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> InParentNamespaceBuilder<'a, 'b> { + let start = _fbb.start_table(); + InParentNamespaceBuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } +} + +pub mod example_2 { + #![allow(dead_code)] + #![allow(unused_imports)] + + use std::mem; + use std::marker::PhantomData; + use std::cmp::Ordering; + + extern crate flatbuffers; + use self::flatbuffers::EndianScalar; + +pub enum MonsterOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct Monster<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for Monster<'a> { + type Inner = Monster<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> Monster<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + Monster { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + _args: &'args MonsterArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = MonsterBuilder::new(_fbb); + builder.finish() + } + +} + +pub struct MonsterArgs<'a> { + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for MonsterArgs<'a> { + fn default() -> Self { + MonsterArgs { + _phantom: PhantomData, + } + } +} +pub struct MonsterBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> MonsterBuilder<'a, 'b> { + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> MonsterBuilder<'a, 'b> { + let start = _fbb.start_table(); + MonsterBuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } +} + +} // pub mod Example2 + +pub mod example { + #![allow(dead_code)] + #![allow(unused_imports)] + + use std::mem; + use std::marker::PhantomData; + use std::cmp::Ordering; + + extern crate flatbuffers; + use self::flatbuffers::EndianScalar; + +#[allow(non_camel_case_types)] +#[repr(i8)] +#[derive(Clone, Copy, PartialEq, Debug)] +pub enum Color { + Red = 1, + Green = 2, + Blue = 8 +} + +const ENUM_MIN_COLOR: i8 = 1; +const ENUM_MAX_COLOR: i8 = 8; + +impl<'a> flatbuffers::Follow<'a> for Color { + type Inner = Self; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + flatbuffers::read_scalar_at::(buf, loc) + } +} + +impl flatbuffers::EndianScalar for Color { + #[inline] + fn to_little_endian(self) -> Self { + let n = i8::to_le(self as i8); + let p = &n as *const i8 as *const Color; + unsafe { *p } + } + #[inline] + fn from_little_endian(self) -> Self { + let n = i8::from_le(self as i8); + let p = &n as *const i8 as *const Color; + unsafe { *p } + } +} + +impl flatbuffers::Push for Color { + type Output = Color; + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + flatbuffers::emplace_scalar::(dst, *self); + } +} + +#[allow(non_camel_case_types)] +const ENUM_VALUES_COLOR:[Color; 3] = [ + Color::Red, + Color::Green, + Color::Blue +]; + +#[allow(non_camel_case_types)] +const ENUM_NAMES_COLOR:[&'static str; 8] = [ + "Red", + "Green", + "", + "", + "", + "", + "", + "Blue" +]; + +pub fn enum_name_color(e: Color) -> &'static str { + let index: usize = e as usize - Color::Red as usize; + ENUM_NAMES_COLOR[index] +} + +#[allow(non_camel_case_types)] +#[repr(u8)] +#[derive(Clone, Copy, PartialEq, Debug)] +pub enum Any { + NONE = 0, + Monster = 1, + TestSimpleTableWithEnum = 2, + MyGame_Example2_Monster = 3 +} + +const ENUM_MIN_ANY: u8 = 0; +const ENUM_MAX_ANY: u8 = 3; + +impl<'a> flatbuffers::Follow<'a> for Any { + type Inner = Self; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + flatbuffers::read_scalar_at::(buf, loc) + } +} + +impl flatbuffers::EndianScalar for Any { + #[inline] + fn to_little_endian(self) -> Self { + let n = u8::to_le(self as u8); + let p = &n as *const u8 as *const Any; + unsafe { *p } + } + #[inline] + fn from_little_endian(self) -> Self { + let n = u8::from_le(self as u8); + let p = &n as *const u8 as *const Any; + unsafe { *p } + } +} + +impl flatbuffers::Push for Any { + type Output = Any; + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + flatbuffers::emplace_scalar::(dst, *self); + } +} + +#[allow(non_camel_case_types)] +const ENUM_VALUES_ANY:[Any; 4] = [ + Any::NONE, + Any::Monster, + Any::TestSimpleTableWithEnum, + Any::MyGame_Example2_Monster +]; + +#[allow(non_camel_case_types)] +const ENUM_NAMES_ANY:[&'static str; 4] = [ + "NONE", + "Monster", + "TestSimpleTableWithEnum", + "MyGame_Example2_Monster" +]; + +pub fn enum_name_any(e: Any) -> &'static str { + let index: usize = e as usize; + ENUM_NAMES_ANY[index] +} + +pub struct AnyUnionTableOffset {} +// struct Test, aligned to 2 +#[repr(C, packed)] +#[derive(Clone, Copy, Debug, PartialEq)] +pub struct Test { + a_: i16, + b_: i8, + padding0__: u8, +} // pub struct Test +impl flatbuffers::SafeSliceAccess for Test {} +impl<'a> flatbuffers::Follow<'a> for Test { + type Inner = &'a Test; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + <&'a Test>::follow(buf, loc) + //flatbuffers::follow_cast_ref::(buf, loc) + } +} +impl<'a> flatbuffers::Follow<'a> for &'a Test { + type Inner = &'a Test; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + flatbuffers::follow_cast_ref::(buf, loc) + } +} +impl<'b> flatbuffers::Push for Test { + type Output = Test; + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + (&self).push(dst, _rest) + } + #[inline] + fn size(&self) -> usize { + ::std::mem::size_of::() + } +} +impl<'b> flatbuffers::Push for &'b Test { + type Output = Test; + + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + let src = unsafe { + ::std::slice::from_raw_parts(*self as *const Test as *const u8, self.size()) + }; + dst.copy_from_slice(src); + } + #[inline] + fn size(&self) -> usize { + ::std::mem::size_of::() + } +} + + +impl Test { + pub fn new<'a>(_a: i16, _b: i8) -> Self { + Test { + a_: _a.to_little_endian(), + b_: _b.to_little_endian(), + + padding0__: 0, + } + } + pub fn a<'a>(&'a self) -> i16 { + self.a_.from_little_endian() + } + pub fn b<'a>(&'a self) -> i8 { + self.b_.from_little_endian() + } +} + +// struct Vec3, aligned to 16 +#[repr(C, packed)] +#[derive(Clone, Copy, Debug, PartialEq)] +pub struct Vec3 { + x_: f32, + y_: f32, + z_: f32, + padding0__: u32, + test1_: f64, + test2_: Color, + padding1__: u8, + test3_: Test, + padding2__: u16, +} // pub struct Vec3 +impl flatbuffers::SafeSliceAccess for Vec3 {} +impl<'a> flatbuffers::Follow<'a> for Vec3 { + type Inner = &'a Vec3; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + <&'a Vec3>::follow(buf, loc) + //flatbuffers::follow_cast_ref::(buf, loc) + } +} +impl<'a> flatbuffers::Follow<'a> for &'a Vec3 { + type Inner = &'a Vec3; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + flatbuffers::follow_cast_ref::(buf, loc) + } +} +impl<'b> flatbuffers::Push for Vec3 { + type Output = Vec3; + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + (&self).push(dst, _rest) + } + #[inline] + fn size(&self) -> usize { + ::std::mem::size_of::() + } +} +impl<'b> flatbuffers::Push for &'b Vec3 { + type Output = Vec3; + + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + let src = unsafe { + ::std::slice::from_raw_parts(*self as *const Vec3 as *const u8, self.size()) + }; + dst.copy_from_slice(src); + } + #[inline] + fn size(&self) -> usize { + ::std::mem::size_of::() + } +} + + +impl Vec3 { + pub fn new<'a>(_x: f32, _y: f32, _z: f32, _test1: f64, _test2: Color, _test3: &'a Test) -> Self { + Vec3 { + x_: _x.to_little_endian(), + y_: _y.to_little_endian(), + z_: _z.to_little_endian(), + test1_: _test1.to_little_endian(), + test2_: _test2.to_little_endian(), + test3_: *_test3, + + padding0__: 0, + padding1__: 0, + padding2__: 0, + } + } + pub fn x<'a>(&'a self) -> f32 { + self.x_.from_little_endian() + } + pub fn y<'a>(&'a self) -> f32 { + self.y_.from_little_endian() + } + pub fn z<'a>(&'a self) -> f32 { + self.z_.from_little_endian() + } + pub fn test1<'a>(&'a self) -> f64 { + self.test1_.from_little_endian() + } + pub fn test2<'a>(&'a self) -> Color { + self.test2_.from_little_endian() + } + pub fn test3<'a>(&'a self) -> &'a Test { + &self.test3_ + } +} + +// struct Ability, aligned to 4 +#[repr(C, packed)] +#[derive(Clone, Copy, Debug, PartialEq)] +pub struct Ability { + id_: u32, + distance_: u32, +} // pub struct Ability +impl flatbuffers::SafeSliceAccess for Ability {} +impl<'a> flatbuffers::Follow<'a> for Ability { + type Inner = &'a Ability; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + <&'a Ability>::follow(buf, loc) + //flatbuffers::follow_cast_ref::(buf, loc) + } +} +impl<'a> flatbuffers::Follow<'a> for &'a Ability { + type Inner = &'a Ability; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + flatbuffers::follow_cast_ref::(buf, loc) + } +} +impl<'b> flatbuffers::Push for Ability { + type Output = Ability; + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + (&self).push(dst, _rest) + } + #[inline] + fn size(&self) -> usize { + ::std::mem::size_of::() + } +} +impl<'b> flatbuffers::Push for &'b Ability { + type Output = Ability; + + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + let src = unsafe { + ::std::slice::from_raw_parts(*self as *const Ability as *const u8, self.size()) + }; + dst.copy_from_slice(src); + } + #[inline] + fn size(&self) -> usize { + ::std::mem::size_of::() + } +} + + +impl Ability { + pub fn new<'a>(_id: u32, _distance: u32) -> Self { + Ability { + id_: _id.to_little_endian(), + distance_: _distance.to_little_endian(), + + } + } + pub fn id<'a>(&'a self) -> u32 { + self.id_.from_little_endian() + } + pub fn key_compare_less_than(&self, o: &Ability) -> bool { + self.id() < o.id() + } + + pub fn key_compare_with_value(&self, val: u32) -> ::std::cmp::Ordering { + let key = self.id(); + key.cmp(&val) + } + pub fn distance<'a>(&'a self) -> u32 { + self.distance_.from_little_endian() + } +} + +pub enum TestSimpleTableWithEnumOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct TestSimpleTableWithEnum<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for TestSimpleTableWithEnum<'a> { + type Inner = TestSimpleTableWithEnum<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> TestSimpleTableWithEnum<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + TestSimpleTableWithEnum { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args TestSimpleTableWithEnumArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = TestSimpleTableWithEnumBuilder::new(_fbb); + builder.add_color(args.color); + builder.finish() + } + + pub const VT_COLOR: flatbuffers::VOffsetT = 4; + + #[inline] + pub fn color(&'a self) -> Color { + self._tab.get::(TestSimpleTableWithEnum::VT_COLOR, Some(Color::Green)).unwrap() + } +} + +pub struct TestSimpleTableWithEnumArgs<'a> { + pub color: Color, + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for TestSimpleTableWithEnumArgs<'a> { + fn default() -> Self { + TestSimpleTableWithEnumArgs { + color: Color::Green, + _phantom: PhantomData, + } + } +} +pub struct TestSimpleTableWithEnumBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> TestSimpleTableWithEnumBuilder<'a, 'b> { + #[inline] + pub fn add_color(&mut self, color: Color) { + self.fbb_.push_slot::(TestSimpleTableWithEnum::VT_COLOR, color, Color::Green); + } + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TestSimpleTableWithEnumBuilder<'a, 'b> { + let start = _fbb.start_table(); + TestSimpleTableWithEnumBuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } +} + +pub enum StatOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct Stat<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for Stat<'a> { + type Inner = Stat<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> Stat<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + Stat { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args StatArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = StatBuilder::new(_fbb); + builder.add_val(args.val); + if let Some(x) = args.id { builder.add_id(x); } + builder.add_count(args.count); + builder.finish() + } + + pub const VT_ID: flatbuffers::VOffsetT = 4; + pub const VT_VAL: flatbuffers::VOffsetT = 6; + pub const VT_COUNT: flatbuffers::VOffsetT = 8; + + #[inline] + pub fn id(&'a self) -> Option<&'a str> { + self._tab.get::>(Stat::VT_ID, None) + } + #[inline] + pub fn val(&'a self) -> i64 { + self._tab.get::(Stat::VT_VAL, Some(0)).unwrap() + } + #[inline] + pub fn count(&'a self) -> u16 { + self._tab.get::(Stat::VT_COUNT, Some(0)).unwrap() + } +} + +pub struct StatArgs<'a> { + pub id: Option>, + pub val: i64, + pub count: u16, + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for StatArgs<'a> { + fn default() -> Self { + StatArgs { + id: None, + val: 0, + count: 0, + _phantom: PhantomData, + } + } +} +pub struct StatBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> StatBuilder<'a, 'b> { + #[inline] + pub fn add_id(&mut self, id: flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::>(Stat::VT_ID, id); + } + #[inline] + pub fn add_val(&mut self, val: i64) { + self.fbb_.push_slot::(Stat::VT_VAL, val, 0); + } + #[inline] + pub fn add_count(&mut self, count: u16) { + self.fbb_.push_slot::(Stat::VT_COUNT, count, 0); + } + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> StatBuilder<'a, 'b> { + let start = _fbb.start_table(); + StatBuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } +} + +pub enum ReferrableOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct Referrable<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for Referrable<'a> { + type Inner = Referrable<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> Referrable<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + Referrable { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args ReferrableArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = ReferrableBuilder::new(_fbb); + builder.add_id(args.id); + builder.finish() + } + + pub const VT_ID: flatbuffers::VOffsetT = 4; + + #[inline] + pub fn id(&'a self) -> u64 { + self._tab.get::(Referrable::VT_ID, Some(0)).unwrap() + } + pub fn key_compare_less_than(&self, o: &Referrable) -> bool { + self.id() < o.id() + } + + pub fn key_compare_with_value(&self, val: u64) -> ::std::cmp::Ordering { + let key = self.id(); + key.cmp(&val) + } +} + +pub struct ReferrableArgs<'a> { + pub id: u64, + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for ReferrableArgs<'a> { + fn default() -> Self { + ReferrableArgs { + id: 0, + _phantom: PhantomData, + } + } +} +pub struct ReferrableBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> ReferrableBuilder<'a, 'b> { + #[inline] + pub fn add_id(&mut self, id: u64) { + self.fbb_.push_slot::(Referrable::VT_ID, id, 0); + } + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> ReferrableBuilder<'a, 'b> { + let start = _fbb.start_table(); + ReferrableBuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } +} + +/// an example documentation comment: monster object +pub enum MonsterOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct Monster<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for Monster<'a> { + type Inner = Monster<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> Monster<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + Monster { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args MonsterArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = MonsterBuilder::new(_fbb); + builder.add_non_owning_reference(args.non_owning_reference); + builder.add_co_owning_reference(args.co_owning_reference); + builder.add_single_weak_reference(args.single_weak_reference); + builder.add_testhashu64_fnv1a(args.testhashu64_fnv1a); + builder.add_testhashs64_fnv1a(args.testhashs64_fnv1a); + builder.add_testhashu64_fnv1(args.testhashu64_fnv1); + builder.add_testhashs64_fnv1(args.testhashs64_fnv1); + if let Some(x) = args.vector_of_non_owning_references { builder.add_vector_of_non_owning_references(x); } + if let Some(x) = args.vector_of_co_owning_references { builder.add_vector_of_co_owning_references(x); } + if let Some(x) = args.vector_of_strong_referrables { builder.add_vector_of_strong_referrables(x); } + if let Some(x) = args.vector_of_weak_references { builder.add_vector_of_weak_references(x); } + if let Some(x) = args.vector_of_referrables { builder.add_vector_of_referrables(x); } + if let Some(x) = args.parent_namespace_test { builder.add_parent_namespace_test(x); } + if let Some(x) = args.vector_of_doubles { builder.add_vector_of_doubles(x); } + if let Some(x) = args.vector_of_longs { builder.add_vector_of_longs(x); } + if let Some(x) = args.test5 { builder.add_test5(x); } + if let Some(x) = args.flex { builder.add_flex(x); } + if let Some(x) = args.testarrayofsortedstruct { builder.add_testarrayofsortedstruct(x); } + if let Some(x) = args.testarrayofstring2 { builder.add_testarrayofstring2(x); } + builder.add_testf3(args.testf3); + builder.add_testf2(args.testf2); + builder.add_testf(args.testf); + if let Some(x) = args.testarrayofbools { builder.add_testarrayofbools(x); } + builder.add_testhashu32_fnv1a(args.testhashu32_fnv1a); + builder.add_testhashs32_fnv1a(args.testhashs32_fnv1a); + builder.add_testhashu32_fnv1(args.testhashu32_fnv1); + builder.add_testhashs32_fnv1(args.testhashs32_fnv1); + if let Some(x) = args.testempty { builder.add_testempty(x); } + if let Some(x) = args.testnestedflatbuffer { builder.add_testnestedflatbuffer(x); } + if let Some(x) = args.enemy { builder.add_enemy(x); } + if let Some(x) = args.testarrayoftables { builder.add_testarrayoftables(x); } + if let Some(x) = args.testarrayofstring { builder.add_testarrayofstring(x); } + if let Some(x) = args.test4 { builder.add_test4(x); } + if let Some(x) = args.test { builder.add_test(x); } + if let Some(x) = args.inventory { builder.add_inventory(x); } + if let Some(x) = args.name { builder.add_name(x); } + if let Some(x) = args.pos { builder.add_pos(x); } + builder.add_hp(args.hp); + builder.add_mana(args.mana); + builder.add_testbool(args.testbool); + builder.add_test_type(args.test_type); + builder.add_color(args.color); + builder.finish() + } + + pub const VT_POS: flatbuffers::VOffsetT = 4; + pub const VT_MANA: flatbuffers::VOffsetT = 6; + pub const VT_HP: flatbuffers::VOffsetT = 8; + pub const VT_NAME: flatbuffers::VOffsetT = 10; + pub const VT_INVENTORY: flatbuffers::VOffsetT = 14; + pub const VT_COLOR: flatbuffers::VOffsetT = 16; + pub const VT_TEST_TYPE: flatbuffers::VOffsetT = 18; + pub const VT_TEST: flatbuffers::VOffsetT = 20; + pub const VT_TEST4: flatbuffers::VOffsetT = 22; + pub const VT_TESTARRAYOFSTRING: flatbuffers::VOffsetT = 24; + pub const VT_TESTARRAYOFTABLES: flatbuffers::VOffsetT = 26; + pub const VT_ENEMY: flatbuffers::VOffsetT = 28; + pub const VT_TESTNESTEDFLATBUFFER: flatbuffers::VOffsetT = 30; + pub const VT_TESTEMPTY: flatbuffers::VOffsetT = 32; + pub const VT_TESTBOOL: flatbuffers::VOffsetT = 34; + pub const VT_TESTHASHS32_FNV1: flatbuffers::VOffsetT = 36; + pub const VT_TESTHASHU32_FNV1: flatbuffers::VOffsetT = 38; + pub const VT_TESTHASHS64_FNV1: flatbuffers::VOffsetT = 40; + pub const VT_TESTHASHU64_FNV1: flatbuffers::VOffsetT = 42; + pub const VT_TESTHASHS32_FNV1A: flatbuffers::VOffsetT = 44; + pub const VT_TESTHASHU32_FNV1A: flatbuffers::VOffsetT = 46; + pub const VT_TESTHASHS64_FNV1A: flatbuffers::VOffsetT = 48; + pub const VT_TESTHASHU64_FNV1A: flatbuffers::VOffsetT = 50; + pub const VT_TESTARRAYOFBOOLS: flatbuffers::VOffsetT = 52; + pub const VT_TESTF: flatbuffers::VOffsetT = 54; + pub const VT_TESTF2: flatbuffers::VOffsetT = 56; + pub const VT_TESTF3: flatbuffers::VOffsetT = 58; + pub const VT_TESTARRAYOFSTRING2: flatbuffers::VOffsetT = 60; + pub const VT_TESTARRAYOFSORTEDSTRUCT: flatbuffers::VOffsetT = 62; + pub const VT_FLEX: flatbuffers::VOffsetT = 64; + pub const VT_TEST5: flatbuffers::VOffsetT = 66; + pub const VT_VECTOR_OF_LONGS: flatbuffers::VOffsetT = 68; + pub const VT_VECTOR_OF_DOUBLES: flatbuffers::VOffsetT = 70; + pub const VT_PARENT_NAMESPACE_TEST: flatbuffers::VOffsetT = 72; + pub const VT_VECTOR_OF_REFERRABLES: flatbuffers::VOffsetT = 74; + pub const VT_SINGLE_WEAK_REFERENCE: flatbuffers::VOffsetT = 76; + pub const VT_VECTOR_OF_WEAK_REFERENCES: flatbuffers::VOffsetT = 78; + pub const VT_VECTOR_OF_STRONG_REFERRABLES: flatbuffers::VOffsetT = 80; + pub const VT_CO_OWNING_REFERENCE: flatbuffers::VOffsetT = 82; + pub const VT_VECTOR_OF_CO_OWNING_REFERENCES: flatbuffers::VOffsetT = 84; + pub const VT_NON_OWNING_REFERENCE: flatbuffers::VOffsetT = 86; + pub const VT_VECTOR_OF_NON_OWNING_REFERENCES: flatbuffers::VOffsetT = 88; + + #[inline] + pub fn pos(&'a self) -> Option<&'a Vec3> { + self._tab.get::(Monster::VT_POS, None) + } + #[inline] + pub fn mana(&'a self) -> i16 { + self._tab.get::(Monster::VT_MANA, Some(150)).unwrap() + } + #[inline] + pub fn hp(&'a self) -> i16 { + self._tab.get::(Monster::VT_HP, Some(100)).unwrap() + } + #[inline] + pub fn name(&'a self) -> Option<&'a str> { + self._tab.get::>(Monster::VT_NAME, None) + } + pub fn key_compare_less_than(&self, o: &Monster) -> bool { + self.name() < o.name() + } + + pub fn key_compare_with_value(&self, val: Option<&str>) -> ::std::cmp::Ordering { + let key = self.name(); + key.cmp(&val) + } + #[inline] + pub fn inventory(&'a self) -> Option<&'a [u8]> { + self._tab.get::>>(Monster::VT_INVENTORY, None).map(|v| v.safe_slice()) + } + #[inline] + pub fn color(&'a self) -> Color { + self._tab.get::(Monster::VT_COLOR, Some(Color::Blue)).unwrap() + } + #[inline] + pub fn test_type(&'a self) -> Any { + self._tab.get::(Monster::VT_TEST_TYPE, Some(Any::NONE)).unwrap() + } + #[inline] + pub fn test(&'a self) -> Option> { + self._tab.get::>>(Monster::VT_TEST, None) + } + #[inline] + pub fn test4(&'a self) -> Option<&'a [Test]> { + self._tab.get::>>(Monster::VT_TEST4, None).map(|v| v.safe_slice() ) + } + #[inline] + pub fn testarrayofstring(&'a self) -> Option>> { + self._tab.get::>>>(Monster::VT_TESTARRAYOFSTRING, None) + } + /// an example documentation comment: this will end up in the generated code + /// multiline too + #[inline] + pub fn testarrayoftables(&'a self) -> Option>>> { + self._tab.get::>>>>(Monster::VT_TESTARRAYOFTABLES, None) + } + #[inline] + pub fn enemy(&'a self) -> Option> { + self._tab.get::>>(Monster::VT_ENEMY, None) + } + #[inline] + pub fn testnestedflatbuffer(&'a self) -> Option<&'a [u8]> { + self._tab.get::>>(Monster::VT_TESTNESTEDFLATBUFFER, None).map(|v| v.safe_slice()) + } + pub fn testnestedflatbuffer_nested_flatbuffer(&'a self) -> Option> { + match self.testnestedflatbuffer() { + None => { None } + Some(data) => { + use self::flatbuffers::Follow; + Some(>>::follow(data, 0)) + }, + } + } + #[inline] + pub fn testempty(&'a self) -> Option> { + self._tab.get::>>(Monster::VT_TESTEMPTY, None) + } + #[inline] + pub fn testbool(&'a self) -> bool { + self._tab.get::(Monster::VT_TESTBOOL, Some(false)).unwrap() + } + #[inline] + pub fn testhashs32_fnv1(&'a self) -> i32 { + self._tab.get::(Monster::VT_TESTHASHS32_FNV1, Some(0)).unwrap() + } + #[inline] + pub fn testhashu32_fnv1(&'a self) -> u32 { + self._tab.get::(Monster::VT_TESTHASHU32_FNV1, Some(0)).unwrap() + } + #[inline] + pub fn testhashs64_fnv1(&'a self) -> i64 { + self._tab.get::(Monster::VT_TESTHASHS64_FNV1, Some(0)).unwrap() + } + #[inline] + pub fn testhashu64_fnv1(&'a self) -> u64 { + self._tab.get::(Monster::VT_TESTHASHU64_FNV1, Some(0)).unwrap() + } + #[inline] + pub fn testhashs32_fnv1a(&'a self) -> i32 { + self._tab.get::(Monster::VT_TESTHASHS32_FNV1A, Some(0)).unwrap() + } + #[inline] + pub fn testhashu32_fnv1a(&'a self) -> u32 { + self._tab.get::(Monster::VT_TESTHASHU32_FNV1A, Some(0)).unwrap() + } + #[inline] + pub fn testhashs64_fnv1a(&'a self) -> i64 { + self._tab.get::(Monster::VT_TESTHASHS64_FNV1A, Some(0)).unwrap() + } + #[inline] + pub fn testhashu64_fnv1a(&'a self) -> u64 { + self._tab.get::(Monster::VT_TESTHASHU64_FNV1A, Some(0)).unwrap() + } + #[inline] + pub fn testarrayofbools(&'a self) -> Option<&'a [bool]> { + self._tab.get::>>(Monster::VT_TESTARRAYOFBOOLS, None).map(|v| v.safe_slice()) + } + #[inline] + pub fn testf(&'a self) -> f32 { + self._tab.get::(Monster::VT_TESTF, Some(3.14159)).unwrap() + } + #[inline] + pub fn testf2(&'a self) -> f32 { + self._tab.get::(Monster::VT_TESTF2, Some(3.0)).unwrap() + } + #[inline] + pub fn testf3(&'a self) -> f32 { + self._tab.get::(Monster::VT_TESTF3, Some(0.0)).unwrap() + } + #[inline] + pub fn testarrayofstring2(&'a self) -> Option>> { + self._tab.get::>>>(Monster::VT_TESTARRAYOFSTRING2, None) + } + #[inline] + pub fn testarrayofsortedstruct(&'a self) -> Option<&'a [Ability]> { + self._tab.get::>>(Monster::VT_TESTARRAYOFSORTEDSTRUCT, None).map(|v| v.safe_slice() ) + } + #[inline] + pub fn flex(&'a self) -> Option<&'a [u8]> { + self._tab.get::>>(Monster::VT_FLEX, None).map(|v| v.safe_slice()) + } + #[inline] + pub fn test5(&'a self) -> Option<&'a [Test]> { + self._tab.get::>>(Monster::VT_TEST5, None).map(|v| v.safe_slice() ) + } + #[inline] + pub fn vector_of_longs(&'a self) -> Option> { + self._tab.get::>>(Monster::VT_VECTOR_OF_LONGS, None) + } + #[inline] + pub fn vector_of_doubles(&'a self) -> Option> { + self._tab.get::>>(Monster::VT_VECTOR_OF_DOUBLES, None) + } + #[inline] + pub fn parent_namespace_test(&'a self) -> Option> { + self._tab.get::>>(Monster::VT_PARENT_NAMESPACE_TEST, None) + } + #[inline] + pub fn vector_of_referrables(&'a self) -> Option>>> { + self._tab.get::>>>>(Monster::VT_VECTOR_OF_REFERRABLES, None) + } + #[inline] + pub fn single_weak_reference(&'a self) -> u64 { + self._tab.get::(Monster::VT_SINGLE_WEAK_REFERENCE, Some(0)).unwrap() + } + #[inline] + pub fn vector_of_weak_references(&'a self) -> Option> { + self._tab.get::>>(Monster::VT_VECTOR_OF_WEAK_REFERENCES, None) + } + #[inline] + pub fn vector_of_strong_referrables(&'a self) -> Option>>> { + self._tab.get::>>>>(Monster::VT_VECTOR_OF_STRONG_REFERRABLES, None) + } + #[inline] + pub fn co_owning_reference(&'a self) -> u64 { + self._tab.get::(Monster::VT_CO_OWNING_REFERENCE, Some(0)).unwrap() + } + #[inline] + pub fn vector_of_co_owning_references(&'a self) -> Option> { + self._tab.get::>>(Monster::VT_VECTOR_OF_CO_OWNING_REFERENCES, None) + } + #[inline] + pub fn non_owning_reference(&'a self) -> u64 { + self._tab.get::(Monster::VT_NON_OWNING_REFERENCE, Some(0)).unwrap() + } + #[inline] + pub fn vector_of_non_owning_references(&'a self) -> Option> { + self._tab.get::>>(Monster::VT_VECTOR_OF_NON_OWNING_REFERENCES, None) + } +#[inline] +#[allow(non_snake_case)] +pub fn test_as_monster(&'a self) -> Option { + if self.test_type() == Any::Monster { + self.test().map(|u| Monster::init_from_table(u)) + } else { + None + } +} + +#[inline] +#[allow(non_snake_case)] +pub fn test_as_test_simple_table_with_enum(&'a self) -> Option { + if self.test_type() == Any::TestSimpleTableWithEnum { + self.test().map(|u| TestSimpleTableWithEnum::init_from_table(u)) + } else { + None + } +} + +#[inline] +#[allow(non_snake_case)] +pub fn test_as_my_game___example_2___monster(&'a self) -> Option { + if self.test_type() == Any::MyGame_Example2_Monster { + self.test().map(|u| super::example_2::Monster::init_from_table(u)) + } else { + None + } +} + +} + +pub struct MonsterArgs<'a> { + pub pos: Option<&'a Vec3>, + pub mana: i16, + pub hp: i16, + pub name: Option>, + pub inventory: Option>>, + pub color: Color, + pub test_type: Any, + pub test: Option>, + pub test4: Option>>, + pub testarrayofstring: Option>>>, + pub testarrayoftables: Option>>>>, + pub enemy: Option>>, + pub testnestedflatbuffer: Option>>, + pub testempty: Option>>, + pub testbool: bool, + pub testhashs32_fnv1: i32, + pub testhashu32_fnv1: u32, + pub testhashs64_fnv1: i64, + pub testhashu64_fnv1: u64, + pub testhashs32_fnv1a: i32, + pub testhashu32_fnv1a: u32, + pub testhashs64_fnv1a: i64, + pub testhashu64_fnv1a: u64, + pub testarrayofbools: Option>>, + pub testf: f32, + pub testf2: f32, + pub testf3: f32, + pub testarrayofstring2: Option>>>, + pub testarrayofsortedstruct: Option>>, + pub flex: Option>>, + pub test5: Option>>, + pub vector_of_longs: Option>>, + pub vector_of_doubles: Option>>, + pub parent_namespace_test: Option>>, + pub vector_of_referrables: Option>>>>, + pub single_weak_reference: u64, + pub vector_of_weak_references: Option>>, + pub vector_of_strong_referrables: Option>>>>, + pub co_owning_reference: u64, + pub vector_of_co_owning_references: Option>>, + pub non_owning_reference: u64, + pub vector_of_non_owning_references: Option>>, + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for MonsterArgs<'a> { + fn default() -> Self { + MonsterArgs { + pos: None, + mana: 150, + hp: 100, + // required + name: None, + inventory: None, + color: Color::Blue, + test_type: Any::NONE, + test: None, + test4: None, + testarrayofstring: None, + testarrayoftables: None, + enemy: None, + testnestedflatbuffer: None, + testempty: None, + testbool: false, + testhashs32_fnv1: 0, + testhashu32_fnv1: 0, + testhashs64_fnv1: 0, + testhashu64_fnv1: 0, + testhashs32_fnv1a: 0, + testhashu32_fnv1a: 0, + testhashs64_fnv1a: 0, + testhashu64_fnv1a: 0, + testarrayofbools: None, + testf: 3.14159, + testf2: 3.0, + testf3: 0.0, + testarrayofstring2: None, + testarrayofsortedstruct: None, + flex: None, + test5: None, + vector_of_longs: None, + vector_of_doubles: None, + parent_namespace_test: None, + vector_of_referrables: None, + single_weak_reference: 0, + vector_of_weak_references: None, + vector_of_strong_referrables: None, + co_owning_reference: 0, + vector_of_co_owning_references: None, + non_owning_reference: 0, + vector_of_non_owning_references: None, + _phantom: PhantomData, + } + } +} +pub struct MonsterBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> MonsterBuilder<'a, 'b> { + #[inline] + pub fn add_pos(&mut self, pos: &'b Vec3) { + self.fbb_.push_slot_always::<&Vec3>(Monster::VT_POS, pos); + } + #[inline] + pub fn add_mana(&mut self, mana: i16) { + self.fbb_.push_slot::(Monster::VT_MANA, mana, 150); + } + #[inline] + pub fn add_hp(&mut self, hp: i16) { + self.fbb_.push_slot::(Monster::VT_HP, hp, 100); + } + #[inline] + pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::>(Monster::VT_NAME, name); + } + #[inline] + pub fn add_inventory(&mut self, inventory: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_INVENTORY, inventory); + } + #[inline] + pub fn add_color(&mut self, color: Color) { + self.fbb_.push_slot::(Monster::VT_COLOR, color, Color::Blue); + } + #[inline] + pub fn add_test_type(&mut self, test_type: Any) { + self.fbb_.push_slot::(Monster::VT_TEST_TYPE, test_type, Any::NONE); + } + #[inline] + pub fn add_test(&mut self, test: flatbuffers::WIPOffset) { + self.fbb_.push_slot_always::>(Monster::VT_TEST, test); + } + #[inline] + pub fn add_test4(&mut self, test4: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_TEST4, test4); + } + #[inline] + pub fn add_testarrayofstring(&mut self, testarrayofstring: flatbuffers::WIPOffset>>) { + self.fbb_.push_slot_always::>(Monster::VT_TESTARRAYOFSTRING, testarrayofstring); + } + #[inline] + pub fn add_testarrayoftables(&mut self, testarrayoftables: flatbuffers::WIPOffset>>>) { + self.fbb_.push_slot_always::>(Monster::VT_TESTARRAYOFTABLES, testarrayoftables); + } + #[inline] + pub fn add_enemy(&mut self, enemy: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_ENEMY, enemy); + } + #[inline] + pub fn add_testnestedflatbuffer(&mut self, testnestedflatbuffer: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_TESTNESTEDFLATBUFFER, testnestedflatbuffer); + } + #[inline] + pub fn add_testempty(&mut self, testempty: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_TESTEMPTY, testempty); + } + #[inline] + pub fn add_testbool(&mut self, testbool: bool) { + self.fbb_.push_slot::(Monster::VT_TESTBOOL, testbool, false); + } + #[inline] + pub fn add_testhashs32_fnv1(&mut self, testhashs32_fnv1: i32) { + self.fbb_.push_slot::(Monster::VT_TESTHASHS32_FNV1, testhashs32_fnv1, 0); + } + #[inline] + pub fn add_testhashu32_fnv1(&mut self, testhashu32_fnv1: u32) { + self.fbb_.push_slot::(Monster::VT_TESTHASHU32_FNV1, testhashu32_fnv1, 0); + } + #[inline] + pub fn add_testhashs64_fnv1(&mut self, testhashs64_fnv1: i64) { + self.fbb_.push_slot::(Monster::VT_TESTHASHS64_FNV1, testhashs64_fnv1, 0); + } + #[inline] + pub fn add_testhashu64_fnv1(&mut self, testhashu64_fnv1: u64) { + self.fbb_.push_slot::(Monster::VT_TESTHASHU64_FNV1, testhashu64_fnv1, 0); + } + #[inline] + pub fn add_testhashs32_fnv1a(&mut self, testhashs32_fnv1a: i32) { + self.fbb_.push_slot::(Monster::VT_TESTHASHS32_FNV1A, testhashs32_fnv1a, 0); + } + #[inline] + pub fn add_testhashu32_fnv1a(&mut self, testhashu32_fnv1a: u32) { + self.fbb_.push_slot::(Monster::VT_TESTHASHU32_FNV1A, testhashu32_fnv1a, 0); + } + #[inline] + pub fn add_testhashs64_fnv1a(&mut self, testhashs64_fnv1a: i64) { + self.fbb_.push_slot::(Monster::VT_TESTHASHS64_FNV1A, testhashs64_fnv1a, 0); + } + #[inline] + pub fn add_testhashu64_fnv1a(&mut self, testhashu64_fnv1a: u64) { + self.fbb_.push_slot::(Monster::VT_TESTHASHU64_FNV1A, testhashu64_fnv1a, 0); + } + #[inline] + pub fn add_testarrayofbools(&mut self, testarrayofbools: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_TESTARRAYOFBOOLS, testarrayofbools); + } + #[inline] + pub fn add_testf(&mut self, testf: f32) { + self.fbb_.push_slot::(Monster::VT_TESTF, testf, 3.14159); + } + #[inline] + pub fn add_testf2(&mut self, testf2: f32) { + self.fbb_.push_slot::(Monster::VT_TESTF2, testf2, 3.0); + } + #[inline] + pub fn add_testf3(&mut self, testf3: f32) { + self.fbb_.push_slot::(Monster::VT_TESTF3, testf3, 0.0); + } + #[inline] + pub fn add_testarrayofstring2(&mut self, testarrayofstring2: flatbuffers::WIPOffset>>) { + self.fbb_.push_slot_always::>(Monster::VT_TESTARRAYOFSTRING2, testarrayofstring2); + } + #[inline] + pub fn add_testarrayofsortedstruct(&mut self, testarrayofsortedstruct: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_TESTARRAYOFSORTEDSTRUCT, testarrayofsortedstruct); + } + #[inline] + pub fn add_flex(&mut self, flex: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_FLEX, flex); + } + #[inline] + pub fn add_test5(&mut self, test5: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_TEST5, test5); + } + #[inline] + pub fn add_vector_of_longs(&mut self, vector_of_longs: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_VECTOR_OF_LONGS, vector_of_longs); + } + #[inline] + pub fn add_vector_of_doubles(&mut self, vector_of_doubles: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_VECTOR_OF_DOUBLES, vector_of_doubles); + } + #[inline] + pub fn add_parent_namespace_test(&mut self, parent_namespace_test: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_PARENT_NAMESPACE_TEST, parent_namespace_test); + } + #[inline] + pub fn add_vector_of_referrables(&mut self, vector_of_referrables: flatbuffers::WIPOffset>>>) { + self.fbb_.push_slot_always::>(Monster::VT_VECTOR_OF_REFERRABLES, vector_of_referrables); + } + #[inline] + pub fn add_single_weak_reference(&mut self, single_weak_reference: u64) { + self.fbb_.push_slot::(Monster::VT_SINGLE_WEAK_REFERENCE, single_weak_reference, 0); + } + #[inline] + pub fn add_vector_of_weak_references(&mut self, vector_of_weak_references: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_VECTOR_OF_WEAK_REFERENCES, vector_of_weak_references); + } + #[inline] + pub fn add_vector_of_strong_referrables(&mut self, vector_of_strong_referrables: flatbuffers::WIPOffset>>>) { + self.fbb_.push_slot_always::>(Monster::VT_VECTOR_OF_STRONG_REFERRABLES, vector_of_strong_referrables); + } + #[inline] + pub fn add_co_owning_reference(&mut self, co_owning_reference: u64) { + self.fbb_.push_slot::(Monster::VT_CO_OWNING_REFERENCE, co_owning_reference, 0); + } + #[inline] + pub fn add_vector_of_co_owning_references(&mut self, vector_of_co_owning_references: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_VECTOR_OF_CO_OWNING_REFERENCES, vector_of_co_owning_references); + } + #[inline] + pub fn add_non_owning_reference(&mut self, non_owning_reference: u64) { + self.fbb_.push_slot::(Monster::VT_NON_OWNING_REFERENCE, non_owning_reference, 0); + } + #[inline] + pub fn add_vector_of_non_owning_references(&mut self, vector_of_non_owning_references: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(Monster::VT_VECTOR_OF_NON_OWNING_REFERENCES, vector_of_non_owning_references); + } + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> MonsterBuilder<'a, 'b> { + let start = _fbb.start_table(); + MonsterBuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Monster::VT_NAME,"name"); + flatbuffers::WIPOffset::new(o.value()) + } +} + +pub enum TypeAliasesOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct TypeAliases<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for TypeAliases<'a> { + type Inner = TypeAliases<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> TypeAliases<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + TypeAliases { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args TypeAliasesArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = TypeAliasesBuilder::new(_fbb); + builder.add_f64_(args.f64_); + builder.add_u64_(args.u64_); + builder.add_i64_(args.i64_); + if let Some(x) = args.vf64 { builder.add_vf64(x); } + if let Some(x) = args.v8 { builder.add_v8(x); } + builder.add_f32_(args.f32_); + builder.add_u32_(args.u32_); + builder.add_i32_(args.i32_); + builder.add_u16_(args.u16_); + builder.add_i16_(args.i16_); + builder.add_u8_(args.u8_); + builder.add_i8_(args.i8_); + builder.finish() + } + + pub const VT_I8_: flatbuffers::VOffsetT = 4; + pub const VT_U8_: flatbuffers::VOffsetT = 6; + pub const VT_I16_: flatbuffers::VOffsetT = 8; + pub const VT_U16_: flatbuffers::VOffsetT = 10; + pub const VT_I32_: flatbuffers::VOffsetT = 12; + pub const VT_U32_: flatbuffers::VOffsetT = 14; + pub const VT_I64_: flatbuffers::VOffsetT = 16; + pub const VT_U64_: flatbuffers::VOffsetT = 18; + pub const VT_F32_: flatbuffers::VOffsetT = 20; + pub const VT_F64_: flatbuffers::VOffsetT = 22; + pub const VT_V8: flatbuffers::VOffsetT = 24; + pub const VT_VF64: flatbuffers::VOffsetT = 26; + + #[inline] + pub fn i8_(&'a self) -> i8 { + self._tab.get::(TypeAliases::VT_I8_, Some(0)).unwrap() + } + #[inline] + pub fn u8_(&'a self) -> u8 { + self._tab.get::(TypeAliases::VT_U8_, Some(0)).unwrap() + } + #[inline] + pub fn i16_(&'a self) -> i16 { + self._tab.get::(TypeAliases::VT_I16_, Some(0)).unwrap() + } + #[inline] + pub fn u16_(&'a self) -> u16 { + self._tab.get::(TypeAliases::VT_U16_, Some(0)).unwrap() + } + #[inline] + pub fn i32_(&'a self) -> i32 { + self._tab.get::(TypeAliases::VT_I32_, Some(0)).unwrap() + } + #[inline] + pub fn u32_(&'a self) -> u32 { + self._tab.get::(TypeAliases::VT_U32_, Some(0)).unwrap() + } + #[inline] + pub fn i64_(&'a self) -> i64 { + self._tab.get::(TypeAliases::VT_I64_, Some(0)).unwrap() + } + #[inline] + pub fn u64_(&'a self) -> u64 { + self._tab.get::(TypeAliases::VT_U64_, Some(0)).unwrap() + } + #[inline] + pub fn f32_(&'a self) -> f32 { + self._tab.get::(TypeAliases::VT_F32_, Some(0.0)).unwrap() + } + #[inline] + pub fn f64_(&'a self) -> f64 { + self._tab.get::(TypeAliases::VT_F64_, Some(0.0)).unwrap() + } + #[inline] + pub fn v8(&'a self) -> Option<&'a [i8]> { + self._tab.get::>>(TypeAliases::VT_V8, None).map(|v| v.safe_slice()) + } + #[inline] + pub fn vf64(&'a self) -> Option> { + self._tab.get::>>(TypeAliases::VT_VF64, None) + } +} + +pub struct TypeAliasesArgs<'a> { + pub i8_: i8, + pub u8_: u8, + pub i16_: i16, + pub u16_: u16, + pub i32_: i32, + pub u32_: u32, + pub i64_: i64, + pub u64_: u64, + pub f32_: f32, + pub f64_: f64, + pub v8: Option>>, + pub vf64: Option>>, + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for TypeAliasesArgs<'a> { + fn default() -> Self { + TypeAliasesArgs { + i8_: 0, + u8_: 0, + i16_: 0, + u16_: 0, + i32_: 0, + u32_: 0, + i64_: 0, + u64_: 0, + f32_: 0.0, + f64_: 0.0, + v8: None, + vf64: None, + _phantom: PhantomData, + } + } +} +pub struct TypeAliasesBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> TypeAliasesBuilder<'a, 'b> { + #[inline] + pub fn add_i8_(&mut self, i8_: i8) { + self.fbb_.push_slot::(TypeAliases::VT_I8_, i8_, 0); + } + #[inline] + pub fn add_u8_(&mut self, u8_: u8) { + self.fbb_.push_slot::(TypeAliases::VT_U8_, u8_, 0); + } + #[inline] + pub fn add_i16_(&mut self, i16_: i16) { + self.fbb_.push_slot::(TypeAliases::VT_I16_, i16_, 0); + } + #[inline] + pub fn add_u16_(&mut self, u16_: u16) { + self.fbb_.push_slot::(TypeAliases::VT_U16_, u16_, 0); + } + #[inline] + pub fn add_i32_(&mut self, i32_: i32) { + self.fbb_.push_slot::(TypeAliases::VT_I32_, i32_, 0); + } + #[inline] + pub fn add_u32_(&mut self, u32_: u32) { + self.fbb_.push_slot::(TypeAliases::VT_U32_, u32_, 0); + } + #[inline] + pub fn add_i64_(&mut self, i64_: i64) { + self.fbb_.push_slot::(TypeAliases::VT_I64_, i64_, 0); + } + #[inline] + pub fn add_u64_(&mut self, u64_: u64) { + self.fbb_.push_slot::(TypeAliases::VT_U64_, u64_, 0); + } + #[inline] + pub fn add_f32_(&mut self, f32_: f32) { + self.fbb_.push_slot::(TypeAliases::VT_F32_, f32_, 0.0); + } + #[inline] + pub fn add_f64_(&mut self, f64_: f64) { + self.fbb_.push_slot::(TypeAliases::VT_F64_, f64_, 0.0); + } + #[inline] + pub fn add_v8(&mut self, v8: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(TypeAliases::VT_V8, v8); + } + #[inline] + pub fn add_vf64(&mut self, vf64: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(TypeAliases::VT_VF64, vf64); + } + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TypeAliasesBuilder<'a, 'b> { + let start = _fbb.start_table(); + TypeAliasesBuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } +} + +#[inline] +pub fn get_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> { + flatbuffers::get_root::>(buf) +} + +#[inline] +pub fn get_size_prefixed_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> { + flatbuffers::get_size_prefixed_root::>(buf) +} + +pub const MONSTER_IDENTIFIER: &'static str = "MONS"; + +#[inline] +pub fn monster_buffer_has_identifier(buf: &[u8]) -> bool { + return flatbuffers::buffer_has_identifier(buf, MONSTER_IDENTIFIER, false); +} + +#[inline] +pub fn monster_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + return flatbuffers::buffer_has_identifier(buf, MONSTER_IDENTIFIER, true); +} + +pub const MONSTER_EXTENSION: &'static str = "mon"; + +#[inline] +pub fn finish_monster_buffer<'a, 'b>( + fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, + root: flatbuffers::WIPOffset>) { + fbb.finish(root, Some(MONSTER_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_monster_buffer<'a, 'b>(fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(MONSTER_IDENTIFIER)); +} +} // pub mod Example +} // pub mod MyGame + diff --git a/tests/namespace_test/namespace_test1_generated.rs b/tests/namespace_test/namespace_test1_generated.rs new file mode 100644 index 000000000000..96ab59a778ef --- /dev/null +++ b/tests/namespace_test/namespace_test1_generated.rs @@ -0,0 +1,231 @@ +pub mod namespace_a { + #![allow(dead_code)] + #![allow(unused_imports)] + + use std::mem; + use std::marker::PhantomData; + use std::cmp::Ordering; + + extern crate flatbuffers; + use self::flatbuffers::EndianScalar; +pub mod namespace_b { + #![allow(dead_code)] + #![allow(unused_imports)] + + use std::mem; + use std::marker::PhantomData; + use std::cmp::Ordering; + + extern crate flatbuffers; + use self::flatbuffers::EndianScalar; + +#[allow(non_camel_case_types)] +#[repr(i8)] +#[derive(Clone, Copy, PartialEq, Debug)] +pub enum EnumInNestedNS { + A = 0, + B = 1, + C = 2 +} + +const ENUM_MIN_ENUM_IN_NESTED_N_S: i8 = 0; +const ENUM_MAX_ENUM_IN_NESTED_N_S: i8 = 2; + +impl<'a> flatbuffers::Follow<'a> for EnumInNestedNS { + type Inner = Self; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + flatbuffers::read_scalar_at::(buf, loc) + } +} + +impl flatbuffers::EndianScalar for EnumInNestedNS { + #[inline] + fn to_little_endian(self) -> Self { + let n = i8::to_le(self as i8); + let p = &n as *const i8 as *const EnumInNestedNS; + unsafe { *p } + } + #[inline] + fn from_little_endian(self) -> Self { + let n = i8::from_le(self as i8); + let p = &n as *const i8 as *const EnumInNestedNS; + unsafe { *p } + } +} + +impl flatbuffers::Push for EnumInNestedNS { + type Output = EnumInNestedNS; + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + flatbuffers::emplace_scalar::(dst, *self); + } +} + +#[allow(non_camel_case_types)] +const ENUM_VALUES_ENUM_IN_NESTED_N_S:[EnumInNestedNS; 3] = [ + EnumInNestedNS::A, + EnumInNestedNS::B, + EnumInNestedNS::C +]; + +#[allow(non_camel_case_types)] +const ENUM_NAMES_ENUM_IN_NESTED_N_S:[&'static str; 3] = [ + "A", + "B", + "C" +]; + +pub fn enum_name_enum_in_nested_n_s(e: EnumInNestedNS) -> &'static str { + let index: usize = e as usize; + ENUM_NAMES_ENUM_IN_NESTED_N_S[index] +} + +// struct StructInNestedNS, aligned to 4 +#[repr(C, packed)] +#[derive(Clone, Copy, Debug, PartialEq)] +pub struct StructInNestedNS { + a_: i32, + b_: i32, +} // pub struct StructInNestedNS +impl flatbuffers::SafeSliceAccess for StructInNestedNS {} +impl<'a> flatbuffers::Follow<'a> for StructInNestedNS { + type Inner = &'a StructInNestedNS; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + <&'a StructInNestedNS>::follow(buf, loc) + //flatbuffers::follow_cast_ref::(buf, loc) + } +} +impl<'a> flatbuffers::Follow<'a> for &'a StructInNestedNS { + type Inner = &'a StructInNestedNS; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + flatbuffers::follow_cast_ref::(buf, loc) + } +} +impl<'b> flatbuffers::Push for StructInNestedNS { + type Output = StructInNestedNS; + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + (&self).push(dst, _rest) + } + #[inline] + fn size(&self) -> usize { + ::std::mem::size_of::() + } +} +impl<'b> flatbuffers::Push for &'b StructInNestedNS { + type Output = StructInNestedNS; + + #[inline] + fn push(&self, dst: &mut [u8], _rest: &[u8]) { + let src = unsafe { + ::std::slice::from_raw_parts(*self as *const StructInNestedNS as *const u8, self.size()) + }; + dst.copy_from_slice(src); + } + #[inline] + fn size(&self) -> usize { + ::std::mem::size_of::() + } +} + + +impl StructInNestedNS { + pub fn new<'a>(_a: i32, _b: i32) -> Self { + StructInNestedNS { + a_: _a.to_little_endian(), + b_: _b.to_little_endian(), + + } + } + pub fn a<'a>(&'a self) -> i32 { + self.a_.from_little_endian() + } + pub fn b<'a>(&'a self) -> i32 { + self.b_.from_little_endian() + } +} + +pub enum TableInNestedNSOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct TableInNestedNS<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for TableInNestedNS<'a> { + type Inner = TableInNestedNS<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> TableInNestedNS<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + TableInNestedNS { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args TableInNestedNSArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = TableInNestedNSBuilder::new(_fbb); + builder.add_foo(args.foo); + builder.finish() + } + + pub const VT_FOO: flatbuffers::VOffsetT = 4; + + #[inline] + pub fn foo(&'a self) -> i32 { + self._tab.get::(TableInNestedNS::VT_FOO, Some(0)).unwrap() + } +} + +pub struct TableInNestedNSArgs<'a> { + pub foo: i32, + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for TableInNestedNSArgs<'a> { + fn default() -> Self { + TableInNestedNSArgs { + foo: 0, + _phantom: PhantomData, + } + } +} +pub struct TableInNestedNSBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> TableInNestedNSBuilder<'a, 'b> { + #[inline] + pub fn add_foo(&mut self, foo: i32) { + self.fbb_.push_slot::(TableInNestedNS::VT_FOO, foo, 0); + } + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TableInNestedNSBuilder<'a, 'b> { + let start = _fbb.start_table(); + TableInNestedNSBuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } +} + +} // pub mod NamespaceB +} // pub mod NamespaceA + diff --git a/tests/namespace_test/namespace_test2_generated.rs b/tests/namespace_test/namespace_test2_generated.rs new file mode 100644 index 000000000000..b72f459fe0bb --- /dev/null +++ b/tests/namespace_test/namespace_test2_generated.rs @@ -0,0 +1,296 @@ +pub mod namespace_a { + #![allow(dead_code)] + #![allow(unused_imports)] + + use std::mem; + use std::marker::PhantomData; + use std::cmp::Ordering; + + extern crate flatbuffers; + use self::flatbuffers::EndianScalar; + +pub enum TableInFirstNSOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct TableInFirstNS<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for TableInFirstNS<'a> { + type Inner = TableInFirstNS<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> TableInFirstNS<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + TableInFirstNS { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args TableInFirstNSArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = TableInFirstNSBuilder::new(_fbb); + if let Some(x) = args.foo_struct { builder.add_foo_struct(x); } + if let Some(x) = args.foo_table { builder.add_foo_table(x); } + builder.add_foo_enum(args.foo_enum); + builder.finish() + } + + pub const VT_FOO_TABLE: flatbuffers::VOffsetT = 4; + pub const VT_FOO_ENUM: flatbuffers::VOffsetT = 6; + pub const VT_FOO_STRUCT: flatbuffers::VOffsetT = 8; + + #[inline] + pub fn foo_table(&'a self) -> Option> { + self._tab.get::>>(TableInFirstNS::VT_FOO_TABLE, None) + } + #[inline] + pub fn foo_enum(&'a self) -> namespace_b::EnumInNestedNS { + self._tab.get::(TableInFirstNS::VT_FOO_ENUM, Some(namespace_b::EnumInNestedNS::A)).unwrap() + } + #[inline] + pub fn foo_struct(&'a self) -> Option<&'a namespace_b::StructInNestedNS> { + self._tab.get::(TableInFirstNS::VT_FOO_STRUCT, None) + } +} + +pub struct TableInFirstNSArgs<'a> { + pub foo_table: Option>>, + pub foo_enum: namespace_b::EnumInNestedNS, + pub foo_struct: Option<&'a namespace_b::StructInNestedNS>, + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for TableInFirstNSArgs<'a> { + fn default() -> Self { + TableInFirstNSArgs { + foo_table: None, + foo_enum: namespace_b::EnumInNestedNS::A, + foo_struct: None, + _phantom: PhantomData, + } + } +} +pub struct TableInFirstNSBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> TableInFirstNSBuilder<'a, 'b> { + #[inline] + pub fn add_foo_table(&mut self, foo_table: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(TableInFirstNS::VT_FOO_TABLE, foo_table); + } + #[inline] + pub fn add_foo_enum(&mut self, foo_enum: namespace_b::EnumInNestedNS) { + self.fbb_.push_slot::(TableInFirstNS::VT_FOO_ENUM, foo_enum, namespace_b::EnumInNestedNS::A); + } + #[inline] + pub fn add_foo_struct(&mut self, foo_struct: &'b namespace_b::StructInNestedNS) { + self.fbb_.push_slot_always::<&namespace_b::StructInNestedNS>(TableInFirstNS::VT_FOO_STRUCT, foo_struct); + } + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TableInFirstNSBuilder<'a, 'b> { + let start = _fbb.start_table(); + TableInFirstNSBuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } +} + +pub enum SecondTableInAOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct SecondTableInA<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for SecondTableInA<'a> { + type Inner = SecondTableInA<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> SecondTableInA<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + SecondTableInA { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args SecondTableInAArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = SecondTableInABuilder::new(_fbb); + if let Some(x) = args.refer_to_c { builder.add_refer_to_c(x); } + builder.finish() + } + + pub const VT_REFER_TO_C: flatbuffers::VOffsetT = 4; + + #[inline] + pub fn refer_to_c(&'a self) -> Option> { + self._tab.get::>>(SecondTableInA::VT_REFER_TO_C, None) + } +} + +pub struct SecondTableInAArgs<'a> { + pub refer_to_c: Option>>, + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for SecondTableInAArgs<'a> { + fn default() -> Self { + SecondTableInAArgs { + refer_to_c: None, + _phantom: PhantomData, + } + } +} +pub struct SecondTableInABuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> SecondTableInABuilder<'a, 'b> { + #[inline] + pub fn add_refer_to_c(&mut self, refer_to_c: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(SecondTableInA::VT_REFER_TO_C, refer_to_c); + } + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> SecondTableInABuilder<'a, 'b> { + let start = _fbb.start_table(); + SecondTableInABuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } +} + +} // pub mod NamespaceA + +pub mod namespace_c { + #![allow(dead_code)] + #![allow(unused_imports)] + + use std::mem; + use std::marker::PhantomData; + use std::cmp::Ordering; + + extern crate flatbuffers; + use self::flatbuffers::EndianScalar; + +pub enum TableInCOffset {} +#[derive(Copy, Clone, Debug, PartialEq)] + +pub struct TableInC<'a> { + pub _tab: flatbuffers::Table<'a>, + _phantom: PhantomData<&'a ()>, +} + +impl<'a> flatbuffers::Follow<'a> for TableInC<'a> { + type Inner = TableInC<'a>; + #[inline] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { + _tab: flatbuffers::Table { buf: buf, loc: loc }, + _phantom: PhantomData, + } + } +} + +impl<'a> TableInC<'a> { + #[inline] + pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { + TableInC { + _tab: table, + _phantom: PhantomData, + } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( + _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, + args: &'args TableInCArgs<'args>) -> flatbuffers::WIPOffset> { + let mut builder = TableInCBuilder::new(_fbb); + if let Some(x) = args.refer_to_a2 { builder.add_refer_to_a2(x); } + if let Some(x) = args.refer_to_a1 { builder.add_refer_to_a1(x); } + builder.finish() + } + + pub const VT_REFER_TO_A1: flatbuffers::VOffsetT = 4; + pub const VT_REFER_TO_A2: flatbuffers::VOffsetT = 6; + + #[inline] + pub fn refer_to_a1(&'a self) -> Option> { + self._tab.get::>>(TableInC::VT_REFER_TO_A1, None) + } + #[inline] + pub fn refer_to_a2(&'a self) -> Option> { + self._tab.get::>>(TableInC::VT_REFER_TO_A2, None) + } +} + +pub struct TableInCArgs<'a> { + pub refer_to_a1: Option>>, + pub refer_to_a2: Option>>, + pub _phantom: PhantomData<&'a ()>, // pub for default trait +} +impl<'a> Default for TableInCArgs<'a> { + fn default() -> Self { + TableInCArgs { + refer_to_a1: None, + refer_to_a2: None, + _phantom: PhantomData, + } + } +} +pub struct TableInCBuilder<'a: 'b, 'b> { + fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, + start_: flatbuffers::WIPOffset, +} +impl<'a: 'b, 'b> TableInCBuilder<'a, 'b> { + #[inline] + pub fn add_refer_to_a1(&mut self, refer_to_a1: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(TableInC::VT_REFER_TO_A1, refer_to_a1); + } + #[inline] + pub fn add_refer_to_a2(&mut self, refer_to_a2: flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::>(TableInC::VT_REFER_TO_A2, refer_to_a2); + } + pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TableInCBuilder<'a, 'b> { + let start = _fbb.start_table(); + TableInCBuilder { + fbb_: _fbb, + start_: start, + } + } + pub fn finish(self) -> flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + flatbuffers::WIPOffset::new(o.value()) + } +} + +} // pub mod NamespaceC + diff --git a/tests/rust_usage_test/Cargo.lock b/tests/rust_usage_test/Cargo.lock new file mode 100644 index 000000000000..b0edc9328fda --- /dev/null +++ b/tests/rust_usage_test/Cargo.lock @@ -0,0 +1,285 @@ +[[package]] +name = "aho-corasick" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "atty" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", + "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bencher" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "bitflags" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "cfg-if" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "env_logger" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "atty 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", + "humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "flatbuffers" +version = "0.1.0" +dependencies = [ + "smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "fuchsia-zircon" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "fuchsia-zircon-sys" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "humantime" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "lazy_static" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "libc" +version = "0.2.42" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "log" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "memchr" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "quick-error" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "quickcheck" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "env_logger 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "redox_syscall" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "redox_termios" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "regex" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "aho-corasick 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", + "thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "regex-syntax" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rust_usage_test" +version = "0.1.0" +dependencies = [ + "bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "flatbuffers 0.1.0", + "quickcheck 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "smallvec" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "termcolor" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "termion" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "thread_local" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ucd-util" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "unreachable" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "utf8-ranges" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "void" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "wincolor" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[metadata] +"checksum aho-corasick 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f0ba20154ea1f47ce2793322f049c5646cc6d0fa9759d5f333f286e507bf8080" +"checksum atty 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "2fc4a1aa4c24c0718a250f0681885c1af91419d242f29eb8f2ab28502d80dbd1" +"checksum bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7dfdb4953a096c551ce9ace855a604d702e6e62d77fac690575ae347571717f5" +"checksum bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d0c54bb8f454c567f21197eefcdbf5679d0bd99f2ddbe52e84c77061952e6789" +"checksum cfg-if 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "efe5c877e17a9c717a0bf3613b2709f723202c4e4675cc8f12926ded29bcb17e" +"checksum env_logger 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0e6e40ebb0e66918a37b38c7acab4e10d299e0463fe2af5d29b9cc86710cfd2a" +"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" +"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" +"checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e" +"checksum lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e6412c5e2ad9584b0b8e979393122026cdd6d2a80b933f890dcd694ddbe73739" +"checksum libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)" = "b685088df2b950fccadf07a7187c8ef846a959c142338a48f9dc0b94517eb5f1" +"checksum log 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "61bd98ae7f7b754bc53dca7d44b604f733c6bba044ea6f41bc8d89272d8161d2" +"checksum memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "796fba70e76612589ed2ce7f45282f5af869e0fdd7cc6199fa1aa1f1d591ba9d" +"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" +"checksum quickcheck 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c01babc5ffd48a2a83744b3024814bb46dfd4f2a4705ccb44b1b60e644fdcab7" +"checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5" +"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1" +"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" +"checksum regex 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13c93d55961981ba9226a213b385216f83ab43bd6ac53ab16b2eeb47e337cf4e" +"checksum regex-syntax 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05b06a75f5217880fc5e905952a42750bf44787e56a6c6d6852ed0992f5e1d54" +"checksum smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "153ffa32fd170e9944f7e0838edf824a754ec4c1fc64746fcc9fe1f8fa602e5d" +"checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83" +"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096" +"checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963" +"checksum ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd2be2d6639d0f8fe6cdda291ad456e23629558d466e2789d2c3e9892bda285d" +"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" +"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" +"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +"checksum winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "773ef9dcc5f24b7d850d0ff101e542ff24c3b090a9768e03ff889fdef41f00fd" +"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +"checksum wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb06499a3a4d44302791052df005d5232b927ed1a9658146d842165c4de7767" diff --git a/tests/rust_usage_test/Cargo.toml b/tests/rust_usage_test/Cargo.toml new file mode 100644 index 000000000000..9392b12c0f77 --- /dev/null +++ b/tests/rust_usage_test/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "rust_usage_test" +version = "0.1.0" +authors = ["Robert Winslow ", "FlatBuffers Maintainers"] + +[dependencies] +flatbuffers = { path = "../../rust/flatbuffers" } + +[[bin]] +name = "monster_example" +path = "bin/monster_example.rs" + + +[dev-dependencies] +quickcheck = "0.6" +# TODO(rw): look into moving to criterion.rs +bencher = "0.1.5" + +[[bench]] +# setup for bencher +name = "flatbuffers_benchmarks" +harness = false diff --git a/tests/rust_usage_test/benches/flatbuffers_benchmarks.rs b/tests/rust_usage_test/benches/flatbuffers_benchmarks.rs new file mode 100644 index 000000000000..3ad45c287851 --- /dev/null +++ b/tests/rust_usage_test/benches/flatbuffers_benchmarks.rs @@ -0,0 +1,218 @@ +/* + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#[macro_use] +extern crate bencher; +use bencher::Bencher; + +extern crate flatbuffers; + +#[path = "../../monster_test_generated.rs"] +mod monster_test_generated; +pub use monster_test_generated::my_game; + +fn traverse_canonical_buffer(bench: &mut Bencher) { + let owned_data = { + let mut builder = &mut flatbuffers::FlatBufferBuilder::new(); + create_serialized_example_with_generated_code(&mut builder, true); + builder.finished_data().to_vec() + }; + let data = &owned_data[..]; + let n = data.len() as u64; + bench.iter(|| { + traverse_serialized_example_with_generated_code(data); + }); + bench.bytes = n; +} + +fn create_canonical_buffer_then_reset(bench: &mut Bencher) { + let mut builder = &mut flatbuffers::FlatBufferBuilder::new(); + // warmup + create_serialized_example_with_generated_code(&mut builder, true); + let n = builder.finished_data().len() as u64; + builder.reset(); + + bench.iter(|| { + let _ = create_serialized_example_with_generated_code(&mut builder, true); + builder.reset(); + }); + + bench.bytes = n; +} + +#[inline(always)] +fn create_serialized_example_with_generated_code(builder: &mut flatbuffers::FlatBufferBuilder, finish: bool) -> usize{ + let s0 = builder.create_string("test1"); + let s1 = builder.create_string("test2"); + let t0_name = builder.create_string("Barney"); + let t1_name = builder.create_string("Fred"); + let t2_name = builder.create_string("Wilma"); + let t0 = my_game::example::Monster::create(builder, &my_game::example::MonsterArgs{ + hp: 1000, + name: Some(t0_name), + ..Default::default() + }); + let t1 = my_game::example::Monster::create(builder, &my_game::example::MonsterArgs{ + name: Some(t1_name), + ..Default::default() + }); + let t2 = my_game::example::Monster::create(builder, &my_game::example::MonsterArgs{ + name: Some(t2_name), + ..Default::default() + }); + let mon = { + let name = builder.create_string("MyMonster"); + let fred_name = builder.create_string("Fred"); + let inventory = builder.create_vector_direct(&[0u8, 1, 2, 3, 4]); + let test4 = builder.create_vector_direct(&[my_game::example::Test::new(10, 20), + my_game::example::Test::new(30, 40)]); + let pos = my_game::example::Vec3::new(1.0, 2.0, 3.0, 3.0, my_game::example::Color::Green, &my_game::example::Test::new(5i16, 6i8)); + let args = my_game::example::MonsterArgs{ + hp: 80, + mana: 150, + name: Some(name), + pos: Some(&pos), + test_type: my_game::example::Any::Monster, + test: Some(my_game::example::Monster::create(builder, &my_game::example::MonsterArgs{ + name: Some(fred_name), + ..Default::default() + }).as_union_value()), + inventory: Some(inventory), + test4: Some(test4), + testarrayofstring: Some(builder.create_vector(&[s0, s1])), + testarrayoftables: Some(builder.create_vector(&[t0, t1, t2])), + ..Default::default() + }; + my_game::example::Monster::create(builder, &args) + }; + if finish { + my_game::example::finish_monster_buffer(builder, mon); + } + + builder.finished_data().len() + + // make it do some work + // if builder.finished_data().len() == 0 { panic!("bad benchmark"); } +} + +#[inline(always)] +fn blackbox(t: T) -> T { + // encapsulate this in case we need to turn it into a noop + bencher::black_box(t) +} + +#[inline(always)] +fn traverse_serialized_example_with_generated_code(bytes: &[u8]) { + let m = my_game::example::get_root_as_monster(bytes); + blackbox(m.hp()); + blackbox(m.mana()); + blackbox(m.name()); + let pos = m.pos().unwrap(); + blackbox(pos.x()); + blackbox(pos.y()); + blackbox(pos.z()); + blackbox(pos.test1()); + blackbox(pos.test2()); + let pos_test3 = pos.test3(); + blackbox(pos_test3.a()); + blackbox(pos_test3.b()); + blackbox(m.test_type()); + let table2 = m.test().unwrap(); + let monster2 = my_game::example::Monster::init_from_table(table2); + blackbox(monster2.name()); + blackbox(m.inventory()); + blackbox(m.test4()); + let testarrayoftables = m.testarrayoftables().unwrap(); + blackbox(testarrayoftables.get(0).hp()); + blackbox(testarrayoftables.get(0).name()); + blackbox(testarrayoftables.get(1).name()); + blackbox(testarrayoftables.get(2).name()); + let testarrayofstring = m.testarrayofstring().unwrap(); + blackbox(testarrayofstring.get(0)); + blackbox(testarrayofstring.get(1)); +} + +fn create_string_10(bench: &mut Bencher) { + let builder = &mut flatbuffers::FlatBufferBuilder::new_with_capacity(1<<20); + let mut i = 0; + bench.iter(|| { + builder.create_string("foobarbaz"); // zero-terminated -> 10 bytes + i += 1; + if i == 10000 { + builder.reset(); + i = 0; + } + }); + + bench.bytes = 10; +} + +fn create_string_100(bench: &mut Bencher) { + let builder = &mut flatbuffers::FlatBufferBuilder::new_with_capacity(1<<20); + let s_owned = (0..99).map(|_| "x").collect::(); + let s: &str = &s_owned; + + let mut i = 0; + bench.iter(|| { + builder.create_string(s); // zero-terminated -> 100 bytes + i += 1; + if i == 1000 { + builder.reset(); + i = 0; + } + }); + + bench.bytes = s.len() as u64; +} + +fn create_byte_vector_100_naive(bench: &mut Bencher) { + let builder = &mut flatbuffers::FlatBufferBuilder::new_with_capacity(1<<20); + let v_owned = (0u8..100).map(|i| i).collect::>(); + let v: &[u8] = &v_owned; + + let mut i = 0; + bench.iter(|| { + builder.create_vector(v); // zero-terminated -> 100 bytes + i += 1; + if i == 10000 { + builder.reset(); + i = 0; + } + }); + + bench.bytes = v.len() as u64; +} + +fn create_byte_vector_100_optimal(bench: &mut Bencher) { + let builder = &mut flatbuffers::FlatBufferBuilder::new_with_capacity(1<<20); + let v_owned = (0u8..100).map(|i| i).collect::>(); + let v: &[u8] = &v_owned; + + let mut i = 0; + bench.iter(|| { + builder.create_vector_direct(v); + i += 1; + if i == 10000 { + builder.reset(); + i = 0; + } + }); + + bench.bytes = v.len() as u64; +} + +benchmark_group!(benches, create_byte_vector_100_naive, create_byte_vector_100_optimal, traverse_canonical_buffer, create_canonical_buffer_then_reset, create_string_10, create_string_100); +benchmark_main!(benches); diff --git a/tests/rust_usage_test/src/lib.rs b/tests/rust_usage_test/src/lib.rs new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/tests/rust_usage_test/src/lib.rs @@ -0,0 +1 @@ + diff --git a/tests/rust_usage_test/test_bench_output.txt b/tests/rust_usage_test/test_bench_output.txt new file mode 100644 index 000000000000..cf93bdbb77e5 --- /dev/null +++ b/tests/rust_usage_test/test_bench_output.txt @@ -0,0 +1,184 @@ +running 170 tests +test builder_asserts::create_byte_string_should_panic_when_in_table ... ok +test builder_asserts::create_string_should_panic_when_in_table ... ok +test builder_asserts::end_table_should_panic_when_not_in_table ... ok +test builder_asserts::finished_bytes_should_panic_when_table_is_not_finished ... ok +test builder_asserts::push_struct_slot_should_panic_when_not_in_table ... ok +test builder_asserts::required_panics_when_field_not_set ... ok +test builder_collapses_into_vec ... ok +test builder_initializes_with_maximum_buffer_size ... ok +test byte_layouts::layout_01_basic_numbers ... ok +test byte_layouts::layout_01b_bigger_numbers ... ok +test byte_layouts::layout_02_1xbyte_vector ... ok +test byte_layouts::layout_03_2xbyte_vector ... ok +test byte_layouts::layout_03b_11xbyte_vector_matches_builder_size ... ok +test byte_layouts::layout_04_1xuint16_vector ... ok +test byte_layouts::layout_05_2xuint16_vector ... ok +test byte_layouts::layout_06_create_string ... ok +test byte_layouts::layout_06b_create_string_unicode ... ok +test byte_layouts::layout_06c_create_byte_string ... ok +test byte_layouts::layout_07_empty_vtable ... ok +test byte_layouts::layout_08_vtable_with_one_true_bool ... ok +test byte_layouts::layout_09_vtable_with_one_default_bool ... ok +test byte_layouts::layout_10_vtable_with_one_int16 ... ok +test byte_layouts::layout_11_vtable_with_two_int16 ... ok +test byte_layouts::layout_12_vtable_with_int16_and_bool ... ok +test byte_layouts::layout_12b_vtable_with_empty_vector ... ok +test byte_layouts::layout_12c_vtable_with_empty_vector_of_byte_and_some_scalars ... ok +test byte_layouts::layout_13_vtable_with_1_int16_and_2_vector_of_i16 ... ok +test byte_layouts::layout_14_vtable_with_1_struct_of_int8_and_int16_and_int32 ... ok +test byte_layouts::layout_15_vtable_with_1_vector_of_2_struct_2_int8 ... ok +test byte_layouts::layout_16_table_with_some_elements ... ok +test byte_layouts::layout_17_one_unfinished_table_and_one_finished_table ... ok +test byte_layouts::layout_18_a_bunch_of_bools ... ok +test byte_layouts::layout_19_three_bools ... ok +test byte_layouts::layout_20_some_floats ... ok +test byte_layouts::layout_21_vtable_defaults ... ok +test byte_layouts::layout_22_root ... ok +test byte_layouts::layout_23_varied_slots_and_root ... ok +test follow_impls::offset_to_byte_string ... ok +test follow_impls::offset_to_byte_vector ... ok +test follow_impls::offset_to_f32 ... ok +test follow_impls::offset_to_ref_u16 ... ok +test follow_impls::offset_to_ref_u8 ... ok +test follow_impls::offset_to_slice_of_u16 ... ok +test follow_impls::offset_to_string ... ok +test follow_impls::offset_to_struct ... ok +test follow_impls::offset_to_u16 ... ok +test follow_impls::offset_to_u8 ... ok +test follow_impls::offset_to_vector_of_u16 ... ok +test follow_impls::root_to_empty_table ... ok +test follow_impls::slice_of_struct_elements ... ok +test follow_impls::table_get_slot_scalar_u8 ... ok +test follow_impls::table_get_slot_scalar_u8_default_via_vtable_len ... ok +test follow_impls::table_get_slot_scalar_u8_default_via_vtable_zero ... ok +test follow_impls::table_get_slot_string_multiple_types ... ok +test follow_impls::table_get_slot_string_multiple_types_default_via_vtable_len ... ok +test follow_impls::table_get_slot_string_multiple_types_default_via_vtable_zero ... ok +test follow_impls::vector_of_offset_to_string_elements ... ok +test follow_impls::vector_of_struct_elements ... ok +test framing_format::test_size_prefixed_buffer ... ok +test generated_code_alignment_and_padding::vec3_is_aligned_to_mod_16 ... ok +test generated_code_alignment_and_padding::vec3_is_padded_to_mod_16 ... ok +test generated_code_asserts::monster_builder_fails_when_name_is_missing ... ok +test generated_constants::monster_file_extension ... ok +test generated_constants::monster_identifier ... ok +test generated_key_comparisons::struct_ability_key_compare_less_than ... ok +test generated_key_comparisons::struct_key_compare_less_than ... ok +test generated_key_comparisons::struct_key_compare_with_value ... ok +test generated_key_comparisons::table_key_compare_less_than ... ok +test generated_key_comparisons::table_key_compare_with_value ... ok +test push_impls::push_byte_slice_with_alignment ... ok +test push_impls::push_f64 ... ok +test push_impls::push_generated_struct ... ok +test push_impls::push_string ... ok +test push_impls::push_u64 ... ok +test push_impls::push_u8 ... ok +test push_impls::push_u8_slice_with_alignment ... ok +test read_examples_from_other_language_ports::gold_cpp_example_data_is_accessible_and_correct ... ok +test read_examples_from_other_language_ports::java_wire_example_data_is_accessible_and_correct ... ok +test read_examples_from_other_language_ports::java_wire_size_prefixed_example_data_is_accessible_and_correct ... ok +test roundtrip_generated_code::enum_default ... ok +test roundtrip_generated_code::enum_store ... ok +test roundtrip_generated_code::nested_flatbuffer_default ... ok +test roundtrip_generated_code::nested_flatbuffer_store ... ok +test roundtrip_generated_code::scalar_default ... ok +test roundtrip_generated_code::scalar_store ... ok +test roundtrip_generated_code::string_store ... ok +test roundtrip_generated_code::struct_default ... ok +test roundtrip_generated_code::struct_store ... ok +test roundtrip_generated_code::table_default ... ok +test roundtrip_generated_code::table_full_namespace_default ... ok +test roundtrip_generated_code::table_full_namespace_store ... ok +test roundtrip_generated_code::table_store ... ok +test roundtrip_generated_code::union_default ... ok +test roundtrip_generated_code::union_store ... ok +test roundtrip_generated_code::vector_of_bool_store ... ok +test roundtrip_generated_code::vector_of_f64_store ... ok +test roundtrip_generated_code::vector_of_string_store_helper_build ... ok +test roundtrip_generated_code::vector_of_string_store_manual_build ... ok +test roundtrip_generated_code::vector_of_struct_store ... ok +test roundtrip_generated_code::vector_of_table_store ... ok +test roundtrip_generated_code::vector_of_ubyte_store ... ok +test roundtrip_push_follow_scalars::fuzz_bool ... ok +test roundtrip_push_follow_scalars::fuzz_f32 ... ok +test roundtrip_push_follow_scalars::fuzz_f64 ... ok +test roundtrip_push_follow_scalars::fuzz_i16 ... ok +test roundtrip_push_follow_scalars::fuzz_i32 ... ok +test roundtrip_push_follow_scalars::fuzz_i64 ... ok +test roundtrip_push_follow_scalars::fuzz_i8 ... ok +test roundtrip_push_follow_scalars::fuzz_u16 ... ok +test roundtrip_push_follow_scalars::fuzz_u32 ... ok +test roundtrip_push_follow_scalars::fuzz_u64 ... ok +test roundtrip_push_follow_scalars::fuzz_u8 ... ok +test roundtrip_scalars::fuzz_bool ... ok +test roundtrip_scalars::fuzz_f32 ... ok +test roundtrip_scalars::fuzz_f64 ... ok +test roundtrip_scalars::fuzz_i16 ... ok +test roundtrip_scalars::fuzz_i32 ... ok +test roundtrip_scalars::fuzz_i64 ... ok +test roundtrip_scalars::fuzz_i8 ... ok +test roundtrip_scalars::fuzz_u16 ... ok +test roundtrip_scalars::fuzz_u32 ... ok +test roundtrip_scalars::fuzz_u64 ... ok +test roundtrip_scalars::fuzz_u8 ... ok +test roundtrip_table::fuzz_table_of_strings ... ok +test roundtrip_table::table_of_byte_strings_fuzz ... ok +test roundtrip_table::table_of_mixed_scalars_fuzz ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_bool ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_f32 ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_f64 ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_i16 ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_i32 ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_i64 ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_i8 ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_u16 ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_u32 ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_u64 ... ok +test roundtrip_table::table_of_vectors_of_scalars::fuzz_u8 ... ok +test roundtrip_vectors::create_vector_direct::host_is_le::test_f32 ... ok +test roundtrip_vectors::create_vector_direct::host_is_le::test_f64 ... ok +test roundtrip_vectors::create_vector_direct::host_is_le::test_i16 ... ok +test roundtrip_vectors::create_vector_direct::host_is_le::test_i32 ... ok +test roundtrip_vectors::create_vector_direct::host_is_le::test_i64 ... ok +test roundtrip_vectors::create_vector_direct::host_is_le::test_u16 ... ok +test roundtrip_vectors::create_vector_direct::host_is_le::test_u32 ... ok +test roundtrip_vectors::create_vector_direct::host_is_le::test_u64 ... ok +test roundtrip_vectors::create_vector_direct::test_bool ... ok +test roundtrip_vectors::create_vector_direct::test_i8 ... ok +test roundtrip_vectors::create_vector_direct::test_u8 ... ok +test roundtrip_vectors::scalar::easy_u8 ... ok +test roundtrip_vectors::scalar::fuzz_bool ... ok +test roundtrip_vectors::scalar::fuzz_f32 ... ok +test roundtrip_vectors::scalar::fuzz_f64 ... ok +test roundtrip_vectors::scalar::fuzz_i16 ... ok +test roundtrip_vectors::scalar::fuzz_i32 ... ok +test roundtrip_vectors::scalar::fuzz_i64 ... ok +test roundtrip_vectors::scalar::fuzz_i8 ... ok +test roundtrip_vectors::scalar::fuzz_u16 ... ok +test roundtrip_vectors::scalar::fuzz_u32 ... ok +test roundtrip_vectors::scalar::fuzz_u64 ... ok +test roundtrip_vectors::scalar::fuzz_u8 ... ok +test roundtrip_vectors::string_manual_build::fuzz ... ok +test roundtrip_vectors::ubyte::fuzz_manual_build ... ok +test vtable_deduplication::many_identical_tables_use_few_vtables ... ok +test vtable_deduplication::one_empty_table ... ok +test vtable_deduplication::two_empty_tables_are_deduplicated ... ok +test vtable_deduplication::two_tables_with_two_conveniently_sized_inline_elements_are_deduplicated ... ok +test write_and_read_examples::generated_code_creates_correct_example ... ok +test write_and_read_examples::generated_code_creates_correct_example_repeatedly_with_reset ... ok +test write_and_read_examples::library_code_creates_correct_example ... ok +test write_and_read_examples::library_code_creates_correct_example_repeatedly_with_reset ... ok + +test result: ok. 170 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out + + +running 6 tests +test create_byte_vector_100_naive ... bench: 1,000 ns/iter (+/- 168) = 100 MB/s +test create_byte_vector_100_optimal ... bench: 24 ns/iter (+/- 3) = 4166 MB/s +test create_canonical_buffer_then_reset ... bench: 794 ns/iter (+/- 143) = 403 MB/s +test create_string_10 ... bench: 10 ns/iter (+/- 4) = 1000 MB/s +test create_string_100 ... bench: 23 ns/iter (+/- 4) = 4304 MB/s +test traverse_canonical_buffer ... bench: 146 ns/iter (+/- 18) = 2191 MB/s + +test result: ok. 0 passed; 0 failed; 0 ignored; 6 measured diff --git a/tests/rust_usage_test/tests/integration_test.rs b/tests/rust_usage_test/tests/integration_test.rs new file mode 100644 index 000000000000..6b8529c2bdd8 --- /dev/null +++ b/tests/rust_usage_test/tests/integration_test.rs @@ -0,0 +1,2473 @@ +/* + * + * Copyright 2018 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +extern crate quickcheck; + +extern crate flatbuffers; + +#[path = "../../monster_test_generated.rs"] +mod monster_test_generated; +pub use monster_test_generated::my_game; + +// Include simple random number generator to ensure results will be the +// same across platforms. +// http://en.wikipedia.org/wiki/Park%E2%80%93Miller_random_number_generator +struct LCG(u64); +impl LCG { + fn new() -> Self { + LCG { 0: 48271 } + } + fn next(&mut self) -> u64 { + let old = self.0; + self.0 = (self.0 * 279470273u64) % 4294967291u64; + old + } + fn reset(&mut self) { + self.0 = 48271 + } +} + +fn create_serialized_example_with_generated_code(builder: &mut flatbuffers::FlatBufferBuilder) { + let mon = { + let s0 = builder.create_string("test1"); + let s1 = builder.create_string("test2"); + let fred_name = builder.create_string("Fred"); + let inventory = builder.create_vector(&[0u8, 1, 2, 3, 4][..]); + let test4 = builder.create_vector(&[my_game::example::Test::new(10, 20), + my_game::example::Test::new(30, 40)]); + let pos = my_game::example::Vec3::new(1.0, 2.0, 3.0, 3.0, my_game::example::Color::Green, &my_game::example::Test::new(5i16, 6i8)); + let args = my_game::example::MonsterArgs{ + hp: 80, + mana: 150, + name: Some(builder.create_string("MyMonster")), + pos: Some(&pos), + test_type: my_game::example::Any::Monster, + // TODO(rw): better offset ergonomics + test: Some(my_game::example::Monster::create(builder, &my_game::example::MonsterArgs{ + name: Some(fred_name), + ..Default::default() + }).as_union_value()), + inventory: Some(inventory), + test4: Some(test4), + testarrayofstring: Some(builder.create_vector(&[s0, s1])), + ..Default::default() + }; + my_game::example::Monster::create(builder, &args) + }; + my_game::example::finish_monster_buffer(builder, mon); +} +fn create_serialized_example_with_library_code(builder: &mut flatbuffers::FlatBufferBuilder) { + let nested_union_mon = { + let name = builder.create_string("Fred"); + let table_start = builder.start_table(); + builder.push_slot_always(my_game::example::Monster::VT_NAME, name); + builder.end_table(table_start) + }; + let pos = my_game::example::Vec3::new(1.0, 2.0, 3.0, 3.0, my_game::example::Color::Green, &my_game::example::Test::new(5i16, 6i8)); + let inv = builder.create_vector(&[0u8, 1, 2, 3, 4]); + + let test4 = builder.create_vector(&[my_game::example::Test::new(10, 20), + my_game::example::Test::new(30, 40)][..]); + + let name = builder.create_string("MyMonster"); + let testarrayofstring = builder.create_vector_of_strings(&["test1", "test2"][..]); + + // begin building + + let table_start = builder.start_table(); + builder.push_slot(my_game::example::Monster::VT_HP, 80i16, 100); + builder.push_slot_always(my_game::example::Monster::VT_NAME, name); + builder.push_slot_always(my_game::example::Monster::VT_POS, &pos); + builder.push_slot(my_game::example::Monster::VT_TEST_TYPE, my_game::example::Any::Monster, my_game::example::Any::NONE); + builder.push_slot_always(my_game::example::Monster::VT_TEST, nested_union_mon); + builder.push_slot_always(my_game::example::Monster::VT_INVENTORY, inv); + builder.push_slot_always(my_game::example::Monster::VT_TEST4, test4); + builder.push_slot_always(my_game::example::Monster::VT_TESTARRAYOFSTRING, testarrayofstring); + let root = builder.end_table(table_start); + builder.finish(root, Some(my_game::example::MONSTER_IDENTIFIER)); +} + +fn serialized_example_is_accessible_and_correct(bytes: &[u8], identifier_required: bool, size_prefixed: bool) -> Result<(), &'static str> { + if identifier_required { + let correct = if size_prefixed { + my_game::example::monster_size_prefixed_buffer_has_identifier(bytes) + } else { + my_game::example::monster_buffer_has_identifier(bytes) + }; + if !correct { + return Err("incorrect buffer identifier"); + } + } + let monster1 = if size_prefixed { + my_game::example::get_size_prefixed_root_as_monster(bytes) + } else { + my_game::example::get_root_as_monster(bytes) + }; + for m in vec![monster1] { + if m.hp() != 80 { assert_eq!(80, m.hp()); return Err("bad m.hp"); } + if m.mana() != 150 { return Err("bad m.mana"); } + match m.name() { + Some("MyMonster") => { } + _ => { return Err("bad m.name"); } + } + let pos = match m.pos() { + None => { return Err("bad m.pos"); } + Some(x) => { x } + }; + if pos.x() != 1.0f32 { return Err("bad pos.x"); } + if pos.y() != 2.0f32 { return Err("bad pos.y"); } + if pos.z() != 3.0f32 { return Err("bad pos.z"); } + if pos.test1() != 3.0f64 { return Err("bad pos.test1"); } + if pos.test2() != my_game::example::Color::Green { return Err("bad pos.test2"); } + + let pos_test3 = pos.test3(); + if pos_test3.a() != 5i16 { return Err("bad pos_test3.a"); } + if pos_test3.b() != 6i8 { return Err("bad pos_test3.b"); } + + match m.enemy() { + None => { + println!("missing m.enemy, most language ports do not generate this yet"); + } + Some(e) => { + match e.name() { + Some("Fred") => { /* woot */ } + _ => { println!("missing m.enemy.name, most language ports do not generate this yet") } + } + } + } + + if m.test_type() != my_game::example::Any::Monster { return Err("bad m.test_type"); } + + let table2 = match m.test() { + None => { return Err("bad m.test"); } + Some(x) => { x } + }; + + let monster2 = my_game::example::Monster::init_from_table(table2); + + match monster2.name() { + Some("Fred") => { } + _ => { return Err("bad monster2.name"); } + } + + let inv: &[u8] = match m.inventory() { + None => { return Err("bad m.inventory"); } + Some(x) => { x } + }; + if inv.len() != 5 { return Err("bad m.inventory len"); } + let invsum: u8 = inv.iter().sum(); + if invsum != 10 { return Err("bad m.inventory sum"); } + + { + let test4 = match m.test4() { + None => { return Err("bad m.test4"); } + Some(x) => { x } + }; + if test4.len() != 2 { return Err("bad m.test4 len"); } + + let x = test4[0]; + let y = test4[1]; + let xy_sum = x.a() as i32 + x.b() as i32 + y.a() as i32 + y.b() as i32; + if xy_sum != 100 { return Err("bad m.test4 item sum"); } + } + + { + match m.testarrayoftables() { + None => { println!("not all monster examples have testarrayoftables, skipping"); } + Some(x) => { + println!("foo: {:?}", x.get(0).name()); + if x.get(0).name() != Some("Barney") { return Err("bad testarrayoftables.get(0).name()") } + if x.get(1).name() != Some("Frodo") { return Err("bad testarrayoftables.get(1).name()") } + if x.get(2).name() != Some("Wilma") { return Err("bad testarrayoftables.get(2).name()") } + } + } + } + + let testarrayofstring = match m.testarrayofstring() { + None => { return Err("bad m.testarrayofstring"); } + Some(x) => { x } + }; + if testarrayofstring.len() != 2 { return Err("bad monster.testarrayofstring len"); } + if testarrayofstring.get(0) != "test1" { return Err("bad monster.testarrayofstring.get(0)"); } + if testarrayofstring.get(1) != "test2" { return Err("bad monster.testarrayofstring.get(1)"); } + } + Ok(()) +} + +#[test] +fn builder_initializes_with_maximum_buffer_size() { + flatbuffers::FlatBufferBuilder::new_with_capacity(flatbuffers::FLATBUFFERS_MAX_BUFFER_SIZE); +} + +#[test] +fn builder_collapses_into_vec() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + create_serialized_example_with_generated_code(&mut b); + let (backing_buf, head) = b.collapse(); + serialized_example_is_accessible_and_correct(&backing_buf[head..], true, false).unwrap(); +} + +#[cfg(test)] +mod generated_constants { + extern crate flatbuffers; + use super::my_game; + + #[test] + fn monster_identifier() { + assert_eq!("MONS", my_game::example::MONSTER_IDENTIFIER); + } + + #[test] + fn monster_file_extension() { + assert_eq!("mon", my_game::example::MONSTER_EXTENSION); + } +} + +#[cfg(test)] +mod roundtrip_generated_code { + extern crate flatbuffers; + + use super::my_game; + + fn build_mon<'a, 'b>(builder: &'a mut flatbuffers::FlatBufferBuilder, args: &'b my_game::example::MonsterArgs) -> my_game::example::Monster<'a> { + let mon = my_game::example::Monster::create(builder, &args); + my_game::example::finish_monster_buffer(builder, mon); + my_game::example::get_root_as_monster(builder.finished_data()) + } + + #[test] + fn scalar_store() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{hp: 123, name: Some(name), ..Default::default()}); + assert_eq!(m.hp(), 123); + } + #[test] + fn scalar_default() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()}); + assert_eq!(m.hp(), 100); + } + #[test] + fn string_store() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foobar"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()}); + assert_eq!(m.name(), Some("foobar")); + } + #[test] + fn struct_store() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{ + name: Some(name), + pos: Some(&my_game::example::Vec3::new(1.0, 2.0, 3.0, 4.0, + my_game::example::Color::Green, + &my_game::example::Test::new(98, 99))), + ..Default::default() + }); + assert_eq!(m.pos(), Some(&my_game::example::Vec3::new(1.0, 2.0, 3.0, 4.0, + my_game::example::Color::Green, + &my_game::example::Test::new(98, 99)))); + } + #[test] + fn struct_default() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()}); + assert_eq!(m.pos(), None); + } + #[test] + fn enum_store() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), color: my_game::example::Color::Red, ..Default::default()}); + assert_eq!(m.color(), my_game::example::Color::Red); + } + #[test] + fn enum_default() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()}); + assert_eq!(m.color(), my_game::example::Color::Blue); + } + #[test] + fn union_store() { + let b = &mut flatbuffers::FlatBufferBuilder::new(); + { + let name_inner = b.create_string("foo"); + let name_outer = b.create_string("bar"); + + let inner = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{ + name: Some(name_inner), + ..Default::default() + }); + let outer = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{ + name: Some(name_outer), + test_type: my_game::example::Any::Monster, + test: Some(inner.as_union_value()), + ..Default::default() + }); + my_game::example::finish_monster_buffer(b, outer); + } + + let mon = my_game::example::get_root_as_monster(b.finished_data()); + assert_eq!(mon.name(), Some("bar")); + assert_eq!(mon.test_type(), my_game::example::Any::Monster); + assert_eq!(my_game::example::Monster::init_from_table(mon.test().unwrap()).name(), + Some("foo")); + assert_eq!(mon.test_as_monster().unwrap().name(), Some("foo")); + assert_eq!(mon.test_as_test_simple_table_with_enum(), None); + assert_eq!(mon.test_as_my_game___example_2___monster(), None); + } + #[test] + fn union_default() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()}); + assert_eq!(m.test_type(), my_game::example::Any::NONE); + assert_eq!(m.test(), None); + } + #[test] + fn table_full_namespace_store() { + let b = &mut flatbuffers::FlatBufferBuilder::new(); + { + let name_inner = b.create_string("foo"); + let name_outer = b.create_string("bar"); + + let inner = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{ + name: Some(name_inner), + ..Default::default() + }); + let outer = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{ + name: Some(name_outer), + enemy: Some(inner), + ..Default::default() + }); + my_game::example::finish_monster_buffer(b, outer); + } + + let mon = my_game::example::get_root_as_monster(b.finished_data()); + assert_eq!(mon.name(), Some("bar")); + assert_eq!(mon.enemy().unwrap().name(), Some("foo")); + } + #[test] + fn table_full_namespace_default() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()}); + assert_eq!(m.enemy(), None); + } + #[test] + fn table_store() { + let b = &mut flatbuffers::FlatBufferBuilder::new(); + { + let id_inner = b.create_string("foo"); + let name_outer = b.create_string("bar"); + + let inner = my_game::example::Stat::create(b, &my_game::example::StatArgs{ + id: Some(id_inner), + ..Default::default() + }); + let outer = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{ + name: Some(name_outer), + testempty: Some(inner), + ..Default::default() + }); + my_game::example::finish_monster_buffer(b, outer); + } + + let mon = my_game::example::get_root_as_monster(b.finished_data()); + assert_eq!(mon.name(), Some("bar")); + assert_eq!(mon.testempty().unwrap().id(), Some("foo")); + } + #[test] + fn table_default() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()}); + assert_eq!(m.testempty(), None); + } + #[test] + fn nested_flatbuffer_store() { + let b0 = { + let mut b0 = flatbuffers::FlatBufferBuilder::new(); + let args = my_game::example::MonsterArgs{ + hp: 123, + name: Some(b0.create_string("foobar")), + ..Default::default() + }; + let mon = my_game::example::Monster::create(&mut b0, &args); + my_game::example::finish_monster_buffer(&mut b0, mon); + b0 + }; + + let b1 = { + let mut b1 = flatbuffers::FlatBufferBuilder::new(); + let args = my_game::example::MonsterArgs{ + testnestedflatbuffer: Some(b1.create_vector(b0.finished_data())), + name: Some(b1.create_string("foo")), + ..Default::default() + }; + let mon = my_game::example::Monster::create(&mut b1, &args); + my_game::example::finish_monster_buffer(&mut b1, mon); + b1 + }; + + let m = my_game::example::get_root_as_monster(b1.finished_data()); + + assert!(m.testnestedflatbuffer().is_some()); + assert_eq!(m.testnestedflatbuffer().unwrap(), b0.finished_data()); + + let m2_a = my_game::example::get_root_as_monster(m.testnestedflatbuffer().unwrap()); + assert_eq!(m2_a.hp(), 123); + assert_eq!(m2_a.name(), Some("foobar")); + + assert!(m.testnestedflatbuffer_nested_flatbuffer().is_some()); + let m2_b = m.testnestedflatbuffer_nested_flatbuffer().unwrap(); + + assert_eq!(m2_b.hp(), 123); + assert_eq!(m2_b.name(), Some("foobar")); + } + #[test] + fn nested_flatbuffer_default() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()}); + assert!(m.testnestedflatbuffer().is_none()); + } + #[test] + fn vector_of_string_store_helper_build() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let v = b.create_vector_of_strings(&["foobar", "baz"]); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{ + name: Some(name), + testarrayofstring: Some(v), ..Default::default()}); + assert_eq!(m.testarrayofstring().unwrap().len(), 2); + assert_eq!(m.testarrayofstring().unwrap().get(0), "foobar"); + assert_eq!(m.testarrayofstring().unwrap().get(1), "baz"); + } + #[test] + fn vector_of_string_store_manual_build() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let s0 = b.create_string("foobar"); + let s1 = b.create_string("baz"); + let v = b.create_vector(&[s0, s1]); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{ + name: Some(name), + testarrayofstring: Some(v), ..Default::default()}); + assert_eq!(m.testarrayofstring().unwrap().len(), 2); + assert_eq!(m.testarrayofstring().unwrap().get(0), "foobar"); + assert_eq!(m.testarrayofstring().unwrap().get(1), "baz"); + } + #[test] + fn vector_of_ubyte_store() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let v = b.create_vector(&[123u8, 234u8][..]); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{ + name: Some(name), + inventory: Some(v), ..Default::default()}); + assert_eq!(m.inventory().unwrap(), &[123, 234][..]); + } + #[test] + fn vector_of_bool_store() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let v = b.create_vector::(&[false, true, false, true][..]); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{ + name: Some(name), + testarrayofbools: Some(v), ..Default::default()}); + assert_eq!(m.testarrayofbools().unwrap(), &[false, true, false, true][..]); + } + #[test] + fn vector_of_f64_store() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let v = b.create_vector::(&[3.14159265359][..]); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{ + name: Some(name), + vector_of_doubles: Some(v), ..Default::default()}); + assert_eq!(m.vector_of_doubles().unwrap().len(), 1); + assert_eq!(m.vector_of_doubles().unwrap().get(0), 3.14159265359f64); + } + #[test] + fn vector_of_struct_store() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let v = b.create_vector::(&[my_game::example::Test::new(127, -128), my_game::example::Test::new(3, 123)][..]); + let name = b.create_string("foo"); + let m = build_mon(&mut b, &my_game::example::MonsterArgs{ + name: Some(name), + test4: Some(v), ..Default::default()}); + assert_eq!(m.test4().unwrap(), &[my_game::example::Test::new(127, -128), my_game::example::Test::new(3, 123)][..]); + } + // TODO(rw) this passes, but I don't want to change the monster test schema right now + // #[test] + // fn vector_of_enum_store() { + // let mut b = flatbuffers::FlatBufferBuilder::new(); + // let v = b.create_vector::(&[my_game::example::Color::Red, my_game::example::Color::Green][..]); + // let name = b.create_string("foo"); + // let m = build_mon(&mut b, &my_game::example::MonsterArgs{ + // name: Some(name), + // vector_of_enum: Some(v), ..Default::default()}); + // assert_eq!(m.vector_of_enum().unwrap().len(), 2); + // assert_eq!(m.vector_of_enum().unwrap().get(0), my_game::example::Color::Red); + // assert_eq!(m.vector_of_enum().unwrap().get(1), my_game::example::Color::Green); + // } + #[test] + fn vector_of_table_store() { + let b = &mut flatbuffers::FlatBufferBuilder::new(); + let t0 = { + let name = b.create_string("foo"); + let args = my_game::example::MonsterArgs{hp: 55, name: Some(name), ..Default::default()}; + my_game::example::Monster::create(b, &args) + }; + let t1 = { + let name = b.create_string("bar"); + let args = my_game::example::MonsterArgs{name: Some(name), ..Default::default()}; + my_game::example::Monster::create(b, &args) + }; + let v = b.create_vector(&[t0, t1][..]); + let name = b.create_string("foo"); + let m = build_mon(b, &my_game::example::MonsterArgs{ + name: Some(name), + testarrayoftables: Some(v), ..Default::default()}); + assert_eq!(m.testarrayoftables().unwrap().len(), 2); + assert_eq!(m.testarrayoftables().unwrap().get(0).hp(), 55); + assert_eq!(m.testarrayoftables().unwrap().get(0).name(), Some("foo")); + assert_eq!(m.testarrayoftables().unwrap().get(1).hp(), 100); + assert_eq!(m.testarrayoftables().unwrap().get(1).name(), Some("bar")); + } +} + +#[cfg(test)] +mod generated_code_alignment_and_padding { + extern crate flatbuffers; + use super::my_game; + + #[test] + fn vec3_is_padded_to_mod_16() { + assert_eq!(::std::mem::size_of::() % 16, 0); + } + + #[test] + fn vec3_is_aligned_to_mod_16() { + let b = &mut flatbuffers::FlatBufferBuilder::new(); + { + let name = b.create_string("foo"); + let mon = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{ + name: Some(name), + pos: Some(&my_game::example::Vec3::new(1.0, 2.0, 3.0, 4.0, + my_game::example::Color::Green, + &my_game::example::Test::new(98, 99))), + ..Default::default()}); + my_game::example::finish_monster_buffer(b, mon); + } + let buf = b.finished_data(); + let mon = my_game::example::get_root_as_monster(buf); + let vec3 = mon.pos().unwrap(); + + let start_ptr = buf.as_ptr() as usize; + let vec3_ptr = vec3 as *const my_game::example::Vec3 as usize; + + assert!(vec3_ptr > start_ptr); + assert_eq!((vec3_ptr - start_ptr) % 16, 0); + } +} + +#[cfg(test)] +mod roundtrip_vectors { + + #[cfg(test)] + mod scalar { + extern crate quickcheck; + extern crate flatbuffers; + + const N: u64 = 20; + + fn prop(xs: Vec) { + use flatbuffers::Follow; + + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.start_vector(xs.len(), ::std::mem::size_of::()); + for i in (0..xs.len()).rev() { + b.push::(xs[i]); + } + let vecend = b.end_vector::(xs.len()); + b.finish_minimal(vecend); + + let buf = b.finished_data(); + + let got = >::follow(buf, 0); + assert_eq!(got, &xs[..]); + } + + #[test] + fn easy_u8() { + prop::(vec![]); + prop::(vec![1u8]); + prop::(vec![1u8, 2u8]); + prop::(vec![1u8, 2u8, 3u8]); + prop::(vec![1u8, 2u8, 3u8, 4u8]); + } + + #[test] + fn fuzz_bool() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + #[test] + fn fuzz_u8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + #[test] + fn fuzz_i8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + #[test] + fn fuzz_u16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + #[test] + fn fuzz_i16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + #[test] + fn fuzz_u32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + #[test] + fn fuzz_i32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + #[test] + fn fuzz_u64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + #[test] + fn fuzz_i64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + #[test] + fn fuzz_f32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + #[test] + fn fuzz_f64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(Vec<_>)); } + } + + #[cfg(test)] + mod create_vector_direct { + extern crate quickcheck; + extern crate flatbuffers; + + const N: u64 = 20; + + // This uses a macro because lifetimes for the trait-bounded function get too + // complicated. + macro_rules! impl_prop { + ($test_name:ident, $fn_name:ident, $ty:ident) => ( + fn $fn_name(xs: Vec<$ty>) { + use flatbuffers::Follow; + + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.create_vector_direct(&xs[..]); + let buf = b.unfinished_data(); + + let got = >::follow(&buf[..], 0).safe_slice(); + assert_eq!(got, &xs[..]); + } + #[test] + fn $test_name() { quickcheck::QuickCheck::new().max_tests(N).quickcheck($fn_name as fn(Vec<_>)); } + ) + } + + impl_prop!(test_bool, prop_bool, bool); + impl_prop!(test_u8, prop_u8, u8); + impl_prop!(test_i8, prop_i8, i8); + + #[cfg(test)] + #[cfg(target_endian = "little")] + mod host_is_le { + const N: u64 = 20; + use super::flatbuffers; + use super::quickcheck; + impl_prop!(test_u16, prop_u16, u16); + impl_prop!(test_u32, prop_u32, u32); + impl_prop!(test_u64, prop_u64, u64); + impl_prop!(test_i16, prop_i16, i16); + impl_prop!(test_i32, prop_i32, i32); + impl_prop!(test_i64, prop_i64, i64); + impl_prop!(test_f32, prop_f32, f32); + impl_prop!(test_f64, prop_f64, f64); + } + } + + #[cfg(test)] + mod string_manual_build { + extern crate quickcheck; + extern crate flatbuffers; + + fn prop(xs: Vec) { + use flatbuffers::Follow; + + let mut b = flatbuffers::FlatBufferBuilder::new(); + let mut offsets = Vec::new(); + for s in xs.iter().rev() { + offsets.push(b.create_string(s.as_str())); + } + + b.start_vector(flatbuffers::SIZE_UOFFSET, xs.len()); + for &i in offsets.iter() { + b.push(i); + } + let vecend = b.end_vector::>(xs.len()); + + b.finish_minimal(vecend); + + let buf = b.finished_data(); + let got = >>>::follow(buf, 0); + + assert_eq!(got.len(), xs.len()); + for i in 0..xs.len() { + assert_eq!(got.get(i), &xs[i][..]); + } + } + + #[test] + fn fuzz() { + quickcheck::QuickCheck::new().max_tests(20).quickcheck(prop as fn(Vec<_>)); + } + } + + #[cfg(test)] + mod ubyte { + extern crate quickcheck; + extern crate flatbuffers; + + #[test] + fn fuzz_manual_build() { + fn prop(vec: Vec) { + let xs = &vec[..]; + + let mut b1 = flatbuffers::FlatBufferBuilder::new(); + b1.start_vector(flatbuffers::SIZE_U8, xs.len()); + + for i in (0..xs.len()).rev() { + b1.push(xs[i]); + } + b1.end_vector::<&u8>(xs.len()); + + let mut b2 = flatbuffers::FlatBufferBuilder::new(); + b2.create_vector(xs); + assert_eq!(b1.unfinished_data(), b2.unfinished_data()); + } + quickcheck::QuickCheck::new().max_tests(20).quickcheck(prop as fn(Vec<_>)); + } + } +} + +#[cfg(test)] +mod framing_format { + extern crate flatbuffers; + + use super::my_game; + + #[test] + fn test_size_prefixed_buffer() { + // Create size prefixed buffer. + let mut b = flatbuffers::FlatBufferBuilder::new(); + let args = &my_game::example::MonsterArgs{ + mana: 200, + hp: 300, + name: Some(b.create_string("bob")), + ..Default::default() + }; + let mon = my_game::example::Monster::create(&mut b, &args); + b.finish_size_prefixed(mon, None); + + // Access it. + let buf = b.finished_data(); + let m = flatbuffers::get_size_prefixed_root::(buf); + assert_eq!(m.mana(), 200); + assert_eq!(m.hp(), 300); + assert_eq!(m.name(), Some("bob")); + } +} + +#[cfg(test)] +mod roundtrip_table { + use std::collections::HashMap; + + extern crate flatbuffers; + extern crate quickcheck; + + use super::LCG; + + #[test] + fn table_of_mixed_scalars_fuzz() { + // Values we're testing against: chosen to ensure no bits get chopped + // off anywhere, and also be different from eachother. + let bool_val: bool = true; + let char_val: i8 = -127; // 0x81 + let uchar_val: u8 = 0xFF; + let short_val: i16 = -32222; // 0x8222; + let ushort_val: u16 = 0xFEEE; + let int_val: i32 = unsafe { ::std::mem::transmute(0x83333333u32) }; + let uint_val: u32 = 0xFDDDDDDD; + let long_val: i64 = unsafe { ::std::mem::transmute(0x8444444444444444u64) }; // TODO: byte literal? + let ulong_val: u64 = 0xFCCCCCCCCCCCCCCCu64; + let float_val: f32 = 3.14159; + let double_val: f64 = 3.14159265359; + + let test_value_types_max: isize = 11; + let max_fields_per_object: flatbuffers::VOffsetT = 100; + let num_fuzz_objects: isize = 1000; // The higher, the more thorough :) + + let mut builder = flatbuffers::FlatBufferBuilder::new(); + let mut lcg = LCG::new(); + + let mut objects: Vec = vec![0; num_fuzz_objects as usize]; + + // Generate num_fuzz_objects random objects each consisting of + // fields_per_object fields, each of a random type. + for i in 0..(num_fuzz_objects as usize) { + let fields_per_object = (lcg.next() % (max_fields_per_object as u64)) as flatbuffers::VOffsetT; + let start = builder.start_table(); + + for j in 0..fields_per_object { + let choice = lcg.next() % (test_value_types_max as u64); + + let f = flatbuffers::field_index_to_field_offset(j); + + match choice { + 0 => {builder.push_slot::(f, bool_val, false);} + 1 => {builder.push_slot::(f, char_val, 0);} + 2 => {builder.push_slot::(f, uchar_val, 0);} + 3 => {builder.push_slot::(f, short_val, 0);} + 4 => {builder.push_slot::(f, ushort_val, 0);} + 5 => {builder.push_slot::(f, int_val, 0);} + 6 => {builder.push_slot::(f, uint_val, 0);} + 7 => {builder.push_slot::(f, long_val, 0);} + 8 => {builder.push_slot::(f, ulong_val, 0);} + 9 => {builder.push_slot::(f, float_val, 0.0);} + 10 => {builder.push_slot::(f, double_val, 0.0);} + _ => { panic!("unknown choice: {}", choice); } + } + } + objects[i] = builder.end_table(start).value(); + } + + // Do some bookkeeping to generate stats on fuzzes: + let mut stats: HashMap = HashMap::new(); + let mut values_generated: u64 = 0; + + // Embrace PRNG determinism: + lcg.reset(); + + // Test that all objects we generated are readable and return the + // expected values. We generate random objects in the same order + // so this is deterministic: + for i in 0..(num_fuzz_objects as usize) { + let table = { + let buf = builder.unfinished_data(); + let loc = buf.len() as flatbuffers::UOffsetT - objects[i]; + flatbuffers::Table::new(buf, loc as usize) + }; + + let fields_per_object = (lcg.next() % (max_fields_per_object as u64)) as flatbuffers::VOffsetT; + for j in 0..fields_per_object { + let choice = lcg.next() % (test_value_types_max as u64); + + *stats.entry(choice).or_insert(0) += 1; + values_generated += 1; + + let f = flatbuffers::field_index_to_field_offset(j); + + match choice { + 0 => { assert_eq!(bool_val, table.get::(f, Some(false)).unwrap()); } + 1 => { assert_eq!(char_val, table.get::(f, Some(0)).unwrap()); } + 2 => { assert_eq!(uchar_val, table.get::(f, Some(0)).unwrap()); } + 3 => { assert_eq!(short_val, table.get::(f, Some(0)).unwrap()); } + 4 => { assert_eq!(ushort_val, table.get::(f, Some(0)).unwrap()); } + 5 => { assert_eq!(int_val, table.get::(f, Some(0)).unwrap()); } + 6 => { assert_eq!(uint_val, table.get::(f, Some(0)).unwrap()); } + 7 => { assert_eq!(long_val, table.get::(f, Some(0)).unwrap()); } + 8 => { assert_eq!(ulong_val, table.get::(f, Some(0)).unwrap()); } + 9 => { assert_eq!(float_val, table.get::(f, Some(0.0)).unwrap()); } + 10 => { assert_eq!(double_val, table.get::(f, Some(0.0)).unwrap()); } + _ => { panic!("unknown choice: {}", choice); } + } + } + } + + // Assert that we tested all the fuzz cases enough: + let min_tests_per_choice = 1000; + assert!(values_generated > 0); + assert!(min_tests_per_choice > 0); + for i in 0..test_value_types_max as u64 { + assert!(stats[&i] >= min_tests_per_choice, + format!("inadequately-tested fuzz case: {}", i)); + } + } + + #[test] + fn table_of_byte_strings_fuzz() { + fn prop(vec: Vec>) { + use flatbuffers::field_index_to_field_offset as fi2fo; + use flatbuffers::Follow; + + let xs = &vec[..]; + + // build + let mut b = flatbuffers::FlatBufferBuilder::new(); + let str_offsets: Vec> = xs.iter().map(|s| b.create_byte_string(&s[..])).collect(); + let table_start = b.start_table(); + + for i in 0..xs.len() { + b.push_slot_always(fi2fo(i as flatbuffers::VOffsetT), str_offsets[i]); + } + let root = b.end_table(table_start); + b.finish_minimal(root); + + // use + let buf = b.finished_data(); + let tab = >::follow(buf, 0); + + for i in 0..xs.len() { + let v = tab.get::>(fi2fo(i as flatbuffers::VOffsetT), None); + assert_eq!(v, Some(&xs[i][..])); + } + } + prop(vec![vec![1,2,3]]); + + let n = 20; + quickcheck::QuickCheck::new().max_tests(n).quickcheck(prop as fn(Vec<_>)); + } + + #[test] + fn fuzz_table_of_strings() { + fn prop(vec: Vec) { + use flatbuffers::field_index_to_field_offset as fi2fo; + use flatbuffers::Follow; + + let xs = &vec[..]; + + // build + let mut b = flatbuffers::FlatBufferBuilder::new(); + let str_offsets: Vec> = xs.iter().map(|s| b.create_string(&s[..])).collect(); + let table_start = b.start_table(); + + for i in 0..xs.len() { + b.push_slot_always(fi2fo(i as flatbuffers::VOffsetT), str_offsets[i]); + } + let root = b.end_table(table_start); + b.finish_minimal(root); + + // use + let buf = b.finished_data(); + let tab = >::follow(buf, 0); + + for i in 0..xs.len() { + let v = tab.get::>(fi2fo(i as flatbuffers::VOffsetT), None); + assert_eq!(v, Some(&xs[i][..])); + } + } + let n = 20; + quickcheck::QuickCheck::new().max_tests(n).quickcheck(prop as fn(Vec)); + } + + mod table_of_vectors_of_scalars { + extern crate flatbuffers; + extern crate quickcheck; + + const N: u64 = 20; + + fn prop<'a, T: flatbuffers::Follow<'a> + 'a + flatbuffers::EndianScalar + flatbuffers::Push + ::std::fmt::Debug>(vecs: Vec>) { + use flatbuffers::field_index_to_field_offset as fi2fo; + use flatbuffers::Follow; + + // build + let mut b = flatbuffers::FlatBufferBuilder::new(); + let mut offs = vec![]; + for vec in &vecs { + b.start_vector(vec.len(), ::std::mem::size_of::()); + + let xs = &vec[..]; + for i in (0..xs.len()).rev() { + b.push::(xs[i]); + } + let vecend = b.end_vector::(xs.len()); + offs.push(vecend); + } + + let table_start = b.start_table(); + + for i in 0..vecs.len() { + b.push_slot_always(fi2fo(i as flatbuffers::VOffsetT), offs[i]); + } + let root = b.end_table(table_start); + b.finish_minimal(root); + + // use + let buf = b.finished_data(); + let tab = >::follow(buf, 0); + + for i in 0..vecs.len() { + let got = tab.get::>(fi2fo(i as flatbuffers::VOffsetT), None); + assert!(got.is_some()); + let got2 = got.unwrap(); + assert_eq!(&vecs[i][..], got2); + } + } + + #[test] + fn fuzz_bool() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + + #[test] + fn fuzz_u8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + #[test] + fn fuzz_u16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + #[test] + fn fuzz_u32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + #[test] + fn fuzz_u64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + + #[test] + fn fuzz_i8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + #[test] + fn fuzz_i16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + #[test] + fn fuzz_i32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + #[test] + fn fuzz_i64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + + #[test] + fn fuzz_f32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + #[test] + fn fuzz_f64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec>)); } + } +} + +#[cfg(test)] +mod roundtrip_scalars { + extern crate flatbuffers; + extern crate quickcheck; + + const N: u64 = 1000; + + fn prop(x: T) { + let mut buf = vec![0u8; ::std::mem::size_of::()]; + flatbuffers::emplace_scalar(&mut buf[..], x); + let y = flatbuffers::read_scalar(&buf[..]); + assert_eq!(x, y); + } + + #[test] + fn fuzz_bool() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } + #[test] + fn fuzz_u8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } + #[test] + fn fuzz_i8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } + + #[test] + fn fuzz_u16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } + #[test] + fn fuzz_i16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } + + #[test] + fn fuzz_u32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } + #[test] + fn fuzz_i32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } + + #[test] + fn fuzz_u64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } + #[test] + fn fuzz_i64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } + + #[test] + fn fuzz_f32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } + #[test] + fn fuzz_f64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop:: as fn(_)); } +} + +#[cfg(test)] +mod roundtrip_push_follow_scalars { + extern crate flatbuffers; + extern crate quickcheck; + + use flatbuffers::Push; + + const N: u64 = 1000; + + // This uses a macro because lifetimes for a trait-bounded function get too + // complicated. + macro_rules! impl_prop { + ($fn_name:ident, $ty:ident) => ( + fn $fn_name(x: $ty) { + let mut buf = vec![0u8; ::std::mem::size_of::<$ty>()]; + x.push(&mut buf[..], &[][..]); + let fs: flatbuffers::FollowStart<$ty> = flatbuffers::FollowStart::new(); + assert_eq!(fs.self_follow(&buf[..], 0), x); + } + ) + } + + impl_prop!(prop_bool, bool); + impl_prop!(prop_u8, u8); + impl_prop!(prop_i8, i8); + impl_prop!(prop_u16, u16); + impl_prop!(prop_i16, i16); + impl_prop!(prop_u32, u32); + impl_prop!(prop_i32, i32); + impl_prop!(prop_u64, u64); + impl_prop!(prop_i64, i64); + impl_prop!(prop_f32, f32); + impl_prop!(prop_f64, f64); + + #[test] + fn fuzz_bool() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_bool as fn(bool)); } + #[test] + fn fuzz_u8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_u8 as fn(u8)); } + #[test] + fn fuzz_i8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_i8 as fn(i8)); } + #[test] + fn fuzz_u16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_u16 as fn(u16)); } + #[test] + fn fuzz_i16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_i16 as fn(i16)); } + #[test] + fn fuzz_u32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_u32 as fn(u32)); } + #[test] + fn fuzz_i32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_i32 as fn(i32)); } + #[test] + fn fuzz_u64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_u64 as fn(u64)); } + #[test] + fn fuzz_i64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_i64 as fn(i64)); } + #[test] + fn fuzz_f32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_f32 as fn(f32)); } + #[test] + fn fuzz_f64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_f64 as fn(f64)); } +} + + +#[cfg(test)] +mod write_and_read_examples { + extern crate flatbuffers; + + use super::create_serialized_example_with_library_code; + use super::create_serialized_example_with_generated_code; + use super::serialized_example_is_accessible_and_correct; + + #[test] + fn generated_code_creates_correct_example() { + let b = &mut flatbuffers::FlatBufferBuilder::new(); + create_serialized_example_with_generated_code(b); + let buf = b.finished_data(); + serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap(); + } + + #[test] + fn generated_code_creates_correct_example_repeatedly_with_reset() { + let b = &mut flatbuffers::FlatBufferBuilder::new(); + for _ in 0..100 { + create_serialized_example_with_generated_code(b); + { + let buf = b.finished_data(); + serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap(); + } + b.reset(); + } + } + + #[test] + fn library_code_creates_correct_example() { + let b = &mut flatbuffers::FlatBufferBuilder::new(); + create_serialized_example_with_library_code(b); + let buf = b.finished_data(); + serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap(); + } + + #[test] + fn library_code_creates_correct_example_repeatedly_with_reset() { + let b = &mut flatbuffers::FlatBufferBuilder::new(); + for _ in 0..100 { + create_serialized_example_with_library_code(b); + { + let buf = b.finished_data(); + serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap(); + } + b.reset(); + } + } +} + +#[cfg(test)] +mod read_examples_from_other_language_ports { + extern crate flatbuffers; + + use super::load_file; + use super::serialized_example_is_accessible_and_correct; + + #[test] + fn gold_cpp_example_data_is_accessible_and_correct() { + let buf = load_file("../monsterdata_test.mon"); + serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap(); + } + #[test] + fn java_wire_example_data_is_accessible_and_correct() { + let buf = load_file("../monsterdata_java_wire.mon"); + serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap(); + } + #[test] + fn java_wire_size_prefixed_example_data_is_accessible_and_correct() { + let buf = load_file("../monsterdata_java_wire_sp.mon"); + serialized_example_is_accessible_and_correct(&buf[..], true, true).unwrap(); + } +} + +#[cfg(test)] +mod generated_code_asserts { + extern crate flatbuffers; + + use super::my_game; + + #[test] + #[should_panic] + fn monster_builder_fails_when_name_is_missing() { + let b = &mut flatbuffers::FlatBufferBuilder::new(); + my_game::example::Monster::create(b, &my_game::example::MonsterArgs{..Default::default()}); + } +} + +#[cfg(test)] +mod generated_key_comparisons { + extern crate flatbuffers; + + use super::my_game; + + #[test] + fn struct_ability_key_compare_less_than() { + let a = my_game::example::Ability::new(1, 2); + let b = my_game::example::Ability::new(2, 1); + let c = my_game::example::Ability::new(3, 3); + + assert_eq!(a.key_compare_less_than(&a), false); + assert_eq!(b.key_compare_less_than(&b), false); + assert_eq!(c.key_compare_less_than(&c), false); + + assert_eq!(a.key_compare_less_than(&b), true); + assert_eq!(a.key_compare_less_than(&c), true); + + assert_eq!(b.key_compare_less_than(&a), false); + assert_eq!(b.key_compare_less_than(&c), true); + + assert_eq!(c.key_compare_less_than(&a), false); + assert_eq!(c.key_compare_less_than(&b), false); + } + + #[test] + fn struct_key_compare_with_value() { + let a = my_game::example::Ability::new(1, 2); + + assert_eq!(a.key_compare_with_value(0), ::std::cmp::Ordering::Greater); + assert_eq!(a.key_compare_with_value(1), ::std::cmp::Ordering::Equal); + assert_eq!(a.key_compare_with_value(2), ::std::cmp::Ordering::Less); + } + + #[test] + fn struct_key_compare_less_than() { + let a = my_game::example::Ability::new(1, 2); + let b = my_game::example::Ability::new(2, 1); + let c = my_game::example::Ability::new(3, 3); + + assert_eq!(a.key_compare_less_than(&a), false); + assert_eq!(b.key_compare_less_than(&b), false); + assert_eq!(c.key_compare_less_than(&c), false); + + assert_eq!(a.key_compare_less_than(&b), true); + assert_eq!(a.key_compare_less_than(&c), true); + + assert_eq!(b.key_compare_less_than(&a), false); + assert_eq!(b.key_compare_less_than(&c), true); + + assert_eq!(c.key_compare_less_than(&a), false); + assert_eq!(c.key_compare_less_than(&b), false); + } + + #[test] + fn table_key_compare_with_value() { + // setup + let builder = &mut flatbuffers::FlatBufferBuilder::new(); + super::create_serialized_example_with_library_code(builder); + let buf = builder.finished_data(); + let a = my_game::example::get_root_as_monster(buf); + + // preconditions + assert_eq!(a.name(), Some("MyMonster")); + + assert_eq!(a.key_compare_with_value(None), ::std::cmp::Ordering::Greater); + + assert_eq!(a.key_compare_with_value(Some("AAA")), ::std::cmp::Ordering::Greater); + assert_eq!(a.key_compare_with_value(Some("MyMonster")), ::std::cmp::Ordering::Equal); + assert_eq!(a.key_compare_with_value(Some("ZZZ")), ::std::cmp::Ordering::Less); + } + + #[test] + fn table_key_compare_less_than() { + // setup + let builder = &mut flatbuffers::FlatBufferBuilder::new(); + super::create_serialized_example_with_library_code(builder); + let buf = builder.finished_data(); + let a = my_game::example::get_root_as_monster(buf); + let b = a.test_as_monster().unwrap(); + + // preconditions + assert_eq!(a.name(), Some("MyMonster")); + assert_eq!(b.name(), Some("Fred")); + + assert_eq!(a.key_compare_less_than(&a), false); + assert_eq!(a.key_compare_less_than(&b), false); + + assert_eq!(b.key_compare_less_than(&a), true); + assert_eq!(b.key_compare_less_than(&b), false); + } +} + +#[cfg(test)] +mod included_schema_generated_code { + extern crate flatbuffers; + + //extern crate rust_usage_test; + + //#[test] + //fn namespace_test_mod_is_importable() { + // use rust_usage_test::namespace_test; + //} + //#[test] + //fn namespace_test1_mod_is_importable() { + // use rust_usage_test::namespace_test::namespace_test1_generated; + //} + //#[test] + //fn namespace_test2_mod_is_importable() { + // use rust_usage_test::namespace_test::namespace_test2_generated; + //} +} + +#[cfg(test)] +mod builder_asserts { + extern crate flatbuffers; + + #[test] + #[should_panic] + fn end_table_should_panic_when_not_in_table() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.end_table(flatbuffers::WIPOffset::new(0)); + } + + #[test] + #[should_panic] + fn create_string_should_panic_when_in_table() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.start_table(); + b.create_string("foo"); + } + + #[test] + #[should_panic] + fn create_byte_string_should_panic_when_in_table() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.start_table(); + b.create_byte_string(b"foo"); + } + + #[test] + #[should_panic] + fn push_struct_slot_should_panic_when_not_in_table() { + #[derive(Copy, Clone, Debug, PartialEq)] + #[repr(C, packed)] + struct foo { } + impl<'b> flatbuffers::Push for &'b foo { + type Output = foo; + fn push<'a>(&'a self, _dst: &'a mut [u8], _rest: &'a [u8]) { } + } + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.push_slot_always(0, &foo{}); + } + + #[test] + #[should_panic] + fn finished_bytes_should_panic_when_table_is_not_finished() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.start_table(); + b.finished_data(); + } + + #[test] + #[should_panic] + fn required_panics_when_field_not_set() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let start = b.start_table(); + let o = b.end_table(start); + b.required(o, 4 /* byte offset to first field */, "test field"); + } +} + +#[cfg(test)] +mod follow_impls { + extern crate flatbuffers; + use flatbuffers::Follow; + use flatbuffers::field_index_to_field_offset as fi2fo; + + #[test] + fn offset_to_ref_u8() { + let vec: Vec = vec![255, 3]; + let fs: flatbuffers::FollowStart = flatbuffers::FollowStart::new(); + assert_eq!(fs.self_follow(&vec[..], 1), 3); + } + + #[test] + fn offset_to_u8() { + let vec: Vec = vec![255, 3]; + let fs: flatbuffers::FollowStart = flatbuffers::FollowStart::new(); + assert_eq!(fs.self_follow(&vec[..], 1), 3); + } + + #[test] + fn offset_to_ref_u16() { + let vec: Vec = vec![255, 255, 3, 4]; + let fs: flatbuffers::FollowStart = flatbuffers::FollowStart::new(); + assert_eq!(fs.self_follow(&vec[..], 2), 1027); + } + + #[test] + fn offset_to_u16() { + let vec: Vec = vec![255, 255, 3, 4]; + let fs: flatbuffers::FollowStart = flatbuffers::FollowStart::new(); + assert_eq!(fs.self_follow(&vec[..], 2), 1027); + } + + #[test] + fn offset_to_f32() { + let vec: Vec = vec![255, 255, 255, 255, /* start of value */ 208, 15, 73, 64]; + let fs: flatbuffers::FollowStart = flatbuffers::FollowStart::new(); + assert_eq!(fs.self_follow(&vec[..], 4), 3.14159); + } + + #[test] + fn offset_to_string() { + let vec: Vec = vec![255,255,255,255, 3, 0, 0, 0, 'f' as u8, 'o' as u8, 'o' as u8, 0]; + let off: flatbuffers::FollowStart<&str> = flatbuffers::FollowStart::new(); + assert_eq!(off.self_follow(&vec[..], 4), "foo"); + } + + #[test] + fn offset_to_byte_vector() { + let vec: Vec = vec![255, 255, 255, 255, 4, 0, 0, 0, 1, 2, 3, 4]; + let off: flatbuffers::FollowStart<&[u8]> = flatbuffers::FollowStart::new(); + assert_eq!(off.self_follow(&vec[..], 4), &[1, 2, 3, 4][..]); + } + + #[test] + fn offset_to_byte_string() { + let vec: Vec = vec![255, 255, 255, 255, 3, 0, 0, 0, 1, 2, 3, 0]; + let off: flatbuffers::FollowStart<&[u8]> = flatbuffers::FollowStart::new(); + assert_eq!(off.self_follow(&vec[..], 4), &[1, 2, 3][..]); + } + + #[cfg(target_endian = "little")] + #[test] + fn offset_to_slice_of_u16() { + let vec: Vec = vec![255, 255, 255, 255, 2, 0, 0, 0, 1, 2, 3, 4]; + let off: flatbuffers::FollowStart<&[u16]> = flatbuffers::FollowStart::new(); + assert_eq!(off.self_follow(&vec[..], 4), &vec![513, 1027][..]); + } + + #[test] + fn offset_to_vector_of_u16() { + let vec: Vec = vec![255, 255, 255, 255, 2, 0, 0, 0, 1, 2, 3, 4]; + let off: flatbuffers::FollowStart> = flatbuffers::FollowStart::new(); + assert_eq!(off.self_follow(&vec[..], 4).len(), 2); + assert_eq!(off.self_follow(&vec[..], 4).get(0), 513); + assert_eq!(off.self_follow(&vec[..], 4).get(1), 1027); + } + + #[test] + fn offset_to_struct() { + #[derive(Copy, Clone, Debug, PartialEq)] + #[repr(C, packed)] + struct FooStruct { + a: i8, + b: u8, + c: i16, + } + impl<'a> flatbuffers::Follow<'a> for &'a FooStruct { + type Inner = &'a FooStruct; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + flatbuffers::follow_cast_ref::(buf, loc) + } + } + + let vec: Vec = vec![255, 255, 255, 255, 1, 2, 3, 4]; + let off: flatbuffers::FollowStart<&FooStruct> = flatbuffers::FollowStart::new(); + assert_eq!(*off.self_follow(&vec[..], 4), FooStruct{a: 1, b: 2, c: 1027}); + } + + #[test] + fn vector_of_offset_to_string_elements() { + let buf: Vec = vec![/* vec len */ 1, 0, 0, 0, /* offset to string */ 4, 0, 0, 0, /* str length */ 3, 0, 0, 0, 'f' as u8, 'o' as u8, 'o' as u8, 0]; + let s: flatbuffers::FollowStart>> = flatbuffers::FollowStart::new(); + assert_eq!(s.self_follow(&buf[..], 0).len(), 1); + assert_eq!(s.self_follow(&buf[..], 0).get(0), "foo"); + } + + #[test] + fn slice_of_struct_elements() { + #[derive(Copy, Clone, Debug, PartialEq)] + #[repr(C, packed)] + struct FooStruct { + a: i8, + b: u8, + c: i16, + } + impl flatbuffers::SafeSliceAccess for FooStruct {} + impl<'a> flatbuffers::Follow<'a> for FooStruct { + type Inner = &'a FooStruct; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + flatbuffers::follow_cast_ref::(buf, loc) + } + } + + let buf: Vec = vec![1, 0, 0, 0, /* struct data */ 1, 2, 3, 4]; + let fs: flatbuffers::FollowStart> = flatbuffers::FollowStart::new(); + assert_eq!(fs.self_follow(&buf[..], 0).safe_slice(), &vec![FooStruct{a: 1, b: 2, c: 1027}][..]); + } + + #[test] + fn vector_of_struct_elements() { + #[derive(Copy, Clone, Debug, PartialEq)] + #[repr(C, packed)] + struct FooStruct { + a: i8, + b: u8, + c: i16, + } + impl<'a> flatbuffers::Follow<'a> for FooStruct { + type Inner = &'a FooStruct; + #[inline(always)] + fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + flatbuffers::follow_cast_ref::(buf, loc) + } + } + + let buf: Vec = vec![1, 0, 0, 0, /* struct data */ 1, 2, 3, 4]; + let fs: flatbuffers::FollowStart> = flatbuffers::FollowStart::new(); + assert_eq!(fs.self_follow(&buf[..], 0).len(), 1); + assert_eq!(fs.self_follow(&buf[..], 0).get(0), &FooStruct{a: 1, b: 2, c: 1027}); + } + + #[test] + fn root_to_empty_table() { + let buf: Vec = vec![ + 12, 0, 0, 0, // offset to root table + // enter vtable + 4, 0, // vtable len + 0, 0, // inline size + 255, 255, 255, 255, // canary + // enter table + 8, 0, 0, 0, // vtable location + ]; + let fs: flatbuffers::FollowStart> = flatbuffers::FollowStart::new(); + assert_eq!(fs.self_follow(&buf[..], 0), flatbuffers::Table::new(&buf[..], 12)); + } + + #[test] + fn table_get_slot_scalar_u8() { + let buf: Vec = vec![ + 14, 0, 0, 0, // offset to root table + // enter vtable + 6, 0, // vtable len + 2, 0, // inline size + 5, 0, // value loc + 255, 255, 255, 255, // canary + // enter table + 10, 0, 0, 0, // vtable location + 0, 99 // value (with padding) + ]; + let fs: flatbuffers::FollowStart> = flatbuffers::FollowStart::new(); + let tab = fs.self_follow(&buf[..], 0); + assert_eq!(tab.get::(fi2fo(0), Some(123)), Some(99)); + } + + #[test] + fn table_get_slot_scalar_u8_default_via_vtable_len() { + let buf: Vec = vec![ + 12, 0, 0, 0, // offset to root table + // enter vtable + 4, 0, // vtable len + 2, 0, // inline size + 255, 255, 255, 255, // canary + // enter table + 8, 0, 0, 0, // vtable location + ]; + let fs: flatbuffers::FollowStart> = flatbuffers::FollowStart::new(); + let tab = fs.self_follow(&buf[..], 0); + assert_eq!(tab.get::(fi2fo(0), Some(123)), Some(123)); + } + + #[test] + fn table_get_slot_scalar_u8_default_via_vtable_zero() { + let buf: Vec = vec![ + 14, 0, 0, 0, // offset to root table + // enter vtable + 6, 0, // vtable len + 2, 0, // inline size + 0, 0, // zero means use the default value + 255, 255, 255, 255, // canary + // enter table + 10, 0, 0, 0, // vtable location + ]; + let fs: flatbuffers::FollowStart> = flatbuffers::FollowStart::new(); + let tab = fs.self_follow(&buf[..], 0); + assert_eq!(tab.get::(fi2fo(0), Some(123)), Some(123)); + } + + #[test] + fn table_get_slot_string_multiple_types() { + let buf: Vec = vec![ + 14, 0, 0, 0, // offset to root table + // enter vtable + 6, 0, // vtable len + 2, 0, // inline size + 4, 0, // value loc + 255, 255, 255, 255, // canary + // enter table + 10, 0, 0, 0, // vtable location + 8, 0, 0, 0, // offset to string + // leave table + 255, 255, 255, 255, // canary + // enter string + 3, 0, 0, 0, 109, 111, 111, 0 // string length and contents + ]; + let tab = >::follow(&buf[..], 0); + assert_eq!(tab.get::>(fi2fo(0), None), Some("moo")); + assert_eq!(tab.get::>(fi2fo(0), None), Some(&vec![109, 111, 111][..])); + let v = tab.get::>>(fi2fo(0), None).unwrap(); + assert_eq!(v.len(), 3); + assert_eq!(v.get(0), 109); + assert_eq!(v.get(1), 111); + assert_eq!(v.get(2), 111); + } + + #[test] + fn table_get_slot_string_multiple_types_default_via_vtable_len() { + let buf: Vec = vec![ + 12, 0, 0, 0, // offset to root table + // enter vtable + 4, 0, // vtable len + 4, 0, // table inline len + 255, 255, 255, 255, // canary + // enter table + 8, 0, 0, 0, // vtable location + ]; + let tab = >::follow(&buf[..], 0); + assert_eq!(tab.get::>(fi2fo(0), Some("abc")), Some("abc")); + assert_eq!(tab.get::>(fi2fo(0), Some(&vec![70, 71, 72][..])), Some(&vec![70, 71, 72][..])); + + let default_vec_buf: Vec = vec![3, 0, 0, 0, 70, 71, 72, 0]; + let default_vec = flatbuffers::Vector::new(&default_vec_buf[..], 0); + let v = tab.get::>>(fi2fo(0), Some(default_vec)).unwrap(); + assert_eq!(v.len(), 3); + assert_eq!(v.get(0), 70); + assert_eq!(v.get(1), 71); + assert_eq!(v.get(2), 72); + } + + #[test] + fn table_get_slot_string_multiple_types_default_via_vtable_zero() { + let buf: Vec = vec![ + 14, 0, 0, 0, // offset to root table + // enter vtable + 6, 0, // vtable len + 2, 0, // inline size + 0, 0, // value loc + 255, 255, 255, 255, // canary + // enter table + 10, 0, 0, 0, // vtable location + ]; + let tab = >::follow(&buf[..], 0); + assert_eq!(tab.get::>(fi2fo(0), Some("abc")), Some("abc")); + assert_eq!(tab.get::>(fi2fo(0), Some(&vec![70, 71, 72][..])), Some(&vec![70, 71, 72][..])); + + let default_vec_buf: Vec = vec![3, 0, 0, 0, 70, 71, 72, 0]; + let default_vec = flatbuffers::Vector::new(&default_vec_buf[..], 0); + let v = tab.get::>>(fi2fo(0), Some(default_vec)).unwrap(); + assert_eq!(v.len(), 3); + assert_eq!(v.get(0), 70); + assert_eq!(v.get(1), 71); + assert_eq!(v.get(2), 72); + } +} + +#[cfg(test)] +mod push_impls { + extern crate flatbuffers; + + use super::my_game; + + fn check<'a>(b: &'a flatbuffers::FlatBufferBuilder, want: &'a [u8]) { + let got = b.unfinished_data(); + assert_eq!(want, got); + } + + #[test] + fn push_u8() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.push(123u8); + check(&b, &[123]); + } + + #[test] + fn push_u64() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.push(0x12345678); + check(&b, &[0x78, 0x56, 0x34, 0x12]); + } + + #[test] + fn push_f64() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.push(3.14159265359f64); + check(&b, &[234, 46, 68, 84, 251, 33, 9, 64]); + } + + #[test] + fn push_generated_struct() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.push(my_game::example::Test::new(10, 20)); + check(&b, &[10, 0, 20, 0]); + } + + #[test] + fn push_string() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.push("foo"); + check(&b, &[3, 0, 0, 0, 102, 111, 111, 0]); + } + + #[test] + fn push_byte_slice_with_alignment() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.push(&[1u8, 2, 3, 4, 5][..]); + check(&b, &[5, 0, 0, 0, 1, 2, 3, 4, 5, 0, 0, 0]); + } + + #[test] + fn push_u8_slice_with_alignment() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.create_vector(&[1u8, 2, 3, 4, 5, 6, 7, 8, 9][..]); + b.push(off); + check(&b, &[/* loc */ 4, 0, 0, 0, /* len */ 9, 0, 0, 0, /* val */ 1, 2, 3, 4, 5, 6, 7, 8, 9, /* padding */ 0, 0, 0]); + } +} + +#[cfg(test)] +mod vtable_deduplication { + extern crate flatbuffers; + use flatbuffers::field_index_to_field_offset as fi2fo; + + fn check<'a>(b: &'a flatbuffers::FlatBufferBuilder, want: &'a [u8]) { + let got = b.unfinished_data(); + assert_eq!(want, got); + } + + #[test] + fn one_empty_table() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let start0 = b.start_table(); + b.end_table(start0); + check(&b, &[ + 4, 0, // vtable size in bytes + 4, 0, // object inline data in bytes + + 4, 0, 0, 0, // backwards offset to vtable + ]); + } + + #[test] + fn two_empty_tables_are_deduplicated() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let start0 = b.start_table(); + b.end_table(start0); + let start1 = b.start_table(); + b.end_table(start1); + check(&b, &[ + 252, 255, 255, 255, // forwards offset to vtable + + 4, 0, // vtable size in bytes + 4, 0, // object inline data in bytes + + 4, 0, 0, 0, // backwards offset to vtable + ]); + } + + #[test] + fn two_tables_with_two_conveniently_sized_inline_elements_are_deduplicated() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let start0 = b.start_table(); + b.push_slot::(fi2fo(0), 100, 0); + b.push_slot::(fi2fo(1), 101, 0); + b.end_table(start0); + let start1 = b.start_table(); + b.push_slot::(fi2fo(0), 200, 0); + b.push_slot::(fi2fo(1), 201, 0); + b.end_table(start1); + check(&b, &[ + 240, 255, 255, 255, // forwards offset to vtable + + 201, 0, 0, 0, // value #1 + 200, 0, 0, 0, 0, 0, 0, 0, // value #0 + + 8, 0, // vtable size in bytes + 16, 0, // object inline data in bytes + 8, 0, // offset in object for value #0 + 4, 0, // offset in object for value #1 + + 8, 0, 0, 0, // backwards offset to vtable + 101, 0, 0, 0, // value #1 + 100, 0, 0, 0, 0, 0, 0, 0 // value #0 + ]); + } + + #[test] + fn many_identical_tables_use_few_vtables() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + for _ in 0..1000 { + let start = b.start_table(); + b.push_slot::(fi2fo(0), 100, 0); + b.push_slot::(fi2fo(1), 101, 0); + b.end_table(start); + } + assert!(b.num_written_vtables() <= 10); + } +} + +#[cfg(test)] +mod byte_layouts { + extern crate flatbuffers; + use flatbuffers::field_index_to_field_offset as fi2fo; + + fn check<'a>(b: &'a flatbuffers::FlatBufferBuilder, want: &'a [u8]) { + let got = b.unfinished_data(); + assert_eq!(want, got); + } + + //fn run(1); + check(&b, &[1, 0, 0, 0, 1, 0, 0, 0]); // padding + } + + #[test] + fn layout_03_2xbyte_vector() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.start_vector(flatbuffers::SIZE_U8, 2); + check(&b, &[0, 0]); // align to 4bytes + b.push(1u8); + check(&b, &[1, 0, 0]); + b.push(2u8); + check(&b, &[2, 1, 0, 0]); + b.end_vector::<&u8>(2); + check(&b, &[2, 0, 0, 0, 2, 1, 0, 0]); // padding + } + + #[test] + fn layout_03b_11xbyte_vector_matches_builder_size() { + let mut b = flatbuffers::FlatBufferBuilder::new_with_capacity(12); + b.start_vector(flatbuffers::SIZE_U8, 8); + + let mut gold = vec![0u8; 0]; + check(&b, &gold[..]); + + for i in 1u8..=8 { + b.push(i); + gold.insert(0, i); + check(&b, &gold[..]); + } + b.end_vector::<&u8>(8); + let want = vec![8u8, 0, 0, 0, 8, 7, 6, 5, 4, 3, 2, 1]; + check(&b, &want[..]); + } + #[test] + fn layout_04_1xuint16_vector() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.start_vector(flatbuffers::SIZE_U16, 1); + check(&b, &[0, 0]); // align to 4bytes + b.push(1u16); + check(&b, &[1, 0, 0, 0]); + b.end_vector::<&u16>(1); + check(&b, &[1, 0, 0, 0, 1, 0, 0, 0]); // padding + } + + #[test] + fn layout_05_2xuint16_vector() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let _off = b.start_vector(flatbuffers::SIZE_U16, 2); + check(&b, &[]); // align to 4bytes + b.push(0xABCDu16); + check(&b, &[0xCD, 0xAB]); + b.push(0xDCBAu16); + check(&b, &[0xBA, 0xDC, 0xCD, 0xAB]); + b.end_vector::<&u16>(2); + check(&b, &[2, 0, 0, 0, 0xBA, 0xDC, 0xCD, 0xAB]); + } + + #[test] + fn layout_06_create_string() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off0 = b.create_string("foo"); + assert_eq!(8, off0.value()); + check(&b, b"\x03\x00\x00\x00foo\x00"); // 0-terminated, no pad + let off1 = b.create_string("moop"); + assert_eq!(20, off1.value()); + check(&b, b"\x04\x00\x00\x00moop\x00\x00\x00\x00\ + \x03\x00\x00\x00foo\x00"); // 0-terminated, 3-byte pad + } + + #[test] + fn layout_06b_create_string_unicode() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + // These characters are chinese from blog.golang.org/strings + // We use escape codes here so that editors without unicode support + // aren't bothered: + let uni_str = "\u{65e5}\u{672c}\u{8a9e}"; + let off0 = b.create_string(uni_str); + assert_eq!(16, off0.value()); + check(&b, &[9, 0, 0, 0, 230, 151, 165, 230, 156, 172, 232, 170, 158, 0, // null-terminated, 2-byte pad + 0, 0]); + } + + #[test] + fn layout_06c_create_byte_string() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off0 = b.create_byte_string(b"foo"); + assert_eq!(8, off0.value()); + check(&b, b"\x03\x00\x00\x00foo\x00"); // 0-terminated, no pad + let off1 = b.create_byte_string(b"moop"); + assert_eq!(20, off1.value()); + check(&b, b"\x04\x00\x00\x00moop\x00\x00\x00\x00\ + \x03\x00\x00\x00foo\x00"); // 0-terminated, 3-byte pad + } + + #[test] + fn layout_07_empty_vtable() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off0 = b.start_table(); + check(&b, &[]); + b.end_table(off0); + check(&b, &[4, 0, // vtable length + 4, 0, // length of table including vtable offset + 4, 0, 0, 0]); // offset for start of vtable + } + + #[test] + fn layout_08_vtable_with_one_true_bool() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + check(&b, &[]); + let off0 = b.start_table(); + assert_eq!(0, off0.value()); + check(&b, &[]); + b.push_slot(fi2fo(0), true, false); + check(&b, &[1]); + let off1 = b.end_table(off0); + assert_eq!(8, off1.value()); + check(&b, &[ + 6, 0, // vtable bytes + 8, 0, // length of object including vtable offset + 7, 0, // start of bool value + 6, 0, 0, 0, // offset for start of vtable (int32) + 0, 0, 0, // padded to 4 bytes + 1, // bool value + ]); + } + + #[test] + fn layout_09_vtable_with_one_default_bool() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + check(&b, &[]); + let off = b.start_table(); + check(&b, &[]); + b.push_slot(fi2fo(0), false, false); + b.end_table(off); + check(&b, &[ + 4, 0, // vtable bytes + 4, 0, // end of object from here + // entry 1 is zero and not stored. + 4, 0, 0, 0, // offset for start of vtable (int32) + ]); + } + + #[test] + fn layout_10_vtable_with_one_int16() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + check(&b, &[]); + let off = b.start_table(); + b.push_slot(fi2fo(0), 0x789Ai16, 0); + b.end_table(off); + check(&b, &[ + 6, 0, // vtable bytes + 8, 0, // end of object from here + 6, 0, // offset to value + 6, 0, 0, 0, // offset for start of vtable (int32) + 0, 0, // padding to 4 bytes + 0x9A, 0x78, + ]); + } + + #[test] + fn layout_11_vtable_with_two_int16() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.start_table(); + b.push_slot(fi2fo(0), 0x3456i16, 0); + b.push_slot(fi2fo(1), 0x789Ai16, 0); + b.end_table(off); + check(&b, &[ + 8, 0, // vtable bytes + 8, 0, // end of object from here + 6, 0, // offset to value 0 + 4, 0, // offset to value 1 + 8, 0, 0, 0, // offset for start of vtable (int32) + 0x9A, 0x78, // value 1 + 0x56, 0x34, // value 0 + ]); + } + + #[test] + fn layout_12_vtable_with_int16_and_bool() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.start_table(); + b.push_slot(fi2fo(0), 0x3456i16, 0); + b.push_slot(fi2fo(1), true, false); + b.end_table(off); + check(&b, &[ + 8, 0, // vtable bytes + 8, 0, // end of object from here + 6, 0, // offset to value 0 + 5, 0, // offset to value 1 + 8, 0, 0, 0, // offset for start of vtable (int32) + 0, // padding + 1, // value 1 + 0x56, 0x34, // value 0 + ]); + } + + #[test] + fn layout_12b_vtable_with_empty_vector() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.start_vector(flatbuffers::SIZE_U8, 0); + let vecend = b.end_vector::<&u8>(0); + let off = b.start_table(); + b.push_slot_always(fi2fo(0), vecend); + b.end_table(off); + check(&b, &[ + 6, 0, // vtable bytes + 8, 0, + 4, 0, // offset to vector offset + 6, 0, 0, 0, // offset for start of vtable (int32) + 4, 0, 0, 0, + 0, 0, 0, 0, // length of vector (not in struct) + ]); + } + + #[test] + fn layout_12c_vtable_with_empty_vector_of_byte_and_some_scalars() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.start_vector(flatbuffers::SIZE_U8, 0); + let vecend = b.end_vector::<&u8>(0); + let off = b.start_table(); + b.push_slot::(fi2fo(0), 55i16, 0); + b.push_slot_always::>(fi2fo(1), vecend); + b.end_table(off); + check(&b, &[ + 8, 0, // vtable bytes + 12, 0, + 10, 0, // offset to value 0 + 4, 0, // offset to vector offset + 8, 0, 0, 0, // vtable loc + 8, 0, 0, 0, // value 1 + 0, 0, 55, 0, // value 0 + + 0, 0, 0, 0, // length of vector (not in struct) + ]); + } + #[test] + fn layout_13_vtable_with_1_int16_and_2_vector_of_i16() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.start_vector(flatbuffers::SIZE_I16, 2); + b.push(0x1234i16); + b.push(0x5678i16); + let vecend = b.end_vector::<&i16>(2); + let off = b.start_table(); + b.push_slot_always(fi2fo(1), vecend); + b.push_slot(fi2fo(0), 55i16, 0); + b.end_table(off); + check(&b, &[ + 8, 0, // vtable bytes + 12, 0, // length of object + 6, 0, // start of value 0 from end of vtable + 8, 0, // start of value 1 from end of buffer + 8, 0, 0, 0, // offset for start of vtable (int32) + 0, 0, // padding + 55, 0, // value 0 + 4, 0, 0, 0, // vector position from here + 2, 0, 0, 0, // length of vector (uint32) + 0x78, 0x56, // vector value 1 + 0x34, 0x12, // vector value 0 + ]); + } + #[test] + fn layout_14_vtable_with_1_struct_of_int8_and_int16_and_int32() { + #[derive(Copy, Clone, Debug, Eq, PartialEq)] + #[repr(C, packed)] + struct foo { + a: i32, + _pad0: [u8; 2], + b: i16, + _pad1: [u8; 3], + c: i8, + _pad2: [u8; 4], + } + assert_eq!(::std::mem::size_of::(), 16); + impl<'b> flatbuffers::Push for &'b foo { + type Output = foo; + fn push<'a>(&'a self, dst: &'a mut [u8], _rest: &'a [u8]) { + let src = unsafe { + ::std::slice::from_raw_parts(*self as *const foo as *const u8, self.size()) + }; + dst.copy_from_slice(src); + } + fn size(&self) -> usize { + ::std::mem::size_of::() + } + fn alignment(&self) -> usize { + self.size() + } + } + + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.start_table(); + let x = foo{a: 0x12345678i32.to_le(), _pad0: [0,0], b: 0x1234i16.to_le(), _pad1: [0, 0, 0], c: 0x12i8.to_le(), _pad2: [0, 0, 0, 0]}; + b.push_slot_always(fi2fo(0), &x); + b.end_table(off); + check(&b, &[ + 6, 0, // vtable bytes + 20, 0, // end of object from here + 4, 0, // start of struct from here + 6, 0, 0, 0, // offset for start of vtable (int32) + + 0x78, 0x56, 0x34, 0x12, // value a + 0, 0, // padding + 0x34, 0x12, // value b + 0, 0, 0, // padding + 0x12, // value c + 0, 0, 0, 0, // padding + ]); + } + // test 15: vtable with 1 vector of 2 struct of 2 int8 + #[test] + fn layout_15_vtable_with_1_vector_of_2_struct_2_int8() { + #[allow(dead_code)] + struct FooStruct { + a: i8, + b: i8, + } + let mut b = flatbuffers::FlatBufferBuilder::new(); + b.start_vector(::std::mem::size_of::(), 2); + b.push(33i8); + b.push(44i8); + b.push(55i8); + b.push(66i8); + let vecend = b.end_vector::<&FooStruct>(2); + let off = b.start_table(); + b.push_slot_always(fi2fo(0), vecend); + b.end_table(off); + check(&b, &[ + 6, 0, // vtable bytes + 8, 0, + 4, 0, // offset of vector offset + 6, 0, 0, 0, // offset for start of vtable (int32) + 4, 0, 0, 0, // vector start offset + + 2, 0, 0, 0, // vector length + 66, // vector value 1,1 + 55, // vector value 1,0 + 44, // vector value 0,1 + 33, // vector value 0,0 + ]); + } + + #[test] + fn layout_16_table_with_some_elements() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.start_table(); + b.push_slot(fi2fo(0), 33i8, 0); + b.push_slot(fi2fo(1), 66i16, 0); + let off2 = b.end_table(off); + b.finish_minimal(off2); + + check(&b, &[ + 12, 0, 0, 0, // root of table: points to vtable offset + + 8, 0, // vtable bytes + 8, 0, // end of object from here + 7, 0, // start of value 0 + 4, 0, // start of value 1 + + 8, 0, 0, 0, // offset for start of vtable (int32) + + 66, 0, // value 1 + 0, // padding + 33, // value 0 + ]); + } + + #[test] + fn layout_17_one_unfinished_table_and_one_finished_table() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + { + let off = b.start_table(); + b.push_slot(fi2fo(0), 33i8, 0); + b.push_slot(fi2fo(1), 44i8, 0); + b.end_table(off); + } + + { + let off = b.start_table(); + b.push_slot(fi2fo(0), 55i8, 0); + b.push_slot(fi2fo(1), 66i8, 0); + b.push_slot(fi2fo(2), 77i8, 0); + let off2 = b.end_table(off); + b.finish_minimal(off2); + } + + check(&b, &[ + 16, 0, 0, 0, // root of table: points to object + 0, 0, // padding + + 10, 0, // vtable bytes + 8, 0, // size of object + 7, 0, // start of value 0 + 6, 0, // start of value 1 + 5, 0, // start of value 2 + 10, 0, 0, 0, // offset for start of vtable (int32) + 0, // padding + 77, // value 2 + 66, // value 1 + 55, // value 0 + + //12, 0, 0, 0, // root of table: points to object + + 8, 0, // vtable bytes + 8, 0, // size of object + 7, 0, // start of value 0 + 6, 0, // start of value 1 + 8, 0, 0, 0, // offset for start of vtable (int32) + 0, 0, // padding + 44, // value 1 + 33, // value 0 + ]); + } + + #[test] + fn layout_18_a_bunch_of_bools() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.start_table(); + b.push_slot(fi2fo(0), true, false); + b.push_slot(fi2fo(1), true, false); + b.push_slot(fi2fo(2), true, false); + b.push_slot(fi2fo(3), true, false); + b.push_slot(fi2fo(4), true, false); + b.push_slot(fi2fo(5), true, false); + b.push_slot(fi2fo(6), true, false); + b.push_slot(fi2fo(7), true, false); + let off2 = b.end_table(off); + b.finish_minimal(off2); + + check(&b, &[ + 24, 0, 0, 0, // root of table: points to vtable offset + + 20, 0, // vtable bytes + 12, 0, // size of object + 11, 0, // start of value 0 + 10, 0, // start of value 1 + 9, 0, // start of value 2 + 8, 0, // start of value 3 + 7, 0, // start of value 4 + 6, 0, // start of value 5 + 5, 0, // start of value 6 + 4, 0, // start of value 7 + 20, 0, 0, 0, // vtable offset + + 1, // value 7 + 1, // value 6 + 1, // value 5 + 1, // value 4 + 1, // value 3 + 1, // value 2 + 1, // value 1 + 1, // value 0 + ]); + } + + #[test] + fn layout_19_three_bools() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.start_table(); + b.push_slot(fi2fo(0), true, false); + b.push_slot(fi2fo(1), true, false); + b.push_slot(fi2fo(2), true, false); + let off2 = b.end_table(off); + b.finish_minimal(off2); + + check(&b, &[ + 16, 0, 0, 0, // root of table: points to vtable offset + + 0, 0, // padding + + 10, 0, // vtable bytes + 8, 0, // size of object + 7, 0, // start of value 0 + 6, 0, // start of value 1 + 5, 0, // start of value 2 + 10, 0, 0, 0, // vtable offset from here + + 0, // padding + 1, // value 2 + 1, // value 1 + 1, // value 0 + ]); + } + + #[test] + fn layout_20_some_floats() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.start_table(); + b.push_slot(fi2fo(0), 1.0f32, 0.0); + b.end_table(off); + + check(&b, &[ + 6, 0, // vtable bytes + 8, 0, // size of object + 4, 0, // start of value 0 + 6, 0, 0, 0, // vtable offset + + 0, 0, 128, 63, // value 0 + ]); + } + + #[test] + fn layout_21_vtable_defaults() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.start_table(); + b.push_slot::(fi2fo(0), 1, 1); + b.push_slot::(fi2fo(1), 3, 2); + b.push_slot::(fi2fo(2), 3, 3); + b.end_table(off); + check(&b, &[ + 8, 0, // vtable size in bytes + 8, 0, // object inline data in bytes + 0, 0, // entry 1/3: 0 => default + 7, 0, // entry 2/3: 7 => table start + 7 bytes + // entry 3/3: not present => default + 8, 0, 0, 0, + 0, 0, 0, + 3, + ]); + } + + #[test] + fn layout_22_root() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.start_table(); + // skipped: b.push_slot_scalar::(0, 1, 1); + b.push_slot::(fi2fo(1), 3, 2); + b.push_slot::(fi2fo(2), 3, 3); + let table_end = b.end_table(off); + b.finish_minimal(table_end); + check(&b, &[ + 12, 0, 0, 0, // root + + 8, 0, // vtable size in bytes + 8, 0, // object inline data in bytes + 0, 0, // entry 1/3: 0 => default + 6, 0, // entry 2/3: 6 => table start + 6 bytes + // entry 3/3: not present => default + 8, 0, 0, 0, // size of table data in bytes + 0, 0, // padding + 3, 0, // value 2/3 + ]); + } + #[test] + fn layout_23_varied_slots_and_root() { + let mut b = flatbuffers::FlatBufferBuilder::new(); + let off = b.start_table(); + b.push_slot::(fi2fo(0), 1, 0); + b.push_slot::(fi2fo(1), 2, 0); + b.push_slot::(fi2fo(2), 3.0, 0.0); + let table_end = b.end_table(off); + b.finish_minimal(table_end); + check(&b, &[ + 16, 0, 0, 0, // root + 0, 0, // padding + 10, 0, // vtable bytes + 12, 0, // object inline data size + 10, 0, // offset to value #1 (i16) + 9, 0, // offset to value #2 (u8) + 4, 0, // offset to value #3 (f32) + 10, 0, // offset to vtable + 0, 0, // padding + 0, 0, 64, 64, // value #3 => 3.0 (float32) + 0, 2, // value #1 => 2 (u8) + 1, 0, // value #0 => 1 (int16) + ]); + } +} + +fn load_file(filename: &str) -> Vec { + use std::io::Read; + let mut f = std::fs::File::open(filename).expect("file does not exist"); + let mut buf = Vec::new(); + f.read_to_end(&mut buf).expect("file reading failed"); + buf +}