Skip to content

Commit

Permalink
Resolves binast#176 - Getting rid of TokenWriter::{list, tagged_tuple…
Browse files Browse the repository at this point in the history
…, Tree} and the companion code in ast.rs

This changeset simplifies trait `io::TokenWriter`, getting rid of some baggage that makes work on entropy
needlessly complicated. For backwards compatibility reason, we keep a `io::deprecated::TokenWriterWithTree`,
which is essentially a copy of the old `io::TokenWriter`, along with a `io::deprecated::TokenWriterTreeAdapter`,
which converts from the old form to the new one.

This change should make future work on entropy a bit simpler, improve performance and this has already helped locate a few bugs in the generic implementation of encoding.

This change will need to be followed up by a corresponding work on the TokenReader, once we have a clear idea of exactly what needs to be done.

1. `TokenWriter` doesn't define a type `Tree` anymore. Rather, on success, all methods return `()`. Consequently,
	all implementations of `Serializer`, which returned `TokenWriter::Tree` on success now return `()` on success.
	This includes both hardcoded implementations and implementations extracted from the webidl.

2. `TokenWriter` doesn't take a `Vec` of children for lists and tagged tuples anymore. Consequently, we adapt the
	code generator to not create these `Vec` of children anymore.

3. Similarly, the generic implementation of encoding (which might not be useful anymore, but it's still here for the moment)
	needs to be adapted to the new `TokenWriter`. This requires a few contorsions in binjs_generic/src/io/encode.rs.

4. Both methods `Encoder::encode`, which give access to all token writers now use `TokenWriterTreeAdapter` for older
	implementations of `TokenWriterWithThree`.

5. We adapt the entropy dictionary builder to this new `TokenWriter`, which essentially means removing now useless code.

6. Other implementations of `TokenWriter` move to `TokenWriterWithTree`, which changes essentially the interface name.

7. Adding methods `from_rc_string` to all types derived from `SharedString`.

8. Adapting tests to the API changes.
  • Loading branch information
Yoric committed Oct 11, 2018
1 parent dbbd2fb commit 4c58661
Show file tree
Hide file tree
Showing 16 changed files with 444 additions and 223 deletions.
54 changes: 27 additions & 27 deletions crates/binjs_es6/src/io.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use binjs_io::{ self, Deserialization, Guard, TokenReader, TokenReaderError, TokenWriterError };
use binjs_io::{ self, Deserialization, Guard, TokenReader, TokenReaderError, TokenWriterTreeAdapter, TokenWriterError };
pub use binjs_io::{ Serialization, TokenSerializer, TokenWriter };
use binjs_shared::{ FieldName, IdentifierName, InterfaceName, Offset, PropertyKey, SharedString, self };

Expand Down Expand Up @@ -131,7 +131,7 @@ impl<W> Serializer<W> where W: TokenWriter {
writer
}
}
pub fn serialize<T>(&mut self, value: T, path: &mut IOPath) -> Result<W:: Tree, TokenWriterError> where Self: Serialization<W, T> {
pub fn serialize<T>(&mut self, value: T, path: &mut IOPath) -> Result<(), TokenWriterError> where Self: Serialization<W, T> {
(self as &mut Serialization<W, T>).serialize(value, path)
}
}
Expand All @@ -143,99 +143,99 @@ impl<W> TokenSerializer<W> for Serializer<W> where W: TokenWriter {
}

impl<W> Serialization<W, Option<bool>> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: Option<bool>, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: Option<bool>, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.bool_at(value, path)
}
}
impl<W> Serialization<W, bool> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: bool, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: bool, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.bool_at(Some(value), path)
}
}
impl<W> Serialization<W, Option<f64>> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: Option<f64>, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: Option<f64>, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.float_at(value, path)
}
}
impl<W> Serialization<W, f64> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: f64, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: f64, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.float_at(Some(value), path)
}
}
impl<W> Serialization<W, u32> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: u32, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: u32, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.unsigned_long_at(value, path)
}
}
impl<'a, W> Serialization<W, &'a Option<bool>> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a Option<bool>, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a Option<bool>, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.bool_at(value.clone(), path)
}
}
impl<'a, W> Serialization<W, &'a bool> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a bool, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a bool, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.bool_at(Some(*value), path)
}
}
impl<'a, W> Serialization<W, &'a Option<f64>> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a Option<f64>, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a Option<f64>, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.float_at(value.clone(), path)
}
}
impl<'a, W> Serialization<W, &'a f64> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a f64, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a f64, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.float_at(Some(*value), path)
}
}
impl<'a, W> Serialization<W, &'a u32> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a u32, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a u32, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.unsigned_long_at(value.clone(), path)
}
}
/*
impl<'a, W> Serialization<W, Option<&'a str>> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: Option<&'a str>, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: Option<&'a str>, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.string_at(value, path)
}
}
impl<'a, W> Serialization<W, &'a str> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a str, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a str, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.string_at(Some(value), path)
}
}
*/
impl<'a, W> Serialization<W, &'a SharedString> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a SharedString, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a SharedString, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.string_at(Some(value), path)
}
}
impl<'a, W> Serialization<W, &'a Option<SharedString>> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a Option<SharedString>, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a Option<SharedString>, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.string_at(value.as_ref(), path)
}
}
impl<'a, W> Serialization<W, &'a IdentifierName> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a IdentifierName, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a IdentifierName, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.identifier_name_at(Some(&value), path)
}
}
impl<'a, W> Serialization<W, &'a PropertyKey> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a PropertyKey, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a PropertyKey, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.property_key_at(Some(&value), path)
}
}
impl<'a, W> Serialization<W, &'a Option<IdentifierName>> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a Option<IdentifierName>, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a Option<IdentifierName>, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.identifier_name_at(value.as_ref(), path)
}
}
impl<'a, W> Serialization<W, &'a Option<PropertyKey>> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, value: &'a Option<PropertyKey>, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, value: &'a Option<PropertyKey>, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.property_key_at(value.as_ref(), path)
}
}
impl<'a, W> Serialization<W, &'a Offset> for Serializer<W> where W: TokenWriter {
fn serialize(&mut self, _: &'a Offset, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {
fn serialize(&mut self, _: &'a Offset, path: &mut IOPath) -> Result<(), TokenWriterError> {
self.writer.offset_at(path)
}
}
Expand Down Expand Up @@ -275,9 +275,9 @@ impl Encoder {
}
pub fn encode<'a, AST>(&self, format: &'a mut binjs_io::Format, ast: &'a AST) -> Result<Box<AsRef<[u8]>>, TokenWriterError>
where
Serializer<binjs_io::simple::TreeTokenWriter> : Serialization<binjs_io::simple::TreeTokenWriter, &'a AST>,
Serializer<binjs_io::multipart::TreeTokenWriter> : Serialization<binjs_io::multipart::TreeTokenWriter, &'a AST>,
Serializer<binjs_io::xml::Encoder> : Serialization<binjs_io::xml::Encoder, &'a AST>,
Serializer<TokenWriterTreeAdapter<binjs_io::simple::TreeTokenWriter>> : Serialization<TokenWriterTreeAdapter<binjs_io::simple::TreeTokenWriter>, &'a AST>,
Serializer<TokenWriterTreeAdapter<binjs_io::multipart::TreeTokenWriter>> : Serialization<TokenWriterTreeAdapter<binjs_io::multipart::TreeTokenWriter>, &'a AST>,
Serializer<TokenWriterTreeAdapter<binjs_io::xml::Encoder>> : Serialization<TokenWriterTreeAdapter<binjs_io::xml::Encoder>, &'a AST>,
// Serializer<binjs_io::entropy::write::TreeTokenWriter<'a>> : Serialization<binjs_io::entropy::write::TreeTokenWriter<'a>, &'a AST>
/*
#[cfg(multistream)]
Expand All @@ -290,14 +290,14 @@ impl Encoder {
match *format {
binjs_io::Format::Simple { .. } => {
let writer = binjs_io::simple::TreeTokenWriter::new();
let mut serializer = Serializer::new(writer);
let mut serializer = Serializer::new(TokenWriterTreeAdapter::new(writer));
serializer.serialize(ast, &mut path)?;
let (data, _) = serializer.done()?;
Ok(Box::new(data))
}
binjs_io::Format::Multipart { ref mut targets, .. } => {
let writer = binjs_io::multipart::TreeTokenWriter::new(targets.clone());
let mut serializer = Serializer::new(writer);
let mut serializer = Serializer::new(TokenWriterTreeAdapter::new(writer));
serializer.serialize(ast, &mut path)?;
let (data, _) = serializer.done()?;
Ok(Box::new(data))
Expand All @@ -323,7 +323,7 @@ impl Encoder {
}
binjs_io::Format::XML => {
let writer = binjs_io::xml::Encoder::new();
let mut serializer = Serializer::new(writer);
let mut serializer = Serializer::new(TokenWriterTreeAdapter::new(writer));
serializer.serialize(ast, &mut path)?;
let (data, _) = serializer.done()?;
Ok(Box::new(data))
Expand Down
54 changes: 26 additions & 28 deletions crates/binjs_generate_library/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ impl FromJSON for {name} {{

let to_writer = format!("
impl<'a, W> Serialization<W, &'a {name}> for Serializer<W> where W: TokenWriter {{
fn serialize(&mut self, value: &'a {name}, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {{
fn serialize(&mut self, value: &'a {name}, path: &mut IOPath) -> Result<(), TokenWriterError> {{
debug!(target: \"serialize_es6\", \"Serializing string enum {name}\");
let str = match *value {{
{variants}
Expand Down Expand Up @@ -574,22 +574,21 @@ impl ToJSON for {name} {{

let to_writer = format!("
impl<'a, W> Serialization<W, &'a Option<{name}>> for Serializer<W> where W: TokenWriter {{
fn serialize(&mut self, value: &'a Option<{name}>, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {{
fn serialize(&mut self, value: &'a Option<{name}>, path: &mut IOPath) -> Result<(), TokenWriterError> {{
debug!(target: \"serialize_es6\", \"Serializing optional sum {name}\");
match *value {{
None => {{
let interface_name = InterfaceName::from_str(\"{null}\");
self.writer.enter_tagged_tuple_at(&interface_name, 0, path)?;
let result = self.writer.tagged_tuple_at(&interface_name, &[], path)?;
self.writer.exit_tagged_tuple_at(&interface_name, path)?;
Ok(result)
self.writer.enter_tagged_tuple_at(&interface_name, &[], path)?;
self.writer.exit_tagged_tuple_at(&interface_name, &[], path)?;
Ok(())
}}
Some(ref sum) => (self as &mut Serialization<W, &'a {name}>).serialize(sum, path)
}}
}}
}}
impl<'a, W> Serialization<W, &'a {name}> for Serializer<W> where W: TokenWriter {{
fn serialize(&mut self, value: &'a {name}, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {{
fn serialize(&mut self, value: &'a {name}, path: &mut IOPath) -> Result<(), TokenWriterError> {{
debug!(target: \"serialize_es6\", \"Serializing sum {name}\");
match *value {{
{variants}
Expand Down Expand Up @@ -841,17 +840,15 @@ impl<'a> Walker<'a> for ViewMut{name}<'a> {{
impl<'a, W> Serialization<W, &'a {name}> for Serializer<W> where W: TokenWriter {{
fn serialize(&mut self, value: &'a {name}, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {{
fn serialize(&mut self, value: &'a {name}, path: &mut IOPath) -> Result<(), TokenWriterError> {{
debug!(target: \"serialize_es6\", \"Serializing list {name}\");
self.writer.enter_list_at(value.len(), path)?;
let mut children = Vec::with_capacity(value.len());
for child in value {{
// All the children of the list share the same path.
children.push(self.serialize(child, path)?);
self.serialize(child, path)?;
}}
let result = self.writer.list_at(children, path)?;
self.writer.exit_list_at(path)?;
Ok(result)
Ok(())
}}
}}
",
Expand Down Expand Up @@ -1059,43 +1056,44 @@ impl<R> Deserialization<R, Option<{name}>> for Deserializer<R> where R: TokenRea
.len();
let to_writer = format!("
impl<'a, W> Serialization<W, &'a Option<{name}>> for Serializer<W> where W: TokenWriter {{
fn serialize(&mut self, value: &'a Option<{name}>, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {{
fn serialize(&mut self, value: &'a Option<{name}>, path: &mut IOPath) -> Result<(), TokenWriterError> {{
debug!(target: \"serialize_es6\", \"Serializing optional tagged tuple {name}\");
match *value {{
None => {{
let interface_name = InterfaceName::from_str(\"{null}\");
self.writer.enter_tagged_tuple_at(&interface_name, 0, path)?;
let result = self.writer.tagged_tuple_at(&interface_name, &[], path)?;
self.writer.exit_tagged_tuple_at(&interface_name, path)?;
Ok(result)
self.writer.enter_tagged_tuple_at(&interface_name, &[], path)?;
self.writer.exit_tagged_tuple_at(&interface_name, &[], path)?;
Ok(())
}}
Some(ref sum) => (self as &mut Serialization<W, &'a {name}>).serialize(sum, path)
}}
}}
}}
impl<'a, W> Serialization<W, &'a {name}> for Serializer<W> where W: TokenWriter {{
fn serialize(&mut self, {value}: &'a {name}, path: &mut IOPath) -> Result<W::Tree, TokenWriterError> {{
fn serialize(&mut self, {value}: &'a {name}, path: &mut IOPath) -> Result<(), TokenWriterError> {{
debug!(target: \"serialize_es6\", \"Serializing tagged tuple {name}\");
let interface_name = InterfaceName::from_str(\"{name}\"); // String is shared
let field_names = [{field_names}];
self.writer.enter_tagged_tuple_at(&interface_name, {len}, path)?;
self.writer.enter_tagged_tuple_at(&interface_name, &field_names, path)?;
path.enter_interface(interface_name.clone());
let {mut} children = Vec::with_capacity({len});
{fields}
let result = self.writer.{tagged_tuple}(&interface_name, &children, path);
path.exit_interface(interface_name.clone());
self.writer.exit_tagged_tuple_at(&interface_name, path)?;
self.writer.exit_tagged_tuple_at(&interface_name, &field_names, path)?;
result
Ok(())
}}
}}
",
mut = if len > 0 { "mut" } else { "" },
value = if len > 0 { "value" } else { "_" },
null = null_name,
name = name,
len = len,
tagged_tuple = if interface.is_scope() { "tagged_scoped_tuple_at" } else { "tagged_tuple_at" },
field_names = interface.contents()
.fields()
.iter()
.map(|field| format!("&FieldName::from_str(\"{field_name}\")",
field_name = field.name().to_str()))
.format(", "),
fields = interface.contents()
.fields()
.iter()
Expand All @@ -1105,9 +1103,9 @@ impl<'a, W> Serialization<W, &'a {name}> for Serializer<W> where W: TokenWriter
let field_name = FieldName::from_str(\"{field_name}\");
let path_item = ({index}, field_name.clone()); // String is shared
path.enter_field(path_item.clone());
let child = (self as &mut Serialization<W, &'a _>).serialize(&value.{rust_field_name}, path);
let result = (self as &mut Serialization<W, &'a _>).serialize(&value.{rust_field_name}, path);
path.exit_field(path_item);
children.push((field_name, child?));",
result?;",
index = index,
field_name = field.name().to_str(),
rust_field_name = field.name().to_rust_identifier_case()))
Expand Down
Loading

0 comments on commit 4c58661

Please sign in to comment.