use range::Range;
use std::rc::Rc;
use std::cell::RefCell;
use {
ret_err,
update,
DebugId,
MetaData,
ParseResult,
Rule,
Tokenizer,
TokenizerState,
};
pub struct Node {
pub name: Rc<String>,
pub rule: Rule,
pub debug_id: DebugId,
}
impl Node {
pub fn parse(
&self,
tokenizer: &mut Tokenizer,
state: &TokenizerState,
mut chars: &[char],
start_offset: usize
) -> ParseResult<TokenizerState> {
let mut offset = start_offset;
let mut state = tokenizer.data(
MetaData::StartNode(self.name.clone()),
state,
Range::empty(offset)
);
let mut opt_error = None;
state = match self.rule.parse(tokenizer, &state, chars, offset) {
Err(err) => { return Err(ret_err(err, opt_error)); }
Ok((range, state, err)) => {
update(range, err, &mut chars, &mut offset, &mut opt_error);
state
}
};
let range = Range::new(start_offset, offset - start_offset);
Ok((
range,
tokenizer.data(MetaData::EndNode(self.name.clone()), &state, range),
opt_error
))
}
}
#[derive(Clone)]
pub enum NodeRef {
Name(Rc<String>, DebugId),
Ref(Rc<RefCell<Node>>, NodeVisit),
}
#[derive(Clone)]
pub enum NodeVisit {
Unvisited,
Visited
}
#[cfg(test)]
mod tests {
use super::super::*;
use range::Range;
use std::rc::Rc;
use std::cell::RefCell;
#[test]
fn node_ref() {
let foo: Rc<String> = Rc::new("foo".into());
let num: Rc<String> = Rc::new("num".into());
let node = Rc::new(RefCell::new(Node {
debug_id: 0,
name: foo.clone(),
rule: Rule::Sequence(Sequence {
debug_id: 1,
args: vec![
Rule::Number(Number {
debug_id: 2,
property: Some(num.clone())
}),
Rule::Optional(Box::new(Optional {
debug_id: 3,
rule: Rule::Sequence(Sequence {
debug_id: 4,
args: vec![
Rule::Whitespace(Whitespace {
debug_id: 3,
optional: false
}),
Rule::Node(NodeRef::Name(foo.clone(), 3)),
]
}),
})),
],
}),
}));
let refs = vec![(foo.clone(), node.clone())];
node.borrow_mut().rule.update_refs(&refs);
let text = "1 2 3";
let chars: Vec<char> = text.chars().collect();
let mut tokenizer = Tokenizer::new();
let s = TokenizerState::new();
let res = node.borrow().parse(&mut tokenizer, &s, &chars, 0);
assert_eq!(res, Ok((Range::new(0, 5), TokenizerState(9),
Some((Range::new(5, 0), ParseError::ExpectedWhitespace(3))))));
assert_eq!(tokenizer.tokens.len(), 9);
assert_eq!(&tokenizer.tokens[0].0, &MetaData::StartNode(foo.clone()));
assert_eq!(&tokenizer.tokens[1].0, &MetaData::F64(num.clone(), 1.0));
assert_eq!(&tokenizer.tokens[2].0, &MetaData::StartNode(foo.clone()));
assert_eq!(&tokenizer.tokens[3].0, &MetaData::F64(num.clone(), 2.0));
assert_eq!(&tokenizer.tokens[4].0, &MetaData::StartNode(foo.clone()));
assert_eq!(&tokenizer.tokens[5].0, &MetaData::F64(num.clone(), 3.0));
assert_eq!(&tokenizer.tokens[6].0, &MetaData::EndNode(foo.clone()));
assert_eq!(&tokenizer.tokens[7].0, &MetaData::EndNode(foo.clone()));
assert_eq!(&tokenizer.tokens[8].0, &MetaData::EndNode(foo.clone()));
}
}