Lines
91.39 %
Functions
75.97 %
Branches
100 %
//! Based on a set of rules, validate a token stream and collect the
//! tokens by type.
//!
//! See the "rules" module for definitions of keywords types and
//! per-keyword rules.
//! The key types in this module are SectionRules, which explains how to
//! validate and partition a stream of Item, and Section, which contains
//! a validated set of Item, ready to be interpreted.
//! # Example
//! (This is an internal API, so see the routerdesc.rs source for an
//! example of use.)
use crate::parse::keyword::Keyword;
use crate::parse::rules::*;
use crate::parse::tokenize::*;
use crate::{ParseErrorKind as EK, Result};
/// Describe the rules for one section of a document.
///
/// The rules are represented as a mapping from token index to
/// rules::TokenFmt.
#[derive(Clone)]
pub(crate) struct SectionRules<T: Keyword> {
/// A set of rules for decoding a series of tokens into a Section
/// object. Each element of this array corresponds to the
/// token with the corresponding index values.
/// When an array element is None, the corresponding keyword is
/// not allowed in this kind section. Otherwise, the array
/// element is a TokenFmt describing how many of the corresponding
/// token may appear, and what they need to look like.
rules: Vec<Option<TokenFmt<T>>>,
}
/// The entry or entries for a particular keyword within a document.
enum TokVal<'a, K: Keyword> {
/// No value has been found.
None,
/// A single value has been found; we're storing it in place.
/// We use a one-element array here so that we can return a slice
/// of the array.
Some([Item<'a, K>; 1]),
/// Multiple values have been found; they go in a vector.
Multi(Vec<Item<'a, K>>),
impl<'a, K: Keyword> TokVal<'a, K> {
/// Return the number of Items for this value.
fn count(&self) -> usize {
match self {
TokVal::None => 0,
TokVal::Some(_) => 1,
TokVal::Multi(v) => v.len(),
/// Return the first Item for this value, or None if there wasn't one.
fn first(&self) -> Option<&Item<'a, K>> {
TokVal::None => None,
TokVal::Some([t]) => Some(t),
TokVal::Multi(v) => Some(&v[0]),
/// Return the Item for this value, if there is exactly one.
fn singleton(&self) -> Option<&Item<'a, K>> {
TokVal::Multi(_) => None,
/// Return all the Items for this value, as a slice.
fn as_slice(&self) -> &[Item<'a, K>] {
TokVal::None => &[],
TokVal::Some(t) => &t[..],
TokVal::Multi(v) => &v[..],
/// Return the last Item for this value, if any.
fn last(&self) -> Option<&Item<'a, K>> {
TokVal::Multi(v) => Some(&v[v.len() - 1]),
/// A Section is the result of sorting a document's entries by keyword.
/// TODO: I'd rather have this be pub(crate), but I haven't figured out
/// how to make that work.
pub struct Section<'a, T: Keyword> {
/// Map from Keyword index to TokVal
v: Vec<TokVal<'a, T>>,
/// The keyword that appeared first in this section. This will
/// be set if `v` is nonempty.
first: Option<T>,
/// The keyword that appeared last in this section. This will
last: Option<T>,
impl<'a, T: Keyword> Section<'a, T> {
/// Make a new empty Section.
fn new() -> Self {
let n = T::n_vals();
let mut v = Vec::with_capacity(n);
v.resize(n, TokVal::None);
Section {
v,
first: None,
last: None,
/// Helper: return the tokval for some Keyword.
fn tokval(&self, t: T) -> &TokVal<'a, T> {
let idx = t.idx();
&self.v[idx]
/// Return all the Items for some Keyword, as a slice.
pub(crate) fn slice(&self, t: T) -> &[Item<'a, T>] {
self.tokval(t).as_slice()
/// Return a single Item for some Keyword, if there is exactly one.
pub(crate) fn get(&self, t: T) -> Option<&Item<'a, T>> {
self.tokval(t).singleton()
/// Return a single Item for some Keyword, giving an error if there
/// is not exactly one.
/// It is usually a mistake to use this function on a Keyword that is
/// not required.
pub(crate) fn required(&self, t: T) -> Result<&Item<'a, T>> {
self.get(t)
.ok_or_else(|| EK::MissingToken.with_msg(t.to_str()))
/// Return a proxy MaybeItem object for some keyword.
//
/// A MaybeItem is used to represent an object that might or might
/// not be there.
pub(crate) fn maybe<'b>(&'b self, t: T) -> MaybeItem<'b, 'a, T> {
MaybeItem::from_option(self.get(t))
/// Return the first item that was accepted for this section, or None
/// if no items were accepted for this section.
pub(crate) fn first_item(&self) -> Option<&Item<'a, T>> {
match self.first {
None => None,
Some(t) => self.tokval(t).first(),
/// Return the last item that was accepted for this section, or None
pub(crate) fn last_item(&self) -> Option<&Item<'a, T>> {
match self.last {
Some(t) => self.tokval(t).last(),
/// Insert an `item`.
/// The `item` must have parsed Keyword `t`.
fn add_tok(&mut self, t: T, item: Item<'a, T>) {
let idx = Keyword::idx(t);
if idx >= self.v.len() {
self.v.resize(idx + 1, TokVal::None);
let m = &mut self.v[idx];
match m {
TokVal::None => *m = TokVal::Some([item]),
TokVal::Some([x]) => {
*m = TokVal::Multi(vec![x.clone(), item]);
TokVal::Multi(ref mut v) => {
v.push(item);
};
if self.first.is_none() {
self.first = Some(t);
self.last = Some(t);
impl<T: Keyword> SectionRules<T> {
/// Create a new SectionRules with no rules.
/// By default, no Keyword is allowed by this SectionRules.
pub(crate) fn new() -> Self {
let mut rules = Vec::with_capacity(n);
rules.resize(n, None);
SectionRules { rules }
/// Add a rule to this SectionRules, based on a TokenFmtBuilder.
/// Requires that no rule yet exists for the provided keyword.
pub(crate) fn add(&mut self, t: TokenFmtBuilder<T>) {
let rule: TokenFmt<_> = t.into();
let idx = rule.kwd().idx();
assert!(self.rules[idx].is_none());
self.rules[idx] = Some(rule);
/// Parse a stream of tokens into a Section object without (fully)
/// verifying them.
/// Some errors are detected early, but others only show up later
/// when we validate more carefully.
fn parse_unverified<'a, I>(&self, tokens: &mut I, section: &mut Section<'a, T>) -> Result<()>
where
I: Iterator<Item = Result<Item<'a, T>>>,
{
for item in tokens {
let item = item?;
let tok = item.kwd();
let tok_idx = tok.idx();
if let Some(rule) = &self.rules[tok_idx] {
// we want this token.
assert!(rule.kwd() == tok);
section.add_tok(tok, item);
rule.check_multiplicity(section.v[tok_idx].as_slice())?;
} else {
// We don't have a rule for this token.
return Err(EK::UnexpectedToken
.with_msg(tok.to_str())
.at_pos(item.pos()));
Ok(())
/// Check whether the tokens in a section we've parsed conform to
/// these rules.
fn validate<'a>(&self, s: &Section<'a, T>) -> Result<()> {
// These vectors are both generated from T::n_vals().
assert_eq!(s.v.len(), self.rules.len());
// Iterate over every item, and make sure it matches the
// corresponding rule.
for (rule, t) in self.rules.iter().zip(s.v.iter()) {
match rule {
None => {
// We aren't supposed to have any of these.
if t.count() > 0 {
unreachable!(
"This item should have been rejected earlier, in parse_unverified()"
);
Some(rule) => {
// We're allowed to have this. Is the number right?
rule.check_multiplicity(t.as_slice())?;
// The number is right. Check each individual item.
for item in t.as_slice() {
rule.check_item(item)?;
/// Check all the base64-encoded objects on a given keyword.
/// We use this to validate objects on unrecognized items, since
/// otherwise nothing would check that they are well-formed.
fn validate_objects<'a>(&self, s: &Section<'a, T>, kwd: T) -> Result<()> {
for item in s.slice(kwd).iter() {
let _ = item.obj_raw()?;
/// Parse a stream of tokens into a validated section.
pub(crate) fn parse<'a, I>(&self, tokens: &mut I) -> Result<Section<'a, T>>
let mut section = Section::new();
self.parse_unverified(tokens, &mut section)?;
self.validate(§ion)?;
self.validate_objects(§ion, T::unrecognized())?;
self.validate_objects(§ion, T::ann_unrecognized())?;
Ok(section)
#[cfg(test)]
mod test {
#![allow(clippy::unwrap_used)]
use super::SectionRules;
use crate::parse::macros::test::Fruit;
use crate::parse::tokenize::{Item, NetDocReader};
use crate::{Error, ParseErrorKind as EK, Result};
use once_cell::sync::Lazy;
/// Rules for parsing a set of router annotations.
static FRUIT_SALAD: Lazy<SectionRules<Fruit>> = Lazy::new(|| {
use Fruit::*;
let mut rules = SectionRules::new();
rules.add(ANN_TASTY.rule().required().args(1..=1));
rules.add(ORANGE.rule().args(1..));
rules.add(STONEFRUIT.rule().may_repeat());
rules.add(GUAVA.rule().obj_optional());
rules.add(LEMON.rule().no_args().obj_required());
rules
});
#[test]
fn parse_section() -> Result<()> {
let s = "\
@tasty yes
orange soda
cherry cobbler
cherry pie
plum compote
guava fresh from 7 trees
-----BEGIN GUAVA MANIFESTO-----
VGhlIGd1YXZhIGVtb2ppIGlzIG5vdCBjdXJyZW50bHkgc3VwcG9ydGVkIGluI
HVuaWNvZGUgMTMuMC4gTGV0J3MgZmlnaHQgYWdhaW5zdCBhbnRpLWd1YXZhIG
JpYXMu
-----END GUAVA MANIFESTO-----
lemon
-----BEGIN LEMON-----
8J+Niw==
-----END LEMON-----
";
let mut r: NetDocReader<'_, Fruit> = NetDocReader::new(s);
let sec = FRUIT_SALAD.parse(&mut r.iter()).unwrap();
assert_eq!(sec.required(ANN_TASTY)?.arg(0), Some("yes"));
assert!(sec.get(ORANGE).is_some());
assert_eq!(sec.get(ORANGE).unwrap().args_as_str(), "soda");
let stonefruit_slice = sec.slice(STONEFRUIT);
assert_eq!(stonefruit_slice.len(), 3);
let kwds: Vec<&str> = stonefruit_slice.iter().map(Item::kwd_str).collect();
assert_eq!(kwds, &["cherry", "cherry", "plum"]);
assert_eq!(sec.maybe(GUAVA).args_as_str(), Some("fresh from 7 trees"));
assert_eq!(sec.maybe(GUAVA).parse_arg::<u32>(2).unwrap(), Some(7));
assert!(sec.maybe(GUAVA).parse_arg::<u32>(1).is_err());
assert_eq!(sec.get(GUAVA).unwrap().obj("GUAVA MANIFESTO").unwrap(),
&b"The guava emoji is not currently supported in unicode 13.0. Let's fight against anti-guava bias."[..]);
assert_eq!(
sec.get(ANN_TASTY).unwrap() as *const Item<'_, _>,
sec.first_item().unwrap() as *const Item<'_, _>
sec.get(LEMON).unwrap() as *const Item<'_, _>,
sec.last_item().unwrap() as *const Item<'_, _>
fn rejected() {
use crate::Pos;
fn check(s: &str, e: &Error) {
let res = FRUIT_SALAD.parse(&mut r.iter());
assert!(res.is_err());
assert_eq!(&res.err().unwrap().within(s), e);
// unrecognized tokens aren't allowed here
check(
"orange foo\nfoobar x\n@tasty yes\n",
&EK::UnexpectedToken
.with_msg("<unrecognized>")
.at_pos(Pos::from_line(2, 1)),
// Only one orange per customer.
"@tasty yes\norange foo\norange bar\n",
&EK::DuplicateToken
.with_msg("orange")
.at_pos(Pos::from_line(3, 1)),
// There needs to be a declaration of tastiness.
check("orange foo\n", &EK::MissingToken.with_msg("@tasty"));
// You can't have an orange without an argument.
"@tasty nope\norange\n",
&EK::TooFewArguments
// You can't have an more than one argument on "tasty".
"@tasty yup indeed\norange normal\n",
&EK::TooManyArguments
.with_msg("@tasty")
.at_pos(Pos::from_line(1, 1)),
// Every lemon needs an object
"@tasty yes\nlemon\norange no\n",
&EK::MissingObject
.with_msg("lemon")