tor_netdoc/parse/
tokenize.rs

1//! Break a string into a set of directory-object Items.
2//!
3//! This module defines Item, which represents a basic entry in a
4//! directory document, and NetDocReader, which is used to break a
5//! string into Items.
6
7use crate::parse::keyword::Keyword;
8use crate::types::misc::FromBytes;
9use crate::util::PeekableIterator;
10use crate::{Error, NetdocErrorKind as EK, Pos, Result};
11use base64ct::{Base64, Encoding};
12use itertools::Itertools;
13use std::cell::{Ref, RefCell};
14use std::iter::Peekable;
15use std::str::FromStr;
16use tor_error::internal;
17
18/// Useful constants for netdoc object syntax
19pub(crate) mod object {
20    /// indicates the start of an object
21    pub(crate) const BEGIN_STR: &str = "-----BEGIN ";
22    /// indicates the end of an object
23    pub(crate) const END_STR: &str = "-----END ";
24    /// indicates the end of a begin or end tag.
25    pub(crate) const TAG_END: &str = "-----";
26    /// Maximum PEM base64 line length (not enforced during parsing)
27    #[cfg(feature = "hs-service")]
28    pub(crate) const BASE64_PEM_MAX_LINE: usize = 64;
29}
30
31/// Return true iff a given character is "space" according to the rules
32/// of dir-spec.txt
33pub(crate) fn is_sp(c: char) -> bool {
34    c == ' ' || c == '\t'
35}
36/// Check that all the characters in `s` are valid base64.
37///
38/// This is not a perfect check for base64ness -- it is mainly meant
39/// to help us recover after unterminated base64.
40fn b64check(s: &str) -> Result<()> {
41    for b in s.bytes() {
42        match b {
43            b'=' => (),
44            b'a'..=b'z' => (),
45            b'A'..=b'Z' => (),
46            b'0'..=b'9' => (),
47            b'/' | b'+' => (),
48            _ => {
49                return Err(EK::BadObjectBase64.at_pos(Pos::at(s)));
50            }
51        };
52    }
53    Ok(())
54}
55
56/// A tagged object that is part of a directory Item.
57///
58/// This represents a single blob within a pair of "-----BEGIN
59/// FOO-----" and "-----END FOO-----".  The data is not guaranteed to
60/// be actual base64 when this object is created: doing so would
61/// require either that we parse the base64 twice, or that we allocate
62/// a buffer to hold the data before it's needed.
63#[derive(Clone, Copy, Debug)]
64pub(crate) struct Object<'a> {
65    /// Reference to the "tag" string (the 'foo') in the BEGIN line.
66    tag: &'a str,
67    /// Reference to the allegedly base64-encoded data.  This may or
68    /// may not actually be base64 at this point.
69    data: &'a str,
70    /// Reference to the END line for this object.  This doesn't
71    /// need to be parsed, but it's used to find where this object
72    /// ends.
73    endline: &'a str,
74}
75
76/// A single part of a directory object.
77///
78/// Each Item -- called an "entry" in dir-spec.txt -- has a keyword, a
79/// (possibly empty) set of arguments, and an optional object.
80///
81/// This is a zero-copy implementation that points to slices within a
82/// containing string.
83#[derive(Clone, Debug)]
84pub(crate) struct Item<'a, K: Keyword> {
85    /// The keyword that determines the type of this item.
86    kwd: K,
87    /// A reference to the actual string that defines the keyword for
88    /// this item.
89    kwd_str: &'a str,
90    /// Reference to the arguments that appear in the same line after the
91    /// keyword.  Does not include the terminating newline or the
92    /// space that separates the keyword for its arguments.
93    args: &'a str,
94    /// The arguments, split by whitespace.  This vector is constructed
95    /// as needed, using interior mutability.
96    split_args: RefCell<Option<Vec<&'a str>>>,
97    /// If present, a base-64-encoded object that appeared at the end
98    /// of this item.
99    object: Option<Object<'a>>,
100}
101
102/// A cursor into a string that returns Items one by one.
103///
104/// (This type isn't used directly, but is returned wrapped in a Peekable.)
105#[derive(Debug)]
106struct NetDocReaderBase<'a, K: Keyword> {
107    /// The string we're parsing.
108    s: &'a str,
109    /// Our position within the string.
110    off: usize,
111    /// Tells Rust it's okay that we are parameterizing on K.
112    _k: std::marker::PhantomData<K>,
113}
114
115impl<'a, K: Keyword> NetDocReaderBase<'a, K> {
116    /// Create a new NetDocReader to split a string into tokens.
117    fn new(s: &'a str) -> Result<Self> {
118        Ok(NetDocReaderBase {
119            s: validate_utf_8_rules(s)?,
120            off: 0,
121            _k: std::marker::PhantomData,
122        })
123    }
124    /// Return the current Pos within the string.
125    fn pos(&self, pos: usize) -> Pos {
126        Pos::from_offset(self.s, pos)
127    }
128    /// Skip forward by n bytes.
129    ///
130    /// (Note that standard caveats with byte-oriented processing of
131    /// UTF-8 strings apply.)
132    fn advance(&mut self, n: usize) -> Result<()> {
133        if n > self.remaining() {
134            return Err(
135                Error::from(internal!("tried to advance past end of document"))
136                    .at_pos(Pos::from_offset(self.s, self.off)),
137            );
138        }
139        self.off += n;
140        Ok(())
141    }
142    /// Return the remaining number of bytes in this reader.
143    fn remaining(&self) -> usize {
144        self.s.len() - self.off
145    }
146
147    /// Return true if the next characters in this reader are `s`
148    fn starts_with(&self, s: &str) -> bool {
149        self.s[self.off..].starts_with(s)
150    }
151    /// Try to extract a NL-terminated line from this reader.  Always
152    /// remove data if the reader is nonempty.
153    fn line(&mut self) -> Result<&'a str> {
154        let remainder = &self.s[self.off..];
155        if let Some(nl_pos) = remainder.find('\n') {
156            self.advance(nl_pos + 1)?;
157            let line = &remainder[..nl_pos];
158
159            // TODO: we should probably detect \r and do something about it.
160            // Just ignoring it isn't the right answer, though.
161            Ok(line)
162        } else {
163            self.advance(remainder.len())?; // drain everything.
164            Err(EK::TruncatedLine.at_pos(self.pos(self.s.len())))
165        }
166    }
167
168    /// Try to extract a line that begins with a keyword from this reader.
169    ///
170    /// Returns a (kwd, args) tuple on success.
171    fn kwdline(&mut self) -> Result<(&'a str, &'a str)> {
172        let pos = self.off;
173        let line = self.line()?;
174        if line.is_empty() {
175            return Err(EK::EmptyLine.at_pos(self.pos(pos)));
176        }
177        let (line, anno_ok) = if let Some(rem) = line.strip_prefix("opt ") {
178            (rem, false)
179        } else {
180            (line, true)
181        };
182        let mut parts_iter = line.splitn(2, [' ', '\t']);
183        let kwd = match parts_iter.next() {
184            Some(k) => k,
185            // This case seems like it can't happen: split always returns
186            // something, apparently.
187            None => return Err(EK::MissingKeyword.at_pos(self.pos(pos))),
188        };
189        if !keyword_ok(kwd, anno_ok) {
190            return Err(EK::BadKeyword.at_pos(self.pos(pos)));
191        }
192        // TODO(nickm): dir-spec does not yet allow unicode in the arguments, but we're
193        // assuming that proposal 285 is accepted.
194        let args = match parts_iter.next() {
195            Some(a) => a,
196            // take a zero-length slice, so it will be within the string.
197            None => &kwd[kwd.len()..],
198        };
199        Ok((kwd, args))
200    }
201
202    /// Try to extract an Object beginning wrapped within BEGIN/END tags.
203    ///
204    /// Returns Ok(Some(Object(...))) on success if an object is
205    /// found, Ok(None) if no object is found, and Err only if a
206    /// corrupt object is found.
207    fn object(&mut self) -> Result<Option<Object<'a>>> {
208        use object::*;
209
210        let pos = self.off;
211        if !self.starts_with(BEGIN_STR) {
212            return Ok(None);
213        }
214        let line = self.line()?;
215        if !line.ends_with(TAG_END) {
216            return Err(EK::BadObjectBeginTag.at_pos(self.pos(pos)));
217        }
218        let tag = &line[BEGIN_STR.len()..(line.len() - TAG_END.len())];
219        if !tag_keywords_ok(tag) {
220            return Err(EK::BadObjectBeginTag.at_pos(self.pos(pos)));
221        }
222        let datapos = self.off;
223        let (endlinepos, endline) = loop {
224            let p = self.off;
225            let line = self.line()?;
226            if line.starts_with(END_STR) {
227                break (p, line);
228            }
229            // Exit if this line isn't plausible base64.  Otherwise,
230            // an unterminated base64 block could potentially
231            // "consume" all the rest of the string, which would stop
232            // us from recovering.
233            b64check(line).map_err(|e| e.within(self.s))?;
234        };
235        let data = &self.s[datapos..endlinepos];
236        if !endline.ends_with(TAG_END) {
237            return Err(EK::BadObjectEndTag.at_pos(self.pos(endlinepos)));
238        }
239        let endtag = &endline[END_STR.len()..(endline.len() - TAG_END.len())];
240        if endtag != tag {
241            return Err(EK::BadObjectMismatchedTag.at_pos(self.pos(endlinepos)));
242        }
243        Ok(Some(Object { tag, data, endline }))
244    }
245
246    /// Read the next Item from this NetDocReaderBase.
247    ///
248    /// If successful, returns Ok(Some(Item)), or Ok(None) if exhausted.
249    /// Returns Err on failure.
250    ///
251    /// Always consumes at least one line if possible; always ends on a
252    /// line boundary if one exists.
253    fn item(&mut self) -> Result<Option<Item<'a, K>>> {
254        if self.remaining() == 0 {
255            return Ok(None);
256        }
257        let (kwd_str, args) = self.kwdline()?;
258        let object = self.object()?;
259        let split_args = RefCell::new(None);
260        let kwd = K::from_str(kwd_str);
261        Ok(Some(Item {
262            kwd,
263            kwd_str,
264            args,
265            split_args,
266            object,
267        }))
268    }
269}
270
271/// Return true iff 's' is a valid keyword or annotation.
272///
273/// (Only allow annotations if `anno_ok` is true.`
274fn keyword_ok(mut s: &str, anno_ok: bool) -> bool {
275    /// Helper: return true if this character can appear in keywords.
276    fn kwd_char_ok(c: char) -> bool {
277        matches!(c,'A'..='Z' | 'a'..='z' |'0'..='9' | '-')
278    }
279
280    if s.is_empty() {
281        return false;
282    }
283    if anno_ok && s.starts_with('@') {
284        s = &s[1..];
285    }
286    if s.starts_with('-') {
287        return false;
288    }
289    s.chars().all(kwd_char_ok)
290}
291
292/// Return true iff 's' is a valid keywords string for a BEGIN/END tag.
293pub(crate) fn tag_keywords_ok(s: &str) -> bool {
294    s.split(' ').all(|w| keyword_ok(w, false))
295}
296
297/// When used as an Iterator, returns a sequence of `Result<Item>`.
298impl<'a, K: Keyword> Iterator for NetDocReaderBase<'a, K> {
299    type Item = Result<Item<'a, K>>;
300    fn next(&mut self) -> Option<Self::Item> {
301        self.item().transpose()
302    }
303}
304
305/// Helper: as base64::decode(), but allows newlines in the middle of the
306/// encoded object.
307fn base64_decode_multiline(s: &str) -> std::result::Result<Vec<u8>, base64ct::Error> {
308    // base64 module hates whitespace.
309    let mut s = s.to_string();
310    s.retain(|ch| ch != '\n');
311    let v = Base64::decode_vec(&s)?;
312    Ok(v)
313}
314
315impl<'a, K: Keyword> Item<'a, K> {
316    /// Return the parsed keyword part of this item.
317    pub(crate) fn kwd(&self) -> K {
318        self.kwd
319    }
320    /// Return the keyword part of this item, as a string.
321    pub(crate) fn kwd_str(&self) -> &'a str {
322        self.kwd_str
323    }
324    /// Return true if the keyword for this item is in 'ks'.
325    pub(crate) fn has_kwd_in(&self, ks: &[K]) -> bool {
326        ks.contains(&self.kwd)
327    }
328    /// Return the arguments of this item, as a single string.
329    pub(crate) fn args_as_str(&self) -> &'a str {
330        self.args
331    }
332    /// Return the arguments of this item as a vector.
333    fn args_as_vec(&self) -> Ref<'_, Vec<&'a str>> {
334        // We're using an interior mutability pattern here to lazily
335        // construct the vector.
336        if self.split_args.borrow().is_none() {
337            self.split_args.replace(Some(self.args().collect()));
338        }
339        Ref::map(self.split_args.borrow(), |opt| match opt {
340            Some(v) => v,
341            None => panic!(),
342        })
343    }
344    /// Return an iterator over the arguments of this item.
345    pub(crate) fn args(&self) -> impl Iterator<Item = &'a str> {
346        self.args.split(is_sp).filter(|s| !s.is_empty())
347    }
348    /// Return the nth argument of this item, if there is one.
349    pub(crate) fn arg(&self, idx: usize) -> Option<&'a str> {
350        self.args_as_vec().get(idx).copied()
351    }
352    /// Return the nth argument of this item, or an error if it isn't there.
353    pub(crate) fn required_arg(&self, idx: usize) -> Result<&'a str> {
354        self.arg(idx)
355            .ok_or_else(|| EK::MissingArgument.at_pos(Pos::at(self.args)))
356    }
357    /// Try to parse the nth argument (if it exists) into some type
358    /// that supports FromStr.
359    ///
360    /// Returns Ok(None) if the argument doesn't exist.
361    pub(crate) fn parse_optional_arg<V: FromStr>(&self, idx: usize) -> Result<Option<V>>
362    where
363        Error: From<V::Err>,
364    {
365        match self.arg(idx) {
366            None => Ok(None),
367            Some(s) => match s.parse() {
368                Ok(r) => Ok(Some(r)),
369                Err(e) => {
370                    let e: Error = e.into();
371                    Err(e.or_at_pos(Pos::at(s)))
372                }
373            },
374        }
375    }
376    /// Try to parse the nth argument (if it exists) into some type
377    /// that supports FromStr.
378    ///
379    /// Return an error if the argument doesn't exist.
380    pub(crate) fn parse_arg<V: FromStr>(&self, idx: usize) -> Result<V>
381    where
382        Error: From<V::Err>,
383    {
384        match self.parse_optional_arg(idx) {
385            Ok(Some(v)) => Ok(v),
386            Ok(None) => Err(EK::MissingArgument.at_pos(self.arg_pos(idx))),
387            Err(e) => Err(e),
388        }
389    }
390    /// Return the number of arguments for this Item
391    pub(crate) fn n_args(&self) -> usize {
392        self.args().count()
393    }
394    /// Return true iff this Item has an associated object.
395    pub(crate) fn has_obj(&self) -> bool {
396        self.object.is_some()
397    }
398    /// Return the tag of this item's associated object, if it has one.
399    pub(crate) fn obj_tag(&self) -> Option<&'a str> {
400        self.object.map(|o| o.tag)
401    }
402    /// Try to decode the base64 contents of this Item's associated object.
403    ///
404    /// On success, return the object's tag and decoded contents.
405    pub(crate) fn obj_raw(&self) -> Result<Option<(&'a str, Vec<u8>)>> {
406        match self.object {
407            None => Ok(None),
408            Some(obj) => {
409                let decoded = base64_decode_multiline(obj.data)
410                    .map_err(|_| EK::BadObjectBase64.at_pos(Pos::at(obj.data)))?;
411                Ok(Some((obj.tag, decoded)))
412            }
413        }
414    }
415    /// Try to decode the base64 contents of this Item's associated object,
416    /// and make sure that its tag matches 'want_tag'.
417    pub(crate) fn obj(&self, want_tag: &str) -> Result<Vec<u8>> {
418        match self.obj_raw()? {
419            None => Err(EK::MissingObject
420                .with_msg(self.kwd.to_str())
421                .at_pos(self.end_pos())),
422            Some((tag, decoded)) => {
423                if tag != want_tag {
424                    Err(EK::WrongObject.at_pos(Pos::at(tag)))
425                } else {
426                    Ok(decoded)
427                }
428            }
429        }
430    }
431    /// Try to decode the base64 contents of this item's associated object
432    /// as a given type that implements FromBytes.
433    pub(crate) fn parse_obj<V: FromBytes>(&self, want_tag: &str) -> Result<V> {
434        let bytes = self.obj(want_tag)?;
435        // Unwrap may be safe because above `.obj()` should return an Error if
436        // wanted tag was not present
437        #[allow(clippy::unwrap_used)]
438        let p = Pos::at(self.object.unwrap().data);
439        V::from_vec(bytes, p).map_err(|e| e.at_pos(p))
440    }
441    /// Return the position of this item.
442    ///
443    /// This position won't be useful unless it is later contextualized
444    /// with the containing string.
445    pub(crate) fn pos(&self) -> Pos {
446        Pos::at(self.kwd_str)
447    }
448    /// Return the position of this Item in a string.
449    ///
450    /// Returns None if this item doesn't actually belong to the string.
451    pub(crate) fn offset_in(&self, s: &str) -> Option<usize> {
452        crate::util::str::str_offset(s, self.kwd_str)
453    }
454    /// Return the position of the n'th argument of this item.
455    ///
456    /// If this item does not have a n'th argument, return the
457    /// position of the end of the final argument.
458    pub(crate) fn arg_pos(&self, n: usize) -> Pos {
459        let args = self.args_as_vec();
460        if n < args.len() {
461            Pos::at(args[n])
462        } else {
463            self.last_arg_end_pos()
464        }
465    }
466    /// Return the position at the end of the last argument.  (This will
467    /// point to a newline.)
468    fn last_arg_end_pos(&self) -> Pos {
469        let args = self.args_as_vec();
470        if let Some(last_arg) = args.last() {
471            Pos::at_end_of(last_arg)
472        } else {
473            Pos::at_end_of(self.kwd_str)
474        }
475    }
476    /// Return the position of the end of this object. (This will point to a
477    /// newline.)
478    pub(crate) fn end_pos(&self) -> Pos {
479        match self.object {
480            Some(o) => Pos::at_end_of(o.endline),
481            None => self.last_arg_end_pos(),
482        }
483    }
484    /// If this item occurs within s, return the byte offset
485    /// immediately after the end of this item.
486    pub(crate) fn offset_after(&self, s: &str) -> Option<usize> {
487        self.end_pos().offset_within(s).map(|nl_pos| nl_pos + 1)
488    }
489}
490
491/// Represents an Item that might not be present, whose arguments we
492/// want to inspect.  If the Item is there, this acts like a proxy to the
493/// item; otherwise, it treats the item as having no arguments.
494pub(crate) struct MaybeItem<'a, 'b, K: Keyword>(Option<&'a Item<'b, K>>);
495
496// All methods here are as for Item.
497impl<'a, 'b, K: Keyword> MaybeItem<'a, 'b, K> {
498    /// Return the position of this item, if it has one.
499    fn pos(&self) -> Pos {
500        match self.0 {
501            Some(item) => item.pos(),
502            None => Pos::None,
503        }
504    }
505    /// Construct a MaybeItem from an Option reference to an item.
506    pub(crate) fn from_option(opt: Option<&'a Item<'b, K>>) -> Self {
507        MaybeItem(opt)
508    }
509
510    /// If this item is present, parse its argument at position `idx`.
511    /// Treat the absence or malformedness of the argument as an error,
512    /// but treat the absence of this item as acceptable.
513    #[cfg(any(test, feature = "routerdesc"))]
514    pub(crate) fn parse_arg<V: FromStr>(&self, idx: usize) -> Result<Option<V>>
515    where
516        Error: From<V::Err>,
517    {
518        match self.0 {
519            Some(item) => match item.parse_arg(idx) {
520                Ok(v) => Ok(Some(v)),
521                Err(e) => Err(e.or_at_pos(self.pos())),
522            },
523            None => Ok(None),
524        }
525    }
526    /// If this item is present, return its arguments as a single string.
527    pub(crate) fn args_as_str(&self) -> Option<&str> {
528        self.0.map(|item| item.args_as_str())
529    }
530    /// If this item is present, parse all of its arguments as a
531    /// single string.
532    pub(crate) fn parse_args_as_str<V: FromStr>(&self) -> Result<Option<V>>
533    where
534        Error: From<V::Err>,
535    {
536        match self.0 {
537            Some(item) => match item.args_as_str().parse::<V>() {
538                Ok(v) => Ok(Some(v)),
539                Err(e) => {
540                    let e: Error = e.into();
541                    Err(e.or_at_pos(self.pos()))
542                }
543            },
544            None => Ok(None),
545        }
546    }
547}
548
549/// Extension trait for `Result<Item>` -- makes it convenient to implement
550/// PauseAt predicates
551pub(crate) trait ItemResult<K: Keyword> {
552    /// Return true if this is an ok result with an annotation.
553    fn is_ok_with_annotation(&self) -> bool;
554    /// Return true if this is an ok result with a non-annotation.
555    fn is_ok_with_non_annotation(&self) -> bool;
556    /// Return true if this is an ok result with the keyword 'k'
557    fn is_ok_with_kwd(&self, k: K) -> bool {
558        self.is_ok_with_kwd_in(&[k])
559    }
560    /// Return true if this is an ok result with a keyword in the slice 'ks'
561    fn is_ok_with_kwd_in(&self, ks: &[K]) -> bool;
562    /// Return true if this is an ok result with a keyword not in the slice 'ks'
563    fn is_ok_with_kwd_not_in(&self, ks: &[K]) -> bool;
564    /// Return true if this is an empty-line error.
565    fn is_empty_line(&self) -> bool;
566}
567
568impl<'a, K: Keyword> ItemResult<K> for Result<Item<'a, K>> {
569    fn is_ok_with_annotation(&self) -> bool {
570        match self {
571            Ok(item) => item.kwd().is_annotation(),
572            Err(_) => false,
573        }
574    }
575    fn is_ok_with_non_annotation(&self) -> bool {
576        match self {
577            Ok(item) => !item.kwd().is_annotation(),
578            Err(_) => false,
579        }
580    }
581    fn is_ok_with_kwd_in(&self, ks: &[K]) -> bool {
582        match self {
583            Ok(item) => item.has_kwd_in(ks),
584            Err(_) => false,
585        }
586    }
587    fn is_ok_with_kwd_not_in(&self, ks: &[K]) -> bool {
588        match self {
589            Ok(item) => !item.has_kwd_in(ks),
590            Err(_) => false,
591        }
592    }
593    fn is_empty_line(&self) -> bool {
594        matches!(
595            self,
596            Err(e) if e.netdoc_error_kind() == crate::err::NetdocErrorKind::EmptyLine
597        )
598    }
599}
600
601/// A peekable cursor into a string that returns Items one by one.
602///
603/// This is an [`Iterator`], yielding [`Item`]s.
604#[derive(Debug)]
605pub(crate) struct NetDocReader<'a, K: Keyword> {
606    // TODO: I wish there were some way around having this string
607    // reference, since we already need one inside NetDocReaderBase.
608    /// The underlying string being parsed.
609    s: &'a str,
610    /// A stream of tokens being parsed by this NetDocReader.
611    tokens: Peekable<NetDocReaderBase<'a, K>>,
612}
613
614impl<'a, K: Keyword> NetDocReader<'a, K> {
615    /// Construct a new NetDocReader to read tokens from `s`.
616    pub(crate) fn new(s: &'a str) -> Result<Self> {
617        Ok(NetDocReader {
618            s,
619            tokens: NetDocReaderBase::new(s)?.peekable(),
620        })
621    }
622    /// Return a reference to the string used for this NetDocReader.
623    pub(crate) fn str(&self) -> &'a str {
624        self.s
625    }
626    /// Return a wrapper around the peekable iterator in this
627    /// NetDocReader that reads tokens until it reaches an element where
628    /// 'f' is true.
629    pub(crate) fn pause_at<'f, 'r, F>(
630        &mut self,
631        mut f: F,
632    ) -> itertools::PeekingTakeWhile<'_, Self, impl FnMut(&Result<Item<'a, K>>) -> bool + 'f>
633    where
634        'f: 'r,
635        F: FnMut(&Result<Item<'a, K>>) -> bool + 'f,
636        K: 'f,
637    {
638        self.peeking_take_while(move |i| !f(i))
639    }
640
641    /// Return true if there are no more items in this NetDocReader.
642    // The implementation sadly needs to mutate the inner state, even if it's not *semantically*
643    // mutated..  We don't want inner mutability just to placate clippy for an internal API.
644    #[allow(clippy::wrong_self_convention)]
645    #[allow(dead_code)] // TODO perhaps we should remove this ?
646    pub(crate) fn is_exhausted(&mut self) -> bool {
647        self.peek().is_none()
648    }
649
650    /// Give an error if there are remaining tokens in this NetDocReader.
651    pub(crate) fn should_be_exhausted(&mut self) -> Result<()> {
652        match self.peek() {
653            None => Ok(()),
654            Some(Ok(t)) => Err(EK::UnexpectedToken
655                .with_msg(t.kwd().to_str())
656                .at_pos(t.pos())),
657            Some(Err(e)) => Err(e.clone()),
658        }
659    }
660
661    /// Give an error if there are remaining tokens in this NetDocReader.
662    ///
663    /// Like [`should_be_exhausted`](Self::should_be_exhausted),
664    /// but permit empty lines at the end of the document.
665    #[cfg(feature = "routerdesc")]
666    pub(crate) fn should_be_exhausted_but_for_empty_lines(&mut self) -> Result<()> {
667        use crate::err::NetdocErrorKind as K;
668        while let Some(Err(e)) = self.peek() {
669            if e.netdoc_error_kind() == K::EmptyLine {
670                let _ignore = self.next();
671            } else {
672                break;
673            }
674        }
675        self.should_be_exhausted()
676    }
677
678    /// Return the position from which the underlying reader is about to take
679    /// the next token.  Use to make sure that the reader is progressing.
680    pub(crate) fn pos(&mut self) -> Pos {
681        match self.tokens.peek() {
682            Some(Ok(tok)) => tok.pos(),
683            Some(Err(e)) => e.pos(),
684            None => Pos::at_end_of(self.s),
685        }
686    }
687}
688
689impl<'a, K: Keyword> Iterator for NetDocReader<'a, K> {
690    type Item = Result<Item<'a, K>>;
691    fn next(&mut self) -> Option<Self::Item> {
692        self.tokens.next()
693    }
694}
695
696impl<'a, K: Keyword> PeekableIterator for NetDocReader<'a, K> {
697    fn peek(&mut self) -> Option<&Self::Item> {
698        self.tokens.peek()
699    }
700}
701
702impl<'a, K: Keyword> itertools::PeekingNext for NetDocReader<'a, K> {
703    fn peeking_next<F>(&mut self, f: F) -> Option<Self::Item>
704    where
705        F: FnOnce(&Self::Item) -> bool,
706    {
707        if f(self.peek()?) {
708            self.next()
709        } else {
710            None
711        }
712    }
713}
714
715/// Check additional UTF-8 rules that the netdoc metaformat imposes on
716/// our documents.
717//
718// NOTE: We might decide in the future to loosen our rules here
719// for parsers that handle concatenated documents:
720// we might want to reject only those documents that contain NULs.
721// But with luck that will never be necessary.
722fn validate_utf_8_rules(s: &str) -> Result<&str> {
723    // No BOM, or mangled BOM, is allowed.
724    let first_char = s.chars().next();
725    if [Some('\u{feff}'), Some('\u{fffe}')].contains(&first_char) {
726        return Err(EK::BomMarkerFound.at_pos(Pos::at(s)));
727    }
728    // No NUL bytes are allowed.
729    if let Some(nul_pos) = memchr::memchr(0, s.as_bytes()) {
730        return Err(EK::NulFound.at_pos(Pos::from_byte(nul_pos)));
731    }
732    Ok(s)
733}
734
735#[cfg(test)]
736mod test {
737    // @@ begin test lint list maintained by maint/add_warning @@
738    #![allow(clippy::bool_assert_comparison)]
739    #![allow(clippy::clone_on_copy)]
740    #![allow(clippy::dbg_macro)]
741    #![allow(clippy::mixed_attributes_style)]
742    #![allow(clippy::print_stderr)]
743    #![allow(clippy::print_stdout)]
744    #![allow(clippy::single_char_pattern)]
745    #![allow(clippy::unwrap_used)]
746    #![allow(clippy::unchecked_duration_subtraction)]
747    #![allow(clippy::useless_vec)]
748    #![allow(clippy::needless_pass_by_value)]
749    //! <!-- @@ end test lint list maintained by maint/add_warning @@ -->
750    #![allow(clippy::cognitive_complexity)]
751    use super::*;
752    use crate::parse::macros::test::Fruit;
753    use crate::{NetdocErrorKind as EK, Pos, Result};
754
755    #[test]
756    fn read_simple() {
757        use Fruit::*;
758
759        let s = "\
760@tasty very much so
761opt apple 77
762banana 60
763cherry 6
764-----BEGIN CHERRY SYNOPSIS-----
7658J+NkvCfjZLwn42S8J+NkvCfjZLwn42S
766-----END CHERRY SYNOPSIS-----
767plum hello there
768";
769        let mut r: NetDocReader<'_, Fruit> = NetDocReader::new(s).unwrap();
770
771        assert_eq!(r.str(), s);
772        assert!(r.should_be_exhausted().is_err()); // it's not exhausted.
773
774        let toks: Result<Vec<_>> = r.by_ref().collect();
775        assert!(r.should_be_exhausted().is_ok());
776
777        let toks = toks.unwrap();
778        assert_eq!(toks.len(), 5);
779        assert_eq!(toks[0].kwd(), ANN_TASTY);
780        assert_eq!(toks[0].n_args(), 3);
781        assert_eq!(toks[0].args_as_str(), "very much so");
782        assert_eq!(toks[0].arg(1), Some("much"));
783        {
784            let a: Vec<_> = toks[0].args().collect();
785            assert_eq!(a, vec!["very", "much", "so"]);
786        }
787        assert!(toks[0].parse_arg::<usize>(0).is_err());
788        assert!(toks[0].parse_arg::<usize>(10).is_err());
789        assert!(!toks[0].has_obj());
790        assert_eq!(toks[0].obj_tag(), None);
791
792        assert_eq!(toks[2].pos().within(s), Pos::from_line(3, 1));
793        assert_eq!(toks[2].arg_pos(0).within(s), Pos::from_line(3, 8));
794        assert_eq!(toks[2].last_arg_end_pos().within(s), Pos::from_line(3, 10));
795        assert_eq!(toks[2].end_pos().within(s), Pos::from_line(3, 10));
796
797        assert_eq!(toks[3].kwd(), STONEFRUIT);
798        assert_eq!(toks[3].kwd_str(), "cherry"); // not cherry/plum!
799        assert_eq!(toks[3].n_args(), 1);
800        assert_eq!(toks[3].required_arg(0), Ok("6"));
801        assert_eq!(toks[3].parse_arg::<usize>(0), Ok(6));
802        assert_eq!(toks[3].parse_optional_arg::<usize>(0), Ok(Some(6)));
803        assert_eq!(toks[3].parse_optional_arg::<usize>(3), Ok(None));
804        assert!(toks[3].has_obj());
805        assert_eq!(toks[3].obj_tag(), Some("CHERRY SYNOPSIS"));
806        assert_eq!(
807            &toks[3].obj("CHERRY SYNOPSIS").unwrap()[..],
808            "🍒🍒🍒🍒🍒🍒".as_bytes()
809        );
810        assert!(toks[3].obj("PLUOT SYNOPSIS").is_err());
811        // this "end-pos" value is questionable!
812        assert_eq!(toks[3].end_pos().within(s), Pos::from_line(7, 30));
813    }
814
815    #[test]
816    fn test_badtoks() {
817        use Fruit::*;
818
819        let s = "\
820-foobar 9090
821apple 3.14159
822$hello
823unrecognized 127.0.0.1 foo
824plum
825-----BEGIN WHATEVER-----
8268J+NkvCfjZLwn42S8J+NkvCfjZLwn42S
827-----END SOMETHING ELSE-----
828orange
829orange
830-----BEGIN WHATEVER-----
831not! base64!
832-----END WHATEVER-----
833guava paste
834opt @annotation
835orange
836-----BEGIN LOBSTER
8378J+NkvCfjZLwn42S8J+NkvCfjZLwn42S
838-----END SOMETHING ELSE-----
839orange
840-----BEGIN !!!!!!-----
8418J+NkvCfjZLwn42S8J+NkvCfjZLwn42S
842-----END !!!!!!-----
843cherry
844-----BEGIN CHERRY SYNOPSIS-----
8458J+NkvCfjZLwn42S8J+NkvCfjZLwn42S
846-----END CHERRY SYNOPSIS
847
848truncated line";
849
850        let r: NetDocReader<'_, Fruit> = NetDocReader::new(s).unwrap();
851        let toks: Vec<_> = r.collect();
852
853        assert!(toks[0].is_err());
854        assert_eq!(
855            toks[0].as_ref().err().unwrap(),
856            &EK::BadKeyword.at_pos(Pos::from_line(1, 1))
857        );
858
859        assert!(toks[1].is_ok());
860        assert!(toks[1].is_ok_with_non_annotation());
861        assert!(!toks[1].is_ok_with_annotation());
862        assert!(toks[1].is_ok_with_kwd_in(&[APPLE, ORANGE]));
863        assert!(toks[1].is_ok_with_kwd_not_in(&[ORANGE, UNRECOGNIZED]));
864        let t = toks[1].as_ref().unwrap();
865        assert_eq!(t.kwd(), APPLE);
866        assert_eq!(t.arg(0), Some("3.14159"));
867
868        assert!(toks[2].is_err());
869        assert!(!toks[2].is_ok_with_non_annotation());
870        assert!(!toks[2].is_ok_with_annotation());
871        assert!(!toks[2].is_ok_with_kwd_in(&[APPLE, ORANGE]));
872        assert!(!toks[2].is_ok_with_kwd_not_in(&[ORANGE, UNRECOGNIZED]));
873        assert_eq!(
874            toks[2].as_ref().err().unwrap(),
875            &EK::BadKeyword.at_pos(Pos::from_line(3, 1))
876        );
877
878        assert!(toks[3].is_ok());
879        let t = toks[3].as_ref().unwrap();
880        assert_eq!(t.kwd(), UNRECOGNIZED);
881        assert_eq!(t.arg(1), Some("foo"));
882
883        assert!(toks[4].is_err());
884        assert_eq!(
885            toks[4].as_ref().err().unwrap(),
886            &EK::BadObjectMismatchedTag.at_pos(Pos::from_line(8, 1))
887        );
888
889        assert!(toks[5].is_ok());
890        let t = toks[5].as_ref().unwrap();
891        assert_eq!(t.kwd(), ORANGE);
892        assert_eq!(t.args_as_str(), "");
893
894        // This blob counts as two errors: a bad base64 blob, and
895        // then an end line.
896        assert!(toks[6].is_err());
897        assert_eq!(
898            toks[6].as_ref().err().unwrap(),
899            &EK::BadObjectBase64.at_pos(Pos::from_line(12, 1))
900        );
901
902        assert!(toks[7].is_err());
903        assert_eq!(
904            toks[7].as_ref().err().unwrap(),
905            &EK::BadKeyword.at_pos(Pos::from_line(13, 1))
906        );
907
908        assert!(toks[8].is_ok());
909        let t = toks[8].as_ref().unwrap();
910        assert_eq!(t.kwd(), GUAVA);
911
912        // this is an error because you can't use opt with annotations.
913        assert!(toks[9].is_err());
914        assert_eq!(
915            toks[9].as_ref().err().unwrap(),
916            &EK::BadKeyword.at_pos(Pos::from_line(15, 1))
917        );
918
919        // this looks like a few errors.
920        assert!(toks[10].is_err());
921        assert_eq!(
922            toks[10].as_ref().err().unwrap(),
923            &EK::BadObjectBeginTag.at_pos(Pos::from_line(17, 1))
924        );
925        assert!(toks[11].is_err());
926        assert_eq!(
927            toks[11].as_ref().err().unwrap(),
928            &EK::BadKeyword.at_pos(Pos::from_line(18, 1))
929        );
930        assert!(toks[12].is_err());
931        assert_eq!(
932            toks[12].as_ref().err().unwrap(),
933            &EK::BadKeyword.at_pos(Pos::from_line(19, 1))
934        );
935
936        // so does this.
937        assert!(toks[13].is_err());
938        assert_eq!(
939            toks[13].as_ref().err().unwrap(),
940            &EK::BadObjectBeginTag.at_pos(Pos::from_line(21, 1))
941        );
942        assert!(toks[14].is_err());
943        assert_eq!(
944            toks[14].as_ref().err().unwrap(),
945            &EK::BadKeyword.at_pos(Pos::from_line(22, 1))
946        );
947        assert!(toks[15].is_err());
948        assert_eq!(
949            toks[15].as_ref().err().unwrap(),
950            &EK::BadKeyword.at_pos(Pos::from_line(23, 1))
951        );
952
953        // not this.
954        assert!(toks[16].is_err());
955        assert_eq!(
956            toks[16].as_ref().err().unwrap(),
957            &EK::BadObjectEndTag.at_pos(Pos::from_line(27, 1))
958        );
959
960        assert!(toks[17].is_err());
961        assert_eq!(
962            toks[17].as_ref().err().unwrap(),
963            &EK::EmptyLine.at_pos(Pos::from_line(28, 1))
964        );
965
966        assert!(toks[18].is_err());
967        assert_eq!(
968            toks[18].as_ref().err().unwrap(),
969            &EK::TruncatedLine.at_pos(Pos::from_line(29, 15))
970        );
971    }
972
973    #[test]
974    fn test_validate_strings() {
975        use validate_utf_8_rules as v;
976        assert_eq!(v(""), Ok(""));
977        assert_eq!(v("hello world"), Ok("hello world"));
978        // We don't have to test a lot more valid cases, since this function is called before
979        // parsing any string.
980
981        for s in ["\u{feff}", "\u{feff}hello world", "\u{fffe}hello world"] {
982            let e = v(s).unwrap_err();
983            assert_eq!(e.netdoc_error_kind(), EK::BomMarkerFound);
984            assert_eq!(e.pos().offset_within(s), Some(0));
985        }
986
987        for s in [
988            "\0hello world",
989            "\0",
990            "\0\0\0",
991            "hello\0world",
992            "hello world\0",
993        ] {
994            let e = v(s).unwrap_err();
995            assert_eq!(e.netdoc_error_kind(), EK::NulFound);
996            let nul_pos = e.pos().offset_within(s).unwrap();
997            assert_eq!(s.as_bytes()[nul_pos], 0);
998        }
999    }
1000}