Skip to content

Commit

Permalink
feat: add support for multi-line inline formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
nfejzic committed Nov 21, 2022
1 parent 3282eb4 commit bc9badb
Show file tree
Hide file tree
Showing 4 changed files with 37 additions and 64 deletions.
12 changes: 6 additions & 6 deletions inline/src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ impl<'a> Lexer<'a> {
}
}

fn resolved(self) -> TokenResolver<'a> {
fn resolved(self) -> TokenResolver {
TokenResolver::new(self.iter())
}
}
Expand Down Expand Up @@ -680,21 +680,21 @@ impl<'a> Iterator for TokenIterator<'a> {

/// TODO: write docs
#[derive(Debug, Clone)]
pub struct Tokens<'a> {
iter: resolver::IntoIter<'a>,
pub struct Tokens {
iter: resolver::IntoIter,
cache: Option<RawToken>,
}

impl<'a> Tokens<'a> {
pub(crate) fn new(resolver: TokenResolver<'a>) -> Self {
impl Tokens {
pub(crate) fn new(resolver: TokenResolver) -> Self {
Self {
iter: resolver.into_iter(),
cache: None,
}
}
}

impl Iterator for Tokens<'_> {
impl Iterator for Tokens {
type Item = Token;

fn next(&mut self) -> Option<Self::Item> {
Expand Down
63 changes: 14 additions & 49 deletions inline/src/lexer/resolver/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,44 +65,25 @@ impl TokenMap {
/// stored into token's tail.
/// - Every time a token pair is matched, all non-resolved tokens between them are marked as plain
#[derive(Debug, Clone)]
pub(crate) struct TokenResolver<'a> {
iter: TokenIterator<'a>,
pub(crate) struct TokenResolver {
curr_scope: usize,
interrupted: Vec<Range<usize>>,
pub(crate) tokens: Vec<RawToken>,
}

impl<'a> TokenResolver<'a> {
pub(crate) fn new(iter: TokenIterator<'a>) -> Self {
Self {
iter,
impl TokenResolver {
pub(crate) fn new(iter: TokenIterator<'_>) -> Self {
let mut new = Self {
curr_scope: 0,
interrupted: Vec::default(),
tokens: Vec::default(),
}
}

fn consume_line(&mut self) {
for token in self.iter.by_ref() {
let should_break = matches!(token.kind, TokenKind::EndOfLine | TokenKind::Newline);

self.tokens.push(RawToken {
token,
state: Resolved::Neither,
tail: None,
});
tokens: iter.map(RawToken::new).collect(),
};

if should_break {
break;
}
}
new.resolve();
new
}

pub(crate) fn resolve(&mut self) {
if self.tokens.is_empty() {
self.consume_line();
}

fn resolve(&mut self) {
// map found tokens to their index in tokens vector
let mut token_map: TokenMap = TokenMap::new();

Expand Down Expand Up @@ -272,38 +253,22 @@ impl<'a> TokenResolver<'a> {
None
}

pub(crate) fn into_iter(self) -> IntoIter<'a> {
pub(crate) fn into_iter(self) -> IntoIter {
IntoIter {
resolver: self,
iter: Vec::new().into_iter(),
iter: self.tokens.into_iter(),
}
}
}

#[derive(Debug, Clone)]
pub(crate) struct IntoIter<'a> {
resolver: TokenResolver<'a>,
pub(crate) struct IntoIter {
iter: vec::IntoIter<RawToken>,
}

impl IntoIter<'_> {
fn next_token(&mut self) -> Option<RawToken> {
if let Some(token) = self.iter.next() {
return Some(token);
}

self.resolver.resolve();
self.iter = std::mem::take(&mut self.resolver.tokens).into_iter();
self.iter.next()
}
}

impl Iterator for IntoIter<'_> {
impl Iterator for IntoIter {
type Item = RawToken;

fn next(&mut self) -> Option<Self::Item> {
let next_token = self.next_token()?;

Some(next_token)
self.iter.next()
}
}
8 changes: 8 additions & 0 deletions inline/src/lexer/resolver/raw_token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,14 @@ pub(crate) struct RawToken {
}

impl RawToken {
pub(crate) fn new(token: Token) -> Self {
Self {
token,
state: Resolved::Neither,
tail: None,
}
}

fn order(&mut self) {
if let Some(ref sec_part) = self.tail {
match (self.state, sec_part.state) {
Expand Down
18 changes: 9 additions & 9 deletions inline/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,11 @@ impl Deref for ParserStack {
/// [`Iterator`]: Iterator
/// [`Inline`]: crate::Inline
#[derive(Debug, Clone)]
pub struct Parser<'i> {
pub struct Parser {
/// Iterator over [`Token`]s found in Unimarkup input.
///
/// [`Token`]: crate::Token
iter: Tokens<'i>,
iter: Tokens,

/// Storage of [`Token`] already yielded from [`TokenIterator`] but not consumed in current
/// iteration of parsing.
Expand All @@ -44,7 +44,7 @@ pub struct Parser<'i> {
inline_cache: VecDeque<Inline>,
}

impl Parser<'_> {
impl Parser {
/// Returns the next [`Token`] either from [`Lexer`] directly or from internal token cache.
///
/// [`Token`]: crate::Token
Expand Down Expand Up @@ -87,7 +87,7 @@ impl Parser<'_> {
let nested = self.parse_inline(next_token).unwrap();
content.append_inline(nested);
} else {
if next_token.kind != TokenKind::Plain {
if kind == TokenKind::Plain && next_token.kind != TokenKind::Plain {
self.token_cache = Some(next_token);
break;
}
Expand All @@ -103,7 +103,7 @@ impl Parser<'_> {
}
}

impl Iterator for Parser<'_> {
impl Iterator for Parser {
type Item = Inline;

fn next(&mut self) -> Option<Self::Item> {
Expand Down Expand Up @@ -134,16 +134,16 @@ impl Iterator for Parser<'_> {
///
/// [`Parser`]: self::Parser
/// [`Tokenize`]: crate::Tokenize
pub trait ParseUnimarkupInlines<'p> {
pub trait ParseUnimarkupInlines {
/// Returns a parser over this type.
fn parse_unimarkup_inlines(&'p self) -> Parser<'p>;
fn parse_unimarkup_inlines(&self) -> Parser;
}

impl<T> ParseUnimarkupInlines<'_> for T
impl<T> ParseUnimarkupInlines for T
where
T: Tokenize,
{
fn parse_unimarkup_inlines(&self) -> Parser<'_> {
fn parse_unimarkup_inlines(&self) -> Parser {
Parser {
iter: self.tokens(),
token_cache: None,
Expand Down

0 comments on commit bc9badb

Please sign in to comment.