Skip to content

Commit

Permalink
fix: cleanup after merging changes in main branch
Browse files Browse the repository at this point in the history
  • Loading branch information
nfejzic committed Jan 6, 2023
1 parent a3055c9 commit ee61856
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 65 deletions.
23 changes: 3 additions & 20 deletions core/src/elements/heading_block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use crate::log_id::{LogId, SetLog};
use crate::middleend::{AsIrLines, ContentIrLine};

use super::error::ElementError;
use super::log_id::{AtomicErrLogId, GeneralErrLogId, InlineWarnLogId};
use super::log_id::{AtomicErrLogId, GeneralErrLogId};

/// Enum of possible heading levels for unimarkup headings
#[derive(Eq, PartialEq, Debug, strum_macros::Display, EnumString, Clone, Copy)]
Expand Down Expand Up @@ -279,27 +279,10 @@ impl ParseFromIr for HeadingBlock {
ir_line.fallback_attributes
};

let try_inline = parse_with_offset(
&content,
Position {
line: ir_line.line_nr,
column: get_column_offset_from_level(level),
},
);
let parsed_inline;
match try_inline {
Ok(inline) => parsed_inline = inline,
Err(_) => {
parsed_inline = flat_inline(&content);
(InlineWarnLogId::InlineParsingFailed as LogId)
.set_log(&format!("Inline parsing failed for heading-id {} => content taken as plain as fallback", ir_line.id), file!(), line!());
}
}

let block = HeadingBlock {
id: ir_line.id,
level,
content: parsed_inline,
content,
attributes,
line_nr: ir_line.line_nr,
};
Expand Down Expand Up @@ -425,7 +408,7 @@ mod tests {
let html = heading.render_html().unwrap();

let expected = format!(
"<h{} id='{}'><pre><code>This</code></pre> <em>is <sub>a</sub></em> <strong>heading</strong></h{}>",
"<h{} id='{}'><code>This</code> <em>is <sub>a</sub></em> <strong>heading</strong></h{}>",
level, id, level
);
assert_eq!(html, expected);
Expand Down
38 changes: 8 additions & 30 deletions core/src/elements/paragraph_block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,7 @@ use pest::iterators::Pairs;
use pest::Span;
use unimarkup_inline::{Inline, ParseUnimarkupInlines};

use unimarkup_inline::{flat_inline, parse_with_offset, FlattenInlineKind, Inline, Position};

use super::{
error::ElementError,
log_id::{GeneralErrLogId, InlineWarnLogId},
};
use super::{error::ElementError, log_id::GeneralErrLogId};

/// Structure of a Unimarkup paragraph element.
#[derive(Debug, Default, Clone)]
Expand Down Expand Up @@ -95,7 +90,7 @@ impl UmParse for ParagraphBlock {

let paragraph_block = ParagraphBlock {
id,
content: flat_inline(&content),
content,
attributes: serde_json::to_string(&attributes.unwrap_or_default()).unwrap(),
line_nr,
};
Expand Down Expand Up @@ -140,26 +135,9 @@ impl ParseFromIr for ParagraphBlock {
ir_line.fallback_attributes
};

let try_inline = parse_with_offset(
&content,
Position {
line: ir_line.line_nr,
..Default::default()
},
);
let parsed_inline;
match try_inline {
Ok(inline) => parsed_inline = inline,
Err(_) => {
parsed_inline = flat_inline(&content);
(InlineWarnLogId::InlineParsingFailed as LogId)
.set_log(&format!("Inline parsing failed for paragraph-id {} => content taken as plain as fallback", ir_line.id), file!(), line!());
}
}

let block = ParagraphBlock {
id: ir_line.id,
content: parsed_inline,
content,
attributes,
line_nr: ir_line.line_nr,
};
Expand Down Expand Up @@ -225,7 +203,7 @@ impl AsIrLines<ContentIrLine> for ParagraphBlock {
#[allow(non_snake_case)]
#[cfg(test)]
mod tests {
use unimarkup_inline::parse;
use std::collections::VecDeque;

use unimarkup_inline::{Inline, ParseUnimarkupInlines};

Expand All @@ -246,7 +224,7 @@ mod tests {

let block = ParagraphBlock {
id: id.clone(),
content: flat_inline(&content),
content: content.clone(),
attributes: "{}".into(),
line_nr: 0,
};
Expand Down Expand Up @@ -277,7 +255,7 @@ mod tests {

assert_eq!(paragraph.id, test_id);
assert_eq!(paragraph.line_nr, 0);
assert_eq!(paragraph.content, parse(&content).unwrap());
assert_eq!(paragraph.content, content);
assert_eq!(paragraph.attributes, String::from("{}"));
}

Expand All @@ -290,13 +268,13 @@ mod tests {

let block = ParagraphBlock {
id: id.clone(),
content: parse(&content).unwrap(),
content,
attributes: "{}".into(),
line_nr: 0,
};

let expected_html = format!(
"<p id='{}'>This is <pre><code>the</code></pre> <em>content</em> <strong>of <sub>the</sub> paragraph</strong></p>",
"<p id='{}'>This is <code>the</code> <em>content</em> <strong>of <sub>the</sub> paragraph</strong></p>",
id
);

Expand Down
2 changes: 1 addition & 1 deletion core/tests/backend/backend_run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use clap::StructOpt;
use unimarkup_core::{
backend::{self, Render},
config::Config,
elements::{get_column_offset_from_level, HeadingBlock, HeadingLevel},
elements::{HeadingBlock, HeadingLevel},
middleend::{self, AsIrLines, ContentIrLine},
};
use unimarkup_inline::ParseUnimarkupInlines;
Expand Down
30 changes: 16 additions & 14 deletions inline/src/lexer/tests/mod.rs
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@
use super::*;

macro_rules! assert_token {
($token:ident with $kind:expr, $spacing:expr, $span:expr) => {
assert_eq!($token.kind(), $kind);
assert_eq!($token.spacing(), $spacing);
assert_eq!($token.span(), crate::Span::from($span));
true
};

($token:ident with $kind:expr, $spacing:expr, $span:expr, $content:expr) => {
assert_token!($token with $kind, $spacing, $span);
assert_eq!($token.as_str(), $content);
true
}
}
// TODO: rewrite lexer tests

// macro_rules! assert_token {
// ($token:ident with $kind:expr, $spacing:expr, $span:expr) => {
// assert_eq!($token.kind(), $kind);
// assert_eq!($token.spacing(), $spacing);
// assert_eq!($token.span(), crate::Span::from($span));
// true
// };
//
// ($token:ident with $kind:expr, $spacing:expr, $span:expr, $content:expr) => {
// assert_token!($token with $kind, $spacing, $span);
// assert_eq!($token.as_str(), $content);
// true
// }
// }

// mod brace;
// mod bracket;
Expand Down

0 comments on commit ee61856

Please sign in to comment.