Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add cargo fmt and cargo clippy checks to CI #384

Merged
merged 1 commit into from
Apr 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ on:
workflow_dispatch:
merge_group:
types: [checks_requested]

jobs:
linux-ci:
name: Linux
Expand Down Expand Up @@ -36,7 +36,10 @@ jobs:
profile: minimal
toolchain: ${{ matrix.toolchain }}
override: true
components: ${{ matrix.toolchain == 'nightly' && 'miri,rust-src' || '' }}
components: rustfmt, clippy, ${{ matrix.toolchain == 'nightly' && 'miri,rust-src' || '' }}

- name: Cargo format & lint
run: cargo fmt --check && cargo clippy -- -Dwarnings

- name: Cargo build
run: cargo build ${{ matrix.features }}
Expand Down
1 change: 1 addition & 0 deletions color/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ use std::str::FromStr;
/// Matching is case-insensitive in the ASCII range.
/// CSS escaping (if relevant) should be resolved before calling this function.
/// (For example, the value of an `Ident` token is fine.)
#[allow(clippy::result_unit_err)]
#[inline]
pub fn parse_color_keyword<Output>(ident: &str) -> Result<Output, ()>
where
Expand Down
4 changes: 3 additions & 1 deletion src/color.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ pub fn serialize_color_alpha(

/// A Predefined color space specified in:
/// <https://drafts.csswg.org/css-color-4/#predefined>
#[derive(Clone, Copy, PartialEq, Debug)]
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
#[cfg_attr(feature = "serde", serde(tag = "type"))]
pub enum PredefinedColorSpace {
Expand Down Expand Up @@ -143,6 +143,7 @@ impl ToCss for PredefinedColorSpace {
}

/// Parse a color hash, without the leading '#' character.
#[allow(clippy::result_unit_err)]
#[inline]
pub fn parse_hash_color(value: &[u8]) -> Result<(u8, u8, u8, f32), ()> {
Ok(match value.len() {
Expand Down Expand Up @@ -330,6 +331,7 @@ ascii_case_insensitive_phf_map! {

/// Returns the named color with the given name.
/// <https://drafts.csswg.org/css-color-4/#typedef-named-color>
#[allow(clippy::result_unit_err)]
#[inline]
pub fn parse_named_color(ident: &str) -> Result<(u8, u8, u8), ()> {
named_colors::get(ident).copied().ok_or(())
Expand Down
3 changes: 2 additions & 1 deletion src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ impl ParserState {
///
/// Would need to scan the whole {} block to find a semicolon, only for parsing getting restarted
/// as a qualified rule later.
#[derive(Clone, Copy, Debug, PartialEq)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum ParseUntilErrorBehavior {
/// Consume until we see the relevant delimiter or the end of the stream.
Consume,
Expand Down Expand Up @@ -606,6 +606,7 @@ impl<'i: 't, 't> Parser<'i, 't> {
/// See the `Parser::parse_nested_block` method to parse the content of functions or blocks.
///
/// This only returns a closing token when it is unmatched (and therefore an error).
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self) -> Result<&Token<'i>, BasicParseError<'i>> {
self.skip_whitespace();
self.next_including_whitespace_and_comments()
Expand Down
1 change: 1 addition & 0 deletions src/rules_and_declarations.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ pub trait AtRuleParser<'i> {
/// This is only called when `parse_prelude` returned `WithoutBlock`, and
/// either the `;` semicolon indeed follows the prelude, or parser is at
/// the end of the input.
#[allow(clippy::result_unit_err)]
fn rule_without_block(
&mut self,
prelude: Self::Prelude,
Expand Down
4 changes: 2 additions & 2 deletions src/serializer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,7 @@ where

macro_rules! impl_tocss_for_int {
($T: ty) => {
impl<'a> ToCss for $T {
impl ToCss for $T {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where
W: fmt::Write,
Expand All @@ -361,7 +361,7 @@ impl_tocss_for_int!(u64);

macro_rules! impl_tocss_for_float {
($T: ty) => {
impl<'a> ToCss for $T {
impl ToCss for $T {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where
W: fmt::Write,
Expand Down
10 changes: 4 additions & 6 deletions src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -430,11 +430,9 @@ fn serializer(preserve_comments: bool) {
preserve_comments: bool,
) {
while let Ok(token) = if preserve_comments {
input
.next_including_whitespace_and_comments()
.map(|t| t.clone())
input.next_including_whitespace_and_comments().cloned()
} else {
input.next_including_whitespace().map(|t| t.clone())
input.next_including_whitespace().cloned()
} {
let token_type = token.serialization_type();
if !preserve_comments && previous_token.needs_separator_when_before(token_type)
Expand Down Expand Up @@ -856,7 +854,7 @@ impl<'i> DeclarationParser<'i> for JsonParser {
let mut important = false;
loop {
let start = input.state();
if let Ok(mut token) = input.next_including_whitespace().map(|t| t.clone()) {
if let Ok(mut token) = input.next_including_whitespace().cloned() {
// Hack to deal with css-parsing-tests assuming that
// `!important` in the middle of a declaration value is OK.
// This can never happen per spec
Expand Down Expand Up @@ -959,7 +957,7 @@ impl<'i> RuleBodyItemParser<'i, Value, ()> for JsonParser {

fn component_values_to_json(input: &mut Parser) -> Vec<Value> {
let mut values = vec![];
while let Ok(token) = input.next_including_whitespace().map(|t| t.clone()) {
while let Ok(token) = input.next_including_whitespace().cloned() {
values.push(one_component_value_to_json(token, input));
}
values
Expand Down
8 changes: 4 additions & 4 deletions src/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -566,11 +566,11 @@ fn next_token<'a>(tokenizer: &mut Tokenizer<'a>) -> Result<Token<'a>, ()> {
b'#' => {
tokenizer.advance(1);
if is_ident_start(tokenizer) { IDHash(consume_name(tokenizer)) }
else if !tokenizer.is_eof() && match tokenizer.next_byte_unchecked() {
else if !tokenizer.is_eof() &&
matches!(tokenizer.next_byte_unchecked(), b'0'..=b'9' | b'-') {
// Any other valid case here already resulted in IDHash.
b'0'..=b'9' | b'-' => true,
_ => false,
} { Hash(consume_name(tokenizer)) }
Hash(consume_name(tokenizer))
}
else { Delim('#') }
},
b'$' => {
Expand Down
Loading