Skip to content

Commit

Permalink
Auto merge of #432 - svenfoo:fix-clippy-warnings, r=jdm
Browse files Browse the repository at this point in the history
Fix clippy warnings

Another wave of changes suggested by Clippy. There are some warnings left, but this is as far as I will get today.
  • Loading branch information
bors-servo authored Mar 4, 2021
2 parents 210bba7 + d1206da commit b1b3e8a
Show file tree
Hide file tree
Showing 15 changed files with 47 additions and 47 deletions.
3 changes: 1 addition & 2 deletions html5ever/macros/match_token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,6 @@ use std::collections::HashSet;
use std::fs::File;
use std::io::{Read, Write};
use std::path::Path;
use syn;
use syn::ext::IdentExt;
use syn::fold::Fold;
use syn::parse::{Parse, ParseStream, Result};
Expand Down Expand Up @@ -254,7 +253,7 @@ impl Parse for MatchToken {
pub fn expand_match_token(body: &TokenStream) -> syn::Expr {
let match_token = syn::parse2::<MatchToken>(body.clone());
let ast = expand_match_token_macro(match_token.unwrap());
syn::parse2(ast.into()).unwrap()
syn::parse2(ast).unwrap()
}

fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {
Expand Down
1 change: 1 addition & 0 deletions html5ever/src/driver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ impl<Sink: TreeSink> Parser<Sink> {
///
/// Use this when your input is bytes that are known to be in the UTF-8 encoding.
/// Decoding is lossy, like `String::from_utf8_lossy`.
#[allow(clippy::wrong_self_convention)]
pub fn from_utf8(self) -> Utf8LossyDecoder<Self> {
Utf8LossyDecoder::new(self)
}
Expand Down
10 changes: 2 additions & 8 deletions html5ever/src/serialize/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ impl Default for SerializeOpts {
#[derive(Default)]
struct ElemInfo {
html_name: Option<LocalName>,
ignore_children: bool,
processed_first_child: bool,
ignore_children: bool
}

pub struct HtmlSerializer<Wr: Write> {
Expand Down Expand Up @@ -87,13 +86,12 @@ impl<Wr: Write> HtmlSerializer<Wr> {
stack: vec![ElemInfo {
html_name,
ignore_children: false,
processed_first_child: false,
}],
}
}

fn parent(&mut self) -> &mut ElemInfo {
if self.stack.len() == 0 {
if self.stack.is_empty() {
if self.opts.create_missing_parent {
warn!("ElemInfo stack empty, creating new parent");
self.stack.push(Default::default());
Expand Down Expand Up @@ -133,7 +131,6 @@ impl<Wr: Write> Serializer for HtmlSerializer<Wr> {
self.stack.push(ElemInfo {
html_name,
ignore_children: true,
processed_first_child: false,
});
return Ok(());
}
Expand Down Expand Up @@ -189,12 +186,9 @@ impl<Wr: Write> Serializer for HtmlSerializer<Wr> {
_ => false,
};

self.parent().processed_first_child = true;

self.stack.push(ElemInfo {
html_name,
ignore_children,
processed_first_child: false,
});

Ok(())
Expand Down
4 changes: 2 additions & 2 deletions html5ever/src/tokenizer/char_ref/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,13 +85,13 @@ impl CharRefTokenizer {
self.result.expect("get_result called before done")
}

fn name_buf<'t>(&'t self) -> &'t StrTendril {
fn name_buf(&self) -> &StrTendril {
self.name_buf_opt
.as_ref()
.expect("name_buf missing in named character reference")
}

fn name_buf_mut<'t>(&'t mut self) -> &'t mut StrTendril {
fn name_buf_mut(&mut self) -> &mut StrTendril {
self.name_buf_opt
.as_mut()
.expect("name_buf missing in named character reference")
Expand Down
9 changes: 5 additions & 4 deletions html5ever/src/tokenizer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -300,13 +300,13 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
// It shouldn't matter because the fallback `FromSet` case should
// always do the same thing as the `NotFromSet` case.
if self.opts.exact_errors || self.reconsume || self.ignore_lf {
return self.get_char(input).map(|x| FromSet(x));
return self.get_char(input).map(FromSet);
}

let d = input.pop_except_from(set);
debug!("got characters {:?}", d);
match d {
Some(FromSet(c)) => self.get_preprocessed_char(c, input).map(|x| FromSet(x)),
Some(FromSet(c)) => self.get_preprocessed_char(c, input).map(FromSet),

// NB: We don't set self.current_char for a run of characters not
// in the set. It shouldn't matter for the codepaths that use
Expand Down Expand Up @@ -495,7 +495,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}

fn finish_attribute(&mut self) {
if self.current_attr_name.len() == 0 {
if self.current_attr_name.is_empty() {
return;
}

Expand Down Expand Up @@ -530,7 +530,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
self.process_token_and_continue(DoctypeToken(doctype));
}

fn doctype_id<'a>(&'a mut self, kind: DoctypeIdKind) -> &'a mut Option<StrTendril> {
fn doctype_id(&mut self, kind: DoctypeIdKind) -> &mut Option<StrTendril> {
match kind {
Public => &mut self.current_doctype.public_id,
System => &mut self.current_doctype.system_id,
Expand Down Expand Up @@ -683,6 +683,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
// Run the state machine for a while.
// Return true if we should be immediately re-invoked
// (this just simplifies control flow vs. break / continue).
#[allow(clippy::never_loop)]
fn step(&mut self, input: &mut BufferQueue) -> ProcessResult<Sink::Handle> {
if self.char_ref_tokenizer.is_some() {
return self.step_char_ref_tokenizer(input);
Expand Down
2 changes: 1 addition & 1 deletion html5ever/src/tree_builder/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1429,7 +1429,7 @@ where
return false;
}

if self.open_elems.len() == 0 {
if self.open_elems.is_empty() {
return false;
}

Expand Down
2 changes: 1 addition & 1 deletion markup5ever/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ fn named_entities_to_phf(to: &Path) {
let mut entities: HashMap<&str, (u32, u32)> = entities::NAMED_ENTITIES
.iter()
.map(|(name, cp1, cp2)| {
assert!(name.starts_with("&"));
assert!(name.starts_with('&'));
(&name[1..], (*cp1, *cp2))
})
.collect();
Expand Down
10 changes: 4 additions & 6 deletions markup5ever/util/buffer_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,7 @@ impl BufferQueue {
debug_assert!(
self.buffers
.iter()
.skip_while(|el| el.len32() != 0)
.next()
.find(|el| el.len32() == 0)
.is_none(),
"invariant \"all buffers in the queue are non-empty\" failed"
);
Expand Down Expand Up @@ -202,15 +201,14 @@ impl BufferQueue {
pub fn eat<F: Fn(&u8, &u8) -> bool>(&mut self, pat: &str, eq: F) -> Option<bool> {
let mut buffers_exhausted = 0;
let mut consumed_from_last = 0;
if self.buffers.front().is_none() {
return None;
}

self.buffers.front()?;

for pattern_byte in pat.bytes() {
if buffers_exhausted >= self.buffers.len() {
return None;
}
let ref buf = self.buffers[buffers_exhausted];
let buf = &self.buffers[buffers_exhausted];

if !eq(&buf.as_bytes()[consumed_from_last], &pattern_byte) {
return Some(false);
Expand Down
18 changes: 9 additions & 9 deletions rcdom/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ impl Node {
/// Create a new node from its contents
pub fn new(data: NodeData) -> Rc<Self> {
Rc::new(Node {
data: data,
data,
parent: Cell::new(None),
children: RefCell::new(Vec::new()),
})
Expand Down Expand Up @@ -280,7 +280,7 @@ impl TreeSink for RcDom {

fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> Handle {
Node::new(NodeData::ProcessingInstruction {
target: target,
target,
contents: data,
})
}
Expand Down Expand Up @@ -467,8 +467,8 @@ impl Serialize for SerializableHandle {

while let Some(op) = ops.pop_front() {
match op {
SerializeOp::Open(handle) => match &handle.data {
&NodeData::Element {
SerializeOp::Open(handle) => match handle.data {
NodeData::Element {
ref name,
ref attrs,
..
Expand All @@ -486,20 +486,20 @@ impl Serialize for SerializableHandle {
}
},

&NodeData::Doctype { ref name, .. } => serializer.write_doctype(&name)?,
NodeData::Doctype { ref name, .. } => serializer.write_doctype(&name)?,

&NodeData::Text { ref contents } => {
NodeData::Text { ref contents } => {
serializer.write_text(&contents.borrow())?
},

&NodeData::Comment { ref contents } => serializer.write_comment(&contents)?,
NodeData::Comment { ref contents } => serializer.write_comment(&contents)?,

&NodeData::ProcessingInstruction {
NodeData::ProcessingInstruction {
ref target,
ref contents,
} => serializer.write_processing_instruction(target, contents)?,

&NodeData::Document => panic!("Can't serialize Document node itself"),
NodeData::Document => panic!("Can't serialize Document node itself"),
},

SerializeOp::Close(name) => {
Expand Down
1 change: 1 addition & 0 deletions xml5ever/src/driver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ impl<Sink: TreeSink> XmlParser<Sink> {
///
/// Use this when your input is bytes that are known to be in the UTF-8 encoding.
/// Decoding is lossy, like `String::from_utf8_lossy`.
#[allow(clippy::wrong_self_convention)]
pub fn from_utf8(self) -> Utf8LossyDecoder<Self> {
Utf8LossyDecoder::new(self)
}
Expand Down
4 changes: 2 additions & 2 deletions xml5ever/src/serialize/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -156,13 +156,13 @@ impl<Wr: Write> Serializer for XmlSerializer<Wr> {
if let Some(current_namespace) = self.namespace_stack.0.last() {
for (prefix, url_opt) in current_namespace.get_scope_iter() {
self.writer.write_all(b" xmlns")?;
if let &Some(ref p) = prefix {
if let Some(ref p) = *prefix {
self.writer.write_all(b":")?;
self.writer.write_all(&*p.as_bytes())?;
}

self.writer.write_all(b"=\"")?;
let url = if let &Some(ref a) = url_opt {
let url = if let Some(ref a) = *url_opt {
a.as_bytes()
} else {
b""
Expand Down
4 changes: 2 additions & 2 deletions xml5ever/src/tokenizer/char_ref/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,13 +84,13 @@ impl CharRefTokenizer {
self.result.expect("get_result called before done")
}

fn name_buf<'t>(&'t self) -> &'t StrTendril {
fn name_buf(&self) -> &StrTendril {
self.name_buf_opt
.as_ref()
.expect("name_buf missing in named character reference")
}

fn name_buf_mut<'t>(&'t mut self) -> &'t mut StrTendril {
fn name_buf_mut(&mut self) -> &mut StrTendril {
self.name_buf_opt
.as_mut()
.expect("name_buf missing in named character reference")
Expand Down
11 changes: 6 additions & 5 deletions xml5ever/src/tokenizer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,13 +280,13 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
// It shouldn't matter because the fallback `FromSet` case should
// always do the same thing as the `NotFromSet` case.
if self.opts.exact_errors || self.reconsume || self.ignore_lf {
return self.get_char(input).map(|x| FromSet(x));
return self.get_char(input).map(FromSet);
}

let d = input.pop_except_from(set);
debug!("got characters {:?}", d);
match d {
Some(FromSet(c)) => self.get_preprocessed_char(c, input).map(|x| FromSet(x)),
Some(FromSet(c)) => self.get_preprocessed_char(c, input).map(FromSet),

// NB: We don't set self.current_char for a run of characters not
// in the set. It shouldn't matter for the codepaths that use
Expand Down Expand Up @@ -482,7 +482,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
self.process_token(DoctypeToken(doctype));
}

fn doctype_id<'a>(&'a mut self, kind: DoctypeKind) -> &'a mut Option<StrTendril> {
fn doctype_id(&mut self, kind: DoctypeKind) -> &mut Option<StrTendril> {
match kind {
Public => &mut self.current_doctype.public_id,
System => &mut self.current_doctype.system_id,
Expand Down Expand Up @@ -639,6 +639,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
// Run the state machine for a while.
// Return true if we should be immediately re-invoked
// (this just simplifies control flow vs. break / continue).
#[allow(clippy::never_loop)]
fn step(&mut self, input: &mut BufferQueue) -> bool {
if self.char_ref_tokenizer.is_some() {
return self.step_char_ref_tokenizer(input);
Expand All @@ -648,7 +649,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
match self.state {
XmlState::Quiescent => {
self.state = XmlState::Data;
return false;
false
},
//§ data-state
XmlState::Data => loop {
Expand Down Expand Up @@ -1225,7 +1226,7 @@ impl<Sink: TokenSink> XmlTokenizer<Sink> {
}

fn finish_attribute(&mut self) {
if self.current_attr_name.len() == 0 {
if self.current_attr_name.is_empty() {
return;
}

Expand Down
2 changes: 1 addition & 1 deletion xml5ever/src/tokenizer/qname.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ impl<'a> QualNameTokenizer<'a> {
}

pub fn run(&mut self) -> Option<u32> {
if self.slice.len() > 0 {
if !self.slice.is_empty() {
loop {
if !self.step() {
break;
Expand Down
13 changes: 9 additions & 4 deletions xml5ever/src/tree_builder/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ impl Debug for NamespaceMap {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "\nNamespaceMap[")?;
for (key, value) in &self.scope {
write!(f, " {:?} : {:?}\n", key, value)?;
writeln!(f, " {:?} : {:?}", key, value)?;
}
write!(f, "]")
}
Expand Down Expand Up @@ -248,7 +248,9 @@ where
for e in self.open_elems.iter() {
tracer.trace_handle(&e);
}
self.curr_elem.as_ref().map(|h| tracer.trace_handle(&h));
if let Some(h) = self.curr_elem.as_ref() {
tracer.trace_handle(&h);
}
}

// Debug helper
Expand Down Expand Up @@ -360,7 +362,8 @@ where
new_attr.push(attr.clone());
}
}
mem::replace(&mut tag.attrs, new_attr);
tag.attrs = new_attr;

// Then we bind the tags namespace.
self.bind_qname(&mut tag.name);

Expand All @@ -383,6 +386,8 @@ where

loop {
let phase = self.phase;

#[allow(clippy::unused_unit)]
match self.step(phase, token) {
Done => {
token = unwrap_or_return!(more_tokens.pop_front(), ());
Expand Down Expand Up @@ -497,7 +502,7 @@ where
Some(expr) => expr,
None => Tendril::new(),
}
};
}
self.sink.append_doctype_to_document(
get_tendril(doctype.name),
get_tendril(doctype.public_id),
Expand Down

0 comments on commit b1b3e8a

Please sign in to comment.