cargo clippy fixes

This commit is contained in:
NGnius (Graham) 2022-01-29 16:11:01 -05:00
parent 322d988c0a
commit 9c2a20ef40
27 changed files with 173 additions and 207 deletions

View file

@ -32,7 +32,7 @@ where
{ {
pub fn with_vocab(tokenizer: T, vocab: MpsLanguageDictionary) -> Self { pub fn with_vocab(tokenizer: T, vocab: MpsLanguageDictionary) -> Self {
Self { Self {
tokenizer: tokenizer, tokenizer,
buffer: VecDeque::new(), buffer: VecDeque::new(),
current_stmt: None, current_stmt: None,
vocabulary: vocab, vocabulary: vocab,
@ -42,7 +42,7 @@ where
pub fn with_standard_vocab(tokenizer: T) -> Self { pub fn with_standard_vocab(tokenizer: T) -> Self {
let mut result = Self { let mut result = Self {
tokenizer: tokenizer, tokenizer,
buffer: VecDeque::new(), buffer: VecDeque::new(),
current_stmt: None, current_stmt: None,
vocabulary: MpsLanguageDictionary::default(), vocabulary: MpsLanguageDictionary::default(),
@ -66,7 +66,7 @@ impl MpsInterpretor<crate::tokens::MpsTokenizer<File>> {
let file = File::open(path)?; let file = File::open(path)?;
let tokenizer = crate::tokens::MpsTokenizer::new(file); let tokenizer = crate::tokens::MpsTokenizer::new(file);
let mut result = Self { let mut result = Self {
tokenizer: tokenizer, tokenizer,
buffer: VecDeque::new(), buffer: VecDeque::new(),
current_stmt: None, current_stmt: None,
vocabulary: MpsLanguageDictionary::default(), vocabulary: MpsLanguageDictionary::default(),
@ -90,12 +90,7 @@ where
if next_item.is_none() { if next_item.is_none() {
is_stmt_done = true; is_stmt_done = true;
} }
match next_item { next_item.map(|item| item.map_err(|e| box_error_with_ctx(e, self.tokenizer.current_line())))
Some(item) => {
Some(item.map_err(|e| box_error_with_ctx(e, self.tokenizer.current_line())))
}
None => None,
}
} else { } else {
/*if self.tokenizer.end_of_file() { /*if self.tokenizer.end_of_file() {
return None; return None;
@ -110,28 +105,23 @@ where
Ok(_) => {} Ok(_) => {}
Err(x) => return Some(Err(x)), Err(x) => return Some(Err(x)),
} }
if self.tokenizer.end_of_file() && self.buffer.len() == 0 { if self.tokenizer.end_of_file() && self.buffer.is_empty() {
return None; return None;
} }
let stmt = self.vocabulary.try_build_statement(&mut self.buffer); let stmt = self.vocabulary.try_build_statement(&mut self.buffer);
match stmt { match stmt {
Ok(mut stmt) => { Ok(mut stmt) => {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
if self.buffer.len() != 0 { if !self.buffer.is_empty() {
panic!("Token buffer was not emptied! (rem: {:?})", self.buffer) panic!("Token buffer was not emptied! (rem: {:?})", self.buffer)
} }
stmt.enter(self.context.take().unwrap_or_else(|| MpsContext::default())); stmt.enter(self.context.take().unwrap_or_default());
self.current_stmt = Some(stmt); self.current_stmt = Some(stmt);
let next_item = self.current_stmt.as_mut().unwrap().next(); let next_item = self.current_stmt.as_mut().unwrap().next();
if next_item.is_none() { if next_item.is_none() {
is_stmt_done = true; is_stmt_done = true;
} }
match next_item { next_item.map(|item| item.map_err(|e| box_error_with_ctx(e, self.tokenizer.current_line())))
Some(item) => Some(
item.map_err(|e| box_error_with_ctx(e, self.tokenizer.current_line())),
),
None => None,
}
} }
Err(e) => { Err(e) => {
Some(Err(e).map_err(|e| box_error_with_ctx(e, self.tokenizer.current_line()))) Some(Err(e).map_err(|e| box_error_with_ctx(e, self.tokenizer.current_line())))

View file

@ -295,7 +295,7 @@ impl<P: MpsFilterPredicate + 'static> Iterator for MpsFilterStatement<P> {
} }
maybe_result maybe_result
} }
Err(e) => return Some(Err(e)), Err(e) => Some(Err(e)),
}, },
VariableOrOp::Variable(variable_name) => { VariableOrOp::Variable(variable_name) => {
let mut variable = match self let mut variable = match self
@ -303,13 +303,13 @@ impl<P: MpsFilterPredicate + 'static> Iterator for MpsFilterStatement<P> {
.as_mut() .as_mut()
.unwrap() .unwrap()
.variables .variables
.remove(&variable_name, &mut op_getter) .remove(variable_name, &mut op_getter)
{ {
Ok(MpsType::Op(op)) => op, Ok(MpsType::Op(op)) => op,
Ok(x) => { Ok(x) => {
return Some(Err(RuntimeError { return Some(Err(RuntimeError {
line: 0, line: 0,
op: (Box::new(self_clone2.clone()) as Box<dyn MpsOp>).into(), op: (Box::new(self_clone2) as Box<dyn MpsOp>).into(),
msg: format!( msg: format!(
"Expected operation/iterable type in variable {}, got {}", "Expected operation/iterable type in variable {}, got {}",
&variable_name, x &variable_name, x
@ -353,11 +353,11 @@ impl<P: MpsFilterPredicate + 'static> Iterator for MpsFilterStatement<P> {
self.context = Some(variable.escape()); self.context = Some(variable.escape());
} }
match self.context.as_mut().unwrap().variables.declare( match self.context.as_mut().unwrap().variables.declare(
&variable_name, variable_name,
MpsType::Op(variable), MpsType::Op(variable),
&mut op_getter, &mut op_getter,
) { ) {
Err(e) => return Some(Err(e)), Err(e) => Some(Err(e)),
Ok(_) => maybe_result, Ok(_) => maybe_result,
} }
} }
@ -405,7 +405,7 @@ impl<P: MpsFilterPredicate + 'static, F: MpsFilterFactory<P> + 'static> BoxedMps
// single filter // single filter
let tokens2: VecDeque<&MpsToken> = let tokens2: VecDeque<&MpsToken> =
VecDeque::from_iter(tokens.range(start_of_predicate..tokens_len - 1)); VecDeque::from_iter(tokens.range(start_of_predicate..tokens_len - 1));
if tokens2.len() != 0 && check_name("if", &tokens2[0]) { if !tokens2.is_empty() && check_name("if", tokens2[0]) {
// replacement filter // replacement filter
if let Some(colon_location) = first_colon2(&tokens2) { if let Some(colon_location) = first_colon2(&tokens2) {
let tokens3 = VecDeque::from_iter(tokens.range( let tokens3 = VecDeque::from_iter(tokens.range(
@ -543,11 +543,7 @@ fn last_open_bracket_is_after_dot(tokens: &VecDeque<MpsToken>) -> bool {
inside_brackets -= 1; inside_brackets -= 1;
} }
} else if open_bracket_found { } else if open_bracket_found {
if tokens[i].is_dot() { return tokens[i].is_dot()
return true;
} else {
return false;
}
} }
} }
false false

View file

@ -166,7 +166,7 @@ impl<P: MpsFilterPredicate + 'static> Iterator for MpsFilterReplaceStatement<P>
.as_mut() .as_mut()
.unwrap() .unwrap()
.variables .variables
.remove(&variable_name, &mut op_getter) .remove(variable_name, &mut op_getter)
{ {
Ok(MpsType::Op(op)) => op, Ok(MpsType::Op(op)) => op,
Ok(x) => { Ok(x) => {
@ -186,7 +186,7 @@ impl<P: MpsFilterPredicate + 'static> Iterator for MpsFilterReplaceStatement<P>
let item = variable.next(); let item = variable.next();
self.context = Some(variable.escape()); self.context = Some(variable.escape());
match self.context.as_mut().unwrap().variables.declare( match self.context.as_mut().unwrap().variables.declare(
&variable_name, variable_name,
MpsType::Op(variable), MpsType::Op(variable),
&mut op_getter, &mut op_getter,
) { ) {
@ -228,7 +228,7 @@ impl<P: MpsFilterPredicate + 'static> Iterator for MpsFilterReplaceStatement<P>
Ok(_) => {} Ok(_) => {}
} }
} }
while let Some(item) = real_op.next() { for item in real_op.by_ref() {
self.item_cache.push_back(item); self.item_cache.push_back(item);
} }
self.context = Some(real_op.escape()); self.context = Some(real_op.escape());
@ -275,7 +275,7 @@ impl<P: MpsFilterPredicate + 'static> Iterator for MpsFilterReplaceStatement<P>
Ok(_) => {} Ok(_) => {}
} }
} }
while let Some(item) = real_op.next() { for item in real_op.by_ref() {
self.item_cache.push_back(item); self.item_cache.push_back(item);
} }
self.context = Some(real_op.escape()); self.context = Some(real_op.escape());
@ -303,7 +303,7 @@ impl<P: MpsFilterPredicate + 'static> Iterator for MpsFilterReplaceStatement<P>
Some(Ok(item)) Some(Ok(item))
} }
} }
Err(e) => return Some(Err(e)), Err(e) => Some(Err(e)),
} }
} }
Some(Err(e)) => Some(Err(e)), Some(Err(e)) => Some(Err(e)),

View file

@ -36,8 +36,8 @@ impl PseudoOp {
pub fn unwrap_real(self) -> Result<Box<dyn MpsOp>, RuntimeError> { pub fn unwrap_real(self) -> Result<Box<dyn MpsOp>, RuntimeError> {
match self { match self {
Self::Real(op) => { Self::Real(op) => {
let result = Ok(op);
result Ok(op)
} }
Self::Fake(_) => Err(RuntimeError { Self::Fake(_) => Err(RuntimeError {
line: 0, line: 0,

View file

@ -43,7 +43,7 @@ pub fn repeated_tokens<X, F1: FnMut(&mut VecDeque<MpsToken>) -> Result<Option<X>
RepeatedTokens { RepeatedTokens {
pattern_ingest: ingestor, pattern_ingest: ingestor,
separator_ingest: move |tokens| { separator_ingest: move |tokens| {
if tokens.len() > 0 && check_token_raw(separator.clone(), &tokens[0]) { if !tokens.is_empty() && check_token_raw(separator.clone(), &tokens[0]) {
assert_token_raw(separator.clone(), tokens)?; assert_token_raw(separator.clone(), tokens)?;
Ok(true) Ok(true)
} else { } else {

View file

@ -16,7 +16,7 @@ impl SingleItem {
pub fn new(item: Result<MpsItem, RuntimeError>) -> Self { pub fn new(item: Result<MpsItem, RuntimeError>) -> Self {
Self { Self {
context: None, context: None,
item: item, item,
is_complete: false, is_complete: false,
} }
} }

View file

@ -23,7 +23,7 @@ pub fn assert_token<T, F: FnOnce(MpsToken) -> Option<T>>(
} else { } else {
Err(SyntaxError { Err(SyntaxError {
line: 0, line: 0,
token: token, token,
got: Some(result), got: Some(result),
}) })
} }
@ -46,7 +46,7 @@ pub fn assert_token_raw(
} else { } else {
Err(SyntaxError { Err(SyntaxError {
line: 0, line: 0,
token: token, token,
got: Some(result), got: Some(result),
}) })
} }
@ -69,7 +69,7 @@ pub fn assert_token_raw_back(
} else { } else {
Err(SyntaxError { Err(SyntaxError {
line: 0, line: 0,
token: token, token,
got: Some(result), got: Some(result),
}) })
} }

View file

@ -81,7 +81,7 @@ impl SimpleMpsOpFactory<CommentStatement> for CommentStatementFactory {
tokens, tokens,
)?; )?;
Ok(CommentStatement { Ok(CommentStatement {
comment: comment, comment,
context: None, context: None,
}) })
} }

View file

@ -56,7 +56,7 @@ impl std::clone::Clone for FilesStatement {
context: None, context: None,
folder: self.folder.clone(), folder: self.folder.clone(),
regex: self.regex.clone(), regex: self.regex.clone(),
recursive: self.recursive.clone(), recursive: self.recursive,
file_iter: None, file_iter: None,
has_tried: self.has_tried, has_tried: self.has_tried,
} }
@ -86,7 +86,7 @@ impl Iterator for FilesStatement {
}); });
} }
match self.file_iter.as_mut().unwrap().next() { match self.file_iter.as_mut().unwrap().next() {
Some(Ok(item)) => Some(Ok(item.into())), Some(Ok(item)) => Some(Ok(item)),
Some(Err(e)) => Some(Err(RuntimeError { Some(Err(e)) => Some(Err(RuntimeError {
line: 0, line: 0,
op: (Box::new(self.clone()) as Box<dyn MpsOp>).into(), op: (Box::new(self.clone()) as Box<dyn MpsOp>).into(),
@ -134,7 +134,7 @@ impl MpsFunctionFactory<FilesStatement> for FilesFunctionFactory {
let mut root_path = None; let mut root_path = None;
let mut pattern = None; let mut pattern = None;
let mut recursive = None; let mut recursive = None;
if tokens.len() != 0 { if !tokens.is_empty() {
if tokens[0].is_literal() { if tokens[0].is_literal() {
// folder is specified without keyword // folder is specified without keyword
root_path = Some(assert_token( root_path = Some(assert_token(
@ -232,7 +232,7 @@ impl MpsFunctionFactory<FilesStatement> for FilesFunctionFactory {
context: None, context: None,
folder: root_path, folder: root_path,
regex: pattern, regex: pattern,
recursive: recursive, recursive,
file_iter: None, file_iter: None,
has_tried: false, has_tried: false,
}) })

View file

@ -41,7 +41,7 @@ pub struct EmptyFilterFactory;
impl MpsFilterFactory<EmptyFilter> for EmptyFilterFactory { impl MpsFilterFactory<EmptyFilter> for EmptyFilterFactory {
fn is_filter(&self, tokens: &VecDeque<&MpsToken>) -> bool { fn is_filter(&self, tokens: &VecDeque<&MpsToken>) -> bool {
tokens.len() == 0 tokens.is_empty()
} }
fn build_filter( fn build_filter(

View file

@ -68,7 +68,7 @@ impl MpsFilterPredicate for FieldFilter {
op: &mut OpGetter, op: &mut OpGetter,
) -> Result<bool, RuntimeError> { ) -> Result<bool, RuntimeError> {
let variable = match &self.val { let variable = match &self.val {
VariableOrValue::Variable(name) => match ctx.variables.get(&name, op)? { VariableOrValue::Variable(name) => match ctx.variables.get(name, op)? {
MpsType::Primitive(t) => Ok(t), MpsType::Primitive(t) => Ok(t),
_ => Err(RuntimeError { _ => Err(RuntimeError {
line: 0, line: 0,
@ -131,12 +131,12 @@ impl MpsFilterFactory<FieldFilter> for FieldFilterFactory {
(tokens_len == 3 // field > variable OR field < variable (tokens_len == 3 // field > variable OR field < variable
&& tokens[0].is_name() && tokens[0].is_name()
&& (tokens[1].is_open_angle_bracket() || tokens[1].is_close_angle_bracket()) && (tokens[1].is_open_angle_bracket() || tokens[1].is_close_angle_bracket())
&& (tokens[2].is_name() || check_is_type(&tokens[2]))) && (tokens[2].is_name() || check_is_type(tokens[2])))
|| (tokens_len == 4 // field >= variable OR field <= variable OR field != variable || (tokens_len == 4 // field >= variable OR field <= variable OR field != variable
&& tokens[0].is_name() && tokens[0].is_name()
&& (tokens[1].is_open_angle_bracket() || tokens[1].is_close_angle_bracket() || tokens[1].is_equals() || tokens[1].is_exclamation()) && (tokens[1].is_open_angle_bracket() || tokens[1].is_close_angle_bracket() || tokens[1].is_equals() || tokens[1].is_exclamation())
&& tokens[2].is_equals() && tokens[2].is_equals()
&& (tokens[3].is_name() || check_is_type(&tokens[3]))) && (tokens[3].is_name() || check_is_type(tokens[3])))
} }
fn build_filter( fn build_filter(

View file

@ -17,13 +17,13 @@ impl MpsFilterFactory<FieldFilter> for FieldFilterMaybeFactory {
&& tokens[0].is_name() && tokens[0].is_name()
&& (tokens[1].is_interrogation() || tokens[1].is_exclamation()) && (tokens[1].is_interrogation() || tokens[1].is_exclamation())
&& (tokens[2].is_open_angle_bracket() || tokens[2].is_close_angle_bracket()) && (tokens[2].is_open_angle_bracket() || tokens[2].is_close_angle_bracket())
&& (tokens[3].is_name() || check_is_type(&tokens[3]))) && (tokens[3].is_name() || check_is_type(tokens[3])))
|| (tokens_len == 5 // field >= variable OR field <= variable OR field != variable || (tokens_len == 5 // field >= variable OR field <= variable OR field != variable
&& tokens[0].is_name() && tokens[0].is_name()
&& (tokens[1].is_interrogation() || tokens[1].is_exclamation()) && (tokens[1].is_interrogation() || tokens[1].is_exclamation())
&& (tokens[2].is_open_angle_bracket() || tokens[2].is_close_angle_bracket() || tokens[2].is_equals() || tokens[2].is_exclamation()) && (tokens[2].is_open_angle_bracket() || tokens[2].is_close_angle_bracket() || tokens[2].is_equals() || tokens[2].is_exclamation())
&& tokens[3].is_equals() && tokens[3].is_equals()
&& (tokens[4].is_name() || check_is_type(&tokens[4]))) && (tokens[4].is_name() || check_is_type(tokens[4])))
} }
fn build_filter( fn build_filter(

View file

@ -37,7 +37,7 @@ impl MpsFilterPredicate for FieldLikeFilter {
op: &mut OpGetter, op: &mut OpGetter,
) -> Result<bool, RuntimeError> { ) -> Result<bool, RuntimeError> {
let variable = match &self.val { let variable = match &self.val {
VariableOrValue::Variable(name) => match ctx.variables.get(&name, op)? { VariableOrValue::Variable(name) => match ctx.variables.get(name, op)? {
MpsType::Primitive(MpsTypePrimitive::String(s)) => Ok(s), MpsType::Primitive(MpsTypePrimitive::String(s)) => Ok(s),
_ => Err(RuntimeError { _ => Err(RuntimeError {
line: 0, line: 0,
@ -50,7 +50,7 @@ impl MpsFilterPredicate for FieldLikeFilter {
_ => Err(RuntimeError { _ => Err(RuntimeError {
line: 0, line: 0,
op: op(), op: op(),
msg: format!("Value is not type String"), msg: "Value is not type String".to_string(),
}), }),
}?; }?;
if let Some(field) = music_item_lut.field(&self.field_name) { if let Some(field) = music_item_lut.field(&self.field_name) {
@ -85,12 +85,12 @@ impl MpsFilterFactory<FieldLikeFilter> for FieldLikeFilterFactory {
let tokens_len = tokens.len(); let tokens_len = tokens.len();
(tokens_len == 3 // field like variable (tokens_len == 3 // field like variable
&& tokens[0].is_name() && tokens[0].is_name()
&& check_name("like", &tokens[1]) && check_name("like", tokens[1])
&& (tokens[2].is_name() || tokens[2].is_literal())) && (tokens[2].is_name() || tokens[2].is_literal()))
|| (tokens_len == 4 // field? like variable OR field! like variable || (tokens_len == 4 // field? like variable OR field! like variable
&& tokens[0].is_name() && tokens[0].is_name()
&& (tokens[1].is_interrogation() || tokens[1].is_exclamation()) && (tokens[1].is_interrogation() || tokens[1].is_exclamation())
&& check_name("like", &tokens[2]) && check_name("like", tokens[2])
&& (tokens[3].is_name() || tokens[3].is_literal())) && (tokens[3].is_name() || tokens[3].is_literal()))
} }

View file

@ -81,8 +81,8 @@ pub struct IndexFilterFactory;
impl MpsFilterFactory<IndexFilter> for IndexFilterFactory { impl MpsFilterFactory<IndexFilter> for IndexFilterFactory {
fn is_filter(&self, tokens: &VecDeque<&MpsToken>) -> bool { fn is_filter(&self, tokens: &VecDeque<&MpsToken>) -> bool {
(tokens.len() == 1 && Lookup::check_is(&tokens[0])) (tokens.len() == 1 && Lookup::check_is(tokens[0]))
|| (tokens.len() == 2 && tokens[0].is_exclamation() && Lookup::check_is(&tokens[1])) || (tokens.len() == 2 && tokens[0].is_exclamation() && Lookup::check_is(tokens[1]))
} }
fn build_filter( fn build_filter(

View file

@ -120,33 +120,33 @@ impl MpsFilterFactory<RangeFilter> for RangeFilterFactory {
) || (tokens.len() == 3 ) || (tokens.len() == 3
&& (( && ((
// ..number // ..number
tokens[0].is_dot() && tokens[1].is_dot() && Lookup::check_is(&tokens[2]) tokens[0].is_dot() && tokens[1].is_dot() && Lookup::check_is(tokens[2])
) || ( ) || (
// number.. // number..
Lookup::check_is(&tokens[0]) && tokens[1].is_dot() && tokens[2].is_dot() Lookup::check_is(tokens[0]) && tokens[1].is_dot() && tokens[2].is_dot()
))) )))
|| (tokens.len() == 4 || (tokens.len() == 4
&& (( && ((
// number..number // number..number
Lookup::check_is(&tokens[0]) Lookup::check_is(tokens[0])
&& tokens[1].is_dot() && tokens[1].is_dot()
&& tokens[2].is_dot() && tokens[2].is_dot()
&& Lookup::check_is(&tokens[3]) && Lookup::check_is(tokens[3])
) || ( ) || (
// ..=number // ..=number
tokens[0].is_dot() tokens[0].is_dot()
&& tokens[1].is_dot() && tokens[1].is_dot()
&& tokens[2].is_equals() && tokens[2].is_equals()
&& Lookup::check_is(&tokens[3]) && Lookup::check_is(tokens[3])
))) )))
|| ( || (
// number..=number // number..=number
tokens.len() == 5 tokens.len() == 5
&& Lookup::check_is(&tokens[0]) && Lookup::check_is(tokens[0])
&& tokens[1].is_dot() && tokens[1].is_dot()
&& tokens[2].is_dot() && tokens[2].is_dot()
&& tokens[3].is_equals() && tokens[3].is_equals()
&& Lookup::check_is(&tokens[4]) && Lookup::check_is(tokens[4])
) )
} }
@ -180,8 +180,8 @@ impl MpsFilterFactory<RangeFilter> for RangeFilterFactory {
}; };
Ok(RangeFilter { Ok(RangeFilter {
start: start, start,
end: end, end,
inclusive_end: equals_at_end, inclusive_end: equals_at_end,
current: 0, current: 0,
complete: false, complete: false,

View file

@ -8,7 +8,7 @@ pub fn assert_comparison_operator(tokens: &mut VecDeque<MpsToken>) -> Result<[i8
let token1 = tokens.pop_front().unwrap(); let token1 = tokens.pop_front().unwrap();
match token1 { match token1 {
MpsToken::Equals => { MpsToken::Equals => {
if tokens.len() != 0 && tokens[0].is_equals() { if !tokens.is_empty() && tokens[0].is_equals() {
// tokens: == // tokens: ==
assert_token_raw(MpsToken::Equals, tokens)?; assert_token_raw(MpsToken::Equals, tokens)?;
Ok([0, 0]) Ok([0, 0])
@ -16,7 +16,7 @@ pub fn assert_comparison_operator(tokens: &mut VecDeque<MpsToken>) -> Result<[i8
Err(SyntaxError { Err(SyntaxError {
line: 0, line: 0,
token: MpsToken::Equals, token: MpsToken::Equals,
got: if tokens.len() != 0 { got: if !tokens.is_empty() {
Some(tokens[0].clone()) Some(tokens[0].clone())
} else { } else {
None None
@ -25,7 +25,7 @@ pub fn assert_comparison_operator(tokens: &mut VecDeque<MpsToken>) -> Result<[i8
} }
} }
MpsToken::OpenAngleBracket => { MpsToken::OpenAngleBracket => {
if tokens.len() != 0 && tokens[0].is_equals() { if !tokens.is_empty() && tokens[0].is_equals() {
// tokens: <= // tokens: <=
assert_token_raw(MpsToken::Equals, tokens)?; assert_token_raw(MpsToken::Equals, tokens)?;
Ok([0, -1]) Ok([0, -1])
@ -35,7 +35,7 @@ pub fn assert_comparison_operator(tokens: &mut VecDeque<MpsToken>) -> Result<[i8
} }
} }
MpsToken::CloseAngleBracket => { MpsToken::CloseAngleBracket => {
if tokens.len() != 0 && tokens[0].is_equals() { if !tokens.is_empty() && tokens[0].is_equals() {
// tokens: >= // tokens: >=
assert_token_raw(MpsToken::Equals, tokens)?; assert_token_raw(MpsToken::Equals, tokens)?;
Ok([0, 1]) Ok([0, 1])

View file

@ -69,7 +69,7 @@ impl Iterator for RepeatStatement {
} }
if real_op.is_resetable() { if real_op.is_resetable() {
while self.loop_forever || !self.inner_done { while self.loop_forever || !self.inner_done {
while let Some(item) = real_op.next() { for item in real_op.by_ref() {
return Some(item); return Some(item);
} }
if !self.loop_forever { if !self.loop_forever {
@ -124,8 +124,7 @@ impl Iterator for RepeatStatement {
// inner is done // inner is done
if self.repetitions == 0 && !self.loop_forever { if self.repetitions == 0 && !self.loop_forever {
None None
} else { } else if self.cache.is_empty() {
if self.cache.len() == 0 {
if self.loop_forever { if self.loop_forever {
Some(Err(RuntimeError { Some(Err(RuntimeError {
line: 0, line: 0,
@ -148,7 +147,6 @@ impl Iterator for RepeatStatement {
} }
} }
} }
}
} }
impl MpsOp for RepeatStatement { impl MpsOp for RepeatStatement {
@ -183,8 +181,7 @@ impl MpsOp for RepeatStatement {
self.repetitions = self.original_repetitions - 1; self.repetitions = self.original_repetitions - 1;
self.inner_done = false; self.inner_done = false;
} }
} else { } else if self.inner_done {
if self.inner_done {
self.repetitions = self.original_repetitions; self.repetitions = self.original_repetitions;
self.cache_position = 0; self.cache_position = 0;
} else { } else {
@ -196,7 +193,6 @@ impl MpsOp for RepeatStatement {
.to_string(), .to_string(),
}); });
} }
}
Ok(()) Ok(())
} }
} }
@ -220,7 +216,7 @@ impl MpsFunctionFactory<RepeatStatement> for RepeatFunctionFactory {
tokens.extend(end_tokens); tokens.extend(end_tokens);
let mut count: Option<usize> = None; let mut count: Option<usize> = None;
let mut inner_done = false; let mut inner_done = false;
if tokens.len() != 0 { if !tokens.is_empty() {
// repititions specified // repititions specified
assert_token_raw(MpsToken::Comma, tokens)?; assert_token_raw(MpsToken::Comma, tokens)?;
count = Some(assert_token( count = Some(assert_token(
@ -244,13 +240,13 @@ impl MpsFunctionFactory<RepeatStatement> for RepeatFunctionFactory {
} }
Ok(RepeatStatement { Ok(RepeatStatement {
inner_statement: inner_statement.into(), inner_statement: inner_statement.into(),
inner_done: inner_done, inner_done,
context: None, context: None,
cache: Vec::new(), cache: Vec::new(),
cache_position: 0, cache_position: 0,
repetitions: count.unwrap_or(0), repetitions: count.unwrap_or(0),
loop_forever: count.is_none(), loop_forever: count.is_none(),
original_repetitions: count.and_then(|c| Some(c + 1)).unwrap_or(0), original_repetitions: count.map(|c| c + 1).unwrap_or(0),
}) })
} }
} }

View file

@ -31,15 +31,13 @@ impl BlissNextSorter {
fn get_maybe(&mut self, op: &mut OpGetter) -> Option<MpsIteratorItem> { fn get_maybe(&mut self, op: &mut OpGetter) -> Option<MpsIteratorItem> {
if self.algorithm_done { if self.algorithm_done {
None None
} else { } else if let Ok(Some(item)) = self.rx.as_ref().unwrap().recv() {
if let Ok(Some(item)) = self.rx.as_ref().unwrap().recv() {
Some(item.map_err(|e| bliss_err(e, op))) Some(item.map_err(|e| bliss_err(e, op)))
} else { } else {
self.algorithm_done = true; self.algorithm_done = true;
None None
} }
} }
}
fn algorithm(mut items: VecDeque<MpsItem>, results: Sender<Option<Result<MpsItem, bliss_audio::BlissError>>>) { fn algorithm(mut items: VecDeque<MpsItem>, results: Sender<Option<Result<MpsItem, bliss_audio::BlissError>>>) {
let mut song_cache: Option<(Song, String)> = None; let mut song_cache: Option<(Song, String)> = None;
@ -127,7 +125,7 @@ impl BlissNextSorter {
impl std::clone::Clone for BlissNextSorter { impl std::clone::Clone for BlissNextSorter {
fn clone(&self) -> Self { fn clone(&self) -> Self {
Self { Self {
up_to: self.up_to.clone(), up_to: self.up_to,
rx: None, rx: None,
algorithm_done: self.algorithm_done, algorithm_done: self.algorithm_done,
} }
@ -205,7 +203,7 @@ pub struct BlissNextSorterFactory;
impl MpsSorterFactory<BlissNextSorter> for BlissNextSorterFactory { impl MpsSorterFactory<BlissNextSorter> for BlissNextSorterFactory {
fn is_sorter(&self, tokens: &VecDeque<&MpsToken>) -> bool { fn is_sorter(&self, tokens: &VecDeque<&MpsToken>) -> bool {
tokens.len() == 2 && check_name("advanced", &tokens[0]) && check_name("bliss_next", &tokens[1]) tokens.len() == 2 && check_name("advanced", tokens[0]) && check_name("bliss_next", tokens[1])
} }
fn build_sorter( fn build_sorter(

View file

@ -74,7 +74,7 @@ impl BlissSorter {
impl std::clone::Clone for BlissSorter { impl std::clone::Clone for BlissSorter {
fn clone(&self) -> Self { fn clone(&self) -> Self {
Self { Self {
up_to: self.up_to.clone(), up_to: self.up_to,
float_map: self.float_map.clone(), float_map: self.float_map.clone(),
first_song: self.first_song.clone(), first_song: self.first_song.clone(),
rx: None, rx: None,
@ -210,7 +210,7 @@ pub struct BlissSorterFactory;
impl MpsSorterFactory<BlissSorter> for BlissSorterFactory { impl MpsSorterFactory<BlissSorter> for BlissSorterFactory {
fn is_sorter(&self, tokens: &VecDeque<&MpsToken>) -> bool { fn is_sorter(&self, tokens: &VecDeque<&MpsToken>) -> bool {
tokens.len() == 2 && check_name("advanced", &tokens[0]) && check_name("bliss_first", &tokens[1]) tokens.len() == 2 && check_name("advanced", tokens[0]) && check_name("bliss_first", tokens[1])
} }
fn build_sorter( fn build_sorter(

View file

@ -34,7 +34,7 @@ pub struct EmptySorterFactory;
impl MpsSorterFactory<EmptySorter> for EmptySorterFactory { impl MpsSorterFactory<EmptySorter> for EmptySorterFactory {
fn is_sorter(&self, tokens: &VecDeque<&MpsToken>) -> bool { fn is_sorter(&self, tokens: &VecDeque<&MpsToken>) -> bool {
tokens.len() == 0 tokens.is_empty()
} }
fn build_sorter( fn build_sorter(

View file

@ -34,7 +34,7 @@ impl SqlStatement {
} else { } else {
//Some(rows[self.current].clone()) //Some(rows[self.current].clone())
match &rows[self.current] { match &rows[self.current] {
Ok(item) => Some(Ok(item.clone().into())), Ok(item) => Some(Ok(item.clone())),
Err(e) => Some(Err(RuntimeError { Err(e) => Some(Err(RuntimeError {
line: e.line, line: e.line,
op: (Box::new(self.clone()) as Box<dyn MpsOp>).into(), op: (Box::new(self.clone()) as Box<dyn MpsOp>).into(),
@ -46,7 +46,7 @@ impl SqlStatement {
Some(Err(RuntimeError { Some(Err(RuntimeError {
line: 0, line: 0,
op: (Box::new(self.clone()) as Box<dyn MpsOp>).into(), op: (Box::new(self.clone()) as Box<dyn MpsOp>).into(),
msg: format!("Context error: rows is None").into(), msg: "Context error: rows is None".to_string(),
})) }))
} }
} }
@ -99,7 +99,7 @@ impl Iterator for SqlStatement {
}) { }) {
Err(e) => { Err(e) => {
self.rows = Some(Vec::with_capacity(0)); self.rows = Some(Vec::with_capacity(0));
return Some(Err(e)); Some(Err(e))
} }
Ok(rows) => { Ok(rows) => {
self.rows = Some(rows); self.rows = Some(rows);

View file

@ -83,7 +83,7 @@ impl SimpleSqlStatement {
} else { } else {
//Some(rows[self.current].clone()) //Some(rows[self.current].clone())
match &rows[self.current] { match &rows[self.current] {
Ok(item) => Some(Ok(item.clone().into())), Ok(item) => Some(Ok(item.clone())),
Err(e) => Some(Err(RuntimeError { Err(e) => Some(Err(RuntimeError {
line: e.line, line: e.line,
op: (Box::new(self.clone()) as Box<dyn MpsOp>).into(), op: (Box::new(self.clone()) as Box<dyn MpsOp>).into(),
@ -95,7 +95,7 @@ impl SimpleSqlStatement {
Some(Err(RuntimeError { Some(Err(RuntimeError {
line: 0, line: 0,
op: (Box::new(self.clone()) as Box<dyn MpsOp>).into(), op: (Box::new(self.clone()) as Box<dyn MpsOp>).into(),
msg: format!("Context error: rows is None").into(), msg: "Context error: rows is None".to_string(),
})) }))
} }
} }
@ -161,7 +161,7 @@ impl Iterator for SimpleSqlStatement {
match query_result { match query_result {
Err(e) => { Err(e) => {
self.rows = Some(Vec::with_capacity(0)); self.rows = Some(Vec::with_capacity(0));
return Some(Err(e)); Some(Err(e))
} }
Ok(rows) => { Ok(rows) => {
self.rows = Some(rows); self.rows = Some(rows);

View file

@ -33,8 +33,7 @@ impl Tags {
self.data self.data
.get("TITLE") .get("TITLE")
.unwrap_or(&TagType::Unknown) .unwrap_or(&TagType::Unknown)
.str() .str().map(|s| s.to_string())
.and_then(|s| Some(s.to_string()))
.unwrap_or_else(|| self.default_title()) .unwrap_or_else(|| self.default_title())
} }
@ -47,8 +46,7 @@ impl Tags {
.unwrap_or(""); .unwrap_or("");
self.filename self.filename
.file_name() .file_name()
.and_then(|file| file.to_str()) .and_then(|file| file.to_str()).map(|file| file.replacen(&format!(".{}", extension), "", 1))
.and_then(|file| Some(file.replacen(&format!(".{}", extension), "", 1)))
.unwrap_or("Unknown Title".into()) .unwrap_or("Unknown Title".into())
} }
@ -57,8 +55,7 @@ impl Tags {
self.data self.data
.get("ARTIST") .get("ARTIST")
.unwrap_or(&TagType::Unknown) .unwrap_or(&TagType::Unknown)
.str() .str().map(|s| s.to_string())
.and_then(|s| Some(s.to_string()))
} }
#[inline] #[inline]
@ -66,8 +63,7 @@ impl Tags {
self.data self.data
.get("ALBUM") .get("ALBUM")
.unwrap_or(&TagType::Unknown) .unwrap_or(&TagType::Unknown)
.str() .str().map(|s| s.to_string())
.and_then(|s| Some(s.to_string()))
} }
#[inline] #[inline]
@ -75,8 +71,7 @@ impl Tags {
self.data self.data
.get("GENRE") .get("GENRE")
.unwrap_or(&TagType::Unknown) .unwrap_or(&TagType::Unknown)
.str() .str().map(|s| s.to_string())
.and_then(|s| Some(s.to_string()))
} }
#[inline] #[inline]
@ -233,7 +228,7 @@ impl TagType {
fn str(&self) -> Option<&str> { fn str(&self) -> Option<&str> {
match self { match self {
Self::Str(s) => Some(&s), Self::Str(s) => Some(s),
_ => None, _ => None,
} }
} }

View file

@ -35,7 +35,7 @@ impl Iterator for SortedReadDir {
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
if !self.dir_iter_complete { if !self.dir_iter_complete {
while let Some(dir) = self.dir_iter.next() { for dir in self.dir_iter.by_ref() {
match dir { match dir {
Ok(f) => self.cache.push(f), Ok(f) => self.cache.push(f),
Err(e) => return Some(Err(e)), Err(e) => return Some(Err(e)),
@ -226,15 +226,14 @@ impl FileIter {
item: &mut MpsItem, item: &mut MpsItem,
path_str: &str, path_str: &str,
captures: Option<regex::Captures>, captures: Option<regex::Captures>,
mut capture_names: regex::CaptureNames, capture_names: regex::CaptureNames,
) { ) {
// populates fields from named capture groups // populates fields from named capture groups
if let Some(captures) = captures { if let Some(captures) = captures {
while let Some(name_maybe) = capture_names.next() { for name_maybe in capture_names {
if let Some(name) = name_maybe { if let Some(name) = name_maybe {
if let Some(value) = captures if let Some(value) = captures
.name(name) .name(name).map(|m| m.as_str().to_string())
.and_then(|m| Some(m.as_str().to_string()))
{ {
item.set_field(name, MpsTypePrimitive::parse(value)); item.set_field(name, MpsTypePrimitive::parse(value));
} }
@ -260,14 +259,10 @@ impl Iterator for FileIter {
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
if self.is_complete { if self.is_complete {
None None
} else { } else if self.dir_iters.is_empty() {
if self.dir_iters.is_empty() {
if self.root.is_file() { if self.root.is_file() {
self.is_complete = true; self.is_complete = true;
match self.build_item(&self.root) { self.build_item(&self.root).map(Ok)
None => None,
Some(item) => Some(Ok(item)),
}
} else { } else {
self.dir_iters.push(match self.root.read_dir() { self.dir_iters.push(match self.root.read_dir() {
Ok(x) => x.into(), Ok(x) => x.into(),
@ -276,7 +271,7 @@ impl Iterator for FileIter {
return Some(Err(format!("Directory read error: {}", e))); return Some(Err(format!("Directory read error: {}", e)));
} }
}); });
return self.next(); self.next()
} }
} else { } else {
while !self.dir_iters.is_empty() { while !self.dir_iters.is_empty() {
@ -299,12 +294,10 @@ impl Iterator for FileIter {
//return self.next(); //return self.next();
break 'inner; break 'inner;
} }
} else { } else if let Some(item) = self.build_item(dir_entry.path()) {
if let Some(item) = self.build_item(dir_entry.path()) {
self.dir_iters.push(dir_iter); self.dir_iters.push(dir_iter);
return Some(Ok(item)); return Some(Ok(item));
} }
}
}, },
Err(e) => { Err(e) => {
self.dir_iters.push(dir_iter); self.dir_iters.push(dir_iter);
@ -317,7 +310,6 @@ impl Iterator for FileIter {
None None
} }
} }
}
} }
pub trait MpsFilesystemQuerier: Debug { pub trait MpsFilesystemQuerier: Debug {

View file

@ -45,7 +45,7 @@ pub struct MpsSQLiteExecutor {
impl MpsSQLiteExecutor { impl MpsSQLiteExecutor {
#[inline] #[inline]
fn gen_db_maybe(&mut self, op: &mut QueryOp) -> Result<(), RuntimeError> { fn gen_db_maybe(&mut self, op: &mut QueryOp) -> Result<(), RuntimeError> {
if let None = self.sqlite_connection { if self.sqlite_connection.is_none() {
// connection needs to be created // connection needs to be created
match generate_default_db() { match generate_default_db() {
Ok(conn) => { Ok(conn) => {
@ -55,7 +55,7 @@ impl MpsSQLiteExecutor {
return Err(RuntimeError { return Err(RuntimeError {
line: 0, line: 0,
op: op(), op: op(),
msg: format!("SQL connection error: {}", e).into(), msg: format!("SQL connection error: {}", e),
}) })
} }
} }
@ -78,7 +78,7 @@ impl MpsSQLiteExecutor {
item.map_err(|e| RuntimeError { item.map_err(|e| RuntimeError {
line: 0, line: 0,
op: op(), op: op(),
msg: format!("SQL item mapping error: {}", e).into(), msg: format!("SQL item mapping error: {}", e),
}) })
}) })
.collect()), .collect()),
@ -103,7 +103,7 @@ impl MpsDatabaseQuerier for MpsSQLiteExecutor {
item.map_err(|e| RuntimeError { item.map_err(|e| RuntimeError {
line: 0, line: 0,
op: op(), op: op(),
msg: format!("SQL item mapping error: {}", e).into(), msg: format!("SQL item mapping error: {}", e),
}) })
}) })
.collect()), .collect()),
@ -248,8 +248,7 @@ impl std::convert::TryInto<rusqlite::Connection> for SqliteSettings {
fn try_into(self) -> Result<rusqlite::Connection, Self::Error> { fn try_into(self) -> Result<rusqlite::Connection, Self::Error> {
let music_path = self let music_path = self
.music_path .music_path.map(std::path::PathBuf::from)
.and_then(|p| Some(std::path::PathBuf::from(p)))
.unwrap_or_else(crate::lang::utility::music_folder); .unwrap_or_else(crate::lang::utility::music_folder);
let sqlite_path = self let sqlite_path = self
.db_path .db_path

View file

@ -36,7 +36,7 @@ impl<T: MpsTokenReader> MpsRunner<T> {
interpretor.context(ctx); interpretor.context(ctx);
} }
Self { Self {
interpretor: interpretor, interpretor,
new_statement: true, new_statement: true,
} }
} }

View file

@ -29,7 +29,7 @@ where
{ {
pub fn new(reader: R) -> Self { pub fn new(reader: R) -> Self {
Self { Self {
reader: reader, reader,
fsm: ReaderStateMachine::Start {}, fsm: ReaderStateMachine::Start {},
line: 0, line: 0,
column: 0, column: 0,
@ -73,7 +73,7 @@ where
bigger_buf.clear(); bigger_buf.clear();
} }
ReaderStateMachine::EndToken {} => { ReaderStateMachine::EndToken {} => {
if bigger_buf.len() != 0 { if !bigger_buf.is_empty() {
// ignore consecutive end tokens // ignore consecutive end tokens
let token = String::from_utf8(bigger_buf.clone()) let token = String::from_utf8(bigger_buf.clone())
.map_err(|e| self.error(format!("UTF-8 encoding error: {}", e)))?; .map_err(|e| self.error(format!("UTF-8 encoding error: {}", e)))?;
@ -86,7 +86,7 @@ where
} }
ReaderStateMachine::SingleCharToken { .. } => { ReaderStateMachine::SingleCharToken { .. } => {
let out = bigger_buf.pop().unwrap(); // bracket or comma token let out = bigger_buf.pop().unwrap(); // bracket or comma token
if bigger_buf.len() != 0 { if !bigger_buf.is_empty() {
// bracket tokens can be beside other tokens, without separator // bracket tokens can be beside other tokens, without separator
let token = String::from_utf8(bigger_buf.clone()) let token = String::from_utf8(bigger_buf.clone())
.map_err(|e| self.error(format!("UTF-8 encoding error: {}", e)))?; .map_err(|e| self.error(format!("UTF-8 encoding error: {}", e)))?;
@ -118,7 +118,7 @@ where
bigger_buf.clear(); bigger_buf.clear();
buf.clear(); buf.clear();
return match invalid_char { return match invalid_char {
0 => Err(self.error(format!("EOF"))), 0 => Err(self.error("EOF".to_string())),
_ => Err(self.error(format!( _ => Err(self.error(format!(
"character {:?} ({})", "character {:?} ({})",
invalid_char as char, invalid_char invalid_char as char, invalid_char
@ -139,7 +139,7 @@ where
self.fsm = self.fsm.next_state(byte_buf[0]); self.fsm = self.fsm.next_state(byte_buf[0]);
} }
// handle end statement // handle end statement
if bigger_buf.len() != 0 { if !bigger_buf.is_empty() {
// also end of token // also end of token
// note: never also end of literal, since those have explicit closing characters // note: never also end of literal, since those have explicit closing characters
let token = String::from_utf8(bigger_buf.clone()) let token = String::from_utf8(bigger_buf.clone())
@ -168,7 +168,7 @@ where
ParseError { ParseError {
line: self.current_line(), line: self.current_line(),
column: self.current_column(), column: self.current_column(),
item: item, item,
} }
} }
} }