diff --git a/build_pycompat.py b/build_pycompat.py index a5e79aa..18937ba 100644 --- a/build_pycompat.py +++ b/build_pycompat.py @@ -1,4 +1,5 @@ from dateutil.parser import parse +from dateutil.tz import tzutc from datetime import datetime tests = { @@ -48,7 +49,9 @@ tests = { 'test_parse_offset': [ 'Thu, 25 Sep 2003 10:49:41 -0300', '2003-09-25T10:49:41.5-03:00', '2003-09-25T10:49:41-03:00', '20030925T104941.5-0300', - '20030925T104941-0300' + '20030925T104941-0300', + # dtparse-specific + "10:00:00 UTC+3", "03:36:47 PM GMT-4", "04:15:00 AM Z-02:00" ], 'test_parse_dayfirst': [ '10-09-2003', '10.09.2003', '10/09/2003', '10 09 2003', diff --git a/src/lib.rs b/src/lib.rs index 76d8bf8..ff71d9c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -147,6 +147,7 @@ pub(crate) fn tokenize(parse_string: &str) -> Vec { /// Utility function for `ParserInfo` that helps in constructing /// the attributes that make up the `ParserInfo` container +#[cfg_attr(feature = "cargo-clippy", allow(get_unwrap))] // Recommended suggestion of &vec[0] doesn't compile pub fn parse_info(vec: Vec>) -> HashMap { let mut m = HashMap::new(); @@ -156,7 +157,7 @@ pub fn parse_info(vec: Vec>) -> HashMap { } } else { for (i, val_vec) in vec.into_iter().enumerate() { - for val in val_vec.into_iter() { + for val in val_vec { m.insert(val.to_lowercase(), i); } } @@ -260,8 +261,8 @@ impl Default for ParserInfo { tzoffset: parse_info(vec![vec![]]), dayfirst: false, yearfirst: false, - year: year, - century: century, + year, + century, } } } @@ -272,7 +273,7 @@ impl ParserInfo { } fn weekday_index(&self, name: &str) -> Option { - self.weekday.get(&name.to_lowercase()).map(|i| *i) + self.weekday.get(&name.to_lowercase()).cloned() } fn month_index(&self, name: &str) -> Option { @@ -280,7 +281,7 @@ impl ParserInfo { } fn hms_index(&self, name: &str) -> Option { - self.hms.get(&name.to_lowercase()).map(|i| *i) + self.hms.get(&name.to_lowercase()).cloned() } fn ampm_index(&self, name: &str) -> Option { @@ -477,7 +478,7 @@ impl YMD { YMDLabel::Day }; - let strids_vals: Vec = strids.values().map(|u| u.clone()).collect(); + let strids_vals: Vec = strids.values().cloned().collect(); let missing_val = if !strids_vals.contains(&0) { 0 } else if !strids_vals.contains(&1) { @@ -506,6 +507,7 @@ impl YMD { )) } + #[cfg_attr(feature = "cargo-clippy", allow(needless_return))] fn resolve_ymd( &mut self, yearfirst: bool, @@ -515,14 +517,14 @@ impl YMD { let mut strids: HashMap = HashMap::new(); self.ystridx - .map(|u| strids.insert(YMDLabel::Year, u.clone())); + .map(|u| strids.insert(YMDLabel::Year, u)); self.mstridx - .map(|u| strids.insert(YMDLabel::Month, u.clone())); + .map(|u| strids.insert(YMDLabel::Month, u)); self.dstridx - .map(|u| strids.insert(YMDLabel::Day, u.clone())); + .map(|u| strids.insert(YMDLabel::Day, u)); // TODO: More Rustiomatic way of doing this? - if len_ymd == strids.len() && strids.len() > 0 + if len_ymd == strids.len() && !strids.is_empty() || (len_ymd == 3 && strids.len() == 2) { return self.resolve_from_stridxs(&mut strids); @@ -660,6 +662,7 @@ impl Parser { /// timezone name support (i.e. "EST", "BRST") is not available by default /// at the moment, they must be added through `tzinfos` at the moment in /// order to be resolved. + #[cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))] // Need to release a 2.0 for changing public API pub fn parse( &mut self, timestr: &str, @@ -688,6 +691,7 @@ impl Parser { } } + #[cfg_attr(feature = "cargo-clippy", allow(cyclomatic_complexity))] // Imitating Python API is priority fn parse_with_tokens( &mut self, timestr: &str, @@ -747,7 +751,7 @@ impl Parser { && self.info.pertain_index(&l[i + 2]) { // Jan of 01 - if let Some(value) = l[i + 4].parse::().ok() { + if let Ok(value) = l[i + 4].parse::() { let year = self.info.convertyear(value, false); ymd.append(year, &l[i + 4], Some(YMDLabel::Year))?; } @@ -764,7 +768,7 @@ impl Parser { } else if fuzzy { skipped_idxs.push(i); } - } else if self.could_be_tzname(res.hour, res.tzname.clone(), res.tzoffset, &l[i]) { + } else if self.could_be_tzname(res.hour, &res.tzname, res.tzoffset, &l[i]) { res.tzname = Some(l[i].clone()); let tzname = res.tzname.clone().unwrap(); @@ -779,10 +783,9 @@ impl Parser { let item = if l[i + 1] == "+" { "-".to_owned() } else { - "-".to_owned() + "+".to_owned() }; - l.remove(i + 1); - l.insert(i + 1, item); + l[i+1] = item; res.tzoffset = None; @@ -809,7 +812,8 @@ impl Parser { i += 2; } else if len_li <= 2 { // -[0]3 - hour_offset = Some(l[i + 1][..2].parse::().unwrap()); + let range_len = min(l[i + 1].len(), 2); + hour_offset = Some(l[i + 1][..range_len].parse::().unwrap()); min_offset = Some(0); } @@ -819,7 +823,7 @@ impl Parser { let tzname = res.tzname.clone(); if i + 5 < len_l && self.info.jump_index(&l[i + 2]) && l[i + 3] == "(" && l[i + 5] == ")" && 3 <= l[i + 4].len() - && self.could_be_tzname(res.hour, tzname, None, &l[i + 4]) + && self.could_be_tzname(res.hour, &tzname, None, &l[i + 4]) { // (GMT) res.tzname = Some(l[i + 4].clone()); @@ -856,23 +860,20 @@ impl Parser { fn could_be_tzname( &self, hour: Option, - tzname: Option, + tzname: &Option, tzoffset: Option, token: &str, ) -> bool { let all_ascii_upper = token .chars() .all(|c| 65u8 as char <= c && c <= 90u8 as char); - return hour.is_some() && tzname.is_none() && tzoffset.is_none() && token.len() <= 5 - && all_ascii_upper; + + hour.is_some() && tzname.is_none() && tzoffset.is_none() && token.len() <= 5 + && all_ascii_upper } fn ampm_valid(&self, hour: Option, ampm: Option, fuzzy: bool) -> ParseResult { - let mut val_is_ampm = true; - - if fuzzy && ampm.is_some() { - val_is_ampm = false; - } + let mut val_is_ampm = !(fuzzy && ampm.is_some()); if hour.is_none() { if fuzzy { @@ -892,8 +893,8 @@ impl Parser { } fn build_naive(&self, res: &ParsingResult, default: &NaiveDateTime) -> ParseResult { - let y = res.year.unwrap_or(default.year()); - let m = res.month.unwrap_or(default.month() as i32) as u32; + let y = res.year.unwrap_or_else(|| default.year()); + let m = res.month.unwrap_or_else(|| default.month() as i32) as u32; let d_offset = if res.weekday.is_some() && res.day.is_none() { // TODO: Unwrap not justified @@ -902,7 +903,7 @@ impl Parser { // UNWRAP: We've already check res.weekday() is some let actual_weekday = (res.weekday.unwrap() + 1) % 7; let other = DayOfWeek::from_numeral(actual_weekday as u32); - Duration::days(dow.difference(other) as i64) + Duration::days(i64::from(dow.difference(&other))) } else { Duration::days(0) }; @@ -946,6 +947,7 @@ impl Parser { ) -> ParseResult> { // TODO: Actual timezone support if let Some(offset) = res.tzoffset { + println!("offset={}", offset); Ok(Some(FixedOffset::east(offset))) } else if res.tzoffset == None && (res.tzname == Some(" ".to_owned()) || res.tzname == Some(".".to_owned()) @@ -954,7 +956,7 @@ impl Parser { Ok(None) } else if res.tzname.is_some() && tzinfos.contains_key(res.tzname.as_ref().unwrap()) { Ok(Some(FixedOffset::east( - tzinfos.get(res.tzname.as_ref().unwrap()).unwrap().clone(), + *tzinfos.get(res.tzname.as_ref().unwrap()).unwrap(), ))) } else if res.tzname.is_some() { // TODO: Dateutil issues a warning/deprecation notice here. Should we force the issue? @@ -967,7 +969,7 @@ impl Parser { fn parse_numeric_token( &self, - tokens: &Vec, + tokens: &[String], idx: usize, info: &ParserInfo, ymd: &mut YMD, @@ -994,11 +996,11 @@ impl Parser { if len_li == 4 { res.minute = Some(s[2..4].parse::()?) } - } else if len_li == 6 || (len_li > 6 && tokens[idx].find(".") == Some(6)) { + } else if len_li == 6 || (len_li > 6 && tokens[idx].find('.') == Some(6)) { // YYMMDD or HHMMSS[.ss] let s = &tokens[idx]; - if ymd.len() == 0 && tokens[idx].find(".") == None { + if ymd.len() == 0 && tokens[idx].find('.') == None { ymd.append(s[0..2].parse::().unwrap(), &s[0..2], None)?; ymd.append(s[2..4].parse::().unwrap(), &s[2..4], None)?; ymd.append(s[4..6].parse::().unwrap(), &s[4..6], None)?; @@ -1070,13 +1072,11 @@ impl Parser { if idx + 3 < len_l && &tokens[idx + 3] == sep { if let Some(value) = info.month_index(&tokens[idx + 4]) { ymd.append(value as i32, &tokens[idx + 4], Some(YMDLabel::Month))?; - } else { - if let Ok(val) = tokens[idx + 4].parse::() { + } else if let Ok(val) = tokens[idx + 4].parse::() { ymd.append(val, &tokens[idx + 4], None)?; } else { return Err(ParseError::UnrecognizedFormat); } - } idx += 2; } @@ -1123,8 +1123,8 @@ impl Parser { } fn parsems(&self, seconds_str: &str) -> ParseResult<(i32, i32)> { - if seconds_str.contains(".") { - let split: Vec<&str> = seconds_str.split(".").collect(); + if seconds_str.contains('.') { + let split: Vec<&str> = seconds_str.split('.').collect(); let (i, f): (&str, &str) = (split[0], split[1]); let i_parse = i.parse::()?; @@ -1138,7 +1138,7 @@ impl Parser { fn find_hms_index( &self, idx: usize, - tokens: &Vec, + tokens: &[String], info: &ParserInfo, allow_jump: bool, ) -> Option { @@ -1180,7 +1180,7 @@ impl Parser { fn parse_hms( &self, idx: usize, - tokens: &Vec, + tokens: &[String], info: &ParserInfo, hms_index: Option, ) -> (usize, Option) { @@ -1236,6 +1236,7 @@ impl Parser { (minute, second) } + #[cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))] // Need Vec type because of mutability in the function that calls us fn recombine_skipped(&self, skipped_idxs: Vec, tokens: Vec) -> Vec { let mut skipped_tokens: Vec = vec![]; @@ -1246,10 +1247,10 @@ impl Parser { if i > 0 && idx - 1 == skipped_idxs[i - 1] { // UNWRAP: Having an initial value and unconditional push at end guarantees value let mut t = skipped_tokens.pop().unwrap(); - t.push_str(tokens[idx.clone()].as_ref()); + t.push_str(tokens[*idx].as_ref()); skipped_tokens.push(t); } else { - skipped_tokens.push(tokens[idx.clone()].to_owned()); + skipped_tokens.push(tokens[*idx].to_owned()); } } diff --git a/src/tests/pycompat_parser.rs b/src/tests/pycompat_parser.rs index ca32d51..92189ca 100644 --- a/src/tests/pycompat_parser.rs +++ b/src/tests/pycompat_parser.rs @@ -1434,6 +1434,42 @@ fn test_parse_offset4() { None, false, &HashMap::new()); } +#[test] +fn test_parse_offset5() { + let info = ParserInfo::default(); + let pdt = PyDateTime { + year: 2018, month: 8, day: 10, + hour: 10, minute: 0, second: 0, + micros: 0, tzo: Some(-10800), + }; + parse_and_assert(pdt, info, "10:00:00 UTC+3", None, None, false, false, + None, false, &HashMap::new()); +} + +#[test] +fn test_parse_offset6() { + let info = ParserInfo::default(); + let pdt = PyDateTime { + year: 2018, month: 8, day: 10, + hour: 15, minute: 36, second: 47, + micros: 0, tzo: Some(14400), + }; + parse_and_assert(pdt, info, "03:36:47 PM GMT-4", None, None, false, false, + None, false, &HashMap::new()); +} + +#[test] +fn test_parse_offset7() { + let info = ParserInfo::default(); + let pdt = PyDateTime { + year: 2018, month: 8, day: 10, + hour: 4, minute: 15, second: 0, + micros: 0, tzo: Some(7200), + }; + parse_and_assert(pdt, info, "04:15:00 AM Z-02:00", None, None, false, false, + None, false, &HashMap::new()); +} + #[test] fn test_parse_dayfirst0() { let info = ParserInfo::default(); diff --git a/src/tests/pycompat_tokenizer.rs b/src/tests/pycompat_tokenizer.rs index c2077d1..644c5cc 100644 --- a/src/tests/pycompat_tokenizer.rs +++ b/src/tests/pycompat_tokenizer.rs @@ -708,198 +708,216 @@ fn test_tokenize115() { #[test] fn test_tokenize116() { + let comp = vec!["10", ":", "00", ":", "00", " ", "UTC", "+", "3"]; + tokenize_assert("10:00:00 UTC+3", comp); +} + +#[test] +fn test_tokenize117() { + let comp = vec!["03", ":", "36", ":", "47", " ", "PM", " ", "GMT", "-", "4"]; + tokenize_assert("03:36:47 PM GMT-4", comp); +} + +#[test] +fn test_tokenize118() { + let comp = vec!["04", ":", "15", ":", "00", " ", "AM", " ", "Z", "-", "02", ":", "00"]; + tokenize_assert("04:15:00 AM Z-02:00", comp); +} + +#[test] +fn test_tokenize119() { let comp = vec!["10", "-", "09", "-", "2003"]; tokenize_assert("10-09-2003", comp); } #[test] -fn test_tokenize117() { +fn test_tokenize120() { let comp = vec!["10", ".", "09", ".", "2003"]; tokenize_assert("10.09.2003", comp); } #[test] -fn test_tokenize118() { +fn test_tokenize121() { let comp = vec!["10", "/", "09", "/", "2003"]; tokenize_assert("10/09/2003", comp); } #[test] -fn test_tokenize119() { +fn test_tokenize122() { let comp = vec!["10", " ", "09", " ", "2003"]; tokenize_assert("10 09 2003", comp); } #[test] -fn test_tokenize120() { +fn test_tokenize123() { let comp = vec!["090107"]; tokenize_assert("090107", comp); } #[test] -fn test_tokenize121() { +fn test_tokenize124() { let comp = vec!["2015", " ", "09", " ", "25"]; tokenize_assert("2015 09 25", comp); } #[test] -fn test_tokenize122() { +fn test_tokenize125() { let comp = vec!["10", "-", "09", "-", "03"]; tokenize_assert("10-09-03", comp); } #[test] -fn test_tokenize123() { +fn test_tokenize126() { let comp = vec!["10", ".", "09", ".", "03"]; tokenize_assert("10.09.03", comp); } #[test] -fn test_tokenize124() { +fn test_tokenize127() { let comp = vec!["10", "/", "09", "/", "03"]; tokenize_assert("10/09/03", comp); } #[test] -fn test_tokenize125() { +fn test_tokenize128() { let comp = vec!["10", " ", "09", " ", "03"]; tokenize_assert("10 09 03", comp); } -#[test] -fn test_tokenize126() { - let comp = vec!["090107"]; - tokenize_assert("090107", comp); -} - -#[test] -fn test_tokenize127() { - let comp = vec!["2015", " ", "09", " ", "25"]; - tokenize_assert("2015 09 25", comp); -} - -#[test] -fn test_tokenize128() { - let comp = vec!["090107"]; - tokenize_assert("090107", comp); -} - #[test] fn test_tokenize129() { - let comp = vec!["2015", " ", "09", " ", "25"]; - tokenize_assert("2015 09 25", comp); + let comp = vec!["090107"]; + tokenize_assert("090107", comp); } #[test] fn test_tokenize130() { + let comp = vec!["2015", " ", "09", " ", "25"]; + tokenize_assert("2015 09 25", comp); +} + +#[test] +fn test_tokenize131() { + let comp = vec!["090107"]; + tokenize_assert("090107", comp); +} + +#[test] +fn test_tokenize132() { + let comp = vec!["2015", " ", "09", " ", "25"]; + tokenize_assert("2015 09 25", comp); +} + +#[test] +fn test_tokenize133() { let comp = vec!["April", " ", "2009"]; tokenize_assert("April 2009", comp); } #[test] -fn test_tokenize131() { +fn test_tokenize134() { let comp = vec!["Feb", " ", "2007"]; tokenize_assert("Feb 2007", comp); } #[test] -fn test_tokenize132() { +fn test_tokenize135() { let comp = vec!["Feb", " ", "2008"]; tokenize_assert("Feb 2008", comp); } #[test] -fn test_tokenize133() { +fn test_tokenize136() { let comp = vec!["Thu", " ", "Sep", " ", "25", " ", "10", ":", "36", ":", "28", " ", "BRST", " ", "2003"]; tokenize_assert("Thu Sep 25 10:36:28 BRST 2003", comp); } #[test] -fn test_tokenize134() { +fn test_tokenize137() { let comp = vec!["1996", ".", "07", ".", "10", " ", "AD", " ", "at", " ", "15", ":", "08", ":", "56", " ", "PDT"]; tokenize_assert("1996.07.10 AD at 15:08:56 PDT", comp); } #[test] -fn test_tokenize135() { +fn test_tokenize138() { let comp = vec!["Tuesday", ",", " ", "April", " ", "12", ",", " ", "1952", " ", "AD", " ", "3", ":", "30", ":", "42", "pm", " ", "PST"]; tokenize_assert("Tuesday, April 12, 1952 AD 3:30:42pm PST", comp); } #[test] -fn test_tokenize136() { +fn test_tokenize139() { let comp = vec!["November", " ", "5", ",", " ", "1994", ",", " ", "8", ":", "15", ":", "30", " ", "am", " ", "EST"]; tokenize_assert("November 5, 1994, 8:15:30 am EST", comp); } #[test] -fn test_tokenize137() { +fn test_tokenize140() { let comp = vec!["1994", "-", "11", "-", "05", "T", "08", ":", "15", ":", "30", "-", "05", ":", "00"]; tokenize_assert("1994-11-05T08:15:30-05:00", comp); } #[test] -fn test_tokenize138() { +fn test_tokenize141() { let comp = vec!["1994", "-", "11", "-", "05", "T", "08", ":", "15", ":", "30", "Z"]; tokenize_assert("1994-11-05T08:15:30Z", comp); } #[test] -fn test_tokenize139() { +fn test_tokenize142() { let comp = vec!["1976", "-", "07", "-", "04", "T", "00", ":", "01", ":", "02", "Z"]; tokenize_assert("1976-07-04T00:01:02Z", comp); } #[test] -fn test_tokenize140() { +fn test_tokenize143() { let comp = vec!["Tue", " ", "Apr", " ", "4", " ", "00", ":", "22", ":", "12", " ", "PDT", " ", "1995"]; tokenize_assert("Tue Apr 4 00:22:12 PDT 1995", comp); } #[test] -fn test_tokenize141() { +fn test_tokenize144() { let comp = vec!["Today", " ", "is", " ", "25", " ", "of", " ", "September", " ", "of", " ", "2003", ",", " ", "exactly", " ", "at", " ", "10", ":", "49", ":", "41", " ", "with", " ", "timezone", " ", "-", "03", ":", "00", "."]; tokenize_assert("Today is 25 of September of 2003, exactly at 10:49:41 with timezone -03:00.", comp); } #[test] -fn test_tokenize142() { +fn test_tokenize145() { let comp = vec!["Today", " ", "is", " ", "25", " ", "of", " ", "September", " ", "of", " ", "2003", ",", " ", "exactly", " ", "at", " ", "10", ":", "49", ":", "41", " ", "with", " ", "timezone", " ", "-", "03", ":", "00", "."]; tokenize_assert("Today is 25 of September of 2003, exactly at 10:49:41 with timezone -03:00.", comp); } #[test] -fn test_tokenize143() { +fn test_tokenize146() { let comp = vec!["I", " ", "have", " ", "a", " ", "meeting", " ", "on", " ", "March", " ", "1", ",", " ", "1974"]; tokenize_assert("I have a meeting on March 1, 1974", comp); } #[test] -fn test_tokenize144() { +fn test_tokenize147() { let comp = vec!["On", " ", "June", " ", "8", "th", ",", " ", "2020", ",", " ", "I", " ", "am", " ", "going", " ", "to", " ", "be", " ", "the", " ", "first", " ", "man", " ", "on", " ", "Mars"]; tokenize_assert("On June 8th, 2020, I am going to be the first man on Mars", comp); } #[test] -fn test_tokenize145() { +fn test_tokenize148() { let comp = vec!["Meet", " ", "me", " ", "at", " ", "the", " ", "AM", "/", "PM", " ", "on", " ", "Sunset", " ", "at", " ", "3", ":", "00", " ", "AM", " ", "on", " ", "December", " ", "3", "rd", ",", " ", "2003"]; tokenize_assert("Meet me at the AM/PM on Sunset at 3:00 AM on December 3rd, 2003", comp); } #[test] -fn test_tokenize146() { +fn test_tokenize149() { let comp = vec!["Meet", " ", "me", " ", "at", " ", "3", ":", "00", " ", "AM", " ", "on", " ", "December", " ", "3", "rd", ",", " ", "2003", " ", "at", " ", "the", " ", "AM", "/", "PM", " ", "on", " ", "Sunset"]; tokenize_assert("Meet me at 3:00 AM on December 3rd, 2003 at the AM/PM on Sunset", comp); } #[test] -fn test_tokenize147() { +fn test_tokenize150() { let comp = vec!["Jan", " ", "29", ",", " ", "1945", " ", "14", ":", "45", " ", "AM", " ", "I", " ", "going", " ", "to", " ", "see", " ", "you", " ", "there", "?"]; tokenize_assert("Jan 29, 1945 14:45 AM I going to see you there?", comp); } #[test] -fn test_tokenize148() { +fn test_tokenize151() { let comp = vec!["2017", "-", "07", "-", "17", " ", "06", ":", "15", ":"]; tokenize_assert("2017-07-17 06:15:", comp); } diff --git a/src/weekday.rs b/src/weekday.rs index 3c6c36e..6a2e436 100644 --- a/src/weekday.rs +++ b/src/weekday.rs @@ -15,14 +15,14 @@ pub enum DayOfWeek { impl DayOfWeek { pub fn to_numeral(&self) -> u32 { - match self { - &DayOfWeek::Sunday => 0, - &DayOfWeek::Monday => 1, - &DayOfWeek::Tuesday => 2, - &DayOfWeek::Wednesday => 3, - &DayOfWeek::Thursday => 4, - &DayOfWeek::Friday => 5, - &DayOfWeek::Saturday => 6, + match *self { + DayOfWeek::Sunday => 0, + DayOfWeek::Monday => 1, + DayOfWeek::Tuesday => 2, + DayOfWeek::Wednesday => 3, + DayOfWeek::Thursday => 4, + DayOfWeek::Friday => 5, + DayOfWeek::Saturday => 6, } } @@ -40,7 +40,7 @@ impl DayOfWeek { } /// Given the current day of the week, how many days until the next day? - pub fn difference(&self, other: DayOfWeek) -> u32 { + pub fn difference(&self, other: &DayOfWeek) -> u32 { // Have to use i32 because of wraparound issues let s_num = self.to_numeral() as i32; let o_num = other.to_numeral() as i32; @@ -115,18 +115,18 @@ mod test { #[test] fn weekday_difference() { - assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Sunday), 0); - assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Monday), 1); - assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Tuesday), 2); - assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Wednesday), 3); - assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Thursday), 4); - assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Friday), 5); - assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Saturday), 6); - assert_eq!(DayOfWeek::Monday.difference(DayOfWeek::Sunday), 6); - assert_eq!(DayOfWeek::Tuesday.difference(DayOfWeek::Sunday), 5); - assert_eq!(DayOfWeek::Wednesday.difference(DayOfWeek::Sunday), 4); - assert_eq!(DayOfWeek::Thursday.difference(DayOfWeek::Sunday), 3); - assert_eq!(DayOfWeek::Friday.difference(DayOfWeek::Sunday), 2); - assert_eq!(DayOfWeek::Saturday.difference(DayOfWeek::Sunday), 1); + assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Sunday), 0); + assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Monday), 1); + assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Tuesday), 2); + assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Wednesday), 3); + assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Thursday), 4); + assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Friday), 5); + assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Saturday), 6); + assert_eq!(DayOfWeek::Monday.difference(&DayOfWeek::Sunday), 6); + assert_eq!(DayOfWeek::Tuesday.difference(&DayOfWeek::Sunday), 5); + assert_eq!(DayOfWeek::Wednesday.difference(&DayOfWeek::Sunday), 4); + assert_eq!(DayOfWeek::Thursday.difference(&DayOfWeek::Sunday), 3); + assert_eq!(DayOfWeek::Friday.difference(&DayOfWeek::Sunday), 2); + assert_eq!(DayOfWeek::Saturday.difference(&DayOfWeek::Sunday), 1); } } \ No newline at end of file