mirror of
https://github.com/bspeice/dtparse
synced 2025-07-03 06:45:01 -04:00
Compare commits
10 Commits
Author | SHA1 | Date | |
---|---|---|---|
ef3ea38834 | |||
741afa3451 | |||
4d7c5dd995 | |||
afb7747cdf | |||
22e0300275 | |||
0ef35527d9 | |||
b5fa1d89ef | |||
246b389ac9 | |||
4d48885f4b | |||
48705339e6 |
2
.gitignore
vendored
2
.gitignore
vendored
@ -4,3 +4,5 @@
|
|||||||
Cargo.lock
|
Cargo.lock
|
||||||
.vscode
|
.vscode
|
||||||
*.pyc
|
*.pyc
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
23
.travis.yml
23
.travis.yml
@ -5,6 +5,8 @@ dist: trusty
|
|||||||
language: rust
|
language: rust
|
||||||
services: docker
|
services: docker
|
||||||
sudo: required
|
sudo: required
|
||||||
|
addons:
|
||||||
|
chrome: stable
|
||||||
|
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
@ -74,6 +76,27 @@ matrix:
|
|||||||
os: osx
|
os: osx
|
||||||
rust: beta
|
rust: beta
|
||||||
|
|
||||||
|
# Historical Rust versions
|
||||||
|
- env: TARGET=x86_64-unknown-linux-gnu
|
||||||
|
rust: 1.21.0
|
||||||
|
- env: TARGET=x86_64-unknown-linux-gnu
|
||||||
|
rust: 1.22.0
|
||||||
|
- env: TARGET=x86_64-unknown-linux-gnu
|
||||||
|
rust: 1.23.0
|
||||||
|
- env: TARGET=x86_64-unknown-linux-gnu
|
||||||
|
rust: 1.24.0
|
||||||
|
- env: TARGET=x86_64-unknown-linux-gnu
|
||||||
|
rust: 1.25.0
|
||||||
|
- env: TARGET=x86_64-unknown-linux-gnu
|
||||||
|
rust: 1.26.0
|
||||||
|
- env: TARGET=x86_64-unknown-linux-gnu
|
||||||
|
rust: 1.27.0
|
||||||
|
|
||||||
|
# WASM support
|
||||||
|
- env: TARGET=asmjs-unknown-emscripten USE_CARGO_WEB=true
|
||||||
|
rust: nightly
|
||||||
|
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- set -e
|
- set -e
|
||||||
- rustup self update
|
- rustup self update
|
||||||
|
34
CHANGELOG.md
Normal file
34
CHANGELOG.md
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
Version 1.0.3 (2018-09-18)
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Misc
|
||||||
|
----
|
||||||
|
|
||||||
|
- Changed the default `parse` function to use a static parser
|
||||||
|
|
||||||
|
Version 1.0.2 (2018-08-14)
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Misc
|
||||||
|
----
|
||||||
|
|
||||||
|
- Add tests for WASM
|
||||||
|
|
||||||
|
Version 1.0.1 (2018-08-11)
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fixed an issue with "GMT+3" not being handled correctly
|
||||||
|
|
||||||
|
Misc
|
||||||
|
----
|
||||||
|
|
||||||
|
- Upgrade `lazy_static` and `rust_decimal` dependencies
|
||||||
|
|
||||||
|
Version 1.0.0 (2018-08-03)
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Initial release. Passes all relevant unit tests from Python's
|
||||||
|
`dateutil` project.
|
@ -4,3 +4,4 @@ This project benefits from the Rust and open source communities, but most specif
|
|||||||
|
|
||||||
- [@messense](https://github.com/messense)
|
- [@messense](https://github.com/messense)
|
||||||
- [@mjmeehan](https://github.com/mjmeehan)
|
- [@mjmeehan](https://github.com/mjmeehan)
|
||||||
|
- [@neosilky](https://github.com/neosilky)
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "dtparse"
|
name = "dtparse"
|
||||||
version = "1.0.0"
|
version = "1.0.3"
|
||||||
authors = ["Bradlee Speice <bradlee@speice.io>"]
|
authors = ["Bradlee Speice <bradlee@speice.io>"]
|
||||||
description = "A dateutil-compatible timestamp parser for Rust"
|
description = "A dateutil-compatible timestamp parser for Rust"
|
||||||
repository = "https://github.com/bspeice/dtparse.git"
|
repository = "https://github.com/bspeice/dtparse.git"
|
||||||
@ -18,6 +18,6 @@ name = "dtparse"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = "0.4"
|
chrono = "0.4"
|
||||||
lazy_static = "1.0"
|
lazy_static = "1.1"
|
||||||
num-traits = "0.2"
|
num-traits = "0.2"
|
||||||
rust_decimal = "0.9"
|
rust_decimal = "^0.10.1"
|
||||||
|
@ -65,5 +65,11 @@ assert_eq!(
|
|||||||
|
|
||||||
Further examples can be found in the [examples](examples) directory on international usage.
|
Further examples can be found in the [examples](examples) directory on international usage.
|
||||||
|
|
||||||
|
# Usage
|
||||||
|
|
||||||
|
`dtparse` requires a minimum Rust version of 1.21 to build, but is tested on Windows, OSX,
|
||||||
|
BSD, Linux, and WASM. The build is also compiled against the iOS and Android SDK's, but is not
|
||||||
|
tested against them.
|
||||||
|
|
||||||
[dateutil]: https://github.com/dateutil/dateutil
|
[dateutil]: https://github.com/dateutil/dateutil
|
||||||
[examples]: https://github.com/bspeice/dtparse/tree/master/examples
|
[examples]: https://github.com/bspeice/dtparse/tree/master/examples
|
||||||
|
10
build_pycompat.py
Normal file → Executable file
10
build_pycompat.py
Normal file → Executable file
@ -1,4 +1,6 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
|
from dateutil.tz import tzutc
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
tests = {
|
tests = {
|
||||||
@ -48,7 +50,9 @@ tests = {
|
|||||||
'test_parse_offset': [
|
'test_parse_offset': [
|
||||||
'Thu, 25 Sep 2003 10:49:41 -0300', '2003-09-25T10:49:41.5-03:00',
|
'Thu, 25 Sep 2003 10:49:41 -0300', '2003-09-25T10:49:41.5-03:00',
|
||||||
'2003-09-25T10:49:41-03:00', '20030925T104941.5-0300',
|
'2003-09-25T10:49:41-03:00', '20030925T104941.5-0300',
|
||||||
'20030925T104941-0300'
|
'20030925T104941-0300',
|
||||||
|
# dtparse-specific
|
||||||
|
"2018-08-10 10:00:00 UTC+3", "2018-08-10 03:36:47 PM GMT-4", "2018-08-10 04:15:00 AM Z-02:00"
|
||||||
],
|
],
|
||||||
'test_parse_dayfirst': [
|
'test_parse_dayfirst': [
|
||||||
'10-09-2003', '10.09.2003', '10/09/2003', '10 09 2003',
|
'10-09-2003', '10.09.2003', '10/09/2003', '10 09 2003',
|
||||||
@ -229,7 +233,7 @@ fn parse_and_assert(
|
|||||||
tzinfos: &HashMap<String, i32>,
|
tzinfos: &HashMap<String, i32>,
|
||||||
) {
|
) {
|
||||||
|
|
||||||
let mut parser = Parser::new(info);
|
let parser = Parser::new(info);
|
||||||
let rs_parsed = parser.parse(
|
let rs_parsed = parser.parse(
|
||||||
s,
|
s,
|
||||||
dayfirst,
|
dayfirst,
|
||||||
@ -279,7 +283,7 @@ fn parse_fuzzy_and_assert(
|
|||||||
tzinfos: &HashMap<String, i32>,
|
tzinfos: &HashMap<String, i32>,
|
||||||
) {
|
) {
|
||||||
|
|
||||||
let mut parser = Parser::new(info);
|
let parser = Parser::new(info);
|
||||||
let rs_parsed = parser.parse(
|
let rs_parsed = parser.parse(
|
||||||
s,
|
s,
|
||||||
dayfirst,
|
dayfirst,
|
||||||
|
1
build_pycompat_tokenizer.py
Normal file → Executable file
1
build_pycompat_tokenizer.py
Normal file → Executable file
@ -1,3 +1,4 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
from dateutil.parser import _timelex
|
from dateutil.parser import _timelex
|
||||||
|
|
||||||
from build_pycompat import tests
|
from build_pycompat import tests
|
||||||
|
20
ci/script.sh
20
ci/script.sh
@ -14,7 +14,27 @@ main() {
|
|||||||
cross test --target $TARGET --release
|
cross test --target $TARGET --release
|
||||||
}
|
}
|
||||||
|
|
||||||
|
main_web() {
|
||||||
|
CARGO_WEB_RELEASE="$(curl -L -s -H 'Accept: application/json' https://github.com/koute/cargo-web/releases/latest)"
|
||||||
|
CARGO_WEB_VERSION="$(echo $CARGO_WEB_RELEASE | sed -e 's/.*"tag_name":"\([^"]*\)".*/\1/')"
|
||||||
|
CARGO_WEB_URL="https://github.com/koute/cargo-web/releases/download/$CARGO_WEB_VERSION/cargo-web-x86_64-unknown-linux-gnu.gz"
|
||||||
|
|
||||||
|
echo "Downloading cargo-web from: $CARGO_WEB_URL"
|
||||||
|
curl -L "$CARGO_WEB_URL" | gzip -d > cargo-web
|
||||||
|
chmod +x cargo-web
|
||||||
|
|
||||||
|
mkdir -p ~/.cargo/bin
|
||||||
|
mv cargo-web ~/.cargo/bin
|
||||||
|
|
||||||
|
cargo web build --target $TARGET
|
||||||
|
cargo web test --target $TARGET --release
|
||||||
|
}
|
||||||
|
|
||||||
# we don't run the "test phase" when doing deploys
|
# we don't run the "test phase" when doing deploys
|
||||||
if [ -z $TRAVIS_TAG ]; then
|
if [ -z $TRAVIS_TAG ]; then
|
||||||
|
if [ -z "$USE_CARGO_WEB" ]; then
|
||||||
main
|
main
|
||||||
|
else
|
||||||
|
main_web
|
||||||
|
fi
|
||||||
fi
|
fi
|
99
src/lib.rs
99
src/lib.rs
@ -10,7 +10,7 @@
|
|||||||
//! sense of the dates and times they contain. A couple of quick examples
|
//! sense of the dates and times they contain. A couple of quick examples
|
||||||
//! from the test cases should give some context:
|
//! from the test cases should give some context:
|
||||||
//!
|
//!
|
||||||
//! ```rust
|
//! ```rust,ignore (tests-dont-compile-on-old-rust)
|
||||||
//! # extern crate chrono;
|
//! # extern crate chrono;
|
||||||
//! # extern crate dtparse;
|
//! # extern crate dtparse;
|
||||||
//! use chrono::prelude::*;
|
//! use chrono::prelude::*;
|
||||||
@ -34,7 +34,7 @@
|
|||||||
//! And we can even handle fuzzy strings where dates/times aren't the
|
//! And we can even handle fuzzy strings where dates/times aren't the
|
||||||
//! only content if we dig into the implementation a bit!
|
//! only content if we dig into the implementation a bit!
|
||||||
//!
|
//!
|
||||||
//! ```rust
|
//! ```rust,ignore (tests-dont-compile-on-old-rust)
|
||||||
//! # extern crate chrono;
|
//! # extern crate chrono;
|
||||||
//! # extern crate dtparse;
|
//! # extern crate dtparse;
|
||||||
//! use chrono::prelude::*;
|
//! use chrono::prelude::*;
|
||||||
@ -61,6 +61,12 @@
|
|||||||
//!
|
//!
|
||||||
//! Further examples can be found in the `examples` directory on international usage.
|
//! Further examples can be found in the `examples` directory on international usage.
|
||||||
//!
|
//!
|
||||||
|
//! # Usage
|
||||||
|
//!
|
||||||
|
//! `dtparse` requires a minimum Rust version of 1.21 to build, but is tested on Windows, OSX,
|
||||||
|
//! BSD, Linux, and WASM. The build is also compiled against the iOS and Android SDK's, but is not
|
||||||
|
//! tested against them.
|
||||||
|
//!
|
||||||
//! [dateutil]: https://github.com/dateutil/dateutil
|
//! [dateutil]: https://github.com/dateutil/dateutil
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
@ -102,6 +108,7 @@ lazy_static! {
|
|||||||
static ref ONE: Decimal = Decimal::new(1, 0);
|
static ref ONE: Decimal = Decimal::new(1, 0);
|
||||||
static ref TWENTY_FOUR: Decimal = Decimal::new(24, 0);
|
static ref TWENTY_FOUR: Decimal = Decimal::new(24, 0);
|
||||||
static ref SIXTY: Decimal = Decimal::new(60, 0);
|
static ref SIXTY: Decimal = Decimal::new(60, 0);
|
||||||
|
static ref DEFAULT_PARSER: Parser = Parser::default();
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<DecimalError> for ParseError {
|
impl From<DecimalError> for ParseError {
|
||||||
@ -147,6 +154,7 @@ pub(crate) fn tokenize(parse_string: &str) -> Vec<String> {
|
|||||||
|
|
||||||
/// Utility function for `ParserInfo` that helps in constructing
|
/// Utility function for `ParserInfo` that helps in constructing
|
||||||
/// the attributes that make up the `ParserInfo` container
|
/// the attributes that make up the `ParserInfo` container
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(get_unwrap))] // Recommended suggestion of &vec[0] doesn't compile
|
||||||
pub fn parse_info(vec: Vec<Vec<&str>>) -> HashMap<String, usize> {
|
pub fn parse_info(vec: Vec<Vec<&str>>) -> HashMap<String, usize> {
|
||||||
let mut m = HashMap::new();
|
let mut m = HashMap::new();
|
||||||
|
|
||||||
@ -156,7 +164,7 @@ pub fn parse_info(vec: Vec<Vec<&str>>) -> HashMap<String, usize> {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for (i, val_vec) in vec.into_iter().enumerate() {
|
for (i, val_vec) in vec.into_iter().enumerate() {
|
||||||
for val in val_vec.into_iter() {
|
for val in val_vec {
|
||||||
m.insert(val.to_lowercase(), i);
|
m.insert(val.to_lowercase(), i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -260,8 +268,8 @@ impl Default for ParserInfo {
|
|||||||
tzoffset: parse_info(vec![vec![]]),
|
tzoffset: parse_info(vec![vec![]]),
|
||||||
dayfirst: false,
|
dayfirst: false,
|
||||||
yearfirst: false,
|
yearfirst: false,
|
||||||
year: year,
|
year,
|
||||||
century: century,
|
century,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -272,7 +280,7 @@ impl ParserInfo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn weekday_index(&self, name: &str) -> Option<usize> {
|
fn weekday_index(&self, name: &str) -> Option<usize> {
|
||||||
self.weekday.get(&name.to_lowercase()).map(|i| *i)
|
self.weekday.get(&name.to_lowercase()).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn month_index(&self, name: &str) -> Option<usize> {
|
fn month_index(&self, name: &str) -> Option<usize> {
|
||||||
@ -280,7 +288,7 @@ impl ParserInfo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn hms_index(&self, name: &str) -> Option<usize> {
|
fn hms_index(&self, name: &str) -> Option<usize> {
|
||||||
self.hms.get(&name.to_lowercase()).map(|i| *i)
|
self.hms.get(&name.to_lowercase()).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ampm_index(&self, name: &str) -> Option<bool> {
|
fn ampm_index(&self, name: &str) -> Option<bool> {
|
||||||
@ -477,7 +485,7 @@ impl YMD {
|
|||||||
YMDLabel::Day
|
YMDLabel::Day
|
||||||
};
|
};
|
||||||
|
|
||||||
let strids_vals: Vec<usize> = strids.values().map(|u| u.clone()).collect();
|
let strids_vals: Vec<usize> = strids.values().cloned().collect();
|
||||||
let missing_val = if !strids_vals.contains(&0) {
|
let missing_val = if !strids_vals.contains(&0) {
|
||||||
0
|
0
|
||||||
} else if !strids_vals.contains(&1) {
|
} else if !strids_vals.contains(&1) {
|
||||||
@ -506,6 +514,7 @@ impl YMD {
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(needless_return))]
|
||||||
fn resolve_ymd(
|
fn resolve_ymd(
|
||||||
&mut self,
|
&mut self,
|
||||||
yearfirst: bool,
|
yearfirst: bool,
|
||||||
@ -515,14 +524,14 @@ impl YMD {
|
|||||||
|
|
||||||
let mut strids: HashMap<YMDLabel, usize> = HashMap::new();
|
let mut strids: HashMap<YMDLabel, usize> = HashMap::new();
|
||||||
self.ystridx
|
self.ystridx
|
||||||
.map(|u| strids.insert(YMDLabel::Year, u.clone()));
|
.map(|u| strids.insert(YMDLabel::Year, u));
|
||||||
self.mstridx
|
self.mstridx
|
||||||
.map(|u| strids.insert(YMDLabel::Month, u.clone()));
|
.map(|u| strids.insert(YMDLabel::Month, u));
|
||||||
self.dstridx
|
self.dstridx
|
||||||
.map(|u| strids.insert(YMDLabel::Day, u.clone()));
|
.map(|u| strids.insert(YMDLabel::Day, u));
|
||||||
|
|
||||||
// TODO: More Rustiomatic way of doing this?
|
// TODO: More Rustiomatic way of doing this?
|
||||||
if len_ymd == strids.len() && strids.len() > 0
|
if len_ymd == strids.len() && !strids.is_empty()
|
||||||
|| (len_ymd == 3 && strids.len() == 2)
|
|| (len_ymd == 3 && strids.len() == 2)
|
||||||
{
|
{
|
||||||
return self.resolve_from_stridxs(&mut strids);
|
return self.resolve_from_stridxs(&mut strids);
|
||||||
@ -660,8 +669,9 @@ impl Parser {
|
|||||||
/// timezone name support (i.e. "EST", "BRST") is not available by default
|
/// timezone name support (i.e. "EST", "BRST") is not available by default
|
||||||
/// at the moment, they must be added through `tzinfos` at the moment in
|
/// at the moment, they must be added through `tzinfos` at the moment in
|
||||||
/// order to be resolved.
|
/// order to be resolved.
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))] // Need to release a 2.0 for changing public API
|
||||||
pub fn parse(
|
pub fn parse(
|
||||||
&mut self,
|
&self,
|
||||||
timestr: &str,
|
timestr: &str,
|
||||||
dayfirst: Option<bool>,
|
dayfirst: Option<bool>,
|
||||||
yearfirst: Option<bool>,
|
yearfirst: Option<bool>,
|
||||||
@ -688,8 +698,9 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(cyclomatic_complexity))] // Imitating Python API is priority
|
||||||
fn parse_with_tokens(
|
fn parse_with_tokens(
|
||||||
&mut self,
|
&self,
|
||||||
timestr: &str,
|
timestr: &str,
|
||||||
dayfirst: Option<bool>,
|
dayfirst: Option<bool>,
|
||||||
yearfirst: Option<bool>,
|
yearfirst: Option<bool>,
|
||||||
@ -747,7 +758,7 @@ impl Parser {
|
|||||||
&& self.info.pertain_index(&l[i + 2])
|
&& self.info.pertain_index(&l[i + 2])
|
||||||
{
|
{
|
||||||
// Jan of 01
|
// Jan of 01
|
||||||
if let Some(value) = l[i + 4].parse::<i32>().ok() {
|
if let Ok(value) = l[i + 4].parse::<i32>() {
|
||||||
let year = self.info.convertyear(value, false);
|
let year = self.info.convertyear(value, false);
|
||||||
ymd.append(year, &l[i + 4], Some(YMDLabel::Year))?;
|
ymd.append(year, &l[i + 4], Some(YMDLabel::Year))?;
|
||||||
}
|
}
|
||||||
@ -764,7 +775,7 @@ impl Parser {
|
|||||||
} else if fuzzy {
|
} else if fuzzy {
|
||||||
skipped_idxs.push(i);
|
skipped_idxs.push(i);
|
||||||
}
|
}
|
||||||
} else if self.could_be_tzname(res.hour, res.tzname.clone(), res.tzoffset, &l[i]) {
|
} else if self.could_be_tzname(res.hour, &res.tzname, res.tzoffset, &l[i]) {
|
||||||
res.tzname = Some(l[i].clone());
|
res.tzname = Some(l[i].clone());
|
||||||
|
|
||||||
let tzname = res.tzname.clone().unwrap();
|
let tzname = res.tzname.clone().unwrap();
|
||||||
@ -779,10 +790,9 @@ impl Parser {
|
|||||||
let item = if l[i + 1] == "+" {
|
let item = if l[i + 1] == "+" {
|
||||||
"-".to_owned()
|
"-".to_owned()
|
||||||
} else {
|
} else {
|
||||||
"-".to_owned()
|
"+".to_owned()
|
||||||
};
|
};
|
||||||
l.remove(i + 1);
|
l[i+1] = item;
|
||||||
l.insert(i + 1, item);
|
|
||||||
|
|
||||||
res.tzoffset = None;
|
res.tzoffset = None;
|
||||||
|
|
||||||
@ -809,7 +819,8 @@ impl Parser {
|
|||||||
i += 2;
|
i += 2;
|
||||||
} else if len_li <= 2 {
|
} else if len_li <= 2 {
|
||||||
// -[0]3
|
// -[0]3
|
||||||
hour_offset = Some(l[i + 1][..2].parse::<i32>().unwrap());
|
let range_len = min(l[i + 1].len(), 2);
|
||||||
|
hour_offset = Some(l[i + 1][..range_len].parse::<i32>().unwrap());
|
||||||
min_offset = Some(0);
|
min_offset = Some(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -819,7 +830,7 @@ impl Parser {
|
|||||||
let tzname = res.tzname.clone();
|
let tzname = res.tzname.clone();
|
||||||
if i + 5 < len_l && self.info.jump_index(&l[i + 2]) && l[i + 3] == "("
|
if i + 5 < len_l && self.info.jump_index(&l[i + 2]) && l[i + 3] == "("
|
||||||
&& l[i + 5] == ")" && 3 <= l[i + 4].len()
|
&& l[i + 5] == ")" && 3 <= l[i + 4].len()
|
||||||
&& self.could_be_tzname(res.hour, tzname, None, &l[i + 4])
|
&& self.could_be_tzname(res.hour, &tzname, None, &l[i + 4])
|
||||||
{
|
{
|
||||||
// (GMT)
|
// (GMT)
|
||||||
res.tzname = Some(l[i + 4].clone());
|
res.tzname = Some(l[i + 4].clone());
|
||||||
@ -856,23 +867,20 @@ impl Parser {
|
|||||||
fn could_be_tzname(
|
fn could_be_tzname(
|
||||||
&self,
|
&self,
|
||||||
hour: Option<i32>,
|
hour: Option<i32>,
|
||||||
tzname: Option<String>,
|
tzname: &Option<String>,
|
||||||
tzoffset: Option<i32>,
|
tzoffset: Option<i32>,
|
||||||
token: &str,
|
token: &str,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let all_ascii_upper = token
|
let all_ascii_upper = token
|
||||||
.chars()
|
.chars()
|
||||||
.all(|c| 65u8 as char <= c && c <= 90u8 as char);
|
.all(|c| 65u8 as char <= c && c <= 90u8 as char);
|
||||||
return hour.is_some() && tzname.is_none() && tzoffset.is_none() && token.len() <= 5
|
|
||||||
&& all_ascii_upper;
|
hour.is_some() && tzname.is_none() && tzoffset.is_none() && token.len() <= 5
|
||||||
|
&& all_ascii_upper
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ampm_valid(&self, hour: Option<i32>, ampm: Option<bool>, fuzzy: bool) -> ParseResult<bool> {
|
fn ampm_valid(&self, hour: Option<i32>, ampm: Option<bool>, fuzzy: bool) -> ParseResult<bool> {
|
||||||
let mut val_is_ampm = true;
|
let mut val_is_ampm = !(fuzzy && ampm.is_some());
|
||||||
|
|
||||||
if fuzzy && ampm.is_some() {
|
|
||||||
val_is_ampm = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if hour.is_none() {
|
if hour.is_none() {
|
||||||
if fuzzy {
|
if fuzzy {
|
||||||
@ -892,8 +900,8 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn build_naive(&self, res: &ParsingResult, default: &NaiveDateTime) -> ParseResult<NaiveDateTime> {
|
fn build_naive(&self, res: &ParsingResult, default: &NaiveDateTime) -> ParseResult<NaiveDateTime> {
|
||||||
let y = res.year.unwrap_or(default.year());
|
let y = res.year.unwrap_or_else(|| default.year());
|
||||||
let m = res.month.unwrap_or(default.month() as i32) as u32;
|
let m = res.month.unwrap_or_else(|| default.month() as i32) as u32;
|
||||||
|
|
||||||
let d_offset = if res.weekday.is_some() && res.day.is_none() {
|
let d_offset = if res.weekday.is_some() && res.day.is_none() {
|
||||||
// TODO: Unwrap not justified
|
// TODO: Unwrap not justified
|
||||||
@ -902,7 +910,7 @@ impl Parser {
|
|||||||
// UNWRAP: We've already check res.weekday() is some
|
// UNWRAP: We've already check res.weekday() is some
|
||||||
let actual_weekday = (res.weekday.unwrap() + 1) % 7;
|
let actual_weekday = (res.weekday.unwrap() + 1) % 7;
|
||||||
let other = DayOfWeek::from_numeral(actual_weekday as u32);
|
let other = DayOfWeek::from_numeral(actual_weekday as u32);
|
||||||
Duration::days(dow.difference(other) as i64)
|
Duration::days(i64::from(dow.difference(&other)))
|
||||||
} else {
|
} else {
|
||||||
Duration::days(0)
|
Duration::days(0)
|
||||||
};
|
};
|
||||||
@ -954,7 +962,7 @@ impl Parser {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
} else if res.tzname.is_some() && tzinfos.contains_key(res.tzname.as_ref().unwrap()) {
|
} else if res.tzname.is_some() && tzinfos.contains_key(res.tzname.as_ref().unwrap()) {
|
||||||
Ok(Some(FixedOffset::east(
|
Ok(Some(FixedOffset::east(
|
||||||
tzinfos.get(res.tzname.as_ref().unwrap()).unwrap().clone(),
|
*tzinfos.get(res.tzname.as_ref().unwrap()).unwrap(),
|
||||||
)))
|
)))
|
||||||
} else if res.tzname.is_some() {
|
} else if res.tzname.is_some() {
|
||||||
// TODO: Dateutil issues a warning/deprecation notice here. Should we force the issue?
|
// TODO: Dateutil issues a warning/deprecation notice here. Should we force the issue?
|
||||||
@ -967,7 +975,7 @@ impl Parser {
|
|||||||
|
|
||||||
fn parse_numeric_token(
|
fn parse_numeric_token(
|
||||||
&self,
|
&self,
|
||||||
tokens: &Vec<String>,
|
tokens: &[String],
|
||||||
idx: usize,
|
idx: usize,
|
||||||
info: &ParserInfo,
|
info: &ParserInfo,
|
||||||
ymd: &mut YMD,
|
ymd: &mut YMD,
|
||||||
@ -994,11 +1002,11 @@ impl Parser {
|
|||||||
if len_li == 4 {
|
if len_li == 4 {
|
||||||
res.minute = Some(s[2..4].parse::<i32>()?)
|
res.minute = Some(s[2..4].parse::<i32>()?)
|
||||||
}
|
}
|
||||||
} else if len_li == 6 || (len_li > 6 && tokens[idx].find(".") == Some(6)) {
|
} else if len_li == 6 || (len_li > 6 && tokens[idx].find('.') == Some(6)) {
|
||||||
// YYMMDD or HHMMSS[.ss]
|
// YYMMDD or HHMMSS[.ss]
|
||||||
let s = &tokens[idx];
|
let s = &tokens[idx];
|
||||||
|
|
||||||
if ymd.len() == 0 && tokens[idx].find(".") == None {
|
if ymd.len() == 0 && tokens[idx].find('.') == None {
|
||||||
ymd.append(s[0..2].parse::<i32>().unwrap(), &s[0..2], None)?;
|
ymd.append(s[0..2].parse::<i32>().unwrap(), &s[0..2], None)?;
|
||||||
ymd.append(s[2..4].parse::<i32>().unwrap(), &s[2..4], None)?;
|
ymd.append(s[2..4].parse::<i32>().unwrap(), &s[2..4], None)?;
|
||||||
ymd.append(s[4..6].parse::<i32>().unwrap(), &s[4..6], None)?;
|
ymd.append(s[4..6].parse::<i32>().unwrap(), &s[4..6], None)?;
|
||||||
@ -1070,13 +1078,11 @@ impl Parser {
|
|||||||
if idx + 3 < len_l && &tokens[idx + 3] == sep {
|
if idx + 3 < len_l && &tokens[idx + 3] == sep {
|
||||||
if let Some(value) = info.month_index(&tokens[idx + 4]) {
|
if let Some(value) = info.month_index(&tokens[idx + 4]) {
|
||||||
ymd.append(value as i32, &tokens[idx + 4], Some(YMDLabel::Month))?;
|
ymd.append(value as i32, &tokens[idx + 4], Some(YMDLabel::Month))?;
|
||||||
} else {
|
} else if let Ok(val) = tokens[idx + 4].parse::<i32>() {
|
||||||
if let Ok(val) = tokens[idx + 4].parse::<i32>() {
|
|
||||||
ymd.append(val, &tokens[idx + 4], None)?;
|
ymd.append(val, &tokens[idx + 4], None)?;
|
||||||
} else {
|
} else {
|
||||||
return Err(ParseError::UnrecognizedFormat);
|
return Err(ParseError::UnrecognizedFormat);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
idx += 2;
|
idx += 2;
|
||||||
}
|
}
|
||||||
@ -1123,8 +1129,8 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn parsems(&self, seconds_str: &str) -> ParseResult<(i32, i32)> {
|
fn parsems(&self, seconds_str: &str) -> ParseResult<(i32, i32)> {
|
||||||
if seconds_str.contains(".") {
|
if seconds_str.contains('.') {
|
||||||
let split: Vec<&str> = seconds_str.split(".").collect();
|
let split: Vec<&str> = seconds_str.split('.').collect();
|
||||||
let (i, f): (&str, &str) = (split[0], split[1]);
|
let (i, f): (&str, &str) = (split[0], split[1]);
|
||||||
|
|
||||||
let i_parse = i.parse::<i32>()?;
|
let i_parse = i.parse::<i32>()?;
|
||||||
@ -1138,7 +1144,7 @@ impl Parser {
|
|||||||
fn find_hms_index(
|
fn find_hms_index(
|
||||||
&self,
|
&self,
|
||||||
idx: usize,
|
idx: usize,
|
||||||
tokens: &Vec<String>,
|
tokens: &[String],
|
||||||
info: &ParserInfo,
|
info: &ParserInfo,
|
||||||
allow_jump: bool,
|
allow_jump: bool,
|
||||||
) -> Option<usize> {
|
) -> Option<usize> {
|
||||||
@ -1180,7 +1186,7 @@ impl Parser {
|
|||||||
fn parse_hms(
|
fn parse_hms(
|
||||||
&self,
|
&self,
|
||||||
idx: usize,
|
idx: usize,
|
||||||
tokens: &Vec<String>,
|
tokens: &[String],
|
||||||
info: &ParserInfo,
|
info: &ParserInfo,
|
||||||
hms_index: Option<usize>,
|
hms_index: Option<usize>,
|
||||||
) -> (usize, Option<usize>) {
|
) -> (usize, Option<usize>) {
|
||||||
@ -1236,6 +1242,7 @@ impl Parser {
|
|||||||
(minute, second)
|
(minute, second)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))] // Need Vec type because of mutability in the function that calls us
|
||||||
fn recombine_skipped(&self, skipped_idxs: Vec<usize>, tokens: Vec<String>) -> Vec<String> {
|
fn recombine_skipped(&self, skipped_idxs: Vec<usize>, tokens: Vec<String>) -> Vec<String> {
|
||||||
let mut skipped_tokens: Vec<String> = vec![];
|
let mut skipped_tokens: Vec<String> = vec![];
|
||||||
|
|
||||||
@ -1246,10 +1253,10 @@ impl Parser {
|
|||||||
if i > 0 && idx - 1 == skipped_idxs[i - 1] {
|
if i > 0 && idx - 1 == skipped_idxs[i - 1] {
|
||||||
// UNWRAP: Having an initial value and unconditional push at end guarantees value
|
// UNWRAP: Having an initial value and unconditional push at end guarantees value
|
||||||
let mut t = skipped_tokens.pop().unwrap();
|
let mut t = skipped_tokens.pop().unwrap();
|
||||||
t.push_str(tokens[idx.clone()].as_ref());
|
t.push_str(tokens[*idx].as_ref());
|
||||||
skipped_tokens.push(t);
|
skipped_tokens.push(t);
|
||||||
} else {
|
} else {
|
||||||
skipped_tokens.push(tokens[idx.clone()].to_owned());
|
skipped_tokens.push(tokens[*idx].to_owned());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1276,7 +1283,7 @@ fn ljust(s: &str, chars: usize, replace: char) -> String {
|
|||||||
/// The default implementation assumes English values for names of months,
|
/// The default implementation assumes English values for names of months,
|
||||||
/// days of the week, etc. It is equivalent to Python's `dateutil.parser.parse()`
|
/// days of the week, etc. It is equivalent to Python's `dateutil.parser.parse()`
|
||||||
pub fn parse(timestr: &str) -> ParseResult<(NaiveDateTime, Option<FixedOffset>)> {
|
pub fn parse(timestr: &str) -> ParseResult<(NaiveDateTime, Option<FixedOffset>)> {
|
||||||
let res = Parser::default().parse(
|
let res = DEFAULT_PARSER.parse(
|
||||||
timestr,
|
timestr,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
@ -16,7 +16,7 @@ fn test_fuzz() {
|
|||||||
// OverflowError: Python int too large to convert to C long
|
// OverflowError: Python int too large to convert to C long
|
||||||
// assert_eq!(parse("8888884444444888444444444881"), Err(ParseError::AmPmWithoutHour));
|
// assert_eq!(parse("8888884444444888444444444881"), Err(ParseError::AmPmWithoutHour));
|
||||||
let default = NaiveDate::from_ymd(2016, 6, 29).and_hms(0, 0, 0);
|
let default = NaiveDate::from_ymd(2016, 6, 29).and_hms(0, 0, 0);
|
||||||
let mut p = Parser::default();
|
let p = Parser::default();
|
||||||
let res = p.parse("\x0D\x31", None, None, false, false, Some(&default), false, &HashMap::new()).unwrap();
|
let res = p.parse("\x0D\x31", None, None, false, false, Some(&default), false, &HashMap::new()).unwrap();
|
||||||
assert_eq!(res.0, default);
|
assert_eq!(res.0, default);
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ fn parse_and_assert(
|
|||||||
tzinfos: &HashMap<String, i32>,
|
tzinfos: &HashMap<String, i32>,
|
||||||
) {
|
) {
|
||||||
|
|
||||||
let mut parser = Parser::new(info);
|
let parser = Parser::new(info);
|
||||||
let rs_parsed = parser.parse(
|
let rs_parsed = parser.parse(
|
||||||
s,
|
s,
|
||||||
dayfirst,
|
dayfirst,
|
||||||
@ -89,7 +89,7 @@ fn parse_fuzzy_and_assert(
|
|||||||
tzinfos: &HashMap<String, i32>,
|
tzinfos: &HashMap<String, i32>,
|
||||||
) {
|
) {
|
||||||
|
|
||||||
let mut parser = Parser::new(info);
|
let parser = Parser::new(info);
|
||||||
let rs_parsed = parser.parse(
|
let rs_parsed = parser.parse(
|
||||||
s,
|
s,
|
||||||
dayfirst,
|
dayfirst,
|
||||||
@ -1434,6 +1434,42 @@ fn test_parse_offset4() {
|
|||||||
None, false, &HashMap::new());
|
None, false, &HashMap::new());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_offset5() {
|
||||||
|
let info = ParserInfo::default();
|
||||||
|
let pdt = PyDateTime {
|
||||||
|
year: 2018, month: 8, day: 10,
|
||||||
|
hour: 10, minute: 0, second: 0,
|
||||||
|
micros: 0, tzo: Some(-10800),
|
||||||
|
};
|
||||||
|
parse_and_assert(pdt, info, "2018-08-10 10:00:00 UTC+3", None, None, false, false,
|
||||||
|
None, false, &HashMap::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_offset6() {
|
||||||
|
let info = ParserInfo::default();
|
||||||
|
let pdt = PyDateTime {
|
||||||
|
year: 2018, month: 8, day: 10,
|
||||||
|
hour: 15, minute: 36, second: 47,
|
||||||
|
micros: 0, tzo: Some(14400),
|
||||||
|
};
|
||||||
|
parse_and_assert(pdt, info, "2018-08-10 03:36:47 PM GMT-4", None, None, false, false,
|
||||||
|
None, false, &HashMap::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_offset7() {
|
||||||
|
let info = ParserInfo::default();
|
||||||
|
let pdt = PyDateTime {
|
||||||
|
year: 2018, month: 8, day: 10,
|
||||||
|
hour: 4, minute: 15, second: 0,
|
||||||
|
micros: 0, tzo: Some(7200),
|
||||||
|
};
|
||||||
|
parse_and_assert(pdt, info, "2018-08-10 04:15:00 AM Z-02:00", None, None, false, false,
|
||||||
|
None, false, &HashMap::new());
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_dayfirst0() {
|
fn test_parse_dayfirst0() {
|
||||||
let info = ParserInfo::default();
|
let info = ParserInfo::default();
|
||||||
|
@ -708,198 +708,216 @@ fn test_tokenize115() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize116() {
|
fn test_tokenize116() {
|
||||||
|
let comp = vec!["2018", "-", "08", "-", "10", " ", "10", ":", "00", ":", "00", " ", "UTC", "+", "3"];
|
||||||
|
tokenize_assert("2018-08-10 10:00:00 UTC+3", comp);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_tokenize117() {
|
||||||
|
let comp = vec!["2018", "-", "08", "-", "10", " ", "03", ":", "36", ":", "47", " ", "PM", " ", "GMT", "-", "4"];
|
||||||
|
tokenize_assert("2018-08-10 03:36:47 PM GMT-4", comp);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_tokenize118() {
|
||||||
|
let comp = vec!["2018", "-", "08", "-", "10", " ", "04", ":", "15", ":", "00", " ", "AM", " ", "Z", "-", "02", ":", "00"];
|
||||||
|
tokenize_assert("2018-08-10 04:15:00 AM Z-02:00", comp);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_tokenize119() {
|
||||||
let comp = vec!["10", "-", "09", "-", "2003"];
|
let comp = vec!["10", "-", "09", "-", "2003"];
|
||||||
tokenize_assert("10-09-2003", comp);
|
tokenize_assert("10-09-2003", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize117() {
|
fn test_tokenize120() {
|
||||||
let comp = vec!["10", ".", "09", ".", "2003"];
|
let comp = vec!["10", ".", "09", ".", "2003"];
|
||||||
tokenize_assert("10.09.2003", comp);
|
tokenize_assert("10.09.2003", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize118() {
|
fn test_tokenize121() {
|
||||||
let comp = vec!["10", "/", "09", "/", "2003"];
|
let comp = vec!["10", "/", "09", "/", "2003"];
|
||||||
tokenize_assert("10/09/2003", comp);
|
tokenize_assert("10/09/2003", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize119() {
|
fn test_tokenize122() {
|
||||||
let comp = vec!["10", " ", "09", " ", "2003"];
|
let comp = vec!["10", " ", "09", " ", "2003"];
|
||||||
tokenize_assert("10 09 2003", comp);
|
tokenize_assert("10 09 2003", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize120() {
|
fn test_tokenize123() {
|
||||||
let comp = vec!["090107"];
|
let comp = vec!["090107"];
|
||||||
tokenize_assert("090107", comp);
|
tokenize_assert("090107", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize121() {
|
fn test_tokenize124() {
|
||||||
let comp = vec!["2015", " ", "09", " ", "25"];
|
let comp = vec!["2015", " ", "09", " ", "25"];
|
||||||
tokenize_assert("2015 09 25", comp);
|
tokenize_assert("2015 09 25", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize122() {
|
fn test_tokenize125() {
|
||||||
let comp = vec!["10", "-", "09", "-", "03"];
|
let comp = vec!["10", "-", "09", "-", "03"];
|
||||||
tokenize_assert("10-09-03", comp);
|
tokenize_assert("10-09-03", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize123() {
|
fn test_tokenize126() {
|
||||||
let comp = vec!["10", ".", "09", ".", "03"];
|
let comp = vec!["10", ".", "09", ".", "03"];
|
||||||
tokenize_assert("10.09.03", comp);
|
tokenize_assert("10.09.03", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize124() {
|
fn test_tokenize127() {
|
||||||
let comp = vec!["10", "/", "09", "/", "03"];
|
let comp = vec!["10", "/", "09", "/", "03"];
|
||||||
tokenize_assert("10/09/03", comp);
|
tokenize_assert("10/09/03", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize125() {
|
fn test_tokenize128() {
|
||||||
let comp = vec!["10", " ", "09", " ", "03"];
|
let comp = vec!["10", " ", "09", " ", "03"];
|
||||||
tokenize_assert("10 09 03", comp);
|
tokenize_assert("10 09 03", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_tokenize126() {
|
|
||||||
let comp = vec!["090107"];
|
|
||||||
tokenize_assert("090107", comp);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_tokenize127() {
|
|
||||||
let comp = vec!["2015", " ", "09", " ", "25"];
|
|
||||||
tokenize_assert("2015 09 25", comp);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_tokenize128() {
|
|
||||||
let comp = vec!["090107"];
|
|
||||||
tokenize_assert("090107", comp);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize129() {
|
fn test_tokenize129() {
|
||||||
let comp = vec!["2015", " ", "09", " ", "25"];
|
let comp = vec!["090107"];
|
||||||
tokenize_assert("2015 09 25", comp);
|
tokenize_assert("090107", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize130() {
|
fn test_tokenize130() {
|
||||||
|
let comp = vec!["2015", " ", "09", " ", "25"];
|
||||||
|
tokenize_assert("2015 09 25", comp);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_tokenize131() {
|
||||||
|
let comp = vec!["090107"];
|
||||||
|
tokenize_assert("090107", comp);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_tokenize132() {
|
||||||
|
let comp = vec!["2015", " ", "09", " ", "25"];
|
||||||
|
tokenize_assert("2015 09 25", comp);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_tokenize133() {
|
||||||
let comp = vec!["April", " ", "2009"];
|
let comp = vec!["April", " ", "2009"];
|
||||||
tokenize_assert("April 2009", comp);
|
tokenize_assert("April 2009", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize131() {
|
fn test_tokenize134() {
|
||||||
let comp = vec!["Feb", " ", "2007"];
|
let comp = vec!["Feb", " ", "2007"];
|
||||||
tokenize_assert("Feb 2007", comp);
|
tokenize_assert("Feb 2007", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize132() {
|
fn test_tokenize135() {
|
||||||
let comp = vec!["Feb", " ", "2008"];
|
let comp = vec!["Feb", " ", "2008"];
|
||||||
tokenize_assert("Feb 2008", comp);
|
tokenize_assert("Feb 2008", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize133() {
|
fn test_tokenize136() {
|
||||||
let comp = vec!["Thu", " ", "Sep", " ", "25", " ", "10", ":", "36", ":", "28", " ", "BRST", " ", "2003"];
|
let comp = vec!["Thu", " ", "Sep", " ", "25", " ", "10", ":", "36", ":", "28", " ", "BRST", " ", "2003"];
|
||||||
tokenize_assert("Thu Sep 25 10:36:28 BRST 2003", comp);
|
tokenize_assert("Thu Sep 25 10:36:28 BRST 2003", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize134() {
|
fn test_tokenize137() {
|
||||||
let comp = vec!["1996", ".", "07", ".", "10", " ", "AD", " ", "at", " ", "15", ":", "08", ":", "56", " ", "PDT"];
|
let comp = vec!["1996", ".", "07", ".", "10", " ", "AD", " ", "at", " ", "15", ":", "08", ":", "56", " ", "PDT"];
|
||||||
tokenize_assert("1996.07.10 AD at 15:08:56 PDT", comp);
|
tokenize_assert("1996.07.10 AD at 15:08:56 PDT", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize135() {
|
fn test_tokenize138() {
|
||||||
let comp = vec!["Tuesday", ",", " ", "April", " ", "12", ",", " ", "1952", " ", "AD", " ", "3", ":", "30", ":", "42", "pm", " ", "PST"];
|
let comp = vec!["Tuesday", ",", " ", "April", " ", "12", ",", " ", "1952", " ", "AD", " ", "3", ":", "30", ":", "42", "pm", " ", "PST"];
|
||||||
tokenize_assert("Tuesday, April 12, 1952 AD 3:30:42pm PST", comp);
|
tokenize_assert("Tuesday, April 12, 1952 AD 3:30:42pm PST", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize136() {
|
fn test_tokenize139() {
|
||||||
let comp = vec!["November", " ", "5", ",", " ", "1994", ",", " ", "8", ":", "15", ":", "30", " ", "am", " ", "EST"];
|
let comp = vec!["November", " ", "5", ",", " ", "1994", ",", " ", "8", ":", "15", ":", "30", " ", "am", " ", "EST"];
|
||||||
tokenize_assert("November 5, 1994, 8:15:30 am EST", comp);
|
tokenize_assert("November 5, 1994, 8:15:30 am EST", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize137() {
|
fn test_tokenize140() {
|
||||||
let comp = vec!["1994", "-", "11", "-", "05", "T", "08", ":", "15", ":", "30", "-", "05", ":", "00"];
|
let comp = vec!["1994", "-", "11", "-", "05", "T", "08", ":", "15", ":", "30", "-", "05", ":", "00"];
|
||||||
tokenize_assert("1994-11-05T08:15:30-05:00", comp);
|
tokenize_assert("1994-11-05T08:15:30-05:00", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize138() {
|
fn test_tokenize141() {
|
||||||
let comp = vec!["1994", "-", "11", "-", "05", "T", "08", ":", "15", ":", "30", "Z"];
|
let comp = vec!["1994", "-", "11", "-", "05", "T", "08", ":", "15", ":", "30", "Z"];
|
||||||
tokenize_assert("1994-11-05T08:15:30Z", comp);
|
tokenize_assert("1994-11-05T08:15:30Z", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize139() {
|
fn test_tokenize142() {
|
||||||
let comp = vec!["1976", "-", "07", "-", "04", "T", "00", ":", "01", ":", "02", "Z"];
|
let comp = vec!["1976", "-", "07", "-", "04", "T", "00", ":", "01", ":", "02", "Z"];
|
||||||
tokenize_assert("1976-07-04T00:01:02Z", comp);
|
tokenize_assert("1976-07-04T00:01:02Z", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize140() {
|
fn test_tokenize143() {
|
||||||
let comp = vec!["Tue", " ", "Apr", " ", "4", " ", "00", ":", "22", ":", "12", " ", "PDT", " ", "1995"];
|
let comp = vec!["Tue", " ", "Apr", " ", "4", " ", "00", ":", "22", ":", "12", " ", "PDT", " ", "1995"];
|
||||||
tokenize_assert("Tue Apr 4 00:22:12 PDT 1995", comp);
|
tokenize_assert("Tue Apr 4 00:22:12 PDT 1995", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize141() {
|
fn test_tokenize144() {
|
||||||
let comp = vec!["Today", " ", "is", " ", "25", " ", "of", " ", "September", " ", "of", " ", "2003", ",", " ", "exactly", " ", "at", " ", "10", ":", "49", ":", "41", " ", "with", " ", "timezone", " ", "-", "03", ":", "00", "."];
|
let comp = vec!["Today", " ", "is", " ", "25", " ", "of", " ", "September", " ", "of", " ", "2003", ",", " ", "exactly", " ", "at", " ", "10", ":", "49", ":", "41", " ", "with", " ", "timezone", " ", "-", "03", ":", "00", "."];
|
||||||
tokenize_assert("Today is 25 of September of 2003, exactly at 10:49:41 with timezone -03:00.", comp);
|
tokenize_assert("Today is 25 of September of 2003, exactly at 10:49:41 with timezone -03:00.", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize142() {
|
fn test_tokenize145() {
|
||||||
let comp = vec!["Today", " ", "is", " ", "25", " ", "of", " ", "September", " ", "of", " ", "2003", ",", " ", "exactly", " ", "at", " ", "10", ":", "49", ":", "41", " ", "with", " ", "timezone", " ", "-", "03", ":", "00", "."];
|
let comp = vec!["Today", " ", "is", " ", "25", " ", "of", " ", "September", " ", "of", " ", "2003", ",", " ", "exactly", " ", "at", " ", "10", ":", "49", ":", "41", " ", "with", " ", "timezone", " ", "-", "03", ":", "00", "."];
|
||||||
tokenize_assert("Today is 25 of September of 2003, exactly at 10:49:41 with timezone -03:00.", comp);
|
tokenize_assert("Today is 25 of September of 2003, exactly at 10:49:41 with timezone -03:00.", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize143() {
|
fn test_tokenize146() {
|
||||||
let comp = vec!["I", " ", "have", " ", "a", " ", "meeting", " ", "on", " ", "March", " ", "1", ",", " ", "1974"];
|
let comp = vec!["I", " ", "have", " ", "a", " ", "meeting", " ", "on", " ", "March", " ", "1", ",", " ", "1974"];
|
||||||
tokenize_assert("I have a meeting on March 1, 1974", comp);
|
tokenize_assert("I have a meeting on March 1, 1974", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize144() {
|
fn test_tokenize147() {
|
||||||
let comp = vec!["On", " ", "June", " ", "8", "th", ",", " ", "2020", ",", " ", "I", " ", "am", " ", "going", " ", "to", " ", "be", " ", "the", " ", "first", " ", "man", " ", "on", " ", "Mars"];
|
let comp = vec!["On", " ", "June", " ", "8", "th", ",", " ", "2020", ",", " ", "I", " ", "am", " ", "going", " ", "to", " ", "be", " ", "the", " ", "first", " ", "man", " ", "on", " ", "Mars"];
|
||||||
tokenize_assert("On June 8th, 2020, I am going to be the first man on Mars", comp);
|
tokenize_assert("On June 8th, 2020, I am going to be the first man on Mars", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize145() {
|
fn test_tokenize148() {
|
||||||
let comp = vec!["Meet", " ", "me", " ", "at", " ", "the", " ", "AM", "/", "PM", " ", "on", " ", "Sunset", " ", "at", " ", "3", ":", "00", " ", "AM", " ", "on", " ", "December", " ", "3", "rd", ",", " ", "2003"];
|
let comp = vec!["Meet", " ", "me", " ", "at", " ", "the", " ", "AM", "/", "PM", " ", "on", " ", "Sunset", " ", "at", " ", "3", ":", "00", " ", "AM", " ", "on", " ", "December", " ", "3", "rd", ",", " ", "2003"];
|
||||||
tokenize_assert("Meet me at the AM/PM on Sunset at 3:00 AM on December 3rd, 2003", comp);
|
tokenize_assert("Meet me at the AM/PM on Sunset at 3:00 AM on December 3rd, 2003", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize146() {
|
fn test_tokenize149() {
|
||||||
let comp = vec!["Meet", " ", "me", " ", "at", " ", "3", ":", "00", " ", "AM", " ", "on", " ", "December", " ", "3", "rd", ",", " ", "2003", " ", "at", " ", "the", " ", "AM", "/", "PM", " ", "on", " ", "Sunset"];
|
let comp = vec!["Meet", " ", "me", " ", "at", " ", "3", ":", "00", " ", "AM", " ", "on", " ", "December", " ", "3", "rd", ",", " ", "2003", " ", "at", " ", "the", " ", "AM", "/", "PM", " ", "on", " ", "Sunset"];
|
||||||
tokenize_assert("Meet me at 3:00 AM on December 3rd, 2003 at the AM/PM on Sunset", comp);
|
tokenize_assert("Meet me at 3:00 AM on December 3rd, 2003 at the AM/PM on Sunset", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize147() {
|
fn test_tokenize150() {
|
||||||
let comp = vec!["Jan", " ", "29", ",", " ", "1945", " ", "14", ":", "45", " ", "AM", " ", "I", " ", "going", " ", "to", " ", "see", " ", "you", " ", "there", "?"];
|
let comp = vec!["Jan", " ", "29", ",", " ", "1945", " ", "14", ":", "45", " ", "AM", " ", "I", " ", "going", " ", "to", " ", "see", " ", "you", " ", "there", "?"];
|
||||||
tokenize_assert("Jan 29, 1945 14:45 AM I going to see you there?", comp);
|
tokenize_assert("Jan 29, 1945 14:45 AM I going to see you there?", comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize148() {
|
fn test_tokenize151() {
|
||||||
let comp = vec!["2017", "-", "07", "-", "17", " ", "06", ":", "15", ":"];
|
let comp = vec!["2017", "-", "07", "-", "17", " ", "06", ":", "15", ":"];
|
||||||
tokenize_assert("2017-07-17 06:15:", comp);
|
tokenize_assert("2017-07-17 06:15:", comp);
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ pub enum DayOfWeek {
|
|||||||
impl DayOfWeek {
|
impl DayOfWeek {
|
||||||
|
|
||||||
pub fn to_numeral(&self) -> u32 {
|
pub fn to_numeral(&self) -> u32 {
|
||||||
match self {
|
match *self {
|
||||||
DayOfWeek::Sunday => 0,
|
DayOfWeek::Sunday => 0,
|
||||||
DayOfWeek::Monday => 1,
|
DayOfWeek::Monday => 1,
|
||||||
DayOfWeek::Tuesday => 2,
|
DayOfWeek::Tuesday => 2,
|
||||||
@ -40,7 +40,7 @@ impl DayOfWeek {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Given the current day of the week, how many days until the next day?
|
/// Given the current day of the week, how many days until the next day?
|
||||||
pub fn difference(&self, other: DayOfWeek) -> u32 {
|
pub fn difference(&self, other: &DayOfWeek) -> u32 {
|
||||||
// Have to use i32 because of wraparound issues
|
// Have to use i32 because of wraparound issues
|
||||||
let s_num = self.to_numeral() as i32;
|
let s_num = self.to_numeral() as i32;
|
||||||
let o_num = other.to_numeral() as i32;
|
let o_num = other.to_numeral() as i32;
|
||||||
@ -115,18 +115,18 @@ mod test {
|
|||||||
#[test]
|
#[test]
|
||||||
fn weekday_difference() {
|
fn weekday_difference() {
|
||||||
|
|
||||||
assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Sunday), 0);
|
assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Sunday), 0);
|
||||||
assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Monday), 1);
|
assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Monday), 1);
|
||||||
assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Tuesday), 2);
|
assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Tuesday), 2);
|
||||||
assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Wednesday), 3);
|
assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Wednesday), 3);
|
||||||
assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Thursday), 4);
|
assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Thursday), 4);
|
||||||
assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Friday), 5);
|
assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Friday), 5);
|
||||||
assert_eq!(DayOfWeek::Sunday.difference(DayOfWeek::Saturday), 6);
|
assert_eq!(DayOfWeek::Sunday.difference(&DayOfWeek::Saturday), 6);
|
||||||
assert_eq!(DayOfWeek::Monday.difference(DayOfWeek::Sunday), 6);
|
assert_eq!(DayOfWeek::Monday.difference(&DayOfWeek::Sunday), 6);
|
||||||
assert_eq!(DayOfWeek::Tuesday.difference(DayOfWeek::Sunday), 5);
|
assert_eq!(DayOfWeek::Tuesday.difference(&DayOfWeek::Sunday), 5);
|
||||||
assert_eq!(DayOfWeek::Wednesday.difference(DayOfWeek::Sunday), 4);
|
assert_eq!(DayOfWeek::Wednesday.difference(&DayOfWeek::Sunday), 4);
|
||||||
assert_eq!(DayOfWeek::Thursday.difference(DayOfWeek::Sunday), 3);
|
assert_eq!(DayOfWeek::Thursday.difference(&DayOfWeek::Sunday), 3);
|
||||||
assert_eq!(DayOfWeek::Friday.difference(DayOfWeek::Sunday), 2);
|
assert_eq!(DayOfWeek::Friday.difference(&DayOfWeek::Sunday), 2);
|
||||||
assert_eq!(DayOfWeek::Saturday.difference(DayOfWeek::Sunday), 1);
|
assert_eq!(DayOfWeek::Saturday.difference(&DayOfWeek::Sunday), 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
Reference in New Issue
Block a user