1
0
mirror of https://github.com/bspeice/dtparse synced 2024-11-12 17:08:09 -05:00

Set up Python parse compat

This commit is contained in:
Bradlee Speice 2018-05-23 23:01:00 -04:00
parent 6a2c0a6304
commit b1f8cd77ee
7 changed files with 90 additions and 43 deletions

View File

@ -4,5 +4,4 @@ version = "0.1.0"
authors = ["Bradlee Speice <bspeice@kcg.com>"]
[dependencies]
chrono = "0.4"
chrono-tz = "0.4"
chrono = "0.4"

View File

@ -1,5 +1,7 @@
#import dateutil.parser._timelex.split as time_split
from dateutil.parser import _timelex
from dateutil.parser import parse as duparse
import pytz
# The TEST_STRINGS list should be the only thing that actually needs changing
TEST_STRINGS = [
@ -12,7 +14,7 @@ S4 = ' ' * 4
S8 = ' ' * 8
S12 = ' ' * 12
def test_string_to_rust(time_string):
def rust_tokenize(time_string):
split_array = _timelex.split(time_string)
def translate_token(token):
@ -28,9 +30,9 @@ def test_string_to_rust(time_string):
return [translate_token(t) for t in split_array]
def main():
header = '''use super::Token;
use super::tokenize;
def build_split_string_tests():
header = '''use ::Token;
use ::tokenize;
#[test]
fn test_python_compat() {\n'''
@ -39,7 +41,7 @@ fn test_python_compat() {\n'''
for test_string in TEST_STRINGS:
token_string = '\n'.join(['{}{},'.format(S12, s)
for s in test_string_to_rust(test_string)])
for s in rust_tokenize(test_string)])
tests.append(' assert_eq!(\n{}tokenize("{}"),\n{}vec![\n{}\n{}]\n{});'
.format(S8, test_string, S8, token_string, S8, S4))
@ -47,10 +49,35 @@ fn test_python_compat() {\n'''
footer = '\n}\n'
with open('src/test_python_compat.rs', 'w') as handle:
handle.write(header)
handle.write(body)
handle.write(footer)
return header + body + footer
def test_parse(time_string):
dt = duparse(time_string)
# TODO: Don't make this dependent on New_York
iso8601 = pytz.timezone('America/New_York').localize(dt).astimezone(pytz.utc)
return 'assert_eq!(\n{}parse("{}".to_owned())\n{}.unwrap()\n{}.to_rfc3339_opts(SecondsFormat::Micros, false),\n{}"{}"\n{});'.format(
S8, time_string, S12, S12, S8, iso8601, S4)
def build_parse_tests():
header = '''use chrono::SecondsFormat;
use parse;
#[test]
fn test_python_compat() {\n'''
asserts = [' {}'.format(test_parse(a)) for a in TEST_STRINGS]
body = '\n'.join(asserts)
footer = '\n}\n'
return header + body + footer
if __name__ == '__main__':
main()
split_string_test = build_split_string_tests()
with open('src/tests/compat_split_string.rs', 'w+') as handle:
handle.write(split_string_test)
parse_test = build_parse_tests()
with open('src/tests/compat_parse.rs', 'w+') as handle:
handle.write(parse_test)

View File

@ -9,8 +9,6 @@ use chrono::Utc;
use std::collections::HashMap;
use std::vec::Vec;
#[cfg(test)]
mod test_python_compat;
#[cfg(test)]
mod tests;
@ -674,9 +672,7 @@ impl Default for Parser {
impl Parser {
pub fn new(info: ParserInfo) -> Self {
Parser {
info: info
}
Parser { info: info }
}
pub fn parse(
@ -692,34 +688,43 @@ impl Parser {
let default_ts = NaiveDateTime::new(default_date, NaiveTime::from_hms(0, 0, 0));
// TODO: What should be done with the tokens?
let (res, tokens) = self.parse_with_tokens(
timestr, self.info.dayfirst, self.info.yearfirst, true, true)?;
let (res, tokens) =
self.parse_with_tokens(timestr, self.info.dayfirst, self.info.yearfirst, true, true)?;
let naive = self.build_naive(&res, default_ts);
Ok(self.build_tzaware(naive, &res, default_ts))
}
fn parse_with_tokens(&self, timestr: String, dayfirst: bool, yearfirst: bool, fuzzy: bool,
fuzzy_with_tokens: bool) -> Result<(ParsingResult, Vec<String>), ParseError> {
fn parse_with_tokens(
&self,
timestr: String,
dayfirst: bool,
yearfirst: bool,
fuzzy: bool,
fuzzy_with_tokens: bool,
) -> Result<(ParsingResult, Vec<String>), ParseError> {
Err(ParseError::InvalidMonth)
}
fn build_naive(&self, res: &ParsingResult, default: NaiveDateTime) -> NaiveDateTime {
Local::now().naive_local()
}
fn build_tzaware(&self, dt: NaiveDateTime, res: &ParsingResult, default: NaiveDateTime) -> DateTime<Utc> {
fn build_tzaware(
&self,
dt: NaiveDateTime,
res: &ParsingResult,
default: NaiveDateTime,
) -> DateTime<Utc> {
Utc::now()
}
}
fn parse(timestr: String, parserinfo: Option<ParserInfo>) -> Result<DateTime<Utc>, ParseError> {
let parserinfo = parserinfo.unwrap_or(ParserInfo::default());
let parser = Parser::new(parserinfo);
fn parse_with_info(timestr: String, info: ParserInfo) -> Result<DateTime<Utc>, ParseError> {
let parser = Parser::new(info);
parser.parse(timestr, None, false, vec![])
}
fn parse(timestr: String) -> Result<DateTime<Utc>, ParseError> {
parse_with_info(timestr, ParserInfo::default())
}

View File

@ -1,11 +0,0 @@
use ParseError;
use days_in_month;
#[test]
fn test_num_days_in_month() {
assert_eq!(days_in_month(2000, 12), Ok(31));
assert_eq!(days_in_month(2000, 2), Ok(29));
assert_eq!(days_in_month(2000, 4), Ok(30));
assert_eq!(days_in_month(2001, 2), Ok(28));
assert_eq!(days_in_month(2000, 13), Err(ParseError::InvalidMonth))
}

25
src/tests/compat_parse.rs Normal file
View File

@ -0,0 +1,25 @@
use chrono::SecondsFormat;
use parse;
#[test]
fn test_python_compat() {
assert_eq!(
parse("2018.5.15".to_owned())
.unwrap()
.to_rfc3339_opts(SecondsFormat::Micros, false),
"2018-05-15 04:00:00+00:00"
);
assert_eq!(
parse("May 5, 2018".to_owned())
.unwrap()
.to_rfc3339_opts(SecondsFormat::Micros, false),
"2018-05-05 04:00:00+00:00"
);
assert_eq!(
parse("Mar. 5, 2018".to_owned())
.unwrap()
.to_rfc3339_opts(SecondsFormat::Micros, false),
"2018-03-05 05:00:00+00:00"
);
}

View File

@ -1,5 +1,5 @@
use super::Token;
use super::tokenize;
use Token;
use tokenize;
#[test]
fn test_python_compat() {

2
src/tests/mod.rs Normal file
View File

@ -0,0 +1,2 @@
mod compat_parse;
mod compat_split_string;