1
0
mirror of https://github.com/bspeice/dtparse synced 2024-12-22 04:18:09 -05:00

Add notes on codegen, and regenerate

This commit is contained in:
Bradlee Speice 2018-08-03 21:56:32 -04:00
parent 50fe2c01d4
commit 0d3b646749
4 changed files with 16 additions and 0 deletions

View File

@ -189,6 +189,10 @@ def test_fuzzy_simple(i, s):
# Here lies all the ugly junk. # Here lies all the ugly junk.
TEST_HEADER = ''' TEST_HEADER = '''
//! This code has been generated by running the `build_pycompat.py` script
//! in the repository root. Please do not edit it, as your edits will be destroyed
//! upon re-running code generation.
extern crate chrono; extern crate chrono;
use chrono::Datelike; use chrono::Datelike;

View File

@ -24,6 +24,10 @@ fn test_tokenize{i}() {{
TEST_HEADER = ''' TEST_HEADER = '''
//! This code has been generated by running the `build_pycompat_tokenizer.py` script
//! in the repository root. Please do not edit it, as your edits will be destroyed
//! upon re-running code generation.
use tokenize::Tokenizer; use tokenize::Tokenizer;
fn tokenize_assert(test_str: &str, comparison: Vec<&str>) { fn tokenize_assert(test_str: &str, comparison: Vec<&str>) {

View File

@ -1,4 +1,8 @@
//! This code has been generated by running the `build_pycompat.py` script
//! in the repository root. Please do not edit it, as your edits will be destroyed
//! upon re-running code generation.
extern crate chrono; extern crate chrono;
use chrono::Datelike; use chrono::Datelike;

View File

@ -1,4 +1,8 @@
//! This code has been generated by running the `build_pycompat_tokenizer.py` script
//! in the repository root. Please do not edit it, as your edits will be destroyed
//! upon re-running code generation.
use tokenize::Tokenizer; use tokenize::Tokenizer;
fn tokenize_assert(test_str: &str, comparison: Vec<&str>) { fn tokenize_assert(test_str: &str, comparison: Vec<&str>) {