From 0d3b64674966020d343c6dc6e9dde131c6ff683b Mon Sep 17 00:00:00 2001 From: Bradlee Speice Date: Fri, 3 Aug 2018 21:56:32 -0400 Subject: [PATCH] Add notes on codegen, and regenerate --- build_pycompat.py | 4 ++++ build_pycompat_tokenizer.py | 4 ++++ src/tests/pycompat_parser.rs | 4 ++++ src/tests/pycompat_tokenizer.rs | 4 ++++ 4 files changed, 16 insertions(+) diff --git a/build_pycompat.py b/build_pycompat.py index 4ea3ecb..a5e79aa 100644 --- a/build_pycompat.py +++ b/build_pycompat.py @@ -189,6 +189,10 @@ def test_fuzzy_simple(i, s): # Here lies all the ugly junk. TEST_HEADER = ''' +//! This code has been generated by running the `build_pycompat.py` script +//! in the repository root. Please do not edit it, as your edits will be destroyed +//! upon re-running code generation. + extern crate chrono; use chrono::Datelike; diff --git a/build_pycompat_tokenizer.py b/build_pycompat_tokenizer.py index ea0ed8c..0832618 100644 --- a/build_pycompat_tokenizer.py +++ b/build_pycompat_tokenizer.py @@ -24,6 +24,10 @@ fn test_tokenize{i}() {{ TEST_HEADER = ''' +//! This code has been generated by running the `build_pycompat_tokenizer.py` script +//! in the repository root. Please do not edit it, as your edits will be destroyed +//! upon re-running code generation. + use tokenize::Tokenizer; fn tokenize_assert(test_str: &str, comparison: Vec<&str>) { diff --git a/src/tests/pycompat_parser.rs b/src/tests/pycompat_parser.rs index 485bfa1..ca32d51 100644 --- a/src/tests/pycompat_parser.rs +++ b/src/tests/pycompat_parser.rs @@ -1,4 +1,8 @@ +//! This code has been generated by running the `build_pycompat.py` script +//! in the repository root. Please do not edit it, as your edits will be destroyed +//! upon re-running code generation. + extern crate chrono; use chrono::Datelike; diff --git a/src/tests/pycompat_tokenizer.rs b/src/tests/pycompat_tokenizer.rs index fbf35c8..c2077d1 100644 --- a/src/tests/pycompat_tokenizer.rs +++ b/src/tests/pycompat_tokenizer.rs @@ -1,4 +1,8 @@ +//! This code has been generated by running the `build_pycompat_tokenizer.py` script +//! in the repository root. Please do not edit it, as your edits will be destroyed +//! upon re-running code generation. + use tokenize::Tokenizer; fn tokenize_assert(test_str: &str, comparison: Vec<&str>) {