mirror of
https://github.com/jrnl-org/jrnl.git
synced 2025-05-10 16:48:31 +02:00
Make sure testing cleans up after itself (#940)
This adds the ability to run commands in a cache directory without the test writer knowing where the cache directory is located. This will let us expand later if we want to start using system temp folders, without having to rewrite any of our tests. * clean up extra directories after running behave * clean up white space issues * move repeated code into function * clean up behave code for creating cache directories * Fix for windows shell parsing in our test suite Co-authored-by: Micah Jerome Ellison <micah.jerome.ellison@gmail.com>
This commit is contained in:
parent
c5f40f1d15
commit
ecb4562c29
5 changed files with 68 additions and 43 deletions
|
@ -3,6 +3,13 @@ import os
|
|||
import sys
|
||||
|
||||
|
||||
def clean_all_working_dirs():
|
||||
for folder in ("configs", "journals", "cache"):
|
||||
working_dir = os.path.join("features", folder)
|
||||
if os.path.exists(working_dir):
|
||||
shutil.rmtree(working_dir)
|
||||
|
||||
|
||||
def before_feature(context, feature):
|
||||
# add "skip" tag
|
||||
# https://stackoverflow.com/a/42721605/4276230
|
||||
|
@ -18,10 +25,7 @@ def before_feature(context, feature):
|
|||
def before_scenario(context, scenario):
|
||||
"""Before each scenario, backup all config and journal test data."""
|
||||
# Clean up in case something went wrong
|
||||
for folder in ("configs", "journals"):
|
||||
working_dir = os.path.join("features", folder)
|
||||
if os.path.exists(working_dir):
|
||||
shutil.rmtree(working_dir)
|
||||
clean_all_working_dirs()
|
||||
|
||||
for folder in ("configs", "journals"):
|
||||
original = os.path.join("features", "data", folder)
|
||||
|
@ -48,7 +52,4 @@ def before_scenario(context, scenario):
|
|||
|
||||
def after_scenario(context, scenario):
|
||||
"""After each scenario, restore all test data and remove working_dirs."""
|
||||
for folder in ("configs", "journals"):
|
||||
working_dir = os.path.join("features", folder)
|
||||
if os.path.exists(working_dir):
|
||||
shutil.rmtree(working_dir)
|
||||
clean_all_working_dirs()
|
||||
|
|
|
@ -122,10 +122,16 @@ Feature: Exporting a Journal
|
|||
|
||||
Scenario: Export to yaml
|
||||
Given we use the config "tags.yaml"
|
||||
And we created a directory named "exported_journal"
|
||||
When we run "jrnl --export yaml -o exported_journal"
|
||||
Then "exported_journal" should contain the files ["2013-04-09_i-have-an-idea.md", "2013-06-10_i-met-with-dan.md"]
|
||||
And the content of exported yaml "exported_journal/2013-04-09_i-have-an-idea.md" should be
|
||||
And we create cache directory "exported_journal"
|
||||
When we run "jrnl --export yaml -o {cache_dir}" with cache directory "exported_journal"
|
||||
Then cache directory "exported_journal" should contain the files
|
||||
"""
|
||||
[
|
||||
"2013-04-09_i-have-an-idea.md",
|
||||
"2013-06-10_i-met-with-dan.md"
|
||||
]
|
||||
"""
|
||||
And the content of file "2013-04-09_i-have-an-idea.md" in cache directory "exported_journal" should be
|
||||
"""
|
||||
title: I have an @idea:
|
||||
date: 2013-04-09 15:39
|
||||
|
|
|
@ -148,10 +148,17 @@ Feature: Zapped bugs should stay dead.
|
|||
# See issues #768 and #881
|
||||
Scenario: Add a blank line to YAML export is there isn't one already
|
||||
Given we use the config "deletion.yaml"
|
||||
And we created a directory named "bug768"
|
||||
When we run "jrnl --export yaml -o bug768"
|
||||
Then "bug768" should contain the files ["2019-10-29_first-entry.md", "2019-10-29_second-entry.md", "2019-10-29_third-entry.md"]
|
||||
And the content of exported yaml "bug768/2019-10-29_third-entry.md" should be
|
||||
And we create cache directory "bug768"
|
||||
When we run "jrnl --export yaml -o {cache_dir}" with cache directory "bug768"
|
||||
Then cache directory "bug768" should contain the files
|
||||
"""
|
||||
[
|
||||
"2019-10-29_first-entry.md",
|
||||
"2019-10-29_second-entry.md",
|
||||
"2019-10-29_third-entry.md"
|
||||
]
|
||||
"""
|
||||
And the content of file "2019-10-29_third-entry.md" in cache directory "bug768" should be
|
||||
"""
|
||||
title: Third entry.
|
||||
date: 2019-10-29 11:13
|
||||
|
|
|
@ -45,9 +45,7 @@ keyring.set_keyring(TestKeyring())
|
|||
|
||||
|
||||
def ushlex(command):
|
||||
if sys.version_info[0] == 3:
|
||||
return shlex.split(command)
|
||||
return map(lambda s: s.decode("UTF8"), shlex.split(command.encode("utf8")))
|
||||
return shlex.split(command, posix="win32" not in sys.platform)
|
||||
|
||||
|
||||
def read_journal(journal_name="default"):
|
||||
|
@ -151,7 +149,12 @@ def run_with_input(context, command, inputs=""):
|
|||
|
||||
|
||||
@when('we run "{command}"')
|
||||
def run(context, command):
|
||||
@when('we run "{command}" with cache directory "{cache_dir}"')
|
||||
def run(context, command, cache_dir=None):
|
||||
if cache_dir is not None:
|
||||
cache_dir = os.path.join("features", "cache", cache_dir)
|
||||
command = command.format(cache_dir=cache_dir)
|
||||
|
||||
args = ushlex(command)[1:]
|
||||
try:
|
||||
cli.run(args or None)
|
||||
|
|
|
@ -85,17 +85,24 @@ def assert_xml_output_tags(context, expected_tags_json_list):
|
|||
assert actual_tags == set(expected_tags), [actual_tags, set(expected_tags)]
|
||||
|
||||
|
||||
@given('we created a directory named "{dir_name}"')
|
||||
@given('we create cache directory "{dir_name}"')
|
||||
def create_directory(context, dir_name):
|
||||
if os.path.exists(dir_name):
|
||||
shutil.rmtree(dir_name)
|
||||
os.mkdir(dir_name)
|
||||
working_dir = os.path.join("features", "cache", dir_name)
|
||||
if os.path.exists(working_dir):
|
||||
shutil.rmtree(working_dir)
|
||||
os.makedirs(working_dir)
|
||||
|
||||
|
||||
@then('"{dir_name}" should contain the files {expected_files_json_list}')
|
||||
def assert_dir_contains_files(context, dir_name, expected_files_json_list):
|
||||
actual_files = os.listdir(dir_name)
|
||||
expected_files = json.loads(expected_files_json_list)
|
||||
@then('cache directory "{dir_name}" should contain the files')
|
||||
@then(
|
||||
'cache directory "{dir_name}" should contain the files {expected_files_json_list}'
|
||||
)
|
||||
def assert_dir_contains_files(context, dir_name, expected_files_json_list="[]"):
|
||||
working_dir = os.path.join("features", "cache", dir_name)
|
||||
actual_files = os.listdir(working_dir)
|
||||
|
||||
expected_files = context.text or expected_files_json_list
|
||||
expected_files = json.loads(expected_files)
|
||||
|
||||
# sort to deal with inconsistent default file ordering on different OS's
|
||||
actual_files.sort()
|
||||
|
@ -104,11 +111,12 @@ def assert_dir_contains_files(context, dir_name, expected_files_json_list):
|
|||
assert actual_files == expected_files, [actual_files, expected_files]
|
||||
|
||||
|
||||
@then('the content of exported yaml "{file_path}" should be')
|
||||
def assert_exported_yaml_file_content(context, file_path):
|
||||
@then('the content of file "{file_path}" in cache directory "{cache_dir}" should be')
|
||||
def assert_exported_yaml_file_content(context, file_path, cache_dir):
|
||||
expected_content = context.text.strip().splitlines()
|
||||
full_file_path = os.path.join("features", "cache", cache_dir, file_path)
|
||||
|
||||
with open(file_path, "r") as f:
|
||||
with open(full_file_path, "r") as f:
|
||||
actual_content = f.read().strip().splitlines()
|
||||
|
||||
for actual_line, expected_line in zip(actual_content, expected_content):
|
||||
|
|
Loading…
Add table
Reference in a new issue