Make sure testing cleans up after itself (#940)

This adds the ability to run commands in a cache directory without the
test writer knowing where the cache directory is located. This will let
us expand later if we want to start using system temp folders, without
having to rewrite any of our tests.

* clean up extra directories after running behave
* clean up white space issues
* move repeated code into function
* clean up behave code for creating cache directories
* Fix for windows shell parsing in our test suite

Co-authored-by: Micah Jerome Ellison <micah.jerome.ellison@gmail.com>
This commit is contained in:
Jonathan Wren 2020-05-06 18:13:36 -07:00 committed by GitHub
parent c5f40f1d15
commit ecb4562c29
5 changed files with 68 additions and 43 deletions

View file

@ -3,6 +3,13 @@ import os
import sys import sys
def clean_all_working_dirs():
for folder in ("configs", "journals", "cache"):
working_dir = os.path.join("features", folder)
if os.path.exists(working_dir):
shutil.rmtree(working_dir)
def before_feature(context, feature): def before_feature(context, feature):
# add "skip" tag # add "skip" tag
# https://stackoverflow.com/a/42721605/4276230 # https://stackoverflow.com/a/42721605/4276230
@ -18,10 +25,7 @@ def before_feature(context, feature):
def before_scenario(context, scenario): def before_scenario(context, scenario):
"""Before each scenario, backup all config and journal test data.""" """Before each scenario, backup all config and journal test data."""
# Clean up in case something went wrong # Clean up in case something went wrong
for folder in ("configs", "journals"): clean_all_working_dirs()
working_dir = os.path.join("features", folder)
if os.path.exists(working_dir):
shutil.rmtree(working_dir)
for folder in ("configs", "journals"): for folder in ("configs", "journals"):
original = os.path.join("features", "data", folder) original = os.path.join("features", "data", folder)
@ -48,7 +52,4 @@ def before_scenario(context, scenario):
def after_scenario(context, scenario): def after_scenario(context, scenario):
"""After each scenario, restore all test data and remove working_dirs.""" """After each scenario, restore all test data and remove working_dirs."""
for folder in ("configs", "journals"): clean_all_working_dirs()
working_dir = os.path.join("features", folder)
if os.path.exists(working_dir):
shutil.rmtree(working_dir)

View file

@ -122,10 +122,16 @@ Feature: Exporting a Journal
Scenario: Export to yaml Scenario: Export to yaml
Given we use the config "tags.yaml" Given we use the config "tags.yaml"
And we created a directory named "exported_journal" And we create cache directory "exported_journal"
When we run "jrnl --export yaml -o exported_journal" When we run "jrnl --export yaml -o {cache_dir}" with cache directory "exported_journal"
Then "exported_journal" should contain the files ["2013-04-09_i-have-an-idea.md", "2013-06-10_i-met-with-dan.md"] Then cache directory "exported_journal" should contain the files
And the content of exported yaml "exported_journal/2013-04-09_i-have-an-idea.md" should be """
[
"2013-04-09_i-have-an-idea.md",
"2013-06-10_i-met-with-dan.md"
]
"""
And the content of file "2013-04-09_i-have-an-idea.md" in cache directory "exported_journal" should be
""" """
title: I have an @idea: title: I have an @idea:
date: 2013-04-09 15:39 date: 2013-04-09 15:39

View file

@ -76,18 +76,18 @@ Feature: Zapped bugs should stay dead.
Then the output should not contain "But I'm better." Then the output should not contain "But I'm better."
Scenario: Create entry using day of the week as entry date. Scenario: Create entry using day of the week as entry date.
Given we use the config "basic.yaml" Given we use the config "basic.yaml"
When we run "jrnl monday: This is an entry on a Monday." When we run "jrnl monday: This is an entry on a Monday."
Then we should see the message "Entry added" Then we should see the message "Entry added"
When we run "jrnl -1" When we run "jrnl -1"
Then the output should contain "monday at 9am" in the local time Then the output should contain "monday at 9am" in the local time
Then the output should contain "This is an entry on a Monday." Then the output should contain "This is an entry on a Monday."
Scenario: Create entry using day of the week abbreviations as entry date. Scenario: Create entry using day of the week abbreviations as entry date.
Given we use the config "basic.yaml" Given we use the config "basic.yaml"
When we run "jrnl fri: This is an entry on a Friday." When we run "jrnl fri: This is an entry on a Friday."
Then we should see the message "Entry added" Then we should see the message "Entry added"
When we run "jrnl -1" When we run "jrnl -1"
Then the output should contain "friday at 9am" in the local time Then the output should contain "friday at 9am" in the local time
Scenario: Displaying entries using -on today should display entries created today. Scenario: Displaying entries using -on today should display entries created today.
@ -148,10 +148,17 @@ Feature: Zapped bugs should stay dead.
# See issues #768 and #881 # See issues #768 and #881
Scenario: Add a blank line to YAML export is there isn't one already Scenario: Add a blank line to YAML export is there isn't one already
Given we use the config "deletion.yaml" Given we use the config "deletion.yaml"
And we created a directory named "bug768" And we create cache directory "bug768"
When we run "jrnl --export yaml -o bug768" When we run "jrnl --export yaml -o {cache_dir}" with cache directory "bug768"
Then "bug768" should contain the files ["2019-10-29_first-entry.md", "2019-10-29_second-entry.md", "2019-10-29_third-entry.md"] Then cache directory "bug768" should contain the files
And the content of exported yaml "bug768/2019-10-29_third-entry.md" should be """
[
"2019-10-29_first-entry.md",
"2019-10-29_second-entry.md",
"2019-10-29_third-entry.md"
]
"""
And the content of file "2019-10-29_third-entry.md" in cache directory "bug768" should be
""" """
title: Third entry. title: Third entry.
date: 2019-10-29 11:13 date: 2019-10-29 11:13

View file

@ -45,9 +45,7 @@ keyring.set_keyring(TestKeyring())
def ushlex(command): def ushlex(command):
if sys.version_info[0] == 3: return shlex.split(command, posix="win32" not in sys.platform)
return shlex.split(command)
return map(lambda s: s.decode("UTF8"), shlex.split(command.encode("utf8")))
def read_journal(journal_name="default"): def read_journal(journal_name="default"):
@ -151,7 +149,12 @@ def run_with_input(context, command, inputs=""):
@when('we run "{command}"') @when('we run "{command}"')
def run(context, command): @when('we run "{command}" with cache directory "{cache_dir}"')
def run(context, command, cache_dir=None):
if cache_dir is not None:
cache_dir = os.path.join("features", "cache", cache_dir)
command = command.format(cache_dir=cache_dir)
args = ushlex(command)[1:] args = ushlex(command)[1:]
try: try:
cli.run(args or None) cli.run(args or None)

View file

@ -85,17 +85,24 @@ def assert_xml_output_tags(context, expected_tags_json_list):
assert actual_tags == set(expected_tags), [actual_tags, set(expected_tags)] assert actual_tags == set(expected_tags), [actual_tags, set(expected_tags)]
@given('we created a directory named "{dir_name}"') @given('we create cache directory "{dir_name}"')
def create_directory(context, dir_name): def create_directory(context, dir_name):
if os.path.exists(dir_name): working_dir = os.path.join("features", "cache", dir_name)
shutil.rmtree(dir_name) if os.path.exists(working_dir):
os.mkdir(dir_name) shutil.rmtree(working_dir)
os.makedirs(working_dir)
@then('"{dir_name}" should contain the files {expected_files_json_list}') @then('cache directory "{dir_name}" should contain the files')
def assert_dir_contains_files(context, dir_name, expected_files_json_list): @then(
actual_files = os.listdir(dir_name) 'cache directory "{dir_name}" should contain the files {expected_files_json_list}'
expected_files = json.loads(expected_files_json_list) )
def assert_dir_contains_files(context, dir_name, expected_files_json_list="[]"):
working_dir = os.path.join("features", "cache", dir_name)
actual_files = os.listdir(working_dir)
expected_files = context.text or expected_files_json_list
expected_files = json.loads(expected_files)
# sort to deal with inconsistent default file ordering on different OS's # sort to deal with inconsistent default file ordering on different OS's
actual_files.sort() actual_files.sort()
@ -104,11 +111,12 @@ def assert_dir_contains_files(context, dir_name, expected_files_json_list):
assert actual_files == expected_files, [actual_files, expected_files] assert actual_files == expected_files, [actual_files, expected_files]
@then('the content of exported yaml "{file_path}" should be') @then('the content of file "{file_path}" in cache directory "{cache_dir}" should be')
def assert_exported_yaml_file_content(context, file_path): def assert_exported_yaml_file_content(context, file_path, cache_dir):
expected_content = context.text.strip().splitlines() expected_content = context.text.strip().splitlines()
full_file_path = os.path.join("features", "cache", cache_dir, file_path)
with open(file_path, "r") as f: with open(full_file_path, "r") as f:
actual_content = f.read().strip().splitlines() actual_content = f.read().strip().splitlines()
for actual_line, expected_line in zip(actual_content, expected_content): for actual_line, expected_line in zip(actual_content, expected_content):