Resolve all remaining ruff line length errors

This commit is contained in:
Micah Jerome Ellison 2023-07-03 11:09:01 -07:00
parent ece44c79a3
commit 71e1b38aff
18 changed files with 83 additions and 56 deletions

View file

@ -439,7 +439,7 @@ def parse_args(args: list[str] = []) -> argparse.Namespace:
\t jrnl --config-file /home/user1/work_config.yaml \t jrnl --config-file /home/user1/work_config.yaml
\t - Use a personal config file stored on a thumb drive: \n \t - Use a personal config file stored on a thumb drive: \n
\t jrnl --config-file /media/user1/my-thumb-drive/personal_config.yaml \t jrnl --config-file /media/user1/my-thumb-drive/personal_config.yaml
""", # noqa E501 """, # noqa: E501
) )
alternate_config.add_argument( alternate_config.add_argument(

View file

@ -142,7 +142,7 @@ def postconfig_encrypt(
def postconfig_decrypt( def postconfig_decrypt(
args: argparse.Namespace, config: dict, original_config: dict args: argparse.Namespace, config: dict, original_config: dict
) -> int: ) -> int:
"""Decrypts into new file. If filename is not set, we encrypt the journal file itself.""" """Decrypts to file. If filename is not set, we encrypt the journal file itself."""
from jrnl.config import update_config from jrnl.config import update_config
from jrnl.install import save_config from jrnl.install import save_config
from jrnl.journals import open_journal from jrnl.journals import open_journal

View file

@ -37,9 +37,10 @@ def make_yaml_valid_dict(input: list) -> dict:
The dict is created through the yaml loader, with the assumption that The dict is created through the yaml loader, with the assumption that
"input[0]: input[1]" is valid yaml. "input[0]: input[1]" is valid yaml.
:param input: list of configuration keys in dot-notation and their respective values. :param input: list of configuration keys in dot-notation and their respective values
:type input: list :type input: list
:return: A single level dict of the configuration keys in dot-notation and their respective desired values :return: A single level dict of the configuration keys in dot-notation and their
respective desired values
:rtype: dict :rtype: dict
""" """

View file

@ -34,9 +34,9 @@ if TYPE_CHECKING:
def run(args: "Namespace"): def run(args: "Namespace"):
""" """
Flow: Flow:
1. Run standalone command if it doesn't require config (help, version, etc), then exit 1. Run standalone command if it doesn't need config (help, version, etc), then exit
2. Load config 2. Load config
3. Run standalone command if it does require config (encrypt, decrypt, etc), then exit 3. Run standalone command if it does need config (encrypt, decrypt, etc), then exit
4. Load specified journal 4. Load specified journal
5. Start append mode, or search mode 5. Start append mode, or search mode
6. Perform actions with results from search mode (if needed) 6. Perform actions with results from search mode (if needed)
@ -181,7 +181,9 @@ def append_mode(args: "Namespace", config: dict, journal: "Journal", **kwargs) -
def _get_template(args, config) -> str: def _get_template(args, config) -> str:
# Read template file and pass as raw text into the composer # Read template file and pass as raw text into the composer
logging.debug( logging.debug(
f"Get template:\n--template: {args.template}\nfrom config: {config.get('template')}" "Get template:\n"
f"--template: {args.template}\n"
f"from config: {config.get('template')}"
) )
template_path = args.template or config.get("template") template_path = args.template or config.get("template")

View file

@ -81,7 +81,8 @@ def get_template_path(template_path: str, jrnl_template_dir: str) -> str:
actual_template_path = os.path.join(jrnl_template_dir, template_path) actual_template_path = os.path.join(jrnl_template_dir, template_path)
if not os.path.exists(actual_template_path): if not os.path.exists(actual_template_path):
logging.debug( logging.debug(
f"Couldn't open {actual_template_path}. Treating template path like a local / abs path." f"Couldn't open {actual_template_path}. "
"Treating template path like a local / abs path."
) )
actual_template_path = absolute_path(template_path) actual_template_path = absolute_path(template_path)

View file

@ -31,11 +31,11 @@ from jrnl.upgrade import is_old_version
def upgrade_config(config_data: dict, alt_config_path: str | None = None) -> None: def upgrade_config(config_data: dict, alt_config_path: str | None = None) -> None:
"""Checks if there are keys missing in a given config dict, and if so, updates the config file accordingly. """Checks if there are keys missing in a given config dict, and if so, updates the
This essentially automatically ports jrnl installations if new config parameters are introduced in later config file accordingly. This essentially automatically ports jrnl installations
versions. if new config parameters are introduced in later versions. Also checks for
Also checks for existence of and difference in version number between config dict and current jrnl version, existence of and difference in version number between config dict
and if so, update the config file accordingly. and current jrnl version, and if so, update the config file accordingly.
Supply alt_config_path if using an alternate config through --config-file.""" Supply alt_config_path if using an alternate config through --config-file."""
default_config = get_default_config() default_config = get_default_config()
missing_keys = set(default_config).difference(config_data) missing_keys = set(default_config).difference(config_data)
@ -167,7 +167,7 @@ def install() -> dict:
def _initialize_autocomplete() -> None: def _initialize_autocomplete() -> None:
# readline is not included in Windows Active Python and perhaps some other distributions # readline is not included in Windows Active Python and perhaps some other distss
if sys.modules.get("readline"): if sys.modules.get("readline"):
import readline import readline

View file

@ -89,7 +89,7 @@ class Entry:
} }
def __str__(self): def __str__(self):
"""Returns a string representation of the entry to be written into a journal file.""" """Returns string representation of the entry to be written to journal file."""
date_str = self.date.strftime(self.journal.config["timeformat"]) date_str = self.date.strftime(self.journal.config["timeformat"])
title = "[{}] {}".format(date_str, self.title.rstrip("\n ")) title = "[{}] {}".format(date_str, self.title.rstrip("\n "))
if self.starred: if self.starred:
@ -233,7 +233,7 @@ SENTENCE_SPLITTER = re.compile(
\s+ # AND a sequence of required spaces. \s+ # AND a sequence of required spaces.
) )
|[\uFF01\uFF0E\uFF1F\uFF61\u3002] # CJK full/half width terminals usually do not have following spaces. |[\uFF01\uFF0E\uFF1F\uFF61\u3002] # CJK full/half width terminals usually do not have following spaces.
""", """, # noqa: E501
re.VERBOSE, re.VERBOSE,
) )

View file

@ -122,7 +122,8 @@ class Folder(Journal):
@staticmethod @staticmethod
def _get_files(journal_path: str) -> list[str]: def _get_files(journal_path: str) -> list[str]:
"""Searches through sub directories starting with journal_path and find all text files that look like entries""" """Searches through sub directories starting with journal_path and find all text
files that look like entries"""
for year_folder in Folder._get_year_folders(pathlib.Path(journal_path)): for year_folder in Folder._get_year_folders(pathlib.Path(journal_path)):
for month_folder in Folder._get_month_folders(year_folder): for month_folder in Folder._get_month_folders(year_folder):
yield from Folder._get_day_files(month_folder) yield from Folder._get_day_files(month_folder)

View file

@ -102,7 +102,7 @@ class Journal:
return self.encryption_method.encrypt(text) return self.encryption_method.encrypt(text)
def open(self, filename: str | None = None) -> "Journal": def open(self, filename: str | None = None) -> "Journal":
"""Opens the journal file defined in the config and parses it into a list of Entries. """Opens the journal file and parses it into a list of Entries
Entries have the form (date, title, body).""" Entries have the form (date, title, body)."""
filename = filename or self.config["journal"] filename = filename or self.config["journal"]
dirname = os.path.dirname(filename) dirname = os.path.dirname(filename)
@ -144,7 +144,7 @@ class Journal:
self._store(filename, text) self._store(filename, text)
def validate_parsing(self) -> bool: def validate_parsing(self) -> bool:
"""Confirms that the jrnl is still parsed correctly after being dumped to text.""" """Confirms that the jrnl is still parsed correctly after conversion to text."""
new_entries = self._parse(self._to_text()) new_entries = self._parse(self._to_text())
return all(entry == new_entries[i] for i, entry in enumerate(self.entries)) return all(entry == new_entries[i] for i, entry in enumerate(self.entries))
@ -225,8 +225,9 @@ class Journal:
@property @property
def tags(self) -> list[Tag]: def tags(self) -> list[Tag]:
"""Returns a set of tuples (count, tag) for all tags present in the journal.""" """Returns a set of tuples (count, tag) for all tags present in the journal."""
# Astute reader: should the following line leave you as puzzled as me the first time # Astute reader: should the following line leave you as puzzled as me the first
# I came across this construction, worry not and embrace the ensuing moment of enlightment. # time I came across this construction, worry not and embrace the ensuing moment
# of enlightment.
tags = [tag for entry in self.entries for tag in set(entry.tags)] tags = [tag for entry in self.entries for tag in set(entry.tags)]
# To be read: [for entry in journal.entries: for tag in set(entry.tags): tag] # To be read: [for entry in journal.entries: for tag in set(entry.tags): tag]
tag_counts = {(tags.count(tag), tag) for tag in tags} tag_counts = {(tags.count(tag), tag) for tag in tags}
@ -343,7 +344,8 @@ class Journal:
def new_entry(self, raw: str, date=None, sort: bool = True) -> Entry: def new_entry(self, raw: str, date=None, sort: bool = True) -> Entry:
"""Constructs a new entry from some raw text input. """Constructs a new entry from some raw text input.
If a date is given, it will parse and use this, otherwise scan for a date in the input first. If a date is given, it will parse and use this, otherwise scan for a date in
the input first.
""" """
raw = raw.replace("\\n ", "\n").replace("\\n", "\n") raw = raw.replace("\\n ", "\n").replace("\\n", "\n")

View file

@ -43,8 +43,8 @@ class MsgText(Enum):
Do you want to encrypt your journal? (You can always change this later) Do you want to encrypt your journal? (You can always change this later)
""" """
UseColorsQuestion = """ UseColorsQuestion = """
Do you want jrnl to use colors when displaying entries? (You can always change this later) Do you want jrnl to use colors to display entries? (You can always change this later)
""" """ # noqa: E501 - the line is still under 88 when dedented
YesOrNoPromptDefaultYes = "[Y/n]" YesOrNoPromptDefaultYes = "[Y/n]"
YesOrNoPromptDefaultNo = "[y/N]" YesOrNoPromptDefaultNo = "[y/N]"
ContinueUpgrade = "Continue upgrading jrnl?" ContinueUpgrade = "Continue upgrading jrnl?"

View file

@ -56,7 +56,8 @@ def _recursively_apply(tree: dict, nodes: list, override_value) -> dict:
Args: Args:
config (dict): Configuration to modify config (dict): Configuration to modify
nodes (list): Vector of override keys; the length of the vector indicates tree depth nodes (list): Vector of override keys; the length of the vector indicates tree
depth
override_value (str): Runtime override passed from the command-line override_value (str): Runtime override passed from the command-line
""" """
key = nodes[0] key = nodes[0]

View file

@ -18,7 +18,7 @@ if TYPE_CHECKING:
class FancyExporter(TextExporter): class FancyExporter(TextExporter):
"""This Exporter can convert entries and journals into text with unicode box drawing characters.""" """This Exporter converts entries and journals into text with unicode boxes."""
names = ["fancy", "boxed"] names = ["fancy", "boxed"]
extension = "txt" extension = "txt"

View file

@ -12,7 +12,7 @@ if TYPE_CHECKING:
class TagExporter(TextExporter): class TagExporter(TextExporter):
"""This Exporter can lists the tags for entries and journals, exported as a plain text file.""" """This Exporter lists the tags for entries and journals."""
names = ["tags"] names = ["tags"]
extension = "tags" extension = "tags"

View file

@ -10,7 +10,8 @@ if TYPE_CHECKING:
def get_tags_count(journal: "Journal") -> set[tuple[int, str]]: def get_tags_count(journal: "Journal") -> set[tuple[int, str]]:
"""Returns a set of tuples (count, tag) for all tags present in the journal.""" """Returns a set of tuples (count, tag) for all tags present in the journal."""
# Astute reader: should the following line leave you as puzzled as me the first time # Astute reader: should the following line leave you as puzzled as me the first time
# I came across this construction, worry not and embrace the ensuing moment of enlightment. # I came across this construction, worry not and embrace the ensuing moment of
# enlightment.
tags = [tag for entry in journal.entries for tag in set(entry.tags)] tags = [tag for entry in journal.entries for tag in set(entry.tags)]
# To be read: [for entry in journal.entries: for tag in set(entry.tags): tag] # To be read: [for entry in journal.entries: for tag in set(entry.tags): tag]
tag_counts = {(tags.count(tag), tag) for tag in tags} tag_counts = {(tags.count(tag), tag) for tag in tags}

View file

@ -18,14 +18,15 @@ if TYPE_CHECKING:
class YAMLExporter(TextExporter): class YAMLExporter(TextExporter):
"""This Exporter can convert entries and journals into Markdown formatted text with YAML front matter.""" """This Exporter converts entries and journals into Markdown formatted text with
YAML front matter."""
names = ["yaml"] names = ["yaml"]
extension = "md" extension = "md"
@classmethod @classmethod
def export_entry(cls, entry: "Entry", to_multifile: bool = True) -> str: def export_entry(cls, entry: "Entry", to_multifile: bool = True) -> str:
"""Returns a markdown representation of a single entry, with YAML front matter.""" """Returns a markdown representation of an entry, with YAML front matter."""
if to_multifile is False: if to_multifile is False:
raise JrnlException(Message(MsgText.YamlMustBeDirectory, MsgStyle.ERROR)) raise JrnlException(Message(MsgText.YamlMustBeDirectory, MsgStyle.ERROR))
@ -117,7 +118,14 @@ class YAMLExporter(TextExporter):
# source directory is entry.journal.config['journal'] # source directory is entry.journal.config['journal']
# output directory is...? # output directory is...?
return "{start}\ntitle: {title}\ndate: {date}\nstarred: {starred}\ntags: {tags}\n{dayone}body: |{body}{end}".format( return (
"{start}\n"
"title: {title}\n"
"date: {date}\n"
"starred: {starred}\n"
"tags: {tags}\n"
"{dayone}body: |{body}{end}"
).format(
start="---", start="---",
date=date_str, date=date_str,
title=entry.title, title=entry.title,

View file

@ -34,8 +34,8 @@ def parse(
elif isinstance(date_str, datetime.datetime): elif isinstance(date_str, datetime.datetime):
return date_str return date_str
# Don't try to parse anything with 6 or fewer characters and was parsed from the existing journal. # Don't try to parse anything with 6 or fewer characters and was parsed from the
# It's probably a markdown footnote # existing journal. It's probably a markdown footnote
if len(date_str) <= 6 and bracketed: if len(date_str) <= 6 and bracketed:
return None return None
@ -82,9 +82,9 @@ def parse(
else: else:
date = datetime.datetime(*date[:6]) date = datetime.datetime(*date[:6])
# Ugly heuristic: if the date is more than 4 weeks in the future, we got the year wrong. # Ugly heuristic: if the date is more than 4 weeks in the future, we got the year
# Rather than this, we would like to see parsedatetime patched so we can tell it to prefer # wrong. Rather than this, we would like to see parsedatetime patched so we can
# past dates # tell it to prefer past dates
dt = datetime.datetime.now() - date dt = datetime.datetime.now() - date
if dt.days < -28 and not year_present: if dt.days < -28 and not year_present:
date = date.replace(date.year - 1) date = date.replace(date.year - 1)

View file

@ -38,38 +38,41 @@ def output_should_match(regex, cli_run):
assert matches, f"\nRegex didn't match:\n{regex}\n{str(out)}\n{str(matches)}" assert matches, f"\nRegex didn't match:\n{regex}\n{str(out)}\n{str(matches)}"
@then(parse("the output {it_should:Should} contain\n{expected_output}", SHOULD_DICT)) @then(parse("the output {it_should:Should} contain\n{expected}", SHOULD_DICT))
@then(parse('the output {it_should:Should} contain "{expected_output}"', SHOULD_DICT)) @then(parse('the output {it_should:Should} contain "{expected}"', SHOULD_DICT))
@then( @then(
parse( parse(
"the {which_output_stream} output {it_should:Should} contain\n{expected_output}", "the {which_output_stream} output {it_should:Should} contain\n{expected}",
SHOULD_DICT, SHOULD_DICT,
) )
) )
@then( @then(
parse( parse(
'the {which_output_stream} output {it_should:Should} contain "{expected_output}"', 'the {which_output_stream} output {it_should:Should} contain "{expected}"',
SHOULD_DICT, SHOULD_DICT,
) )
) )
def output_should_contain(expected_output, which_output_stream, cli_run, it_should): def output_should_contain(expected, which_output_stream, cli_run, it_should):
output_str = f"\nEXPECTED:\n{expected_output}\n\nACTUAL STDOUT:\n{cli_run['stdout']}\n\nACTUAL STDERR:\n{cli_run['stderr']}" output_str = (
assert expected_output f"\nEXPECTED:\n{expected}\n\n"
f"ACTUAL STDOUT:\n{cli_run['stdout']}\n\n"
f"ACTUAL STDERR:\n{cli_run['stderr']}"
)
assert expected
if which_output_stream is None: if which_output_stream is None:
assert ((expected_output in cli_run["stdout"]) == it_should) or ( assert ((expected in cli_run["stdout"]) == it_should) or (
(expected_output in cli_run["stderr"]) == it_should (expected in cli_run["stderr"]) == it_should
), output_str ), output_str
elif which_output_stream == "standard": elif which_output_stream == "standard":
assert (expected_output in cli_run["stdout"]) == it_should, output_str assert (expected in cli_run["stdout"]) == it_should, output_str
elif which_output_stream == "error": elif which_output_stream == "error":
assert (expected_output in cli_run["stderr"]) == it_should, output_str assert (expected in cli_run["stderr"]) == it_should, output_str
else: else:
assert ( assert (expected in cli_run[which_output_stream]) == it_should, output_str
expected_output in cli_run[which_output_stream]
) == it_should, output_str
@then(parse("the output should not contain\n{expected_output}")) @then(parse("the output should not contain\n{expected_output}"))
@ -119,7 +122,8 @@ def output_should_be_columns_wide(cli_run, width):
@then( @then(
parse( parse(
'the default journal "{journal_file}" should be in the "{journal_dir}" directory' 'the default journal "{journal_file}" '
'should be in the "{journal_dir}" directory'
) )
) )
def default_journal_location(journal_file, journal_dir, config_on_disk, temp_dir): def default_journal_location(journal_file, journal_dir, config_on_disk, temp_dir):
@ -135,13 +139,15 @@ def default_journal_location(journal_file, journal_dir, config_on_disk, temp_dir
@then( @then(
parse( parse(
'the config for journal "{journal_name}" {it_should:Should} contain "{some_yaml}"', 'the config for journal "{journal_name}" '
'{it_should:Should} contain "{some_yaml}"',
SHOULD_DICT, SHOULD_DICT,
) )
) )
@then( @then(
parse( parse(
'the config for journal "{journal_name}" {it_should:Should} contain\n{some_yaml}', 'the config for journal "{journal_name}" '
"{it_should:Should} contain\n{some_yaml}",
SHOULD_DICT, SHOULD_DICT,
) )
) )
@ -164,13 +170,15 @@ def config_var_on_disk(config_on_disk, journal_name, it_should, some_yaml):
@then( @then(
parse( parse(
'the config in memory for journal "{journal_name}" {it_should:Should} contain "{some_yaml}"', 'the config in memory for journal "{journal_name}" '
'{it_should:Should} contain "{some_yaml}"',
SHOULD_DICT, SHOULD_DICT,
) )
) )
@then( @then(
parse( parse(
'the config in memory for journal "{journal_name}" {it_should:Should} contain\n{some_yaml}', 'the config in memory for journal "{journal_name}" '
"{it_should:Should} contain\n{some_yaml}",
SHOULD_DICT, SHOULD_DICT,
) )
) )

View file

@ -242,7 +242,9 @@ def test_color_override():
def test_multiple_overrides(): def test_multiple_overrides():
parsed_args = cli_as_dict( parsed_args = cli_as_dict(
'--config-override colors.title green --config-override editor "nano" --config-override journal.scratchpad "/tmp/scratchpad"' "--config-override colors.title green "
'--config-override editor "nano" '
'--config-override journal.scratchpad "/tmp/scratchpad"'
) )
assert parsed_args == expected_args( assert parsed_args == expected_args(
config_override=[ config_override=[