(#770) run black formatter on codebase for standardization

This commit is contained in:
Jonathan Wren 2019-12-21 11:47:02 -08:00
parent 9664924096
commit 46c4c88231
24 changed files with 850 additions and 427 deletions

View file

@ -5,8 +5,11 @@ from jrnl import cli, install, Journal, util, plugins
from jrnl import __version__ from jrnl import __version__
from dateutil import parser as date_parser from dateutil import parser as date_parser
from collections import defaultdict from collections import defaultdict
try: import parsedatetime.parsedatetime_consts as pdt
except ImportError: import parsedatetime as pdt try:
import parsedatetime.parsedatetime_consts as pdt
except ImportError:
import parsedatetime as pdt
import time import time
import os import os
import json import json
@ -17,7 +20,7 @@ import shlex
import sys import sys
consts = pdt.Constants(usePyICU=False) consts = pdt.Constants(usePyICU=False)
consts.DOWParseStyle = -1 # Prefers past weekdays consts.DOWParseStyle = -1 # Prefers past weekdays
CALENDAR = pdt.Calendar(consts) CALENDAR = pdt.Calendar(consts)
@ -44,23 +47,25 @@ keyring.set_keyring(TestKeyring())
def ushlex(command): def ushlex(command):
if sys.version_info[0] == 3: if sys.version_info[0] == 3:
return shlex.split(command) return shlex.split(command)
return map(lambda s: s.decode('UTF8'), shlex.split(command.encode('utf8'))) return map(lambda s: s.decode("UTF8"), shlex.split(command.encode("utf8")))
def read_journal(journal_name="default"): def read_journal(journal_name="default"):
config = util.load_config(install.CONFIG_FILE_PATH) config = util.load_config(install.CONFIG_FILE_PATH)
with open(config['journals'][journal_name]) as journal_file: with open(config["journals"][journal_name]) as journal_file:
journal = journal_file.read() journal = journal_file.read()
return journal return journal
def open_journal(journal_name="default"): def open_journal(journal_name="default"):
config = util.load_config(install.CONFIG_FILE_PATH) config = util.load_config(install.CONFIG_FILE_PATH)
journal_conf = config['journals'][journal_name] journal_conf = config["journals"][journal_name]
if type(journal_conf) is dict: # We can override the default config on a by-journal basis if (
type(journal_conf) is dict
): # We can override the default config on a by-journal basis
config.update(journal_conf) config.update(journal_conf)
else: # But also just give them a string to point to the journal file else: # But also just give them a string to point to the journal file
config['journal'] = journal_conf config["journal"] = journal_conf
return Journal.open_journal(journal_name, config) return Journal.open_journal(journal_name, config)
@ -70,14 +75,15 @@ def set_config(context, config_file):
install.CONFIG_FILE_PATH = os.path.abspath(full_path) install.CONFIG_FILE_PATH = os.path.abspath(full_path)
if config_file.endswith("yaml"): if config_file.endswith("yaml"):
# Add jrnl version to file for 2.x journals # Add jrnl version to file for 2.x journals
with open(install.CONFIG_FILE_PATH, 'a') as cf: with open(install.CONFIG_FILE_PATH, "a") as cf:
cf.write("version: {}".format(__version__)) cf.write("version: {}".format(__version__))
@when('we open the editor and enter ""') @when('we open the editor and enter ""')
@when('we open the editor and enter "{text}"') @when('we open the editor and enter "{text}"')
def open_editor_and_enter(context, text=""): def open_editor_and_enter(context, text=""):
text = (text or context.text) text = text or context.text
def _mock_editor_function(command): def _mock_editor_function(command):
tmpfile = command[-1] tmpfile = command[-1]
with open(tmpfile, "w+") as f: with open(tmpfile, "w+") as f:
@ -88,7 +94,7 @@ def open_editor_and_enter(context, text=""):
return tmpfile return tmpfile
with patch('subprocess.call', side_effect=_mock_editor_function): with patch("subprocess.call", side_effect=_mock_editor_function):
run(context, "jrnl") run(context, "jrnl")
@ -96,6 +102,7 @@ def _mock_getpass(inputs):
def prompt_return(prompt="Password: "): def prompt_return(prompt="Password: "):
print(prompt) print(prompt)
return next(inputs) return next(inputs)
return prompt_return return prompt_return
@ -104,6 +111,7 @@ def _mock_input(inputs):
val = next(inputs) val = next(inputs)
print(prompt, val) print(prompt, val)
return val return val
return prompt_return return prompt_return
@ -119,24 +127,28 @@ def run_with_input(context, command, inputs=""):
text = iter([inputs]) text = iter([inputs])
args = ushlex(command)[1:] args = ushlex(command)[1:]
with patch("builtins.input", side_effect=_mock_input(text)) as mock_input,\
patch("getpass.getpass", side_effect=_mock_getpass(text)) as mock_getpass,\ # fmt: off
# see: https://github.com/psf/black/issues/557
with patch("builtins.input", side_effect=_mock_input(text)) as mock_input, \
patch("getpass.getpass", side_effect=_mock_getpass(text)) as mock_getpass, \
patch("sys.stdin.read", side_effect=text) as mock_read: patch("sys.stdin.read", side_effect=text) as mock_read:
try:
cli.run(args or [])
context.exit_status = 0
except SystemExit as e:
context.exit_status = e.code
# at least one of the mocked input methods got called try:
assert mock_input.called or mock_getpass.called or mock_read.called cli.run(args or [])
# all inputs were used context.exit_status = 0
try: except SystemExit as e:
next(text) context.exit_status = e.code
assert False, "Not all inputs were consumed"
except StopIteration:
pass
# at least one of the mocked input methods got called
assert mock_input.called or mock_getpass.called or mock_read.called
# all inputs were used
try:
next(text)
assert False, "Not all inputs were consumed"
except StopIteration:
pass
# fmt: on
@when('we run "{command}"') @when('we run "{command}"')
@ -158,20 +170,20 @@ def load_template(context, filename):
@when('we set the keychain password of "{journal}" to "{password}"') @when('we set the keychain password of "{journal}" to "{password}"')
def set_keychain(context, journal, password): def set_keychain(context, journal, password):
keyring.set_password('jrnl', journal, password) keyring.set_password("jrnl", journal, password)
@then('we should get an error') @then("we should get an error")
def has_error(context): def has_error(context):
assert context.exit_status != 0, context.exit_status assert context.exit_status != 0, context.exit_status
@then('we should get no error') @then("we should get no error")
def no_error(context): def no_error(context):
assert context.exit_status == 0, context.exit_status assert context.exit_status == 0, context.exit_status
@then('the output should be parsable as json') @then("the output should be parsable as json")
def check_output_json(context): def check_output_json(context):
out = context.stdout_capture.getvalue() out = context.stdout_capture.getvalue()
assert json.loads(out), out assert json.loads(out), out
@ -210,7 +222,7 @@ def check_json_output_path(context, path, value):
out = context.stdout_capture.getvalue() out = context.stdout_capture.getvalue()
struct = json.loads(out) struct = json.loads(out)
for node in path.split('.'): for node in path.split("."):
try: try:
struct = struct[int(node)] struct = struct[int(node)]
except ValueError: except ValueError:
@ -218,14 +230,19 @@ def check_json_output_path(context, path, value):
assert struct == value, struct assert struct == value, struct
@then('the output should be') @then("the output should be")
@then('the output should be "{text}"') @then('the output should be "{text}"')
def check_output(context, text=None): def check_output(context, text=None):
text = (text or context.text).strip().splitlines() text = (text or context.text).strip().splitlines()
out = context.stdout_capture.getvalue().strip().splitlines() out = context.stdout_capture.getvalue().strip().splitlines()
assert len(text) == len(out), "Output has {} lines (expected: {})".format(len(out), len(text)) assert len(text) == len(out), "Output has {} lines (expected: {})".format(
len(out), len(text)
)
for line_text, line_out in zip(text, out): for line_text, line_out in zip(text, out):
assert line_text.strip() == line_out.strip(), [line_text.strip(), line_out.strip()] assert line_text.strip() == line_out.strip(), [
line_text.strip(),
line_out.strip(),
]
@then('the output should contain "{text}" in the local time') @then('the output should contain "{text}" in the local time')
@ -233,11 +250,11 @@ def check_output_time_inline(context, text):
out = context.stdout_capture.getvalue() out = context.stdout_capture.getvalue()
local_tz = tzlocal.get_localzone() local_tz = tzlocal.get_localzone()
date, flag = CALENDAR.parse(text) date, flag = CALENDAR.parse(text)
output_date = time.strftime("%Y-%m-%d %H:%M",date) output_date = time.strftime("%Y-%m-%d %H:%M", date)
assert output_date in out, output_date assert output_date in out, output_date
@then('the output should contain') @then("the output should contain")
@then('the output should contain "{text}"') @then('the output should contain "{text}"')
def check_output_inline(context, text=None): def check_output_inline(context, text=None):
text = text or context.text text = text or context.text
@ -274,7 +291,7 @@ def check_journal_content(context, text, journal_name="default"):
def journal_doesnt_exist(context, journal_name="default"): def journal_doesnt_exist(context, journal_name="default"):
with open(install.CONFIG_FILE_PATH) as config_file: with open(install.CONFIG_FILE_PATH) as config_file:
config = yaml.load(config_file, Loader=yaml.FullLoader) config = yaml.load(config_file, Loader=yaml.FullLoader)
journal_path = config['journals'][journal_name] journal_path = config["journals"][journal_name]
assert not os.path.exists(journal_path) assert not os.path.exists(journal_path)
@ -282,11 +299,7 @@ def journal_doesnt_exist(context, journal_name="default"):
@then('the config for journal "{journal}" should have "{key}" set to "{value}"') @then('the config for journal "{journal}" should have "{key}" set to "{value}"')
def config_var(context, key, value, journal=None): def config_var(context, key, value, journal=None):
t, value = value.split(":") t, value = value.split(":")
value = { value = {"bool": lambda v: v.lower() == "true", "int": int, "str": str}[t](value)
"bool": lambda v: v.lower() == "true",
"int": int,
"str": str
}[t](value)
config = util.load_config(install.CONFIG_FILE_PATH) config = util.load_config(install.CONFIG_FILE_PATH)
if journal: if journal:
config = config["journals"][journal] config = config["journals"][journal]
@ -294,8 +307,8 @@ def config_var(context, key, value, journal=None):
assert config[key] == value assert config[key] == value
@then('the journal should have {number:d} entries') @then("the journal should have {number:d} entries")
@then('the journal should have {number:d} entry') @then("the journal should have {number:d} entry")
@then('journal "{journal_name}" should have {number:d} entries') @then('journal "{journal_name}" should have {number:d} entries')
@then('journal "{journal_name}" should have {number:d} entry') @then('journal "{journal_name}" should have {number:d} entry')
def check_journal_entries(context, number, journal_name="default"): def check_journal_entries(context, number, journal_name="default"):
@ -303,6 +316,6 @@ def check_journal_entries(context, number, journal_name="default"):
assert len(journal.entries) == number assert len(journal.entries) == number
@then('fail') @then("fail")
def debug_fail(context): def debug_fail(context):
assert False assert False

View file

@ -19,7 +19,11 @@ class DayOne(Journal.Journal):
"""A special Journal handling DayOne files""" """A special Journal handling DayOne files"""
# InvalidFileException was added to plistlib in Python3.4 # InvalidFileException was added to plistlib in Python3.4
PLIST_EXCEPTIONS = (ExpatError, plistlib.InvalidFileException) if hasattr(plistlib, "InvalidFileException") else ExpatError PLIST_EXCEPTIONS = (
(ExpatError, plistlib.InvalidFileException)
if hasattr(plistlib, "InvalidFileException")
else ExpatError
)
def __init__(self, **kwargs): def __init__(self, **kwargs):
self.entries = [] self.entries = []
@ -27,28 +31,39 @@ class DayOne(Journal.Journal):
super().__init__(**kwargs) super().__init__(**kwargs)
def open(self): def open(self):
filenames = [os.path.join(self.config['journal'], "entries", f) for f in os.listdir(os.path.join(self.config['journal'], "entries"))] filenames = [
os.path.join(self.config["journal"], "entries", f)
for f in os.listdir(os.path.join(self.config["journal"], "entries"))
]
filenames = [] filenames = []
for root, dirnames, f in os.walk(self.config['journal']): for root, dirnames, f in os.walk(self.config["journal"]):
for filename in fnmatch.filter(f, '*.doentry'): for filename in fnmatch.filter(f, "*.doentry"):
filenames.append(os.path.join(root, filename)) filenames.append(os.path.join(root, filename))
self.entries = [] self.entries = []
for filename in filenames: for filename in filenames:
with open(filename, 'rb') as plist_entry: with open(filename, "rb") as plist_entry:
try: try:
dict_entry = plistlib.readPlist(plist_entry) dict_entry = plistlib.readPlist(plist_entry)
except self.PLIST_EXCEPTIONS: except self.PLIST_EXCEPTIONS:
pass pass
else: else:
try: try:
timezone = pytz.timezone(dict_entry['Time Zone']) timezone = pytz.timezone(dict_entry["Time Zone"])
except (KeyError, pytz.exceptions.UnknownTimeZoneError): except (KeyError, pytz.exceptions.UnknownTimeZoneError):
timezone = tzlocal.get_localzone() timezone = tzlocal.get_localzone()
date = dict_entry['Creation Date'] date = dict_entry["Creation Date"]
date = date + timezone.utcoffset(date, is_dst=False) date = date + timezone.utcoffset(date, is_dst=False)
entry = Entry.Entry(self, date, text=dict_entry['Entry Text'], starred=dict_entry["Starred"]) entry = Entry.Entry(
self,
date,
text=dict_entry["Entry Text"],
starred=dict_entry["Starred"],
)
entry.uuid = dict_entry["UUID"] entry.uuid = dict_entry["UUID"]
entry._tags = [self.config['tagsymbols'][0] + tag.lower() for tag in dict_entry.get("Tags", [])] entry._tags = [
self.config["tagsymbols"][0] + tag.lower()
for tag in dict_entry.get("Tags", [])
]
self.entries.append(entry) self.entries.append(entry)
self.sort() self.sort()
@ -58,24 +73,33 @@ class DayOne(Journal.Journal):
"""Writes only the entries that have been modified into plist files.""" """Writes only the entries that have been modified into plist files."""
for entry in self.entries: for entry in self.entries:
if entry.modified: if entry.modified:
utc_time = datetime.utcfromtimestamp(time.mktime(entry.date.timetuple())) utc_time = datetime.utcfromtimestamp(
time.mktime(entry.date.timetuple())
)
if not hasattr(entry, "uuid"): if not hasattr(entry, "uuid"):
entry.uuid = uuid.uuid1().hex entry.uuid = uuid.uuid1().hex
filename = os.path.join(self.config['journal'], "entries", entry.uuid.upper() + ".doentry") filename = os.path.join(
self.config["journal"], "entries", entry.uuid.upper() + ".doentry"
)
entry_plist = { entry_plist = {
'Creation Date': utc_time, "Creation Date": utc_time,
'Starred': entry.starred if hasattr(entry, 'starred') else False, "Starred": entry.starred if hasattr(entry, "starred") else False,
'Entry Text': entry.title + "\n" + entry.body, "Entry Text": entry.title + "\n" + entry.body,
'Time Zone': str(tzlocal.get_localzone()), "Time Zone": str(tzlocal.get_localzone()),
'UUID': entry.uuid.upper(), "UUID": entry.uuid.upper(),
'Tags': [tag.strip(self.config['tagsymbols']).replace("_", " ") for tag in entry.tags] "Tags": [
tag.strip(self.config["tagsymbols"]).replace("_", " ")
for tag in entry.tags
],
} }
plistlib.writePlist(entry_plist, filename) plistlib.writePlist(entry_plist, filename)
for entry in self._deleted_entries: for entry in self._deleted_entries:
filename = os.path.join(self.config['journal'], "entries", entry.uuid + ".doentry") filename = os.path.join(
self.config["journal"], "entries", entry.uuid + ".doentry"
)
os.remove(filename) os.remove(filename)
def editable_str(self): def editable_str(self):
@ -113,7 +137,7 @@ class DayOne(Journal.Journal):
if line.endswith("*"): if line.endswith("*"):
current_entry.starred = True current_entry.starred = True
line = line[:-1] line = line[:-1]
current_entry.title = line[len(date_blob) - 1:] current_entry.title = line[len(date_blob) - 1 :]
current_entry.date = new_date current_entry.date = new_date
elif current_entry: elif current_entry:
current_entry.body += line + "\n" current_entry.body += line + "\n"

View file

@ -22,29 +22,32 @@ def make_key(password):
algorithm=hashes.SHA256(), algorithm=hashes.SHA256(),
length=32, length=32,
# Salt is hard-coded # Salt is hard-coded
salt=b'\xf2\xd5q\x0e\xc1\x8d.\xde\xdc\x8e6t\x89\x04\xce\xf8', salt=b"\xf2\xd5q\x0e\xc1\x8d.\xde\xdc\x8e6t\x89\x04\xce\xf8",
iterations=100000, iterations=100000,
backend=default_backend() backend=default_backend(),
) )
key = kdf.derive(password) key = kdf.derive(password)
return base64.urlsafe_b64encode(key) return base64.urlsafe_b64encode(key)
class EncryptedJournal(Journal): class EncryptedJournal(Journal):
def __init__(self, name='default', **kwargs): def __init__(self, name="default", **kwargs):
super().__init__(name, **kwargs) super().__init__(name, **kwargs)
self.config['encrypt'] = True self.config["encrypt"] = True
self.password = None self.password = None
def open(self, filename=None): def open(self, filename=None):
"""Opens the journal file defined in the config and parses it into a list of Entries. """Opens the journal file defined in the config and parses it into a list of Entries.
Entries have the form (date, title, body).""" Entries have the form (date, title, body)."""
filename = filename or self.config['journal'] filename = filename or self.config["journal"]
if not os.path.exists(filename): if not os.path.exists(filename):
self.create_file(filename) self.create_file(filename)
self.password = util.create_password(self.name) self.password = util.create_password(self.name)
print(f"Encrypted journal '{self.name}' created at {filename}", file=sys.stderr) print(
f"Encrypted journal '{self.name}' created at {filename}",
file=sys.stderr,
)
text = self._load(filename) text = self._load(filename)
self.entries = self._parse(text) self.entries = self._parse(text)
@ -58,13 +61,13 @@ class EncryptedJournal(Journal):
and otherwise ask the user to enter a password up to three times. and otherwise ask the user to enter a password up to three times.
If the password is provided but wrong (or corrupt), this will simply If the password is provided but wrong (or corrupt), this will simply
return None.""" return None."""
with open(filename, 'rb') as f: with open(filename, "rb") as f:
journal_encrypted = f.read() journal_encrypted = f.read()
def decrypt_journal(password): def decrypt_journal(password):
key = make_key(password) key = make_key(password)
try: try:
plain = Fernet(key).decrypt(journal_encrypted).decode('utf-8') plain = Fernet(key).decrypt(journal_encrypted).decode("utf-8")
self.password = password self.password = password
return plain return plain
except (InvalidToken, IndexError): except (InvalidToken, IndexError):
@ -77,45 +80,53 @@ class EncryptedJournal(Journal):
def _store(self, filename, text): def _store(self, filename, text):
key = make_key(self.password) key = make_key(self.password)
journal = Fernet(key).encrypt(text.encode('utf-8')) journal = Fernet(key).encrypt(text.encode("utf-8"))
with open(filename, 'wb') as f: with open(filename, "wb") as f:
f.write(journal) f.write(journal)
@classmethod @classmethod
def from_journal(cls, other: Journal): def from_journal(cls, other: Journal):
new_journal = super().from_journal(other) new_journal = super().from_journal(other)
new_journal.password = other.password if hasattr(other, "password") else util.create_password(other.name) new_journal.password = (
other.password
if hasattr(other, "password")
else util.create_password(other.name)
)
return new_journal return new_journal
class LegacyEncryptedJournal(LegacyJournal): class LegacyEncryptedJournal(LegacyJournal):
"""Legacy class to support opening journals encrypted with the jrnl 1.x """Legacy class to support opening journals encrypted with the jrnl 1.x
standard. You'll not be able to save these journals anymore.""" standard. You'll not be able to save these journals anymore."""
def __init__(self, name='default', **kwargs):
def __init__(self, name="default", **kwargs):
super().__init__(name, **kwargs) super().__init__(name, **kwargs)
self.config['encrypt'] = True self.config["encrypt"] = True
self.password = None self.password = None
def _load(self, filename): def _load(self, filename):
with open(filename, 'rb') as f: with open(filename, "rb") as f:
journal_encrypted = f.read() journal_encrypted = f.read()
iv, cipher = journal_encrypted[:16], journal_encrypted[16:] iv, cipher = journal_encrypted[:16], journal_encrypted[16:]
def decrypt_journal(password): def decrypt_journal(password):
decryption_key = hashlib.sha256(password.encode('utf-8')).digest() decryption_key = hashlib.sha256(password.encode("utf-8")).digest()
decryptor = Cipher(algorithms.AES(decryption_key), modes.CBC(iv), default_backend()).decryptor() decryptor = Cipher(
algorithms.AES(decryption_key), modes.CBC(iv), default_backend()
).decryptor()
try: try:
plain_padded = decryptor.update(cipher) + decryptor.finalize() plain_padded = decryptor.update(cipher) + decryptor.finalize()
self.password = password self.password = password
if plain_padded[-1] in (" ", 32): if plain_padded[-1] in (" ", 32):
# Ancient versions of jrnl. Do not judge me. # Ancient versions of jrnl. Do not judge me.
return plain_padded.decode('utf-8').rstrip(" ") return plain_padded.decode("utf-8").rstrip(" ")
else: else:
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
plain = unpadder.update(plain_padded) + unpadder.finalize() plain = unpadder.update(plain_padded) + unpadder.finalize()
return plain.decode('utf-8') return plain.decode("utf-8")
except ValueError: except ValueError:
return None return None
if self.password: if self.password:
return decrypt_journal(self.password) return decrypt_journal(self.password)
return util.decrypt_content(keychain=self.name, decrypt_func=decrypt_journal) return util.decrypt_content(keychain=self.name, decrypt_func=decrypt_journal)

View file

@ -49,50 +49,60 @@ class Entry:
@staticmethod @staticmethod
def tag_regex(tagsymbols): def tag_regex(tagsymbols):
pattern = fr'(?u)(?:^|\s)([{tagsymbols}][-+*#/\w]+)' pattern = fr"(?u)(?:^|\s)([{tagsymbols}][-+*#/\w]+)"
return re.compile(pattern) return re.compile(pattern)
def _parse_tags(self): def _parse_tags(self):
tagsymbols = self.journal.config['tagsymbols'] tagsymbols = self.journal.config["tagsymbols"]
return {tag.lower() for tag in re.findall(Entry.tag_regex(tagsymbols), self.text)} return {
tag.lower() for tag in re.findall(Entry.tag_regex(tagsymbols), self.text)
}
def __str__(self): def __str__(self):
"""Returns a string representation of the entry to be written into a journal file.""" """Returns a string representation of the entry to be written into a journal file."""
date_str = self.date.strftime(self.journal.config['timeformat']) date_str = self.date.strftime(self.journal.config["timeformat"])
title = "[{}] {}".format(date_str, self.title.rstrip("\n ")) title = "[{}] {}".format(date_str, self.title.rstrip("\n "))
if self.starred: if self.starred:
title += " *" title += " *"
return "{title}{sep}{body}\n".format( return "{title}{sep}{body}\n".format(
title=title, title=title,
sep="\n" if self.body.rstrip("\n ") else "", sep="\n" if self.body.rstrip("\n ") else "",
body=self.body.rstrip("\n ") body=self.body.rstrip("\n "),
) )
def pprint(self, short=False): def pprint(self, short=False):
"""Returns a pretty-printed version of the entry. """Returns a pretty-printed version of the entry.
If short is true, only print the title.""" If short is true, only print the title."""
date_str = self.date.strftime(self.journal.config['timeformat']) date_str = self.date.strftime(self.journal.config["timeformat"])
if self.journal.config['indent_character']: if self.journal.config["indent_character"]:
indent = self.journal.config['indent_character'].rstrip() + " " indent = self.journal.config["indent_character"].rstrip() + " "
else: else:
indent = "" indent = ""
if not short and self.journal.config['linewrap']: if not short and self.journal.config["linewrap"]:
title = textwrap.fill(date_str + " " + self.title, self.journal.config['linewrap']) title = textwrap.fill(
body = "\n".join([ date_str + " " + self.title, self.journal.config["linewrap"]
textwrap.fill( )
line, body = "\n".join(
self.journal.config['linewrap'], [
initial_indent=indent, textwrap.fill(
subsequent_indent=indent, line,
drop_whitespace=True) or indent self.journal.config["linewrap"],
for line in self.body.rstrip(" \n").splitlines() initial_indent=indent,
]) subsequent_indent=indent,
drop_whitespace=True,
)
or indent
for line in self.body.rstrip(" \n").splitlines()
]
)
else: else:
title = date_str + " " + self.title.rstrip("\n ") title = date_str + " " + self.title.rstrip("\n ")
body = self.body.rstrip("\n ") body = self.body.rstrip("\n ")
# Suppress bodies that are just blanks and new lines. # Suppress bodies that are just blanks and new lines.
has_body = len(self.body) > 20 or not all(char in (" ", "\n") for char in self.body) has_body = len(self.body) > 20 or not all(
char in (" ", "\n") for char in self.body
)
if short: if short:
return title return title
@ -104,17 +114,21 @@ class Entry:
) )
def __repr__(self): def __repr__(self):
return "<Entry '{}' on {}>".format(self.title.strip(), self.date.strftime("%Y-%m-%d %H:%M")) return "<Entry '{}' on {}>".format(
self.title.strip(), self.date.strftime("%Y-%m-%d %H:%M")
)
def __hash__(self): def __hash__(self):
return hash(self.__repr__()) return hash(self.__repr__())
def __eq__(self, other): def __eq__(self, other):
if not isinstance(other, Entry) \ if (
or self.title.strip() != other.title.strip() \ not isinstance(other, Entry)
or self.body.rstrip() != other.body.rstrip() \ or self.title.strip() != other.title.strip()
or self.date != other.date \ or self.body.rstrip() != other.body.rstrip()
or self.starred != other.starred: or self.date != other.date
or self.starred != other.starred
):
return False return False
return True return True

View file

@ -25,17 +25,17 @@ class Tag:
class Journal: class Journal:
def __init__(self, name='default', **kwargs): def __init__(self, name="default", **kwargs):
self.config = { self.config = {
'journal': "journal.txt", "journal": "journal.txt",
'encrypt': False, "encrypt": False,
'default_hour': 9, "default_hour": 9,
'default_minute': 0, "default_minute": 0,
'timeformat': "%Y-%m-%d %H:%M", "timeformat": "%Y-%m-%d %H:%M",
'tagsymbols': '@', "tagsymbols": "@",
'highlight': True, "highlight": True,
'linewrap': 80, "linewrap": 80,
'indent_character': '|', "indent_character": "|",
} }
self.config.update(kwargs) self.config.update(kwargs)
# Set up date parser # Set up date parser
@ -57,17 +57,24 @@ class Journal:
another journal object""" another journal object"""
new_journal = cls(other.name, **other.config) new_journal = cls(other.name, **other.config)
new_journal.entries = other.entries new_journal.entries = other.entries
log.debug("Imported %d entries from %s to %s", len(new_journal), other.__class__.__name__, cls.__name__) log.debug(
"Imported %d entries from %s to %s",
len(new_journal),
other.__class__.__name__,
cls.__name__,
)
return new_journal return new_journal
def import_(self, other_journal_txt): def import_(self, other_journal_txt):
self.entries = list(frozenset(self.entries) | frozenset(self._parse(other_journal_txt))) self.entries = list(
frozenset(self.entries) | frozenset(self._parse(other_journal_txt))
)
self.sort() self.sort()
def open(self, filename=None): def open(self, filename=None):
"""Opens the journal file defined in the config and parses it into a list of Entries. """Opens the journal file defined in the config and parses it into a list of Entries.
Entries have the form (date, title, body).""" Entries have the form (date, title, body)."""
filename = filename or self.config['journal'] filename = filename or self.config["journal"]
if not os.path.exists(filename): if not os.path.exists(filename):
self.create_file(filename) self.create_file(filename)
@ -81,7 +88,7 @@ class Journal:
def write(self, filename=None): def write(self, filename=None):
"""Dumps the journal into the config file, overwriting it""" """Dumps the journal into the config file, overwriting it"""
filename = filename or self.config['journal'] filename = filename or self.config["journal"]
text = self._to_text() text = self._to_text()
self._store(filename, text) self._store(filename, text)
@ -129,7 +136,7 @@ class Journal:
if new_date: if new_date:
if entries: if entries:
entries[-1].text = journal_txt[last_entry_pos:match.start()] entries[-1].text = journal_txt[last_entry_pos : match.start()]
last_entry_pos = match.end() last_entry_pos = match.end()
entries.append(Entry.Entry(self, date=new_date)) entries.append(Entry.Entry(self, date=new_date))
@ -148,18 +155,16 @@ class Journal:
"""Prettyprints the journal's entries""" """Prettyprints the journal's entries"""
sep = "\n" sep = "\n"
pp = sep.join([e.pprint(short=short) for e in self.entries]) pp = sep.join([e.pprint(short=short) for e in self.entries])
if self.config['highlight']: # highlight tags if self.config["highlight"]: # highlight tags
if self.search_tags: if self.search_tags:
for tag in self.search_tags: for tag in self.search_tags:
tagre = re.compile(re.escape(tag), re.IGNORECASE) tagre = re.compile(re.escape(tag), re.IGNORECASE)
pp = re.sub(tagre, pp = re.sub(tagre, lambda match: util.colorize(match.group(0)), pp)
lambda match: util.colorize(match.group(0)),
pp)
else: else:
pp = re.sub( pp = re.sub(
Entry.Entry.tag_regex(self.config['tagsymbols']), Entry.Entry.tag_regex(self.config["tagsymbols"]),
lambda match: util.colorize(match.group(0)), lambda match: util.colorize(match.group(0)),
pp pp,
) )
return pp return pp
@ -183,14 +188,22 @@ class Journal:
"""Returns a set of tuples (count, tag) for all tags present in the journal.""" """Returns a set of tuples (count, tag) for all tags present in the journal."""
# Astute reader: should the following line leave you as puzzled as me the first time # Astute reader: should the following line leave you as puzzled as me the first time
# I came across this construction, worry not and embrace the ensuing moment of enlightment. # I came across this construction, worry not and embrace the ensuing moment of enlightment.
tags = [tag tags = [tag for entry in self.entries for tag in set(entry.tags)]
for entry in self.entries
for tag in set(entry.tags)]
# To be read: [for entry in journal.entries: for tag in set(entry.tags): tag] # To be read: [for entry in journal.entries: for tag in set(entry.tags): tag]
tag_counts = {(tags.count(tag), tag) for tag in tags} tag_counts = {(tags.count(tag), tag) for tag in tags}
return [Tag(tag, count=count) for count, tag in sorted(tag_counts)] return [Tag(tag, count=count) for count, tag in sorted(tag_counts)]
def filter(self, tags=[], start_date=None, end_date=None, starred=False, strict=False, short=False, contains=None, exclude=[]): def filter(
self,
tags=[],
start_date=None,
end_date=None,
starred=False,
strict=False,
short=False,
contains=None,
exclude=[],
):
"""Removes all entries from the journal that don't match the filter. """Removes all entries from the journal that don't match the filter.
tags is a list of tags, each being a string that starts with one of the tags is a list of tags, each being a string that starts with one of the
@ -216,13 +229,20 @@ class Journal:
contains_lower = contains.casefold() contains_lower = contains.casefold()
result = [ result = [
entry for entry in self.entries entry
for entry in self.entries
if (not tags or tagged(entry.tags)) if (not tags or tagged(entry.tags))
and (not starred or entry.starred) and (not starred or entry.starred)
and (not start_date or entry.date >= start_date) and (not start_date or entry.date >= start_date)
and (not end_date or entry.date <= end_date) and (not end_date or entry.date <= end_date)
and (not exclude or not excluded(entry.tags)) and (not exclude or not excluded(entry.tags))
and (not contains or (contains_lower in entry.title.casefold() or contains_lower in entry.body.casefold())) and (
not contains
or (
contains_lower in entry.title.casefold()
or contains_lower in entry.body.casefold()
)
)
] ]
self.entries = result self.entries = result
@ -231,11 +251,11 @@ class Journal:
"""Constructs a new entry from some raw text input. """Constructs a new entry from some raw text input.
If a date is given, it will parse and use this, otherwise scan for a date in the input first.""" If a date is given, it will parse and use this, otherwise scan for a date in the input first."""
raw = raw.replace('\\n ', '\n').replace('\\n', '\n') raw = raw.replace("\\n ", "\n").replace("\\n", "\n")
starred = False starred = False
# Split raw text into title and body # Split raw text into title and body
sep = re.search(r"\n|[?!.]+ +\n?", raw) sep = re.search(r"\n|[?!.]+ +\n?", raw)
first_line = raw[:sep.end()].strip() if sep else raw first_line = raw[: sep.end()].strip() if sep else raw
starred = False starred = False
if not date: if not date:
@ -243,12 +263,12 @@ class Journal:
if colon_pos > 0: if colon_pos > 0:
date = time.parse( date = time.parse(
raw[:colon_pos], raw[:colon_pos],
default_hour=self.config['default_hour'], default_hour=self.config["default_hour"],
default_minute=self.config['default_minute'] default_minute=self.config["default_minute"],
) )
if date: # Parsed successfully, strip that from the raw text if date: # Parsed successfully, strip that from the raw text
starred = raw[:colon_pos].strip().endswith("*") starred = raw[:colon_pos].strip().endswith("*")
raw = raw[colon_pos + 1:].strip() raw = raw[colon_pos + 1 :].strip()
starred = starred or first_line.startswith("*") or first_line.endswith("*") starred = starred or first_line.startswith("*") or first_line.endswith("*")
if not date: # Still nothing? Meh, just live in the moment. if not date: # Still nothing? Meh, just live in the moment.
date = time.parse("now") date = time.parse("now")
@ -281,7 +301,7 @@ class PlainJournal(Journal):
return f.read() return f.read()
def _store(self, filename, text): def _store(self, filename, text):
with open(filename, 'w', encoding="utf-8") as f: with open(filename, "w", encoding="utf-8") as f:
f.write(text) f.write(text)
@ -289,6 +309,7 @@ class LegacyJournal(Journal):
"""Legacy class to support opening journals formatted with the jrnl 1.x """Legacy class to support opening journals formatted with the jrnl 1.x
standard. Main difference here is that in 1.x, timestamps were not cuddled standard. Main difference here is that in 1.x, timestamps were not cuddled
by square brackets. You'll not be able to save these journals anymore.""" by square brackets. You'll not be able to save these journals anymore."""
def _load(self, filename): def _load(self, filename):
with open(filename, "r", encoding="utf-8") as f: with open(filename, "r", encoding="utf-8") as f:
return f.read() return f.read()
@ -297,17 +318,19 @@ class LegacyJournal(Journal):
"""Parses a journal that's stored in a string and returns a list of entries""" """Parses a journal that's stored in a string and returns a list of entries"""
# Entries start with a line that looks like 'date title' - let's figure out how # Entries start with a line that looks like 'date title' - let's figure out how
# long the date will be by constructing one # long the date will be by constructing one
date_length = len(datetime.today().strftime(self.config['timeformat'])) date_length = len(datetime.today().strftime(self.config["timeformat"]))
# Initialise our current entry # Initialise our current entry
entries = [] entries = []
current_entry = None current_entry = None
new_date_format_regex = re.compile(r'(^\[[^\]]+\].*?$)') new_date_format_regex = re.compile(r"(^\[[^\]]+\].*?$)")
for line in journal_txt.splitlines(): for line in journal_txt.splitlines():
line = line.rstrip() line = line.rstrip()
try: try:
# try to parse line as date => new entry begins # try to parse line as date => new entry begins
new_date = datetime.strptime(line[:date_length], self.config['timeformat']) new_date = datetime.strptime(
line[:date_length], self.config["timeformat"]
)
# parsing successful => save old entry and create new one # parsing successful => save old entry and create new one
if new_date and current_entry: if new_date and current_entry:
@ -319,12 +342,14 @@ class LegacyJournal(Journal):
else: else:
starred = False starred = False
current_entry = Entry.Entry(self, date=new_date, text=line[date_length + 1:], starred=starred) current_entry = Entry.Entry(
self, date=new_date, text=line[date_length + 1 :], starred=starred
)
except ValueError: except ValueError:
# Happens when we can't parse the start of the line as an date. # Happens when we can't parse the start of the line as an date.
# In this case, just append line to our body (after some # In this case, just append line to our body (after some
# escaping for the new format). # escaping for the new format).
line = new_date_format_regex.sub(r' \1', line) line = new_date_format_regex.sub(r" \1", line)
if current_entry: if current_entry:
current_entry.text += line + "\n" current_entry.text += line + "\n"
@ -343,26 +368,30 @@ def open_journal(name, config, legacy=False):
backwards compatibility with jrnl 1.x backwards compatibility with jrnl 1.x
""" """
config = config.copy() config = config.copy()
config['journal'] = os.path.expanduser(os.path.expandvars(config['journal'])) config["journal"] = os.path.expanduser(os.path.expandvars(config["journal"]))
if os.path.isdir(config['journal']): if os.path.isdir(config["journal"]):
if config['journal'].strip("/").endswith(".dayone") or "entries" in os.listdir(config['journal']): if config["journal"].strip("/").endswith(".dayone") or "entries" in os.listdir(
config["journal"]
):
from . import DayOneJournal from . import DayOneJournal
return DayOneJournal.DayOne(**config).open() return DayOneJournal.DayOne(**config).open()
else: else:
print( print(
f"[Error: {config['journal']} is a directory, but doesn't seem to be a DayOne journal either.", f"[Error: {config['journal']} is a directory, but doesn't seem to be a DayOne journal either.",
file=sys.stderr file=sys.stderr,
) )
sys.exit(1) sys.exit(1)
if not config['encrypt']: if not config["encrypt"]:
if legacy: if legacy:
return LegacyJournal(name, **config).open() return LegacyJournal(name, **config).open()
return PlainJournal(name, **config).open() return PlainJournal(name, **config).open()
else: else:
from . import EncryptedJournal from . import EncryptedJournal
if legacy: if legacy:
return EncryptedJournal.LegacyEncryptedJournal(name, **config).open() return EncryptedJournal.LegacyEncryptedJournal(name, **config).open()
return EncryptedJournal.EncryptedJournal(name, **config).open() return EncryptedJournal.EncryptedJournal(name, **config).open()

View file

@ -1,6 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
import os import os
try: try:
from .__version__ import __version__ from .__version__ import __version__
except ImportError: except ImportError:

View file

@ -23,33 +23,155 @@ logging.getLogger("keyring.backend").setLevel(logging.ERROR)
def parse_args(args=None): def parse_args(args=None):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', dest='version', action="store_true", help="prints version information and exits") parser.add_argument(
parser.add_argument('-ls', dest='ls', action="store_true", help="displays accessible journals") "-v",
parser.add_argument('-d', '--debug', dest='debug', action='store_true', help='execute in debug mode') "--version",
dest="version",
action="store_true",
help="prints version information and exits",
)
parser.add_argument(
"-ls", dest="ls", action="store_true", help="displays accessible journals"
)
parser.add_argument(
"-d", "--debug", dest="debug", action="store_true", help="execute in debug mode"
)
composing = parser.add_argument_group('Composing', 'To write an entry simply write it on the command line, e.g. "jrnl yesterday at 1pm: Went to the gym."') composing = parser.add_argument_group(
composing.add_argument('text', metavar='', nargs="*") "Composing",
'To write an entry simply write it on the command line, e.g. "jrnl yesterday at 1pm: Went to the gym."',
)
composing.add_argument("text", metavar="", nargs="*")
reading = parser.add_argument_group('Reading', 'Specifying either of these parameters will display posts of your journal') reading = parser.add_argument_group(
reading.add_argument('-from', dest='start_date', metavar="DATE", help='View entries after this date') "Reading",
reading.add_argument('-until', '-to', dest='end_date', metavar="DATE", help='View entries before this date') "Specifying either of these parameters will display posts of your journal",
reading.add_argument('-contains', dest='contains', help='View entries containing a specific string') )
reading.add_argument('-on', dest='on_date', metavar="DATE", help='View entries on this date') reading.add_argument(
reading.add_argument('-and', dest='strict', action="store_true", help='Filter by tags using AND (default: OR)') "-from", dest="start_date", metavar="DATE", help="View entries after this date"
reading.add_argument('-starred', dest='starred', action="store_true", help='Show only starred entries') )
reading.add_argument('-n', dest='limit', default=None, metavar="N", help="Shows the last n entries matching the filter. '-n 3' and '-3' have the same effect.", nargs="?", type=int) reading.add_argument(
reading.add_argument('-not', dest='excluded', nargs='+', default=[], metavar="E", help="Exclude entries with these tags") "-until",
"-to",
dest="end_date",
metavar="DATE",
help="View entries before this date",
)
reading.add_argument(
"-contains", dest="contains", help="View entries containing a specific string"
)
reading.add_argument(
"-on", dest="on_date", metavar="DATE", help="View entries on this date"
)
reading.add_argument(
"-and",
dest="strict",
action="store_true",
help="Filter by tags using AND (default: OR)",
)
reading.add_argument(
"-starred",
dest="starred",
action="store_true",
help="Show only starred entries",
)
reading.add_argument(
"-n",
dest="limit",
default=None,
metavar="N",
help="Shows the last n entries matching the filter. '-n 3' and '-3' have the same effect.",
nargs="?",
type=int,
)
reading.add_argument(
"-not",
dest="excluded",
nargs="+",
default=[],
metavar="E",
help="Exclude entries with these tags",
)
exporting = parser.add_argument_group('Export / Import', 'Options for transmogrifying your journal') exporting = parser.add_argument_group(
exporting.add_argument('-s', '--short', dest='short', action="store_true", help='Show only titles or line containing the search tags') "Export / Import", "Options for transmogrifying your journal"
exporting.add_argument('--tags', dest='tags', action="store_true", help='Returns a list of all tags and number of occurences') )
exporting.add_argument('--export', metavar='TYPE', dest='export', choices=plugins.EXPORT_FORMATS, help='Export your journal. TYPE can be {}.'.format(plugins.util.oxford_list(plugins.EXPORT_FORMATS)), default=False, const=None) exporting.add_argument(
exporting.add_argument('-o', metavar='OUTPUT', dest='output', help='Optionally specifies output file when using --export. If OUTPUT is a directory, exports each entry into an individual file instead.', default=False, const=None) "-s",
exporting.add_argument('--import', metavar='TYPE', dest='import_', choices=plugins.IMPORT_FORMATS, help='Import entries into your journal. TYPE can be {}, and it defaults to jrnl if nothing else is specified.'.format(plugins.util.oxford_list(plugins.IMPORT_FORMATS)), default=False, const='jrnl', nargs='?') "--short",
exporting.add_argument('-i', metavar='INPUT', dest='input', help='Optionally specifies input file when using --import.', default=False, const=None) dest="short",
exporting.add_argument('--encrypt', metavar='FILENAME', dest='encrypt', help='Encrypts your existing journal with a new password', nargs='?', default=False, const=None) action="store_true",
exporting.add_argument('--decrypt', metavar='FILENAME', dest='decrypt', help='Decrypts your journal and stores it in plain text', nargs='?', default=False, const=None) help="Show only titles or line containing the search tags",
exporting.add_argument('--edit', dest='edit', help='Opens your editor to edit the selected entries.', action="store_true") )
exporting.add_argument(
"--tags",
dest="tags",
action="store_true",
help="Returns a list of all tags and number of occurences",
)
exporting.add_argument(
"--export",
metavar="TYPE",
dest="export",
choices=plugins.EXPORT_FORMATS,
help="Export your journal. TYPE can be {}.".format(
plugins.util.oxford_list(plugins.EXPORT_FORMATS)
),
default=False,
const=None,
)
exporting.add_argument(
"-o",
metavar="OUTPUT",
dest="output",
help="Optionally specifies output file when using --export. If OUTPUT is a directory, exports each entry into an individual file instead.",
default=False,
const=None,
)
exporting.add_argument(
"--import",
metavar="TYPE",
dest="import_",
choices=plugins.IMPORT_FORMATS,
help="Import entries into your journal. TYPE can be {}, and it defaults to jrnl if nothing else is specified.".format(
plugins.util.oxford_list(plugins.IMPORT_FORMATS)
),
default=False,
const="jrnl",
nargs="?",
)
exporting.add_argument(
"-i",
metavar="INPUT",
dest="input",
help="Optionally specifies input file when using --import.",
default=False,
const=None,
)
exporting.add_argument(
"--encrypt",
metavar="FILENAME",
dest="encrypt",
help="Encrypts your existing journal with a new password",
nargs="?",
default=False,
const=None,
)
exporting.add_argument(
"--decrypt",
metavar="FILENAME",
dest="decrypt",
help="Decrypts your journal and stores it in plain text",
nargs="?",
default=False,
const=None,
)
exporting.add_argument(
"--edit",
dest="edit",
help="Opens your editor to edit the selected entries.",
action="store_true",
)
return parser.parse_args(args) return parser.parse_args(args)
@ -63,13 +185,30 @@ def guess_mode(args, config):
compose = False compose = False
export = False export = False
import_ = True import_ = True
elif args.decrypt is not False or args.encrypt is not False or args.export is not False or any((args.short, args.tags, args.edit)): elif (
args.decrypt is not False
or args.encrypt is not False
or args.export is not False
or any((args.short, args.tags, args.edit))
):
compose = False compose = False
export = True export = True
elif any((args.start_date, args.end_date, args.on_date, args.limit, args.strict, args.starred, args.contains)): elif any(
(
args.start_date,
args.end_date,
args.on_date,
args.limit,
args.strict,
args.starred,
args.contains,
)
):
# Any sign of displaying stuff? # Any sign of displaying stuff?
compose = False compose = False
elif args.text and all(word[0] in config['tagsymbols'] for word in " ".join(args.text).split()): elif args.text and all(
word[0] in config["tagsymbols"] for word in " ".join(args.text).split()
):
# No date and only tags? # No date and only tags?
compose = False compose = False
@ -78,29 +217,37 @@ def guess_mode(args, config):
def encrypt(journal, filename=None): def encrypt(journal, filename=None):
""" Encrypt into new file. If filename is not set, we encrypt the journal file itself. """ """ Encrypt into new file. If filename is not set, we encrypt the journal file itself. """
journal.config['encrypt'] = True journal.config["encrypt"] = True
new_journal = EncryptedJournal.from_journal(journal) new_journal = EncryptedJournal.from_journal(journal)
new_journal.write(filename) new_journal.write(filename)
print("Journal encrypted to {}.".format(filename or new_journal.config['journal']), file=sys.stderr) print(
"Journal encrypted to {}.".format(filename or new_journal.config["journal"]),
file=sys.stderr,
)
def decrypt(journal, filename=None): def decrypt(journal, filename=None):
""" Decrypts into new file. If filename is not set, we encrypt the journal file itself. """ """ Decrypts into new file. If filename is not set, we encrypt the journal file itself. """
journal.config['encrypt'] = False journal.config["encrypt"] = False
new_journal = PlainJournal.from_journal(journal) new_journal = PlainJournal.from_journal(journal)
new_journal.write(filename) new_journal.write(filename)
print("Journal decrypted to {}.".format(filename or new_journal.config['journal']), file=sys.stderr) print(
"Journal decrypted to {}.".format(filename or new_journal.config["journal"]),
file=sys.stderr,
)
def list_journals(config): def list_journals(config):
"""List the journals specified in the configuration file""" """List the journals specified in the configuration file"""
result = f"Journals defined in {install.CONFIG_FILE_PATH}\n" result = f"Journals defined in {install.CONFIG_FILE_PATH}\n"
ml = min(max(len(k) for k in config['journals']), 20) ml = min(max(len(k) for k in config["journals"]), 20)
for journal, cfg in config['journals'].items(): for journal, cfg in config["journals"].items():
result += " * {:{}} -> {}\n".format(journal, ml, cfg['journal'] if isinstance(cfg, dict) else cfg) result += " * {:{}} -> {}\n".format(
journal, ml, cfg["journal"] if isinstance(cfg, dict) else cfg
)
return result return result
@ -108,11 +255,11 @@ def update_config(config, new_config, scope, force_local=False):
"""Updates a config dict with new values - either global if scope is None """Updates a config dict with new values - either global if scope is None
or config['journals'][scope] is just a string pointing to a journal file, or config['journals'][scope] is just a string pointing to a journal file,
or within the scope""" or within the scope"""
if scope and type(config['journals'][scope]) is dict: # Update to journal specific if scope and type(config["journals"][scope]) is dict: # Update to journal specific
config['journals'][scope].update(new_config) config["journals"][scope].update(new_config)
elif scope and force_local: # Convert to dict elif scope and force_local: # Convert to dict
config['journals'][scope] = {"journal": config['journals'][scope]} config["journals"][scope] = {"journal": config["journals"][scope]}
config['journals'][scope].update(new_config) config["journals"][scope].update(new_config)
else: else:
config.update(new_config) config.update(new_config)
@ -120,9 +267,11 @@ def update_config(config, new_config, scope, force_local=False):
def configure_logger(debug=False): def configure_logger(debug=False):
logging.basicConfig( logging.basicConfig(
level=logging.DEBUG if debug else logging.INFO, level=logging.DEBUG if debug else logging.INFO,
format='%(levelname)-8s %(name)-12s %(message)s' format="%(levelname)-8s %(name)-12s %(message)s",
) )
logging.getLogger('parsedatetime').setLevel(logging.INFO) # disable parsedatetime debug logging logging.getLogger("parsedatetime").setLevel(
logging.INFO
) # disable parsedatetime debug logging
def run(manual_args=None): def run(manual_args=None):
@ -150,10 +299,10 @@ def run(manual_args=None):
# use this! # use this!
journal_name = install.DEFAULT_JOURNAL_KEY journal_name = install.DEFAULT_JOURNAL_KEY
if args.text and args.text[0] in config['journals']: if args.text and args.text[0] in config["journals"]:
journal_name = args.text[0] journal_name = args.text[0]
args.text = args.text[1:] args.text = args.text[1:]
elif install.DEFAULT_JOURNAL_KEY not in config['journals']: elif install.DEFAULT_JOURNAL_KEY not in config["journals"]:
print("No default journal configured.", file=sys.stderr) print("No default journal configured.", file=sys.stderr)
print(list_journals(config), file=sys.stderr) print(list_journals(config), file=sys.stderr)
sys.exit(1) sys.exit(1)
@ -181,18 +330,24 @@ def run(manual_args=None):
if not sys.stdin.isatty(): if not sys.stdin.isatty():
# Piping data into jrnl # Piping data into jrnl
raw = sys.stdin.read() raw = sys.stdin.read()
elif config['editor']: elif config["editor"]:
template = "" template = ""
if config['template']: if config["template"]:
try: try:
template = open(config['template']).read() template = open(config["template"]).read()
except OSError: except OSError:
print(f"[Could not read template at '{config['template']}']", file=sys.stderr) print(
f"[Could not read template at '{config['template']}']",
file=sys.stderr,
)
sys.exit(1) sys.exit(1)
raw = util.get_text_from_editor(config, template) raw = util.get_text_from_editor(config, template)
else: else:
try: try:
print("[Compose Entry; " + _exit_multiline_code + " to finish writing]\n", file=sys.stderr) print(
"[Compose Entry; " + _exit_multiline_code + " to finish writing]\n",
file=sys.stderr,
)
raw = sys.stdin.read() raw = sys.stdin.read()
except KeyboardInterrupt: except KeyboardInterrupt:
print("[Entry NOT saved to journal.]", file=sys.stderr) print("[Entry NOT saved to journal.]", file=sys.stderr)
@ -225,13 +380,16 @@ def run(manual_args=None):
old_entries = journal.entries old_entries = journal.entries
if args.on_date: if args.on_date:
args.start_date = args.end_date = args.on_date args.start_date = args.end_date = args.on_date
journal.filter(tags=args.text, journal.filter(
start_date=args.start_date, end_date=args.end_date, tags=args.text,
strict=args.strict, start_date=args.start_date,
short=args.short, end_date=args.end_date,
starred=args.starred, strict=args.strict,
exclude=args.excluded, short=args.short,
contains=args.contains) starred=args.starred,
exclude=args.excluded,
contains=args.contains,
)
journal.limit(args.limit) journal.limit(args.limit)
# Reading mode # Reading mode
@ -253,20 +411,28 @@ def run(manual_args=None):
encrypt(journal, filename=args.encrypt) encrypt(journal, filename=args.encrypt)
# Not encrypting to a separate file: update config! # Not encrypting to a separate file: update config!
if not args.encrypt: if not args.encrypt:
update_config(original_config, {"encrypt": True}, journal_name, force_local=True) update_config(
original_config, {"encrypt": True}, journal_name, force_local=True
)
install.save_config(original_config) install.save_config(original_config)
elif args.decrypt is not False: elif args.decrypt is not False:
decrypt(journal, filename=args.decrypt) decrypt(journal, filename=args.decrypt)
# Not decrypting to a separate file: update config! # Not decrypting to a separate file: update config!
if not args.decrypt: if not args.decrypt:
update_config(original_config, {"encrypt": False}, journal_name, force_local=True) update_config(
original_config, {"encrypt": False}, journal_name, force_local=True
)
install.save_config(original_config) install.save_config(original_config)
elif args.edit: elif args.edit:
if not config['editor']: if not config["editor"]:
print("[{1}ERROR{2}: You need to specify an editor in {0} to use the --edit function.]" print(
.format(install.CONFIG_FILE_PATH, ERROR_COLOR, RESET_COLOR), file=sys.stderr) "[{1}ERROR{2}: You need to specify an editor in {0} to use the --edit function.]".format(
install.CONFIG_FILE_PATH, ERROR_COLOR, RESET_COLOR
),
file=sys.stderr,
)
sys.exit(1) sys.exit(1)
other_entries = [e for e in old_entries if e not in journal.entries] other_entries = [e for e in old_entries if e not in journal.entries]
# Edit # Edit
@ -277,9 +443,17 @@ def run(manual_args=None):
num_edited = len([e for e in journal.entries if e.modified]) num_edited = len([e for e in journal.entries if e.modified])
prompts = [] prompts = []
if num_deleted: if num_deleted:
prompts.append("{} {} deleted".format(num_deleted, "entry" if num_deleted == 1 else "entries")) prompts.append(
"{} {} deleted".format(
num_deleted, "entry" if num_deleted == 1 else "entries"
)
)
if num_edited: if num_edited:
prompts.append("{} {} modified".format(num_edited, "entry" if num_deleted == 1 else "entries")) prompts.append(
"{} {} modified".format(
num_edited, "entry" if num_deleted == 1 else "entries"
)
)
if prompts: if prompts:
print("[{}]".format(", ".join(prompts).capitalize()), file=sys.stderr) print("[{}]".format(", ".join(prompts).capitalize()), file=sys.stderr)
journal.entries += other_entries journal.entries += other_entries

View file

@ -8,6 +8,7 @@ import os
class Exporter: class Exporter:
"""This Exporter can convert entries and journals into text files.""" """This Exporter can convert entries and journals into text files."""
def __init__(self, format): def __init__(self, format):
with open("jrnl/templates/" + format + ".template") as f: with open("jrnl/templates/" + format + ".template") as f:
front_matter, body = f.read().strip("-\n").split("---", 2) front_matter, body = f.read().strip("-\n").split("---", 2)
@ -18,11 +19,7 @@ class Exporter:
return str(entry) return str(entry)
def _get_vars(self, journal): def _get_vars(self, journal):
return { return {"journal": journal, "entries": journal.entries, "tags": journal.tags}
'journal': journal,
'entries': journal.entries,
'tags': journal.tags
}
def export_journal(self, journal): def export_journal(self, journal):
"""Returns a string representation of an entire journal.""" """Returns a string representation of an entire journal."""
@ -38,7 +35,9 @@ class Exporter:
return f"[{ERROR_COLOR}ERROR{RESET_COLOR}: {e.filename} {e.strerror}]" return f"[{ERROR_COLOR}ERROR{RESET_COLOR}: {e.filename} {e.strerror}]"
def make_filename(self, entry): def make_filename(self, entry):
return entry.date.strftime("%Y-%m-%d_{}.{}".format(slugify(entry.title), self.extension)) return entry.date.strftime(
"%Y-%m-%d_{}.{}".format(slugify(entry.title), self.extension)
)
def write_files(self, journal, path): def write_files(self, journal, path):
"""Exports a journal into individual files for each entry.""" """Exports a journal into individual files for each entry."""
@ -57,7 +56,7 @@ class Exporter:
representation as string if output is None.""" representation as string if output is None."""
if output and os.path.isdir(output): # multiple files if output and os.path.isdir(output): # multiple files
return self.write_files(journal, output) return self.write_files(journal, output)
elif output: # single file elif output: # single file
return self.write_file(journal, output) return self.write_file(journal, output)
else: else:
return self.export_journal(journal) return self.export_journal(journal)

View file

@ -13,16 +13,17 @@ from .util import UserAbort
import yaml import yaml
import logging import logging
import sys import sys
if "win32" not in sys.platform: if "win32" not in sys.platform:
# readline is not included in Windows Active Python # readline is not included in Windows Active Python
import readline import readline
DEFAULT_CONFIG_NAME = 'jrnl.yaml' DEFAULT_CONFIG_NAME = "jrnl.yaml"
DEFAULT_JOURNAL_NAME = 'journal.txt' DEFAULT_JOURNAL_NAME = "journal.txt"
DEFAULT_JOURNAL_KEY = 'default' DEFAULT_JOURNAL_KEY = "default"
XDG_RESOURCE = 'jrnl' XDG_RESOURCE = "jrnl"
USER_HOME = os.path.expanduser('~') USER_HOME = os.path.expanduser("~")
CONFIG_PATH = xdg.BaseDirectory.save_config_path(XDG_RESOURCE) or USER_HOME CONFIG_PATH = xdg.BaseDirectory.save_config_path(XDG_RESOURCE) or USER_HOME
CONFIG_FILE_PATH = os.path.join(CONFIG_PATH, DEFAULT_CONFIG_NAME) CONFIG_FILE_PATH = os.path.join(CONFIG_PATH, DEFAULT_CONFIG_NAME)
@ -43,21 +44,20 @@ def module_exists(module_name):
else: else:
return True return True
default_config = { default_config = {
'version': __version__, "version": __version__,
'journals': { "journals": {DEFAULT_JOURNAL_KEY: JOURNAL_FILE_PATH},
DEFAULT_JOURNAL_KEY: JOURNAL_FILE_PATH "editor": os.getenv("VISUAL") or os.getenv("EDITOR") or "",
}, "encrypt": False,
'editor': os.getenv('VISUAL') or os.getenv('EDITOR') or "", "template": False,
'encrypt': False, "default_hour": 9,
'template': False, "default_minute": 0,
'default_hour': 9, "timeformat": "%Y-%m-%d %H:%M",
'default_minute': 0, "tagsymbols": "@",
'timeformat': "%Y-%m-%d %H:%M", "highlight": True,
'tagsymbols': '@', "linewrap": 79,
'highlight': True, "indent_character": "|",
'linewrap': 79,
'indent_character': '|',
} }
@ -70,13 +70,18 @@ def upgrade_config(config):
for key in missing_keys: for key in missing_keys:
config[key] = default_config[key] config[key] = default_config[key]
save_config(config) save_config(config)
print(f"[Configuration updated to newest version at {CONFIG_FILE_PATH}]", file=sys.stderr) print(
f"[Configuration updated to newest version at {CONFIG_FILE_PATH}]",
file=sys.stderr,
)
def save_config(config): def save_config(config):
config['version'] = __version__ config["version"] = __version__
with open(CONFIG_FILE_PATH, 'w') as f: with open(CONFIG_FILE_PATH, "w") as f:
yaml.safe_dump(config, f, encoding='utf-8', allow_unicode=True, default_flow_style=False) yaml.safe_dump(
config, f, encoding="utf-8", allow_unicode=True, default_flow_style=False
)
def load_or_install_jrnl(): def load_or_install_jrnl():
@ -84,17 +89,27 @@ def load_or_install_jrnl():
If jrnl is already installed, loads and returns a config object. If jrnl is already installed, loads and returns a config object.
Else, perform various prompts to install jrnl. Else, perform various prompts to install jrnl.
""" """
config_path = CONFIG_FILE_PATH if os.path.exists(CONFIG_FILE_PATH) else CONFIG_FILE_PATH_FALLBACK config_path = (
CONFIG_FILE_PATH
if os.path.exists(CONFIG_FILE_PATH)
else CONFIG_FILE_PATH_FALLBACK
)
if os.path.exists(config_path): if os.path.exists(config_path):
log.debug('Reading configuration from file %s', config_path) log.debug("Reading configuration from file %s", config_path)
config = util.load_config(config_path) config = util.load_config(config_path)
try: try:
upgrade.upgrade_jrnl_if_necessary(config_path) upgrade.upgrade_jrnl_if_necessary(config_path)
except upgrade.UpgradeValidationException: except upgrade.UpgradeValidationException:
print("Aborting upgrade.", file=sys.stderr) print("Aborting upgrade.", file=sys.stderr)
print("Please tell us about this problem at the following URL:", file=sys.stderr) print(
print("https://github.com/jrnl-org/jrnl/issues/new?title=UpgradeValidationException", file=sys.stderr) "Please tell us about this problem at the following URL:",
file=sys.stderr,
)
print(
"https://github.com/jrnl-org/jrnl/issues/new?title=UpgradeValidationException",
file=sys.stderr,
)
print("Exiting.", file=sys.stderr) print("Exiting.", file=sys.stderr)
sys.exit(1) sys.exit(1)
@ -102,7 +117,7 @@ def load_or_install_jrnl():
return config return config
else: else:
log.debug('Configuration file not found, installing jrnl...') log.debug("Configuration file not found, installing jrnl...")
try: try:
config = install() config = install()
except KeyboardInterrupt: except KeyboardInterrupt:
@ -112,25 +127,32 @@ def load_or_install_jrnl():
def install(): def install():
if "win32" not in sys.platform: if "win32" not in sys.platform:
readline.set_completer_delims(' \t\n;') readline.set_completer_delims(" \t\n;")
readline.parse_and_bind("tab: complete") readline.parse_and_bind("tab: complete")
readline.set_completer(autocomplete) readline.set_completer(autocomplete)
# Where to create the journal? # Where to create the journal?
path_query = f'Path to your journal file (leave blank for {JOURNAL_FILE_PATH}): ' path_query = f"Path to your journal file (leave blank for {JOURNAL_FILE_PATH}): "
journal_path = input(path_query).strip() or JOURNAL_FILE_PATH journal_path = input(path_query).strip() or JOURNAL_FILE_PATH
default_config['journals'][DEFAULT_JOURNAL_KEY] = os.path.expanduser(os.path.expandvars(journal_path)) default_config["journals"][DEFAULT_JOURNAL_KEY] = os.path.expanduser(
os.path.expandvars(journal_path)
)
path = os.path.split(default_config['journals'][DEFAULT_JOURNAL_KEY])[0] # If the folder doesn't exist, create it path = os.path.split(default_config["journals"][DEFAULT_JOURNAL_KEY])[
0
] # If the folder doesn't exist, create it
try: try:
os.makedirs(path) os.makedirs(path)
except OSError: except OSError:
pass pass
# Encrypt it? # Encrypt it?
encrypt = util.yesno("Do you want to encrypt your journal? You can always change this later", default=False) encrypt = util.yesno(
"Do you want to encrypt your journal? You can always change this later",
default=False,
)
if encrypt: if encrypt:
default_config['encrypt'] = True default_config["encrypt"] = True
print("Journal will be encrypted.", file=sys.stderr) print("Journal will be encrypted.", file=sys.stderr)
save_config(default_config) save_config(default_config)
@ -138,7 +160,7 @@ def install():
def autocomplete(text, state): def autocomplete(text, state):
expansions = glob.glob(os.path.expanduser(os.path.expandvars(text)) + '*') expansions = glob.glob(os.path.expanduser(os.path.expandvars(text)) + "*")
expansions = [e + "/" if os.path.isdir(e) else e for e in expansions] expansions = [e + "/" if os.path.isdir(e) else e for e in expansions]
expansions.append(None) expansions.append(None)
return expansions[state] return expansions[state]

View file

@ -11,8 +11,16 @@ from .yaml_exporter import YAMLExporter
from .template_exporter import __all__ as template_exporters from .template_exporter import __all__ as template_exporters
from .fancy_exporter import FancyExporter from .fancy_exporter import FancyExporter
__exporters =[JSONExporter, MarkdownExporter, TagExporter, TextExporter, XMLExporter, YAMLExporter, FancyExporter] + template_exporters __exporters = [
__importers =[JRNLImporter] JSONExporter,
MarkdownExporter,
TagExporter,
TextExporter,
XMLExporter,
YAMLExporter,
FancyExporter,
] + template_exporters
__importers = [JRNLImporter]
__exporter_types = {name: plugin for plugin in __exporters for name in plugin.names} __exporter_types = {name: plugin for plugin in __exporters for name in plugin.names}
__importer_types = {name: plugin for plugin in __importers for name in plugin.names} __importer_types = {name: plugin for plugin in __importers for name in plugin.names}
@ -20,6 +28,7 @@ __importer_types = {name: plugin for plugin in __importers for name in plugin.na
EXPORT_FORMATS = sorted(__exporter_types.keys()) EXPORT_FORMATS = sorted(__exporter_types.keys())
IMPORT_FORMATS = sorted(__importer_types.keys()) IMPORT_FORMATS = sorted(__importer_types.keys())
def get_exporter(format): def get_exporter(format):
for exporter in __exporters: for exporter in __exporters:
if hasattr(exporter, "names") and format in exporter.names: if hasattr(exporter, "names") and format in exporter.names:

View file

@ -8,46 +8,64 @@ from textwrap import TextWrapper
class FancyExporter(TextExporter): class FancyExporter(TextExporter):
"""This Exporter can convert entries and journals into text with unicode box drawing characters.""" """This Exporter can convert entries and journals into text with unicode box drawing characters."""
names = ["fancy", "boxed"] names = ["fancy", "boxed"]
extension = "txt" extension = "txt"
border_a="" border_a = ""
border_b="" border_b = ""
border_c="" border_c = ""
border_d="" border_d = ""
border_e="" border_e = ""
border_f="" border_f = ""
border_g="" border_g = ""
border_h="" border_h = ""
border_i="" border_i = ""
border_j="" border_j = ""
border_k="" border_k = ""
border_l="" border_l = ""
border_m="" border_m = ""
@classmethod @classmethod
def export_entry(cls, entry): def export_entry(cls, entry):
"""Returns a fancy unicode representation of a single entry.""" """Returns a fancy unicode representation of a single entry."""
date_str = entry.date.strftime(entry.journal.config['timeformat']) date_str = entry.date.strftime(entry.journal.config["timeformat"])
linewrap = entry.journal.config['linewrap'] or 78 linewrap = entry.journal.config["linewrap"] or 78
initial_linewrap = linewrap - len(date_str) - 2 initial_linewrap = linewrap - len(date_str) - 2
body_linewrap = linewrap - 2 body_linewrap = linewrap - 2
card = [cls.border_a + cls.border_b*(initial_linewrap) + cls.border_c + date_str] card = [
w = TextWrapper(width=initial_linewrap, initial_indent=cls.border_g+' ', subsequent_indent=cls.border_g+' ') cls.border_a + cls.border_b * (initial_linewrap) + cls.border_c + date_str
]
w = TextWrapper(
width=initial_linewrap,
initial_indent=cls.border_g + " ",
subsequent_indent=cls.border_g + " ",
)
title_lines = w.wrap(entry.title) title_lines = w.wrap(entry.title)
card.append(title_lines[0].ljust(initial_linewrap+1) + cls.border_d + cls.border_e*(len(date_str)-1) + cls.border_f) card.append(
title_lines[0].ljust(initial_linewrap + 1)
+ cls.border_d
+ cls.border_e * (len(date_str) - 1)
+ cls.border_f
)
w.width = body_linewrap w.width = body_linewrap
if len(title_lines) > 1: if len(title_lines) > 1:
for line in w.wrap(' '.join([title_line[len(w.subsequent_indent):] for line in w.wrap(
for title_line in title_lines[1:]])): " ".join(
card.append(line.ljust(body_linewrap+1) + cls.border_h) [
title_line[len(w.subsequent_indent) :]
for title_line in title_lines[1:]
]
)
):
card.append(line.ljust(body_linewrap + 1) + cls.border_h)
if entry.body: if entry.body:
card.append(cls.border_i + cls.border_j*body_linewrap + cls.border_k) card.append(cls.border_i + cls.border_j * body_linewrap + cls.border_k)
for line in entry.body.splitlines(): for line in entry.body.splitlines():
body_lines = w.wrap(line) or [cls.border_g] body_lines = w.wrap(line) or [cls.border_g]
for body_line in body_lines: for body_line in body_lines:
card.append(body_line.ljust(body_linewrap+1) + cls.border_h) card.append(body_line.ljust(body_linewrap + 1) + cls.border_h)
card.append(cls.border_l + cls.border_b*body_linewrap + cls.border_m) card.append(cls.border_l + cls.border_b * body_linewrap + cls.border_m)
return "\n".join(card) return "\n".join(card)
@classmethod @classmethod

View file

@ -4,8 +4,10 @@
import sys import sys
from .. import util from .. import util
class JRNLImporter: class JRNLImporter:
"""This plugin imports entries from other jrnl files.""" """This plugin imports entries from other jrnl files."""
names = ["jrnl"] names = ["jrnl"]
@staticmethod @staticmethod
@ -25,5 +27,8 @@ class JRNLImporter:
sys.exit(0) sys.exit(0)
journal.import_(other_journal_txt) journal.import_(other_journal_txt)
new_cnt = len(journal.entries) new_cnt = len(journal.entries)
print("[{} imported to {} journal]".format(new_cnt - old_cnt, journal.name), file=sys.stderr) print(
"[{} imported to {} journal]".format(new_cnt - old_cnt, journal.name),
file=sys.stderr,
)
journal.write() journal.write()

View file

@ -8,20 +8,21 @@ from .util import get_tags_count
class JSONExporter(TextExporter): class JSONExporter(TextExporter):
"""This Exporter can convert entries and journals into json.""" """This Exporter can convert entries and journals into json."""
names = ["json"] names = ["json"]
extension = "json" extension = "json"
@classmethod @classmethod
def entry_to_dict(cls, entry): def entry_to_dict(cls, entry):
entry_dict = { entry_dict = {
'title': entry.title, "title": entry.title,
'body': entry.body, "body": entry.body,
'date': entry.date.strftime("%Y-%m-%d"), "date": entry.date.strftime("%Y-%m-%d"),
'time': entry.date.strftime("%H:%M"), "time": entry.date.strftime("%H:%M"),
'starred': entry.starred "starred": entry.starred,
} }
if hasattr(entry, "uuid"): if hasattr(entry, "uuid"):
entry_dict['uuid'] = entry.uuid entry_dict["uuid"] = entry.uuid
return entry_dict return entry_dict
@classmethod @classmethod
@ -35,6 +36,6 @@ class JSONExporter(TextExporter):
tags = get_tags_count(journal) tags = get_tags_count(journal)
result = { result = {
"tags": {tag: count for count, tag in tags}, "tags": {tag: count for count, tag in tags},
"entries": [cls.entry_to_dict(e) for e in journal.entries] "entries": [cls.entry_to_dict(e) for e in journal.entries],
} }
return json.dumps(result, indent=2) return json.dumps(result, indent=2)

View file

@ -10,24 +10,25 @@ from ..util import WARNING_COLOR, RESET_COLOR
class MarkdownExporter(TextExporter): class MarkdownExporter(TextExporter):
"""This Exporter can convert entries and journals into Markdown.""" """This Exporter can convert entries and journals into Markdown."""
names = ["md", "markdown"] names = ["md", "markdown"]
extension = "md" extension = "md"
@classmethod @classmethod
def export_entry(cls, entry, to_multifile=True): def export_entry(cls, entry, to_multifile=True):
"""Returns a markdown representation of a single entry.""" """Returns a markdown representation of a single entry."""
date_str = entry.date.strftime(entry.journal.config['timeformat']) date_str = entry.date.strftime(entry.journal.config["timeformat"])
body_wrapper = "\n" if entry.body else "" body_wrapper = "\n" if entry.body else ""
body = body_wrapper + entry.body body = body_wrapper + entry.body
if to_multifile is True: if to_multifile is True:
heading = '#' heading = "#"
else: else:
heading = '###' heading = "###"
'''Increase heading levels in body text''' """Increase heading levels in body text"""
newbody = '' newbody = ""
previous_line = '' previous_line = ""
warn_on_heading_level = False warn_on_heading_level = False
for line in body.splitlines(True): for line in body.splitlines(True):
if re.match(r"^#+ ", line): if re.match(r"^#+ ", line):
@ -35,24 +36,30 @@ class MarkdownExporter(TextExporter):
newbody = newbody + previous_line + heading + line newbody = newbody + previous_line + heading + line
if re.match(r"^#######+ ", heading + line): if re.match(r"^#######+ ", heading + line):
warn_on_heading_level = True warn_on_heading_level = True
line = '' line = ""
elif re.match(r"^=+$", line.rstrip()) and not re.match(r"^$", previous_line.strip()): elif re.match(r"^=+$", line.rstrip()) and not re.match(
r"^$", previous_line.strip()
):
"""Setext style H1""" """Setext style H1"""
newbody = newbody + heading + "# " + previous_line newbody = newbody + heading + "# " + previous_line
line = '' line = ""
elif re.match(r"^-+$", line.rstrip()) and not re.match(r"^$", previous_line.strip()): elif re.match(r"^-+$", line.rstrip()) and not re.match(
r"^$", previous_line.strip()
):
"""Setext style H2""" """Setext style H2"""
newbody = newbody + heading + "## " + previous_line newbody = newbody + heading + "## " + previous_line
line = '' line = ""
else: else:
newbody = newbody + previous_line newbody = newbody + previous_line
previous_line = line previous_line = line
newbody = newbody + previous_line # add very last line newbody = newbody + previous_line # add very last line
if warn_on_heading_level is True: if warn_on_heading_level is True:
print(f"{WARNING_COLOR}WARNING{RESET_COLOR}: " print(
f"Headings increased past H6 on export - {date_str} {entry.title}", f"{WARNING_COLOR}WARNING{RESET_COLOR}: "
file=sys.stderr) f"Headings increased past H6 on export - {date_str} {entry.title}",
file=sys.stderr,
)
return f"{heading} {date_str} {entry.title}\n{newbody} " return f"{heading} {date_str} {entry.title}\n{newbody} "

View file

@ -7,6 +7,7 @@ from .util import get_tags_count
class TagExporter(TextExporter): class TagExporter(TextExporter):
"""This Exporter can lists the tags for entries and journals, exported as a plain text file.""" """This Exporter can lists the tags for entries and journals, exported as a plain text file."""
names = ["tags"] names = ["tags"]
extension = "tags" extension = "tags"
@ -21,9 +22,11 @@ class TagExporter(TextExporter):
tag_counts = get_tags_count(journal) tag_counts = get_tags_count(journal)
result = "" result = ""
if not tag_counts: if not tag_counts:
return '[No tags found in journal.]' return "[No tags found in journal.]"
elif min(tag_counts)[0] == 0: elif min(tag_counts)[0] == 0:
tag_counts = filter(lambda x: x[0] > 1, tag_counts) tag_counts = filter(lambda x: x[0] > 1, tag_counts)
result += '[Removed tags that appear only once.]\n' result += "[Removed tags that appear only once.]\n"
result += "\n".join("{:20} : {}".format(tag, n) for n, tag in sorted(tag_counts, reverse=True)) result += "\n".join(
"{:20} : {}".format(tag, n) for n, tag in sorted(tag_counts, reverse=True)
)
return result return result

View file

@ -6,7 +6,9 @@ EXPRESSION_RE = r"[\[\]():.a-zA-Z0-9_]*"
PRINT_RE = r"{{ *(.+?) *}}" PRINT_RE = r"{{ *(.+?) *}}"
START_BLOCK_RE = r"{% *(if|for) +(.+?) *%}" START_BLOCK_RE = r"{% *(if|for) +(.+?) *%}"
END_BLOCK_RE = r"{% *end(for|if) *%}" END_BLOCK_RE = r"{% *end(for|if) *%}"
FOR_RE = r"{{% *for +({varname}) +in +([^%]+) *%}}".format(varname=VAR_RE, expression=EXPRESSION_RE) FOR_RE = r"{{% *for +({varname}) +in +([^%]+) *%}}".format(
varname=VAR_RE, expression=EXPRESSION_RE
)
IF_RE = r"{% *if +(.+?) *%}" IF_RE = r"{% *if +(.+?) *%}"
BLOCK_RE = r"{% *block +(.+?) *%}((?:.|\n)+?){% *endblock *%}" BLOCK_RE = r"{% *block +(.+?) *%}((?:.|\n)+?){% *endblock *%}"
INCLUDE_RE = r"{% *include +(.+?) *%}" INCLUDE_RE = r"{% *include +(.+?) *%}"
@ -39,9 +41,10 @@ class Template:
def _eval_context(self, vars): def _eval_context(self, vars):
import asteval import asteval
e = asteval.Interpreter(use_numpy=False, writer=None) e = asteval.Interpreter(use_numpy=False, writer=None)
e.symtable.update(vars) e.symtable.update(vars)
e.symtable['__last_iteration'] = vars.get("__last_iteration", False) e.symtable["__last_iteration"] = vars.get("__last_iteration", False)
return e return e
def _get_blocks(self): def _get_blocks(self):
@ -49,12 +52,19 @@ class Template:
name, contents = match.groups() name, contents = match.groups()
self.blocks[name] = self._strip_single_nl(contents) self.blocks[name] = self._strip_single_nl(contents)
return "" return ""
self.clean_template = re.sub(BLOCK_RE, s, self.template, flags=re.MULTILINE) self.clean_template = re.sub(BLOCK_RE, s, self.template, flags=re.MULTILINE)
def _expand(self, template, **vars): def _expand(self, template, **vars):
stack = sorted( stack = sorted(
[(m.start(), 1, m.groups()[0]) for m in re.finditer(START_BLOCK_RE, template)] + [
[(m.end(), -1, m.groups()[0]) for m in re.finditer(END_BLOCK_RE, template)] (m.start(), 1, m.groups()[0])
for m in re.finditer(START_BLOCK_RE, template)
]
+ [
(m.end(), -1, m.groups()[0])
for m in re.finditer(END_BLOCK_RE, template)
]
) )
last_nesting, nesting = 0, 0 last_nesting, nesting = 0, 0
@ -80,19 +90,23 @@ class Template:
start = pos start = pos
last_nesting = nesting last_nesting = nesting
result += self._expand_vars(template[stack[-1][0]:], **vars) result += self._expand_vars(template[stack[-1][0] :], **vars)
return result return result
def _expand_vars(self, template, **vars): def _expand_vars(self, template, **vars):
safe_eval = self._eval_context(vars) safe_eval = self._eval_context(vars)
expanded = re.sub(INCLUDE_RE, lambda m: self.render_block(m.groups()[0], **vars), template) expanded = re.sub(
INCLUDE_RE, lambda m: self.render_block(m.groups()[0], **vars), template
)
return re.sub(PRINT_RE, lambda m: str(safe_eval(m.groups()[0])), expanded) return re.sub(PRINT_RE, lambda m: str(safe_eval(m.groups()[0])), expanded)
def _expand_cond(self, template, **vars): def _expand_cond(self, template, **vars):
start_block = re.search(IF_RE, template, re.M) start_block = re.search(IF_RE, template, re.M)
end_block = list(re.finditer(END_BLOCK_RE, template, re.M))[-1] end_block = list(re.finditer(END_BLOCK_RE, template, re.M))[-1]
expression = start_block.groups()[0] expression = start_block.groups()[0]
sub_template = self._strip_single_nl(template[start_block.end():end_block.start()]) sub_template = self._strip_single_nl(
template[start_block.end() : end_block.start()]
)
safe_eval = self._eval_context(vars) safe_eval = self._eval_context(vars)
if safe_eval(expression): if safe_eval(expression):
@ -110,15 +124,17 @@ class Template:
start_block = re.search(FOR_RE, template, re.M) start_block = re.search(FOR_RE, template, re.M)
end_block = list(re.finditer(END_BLOCK_RE, template, re.M))[-1] end_block = list(re.finditer(END_BLOCK_RE, template, re.M))[-1]
var_name, iterator = start_block.groups() var_name, iterator = start_block.groups()
sub_template = self._strip_single_nl(template[start_block.end():end_block.start()], strip_r=False) sub_template = self._strip_single_nl(
template[start_block.end() : end_block.start()], strip_r=False
)
safe_eval = self._eval_context(vars) safe_eval = self._eval_context(vars)
result = '' result = ""
items = safe_eval(iterator) items = safe_eval(iterator)
for idx, var in enumerate(items): for idx, var in enumerate(items):
vars[var_name] = var vars[var_name] = var
vars['__last_iteration'] = idx == len(items) - 1 vars["__last_iteration"] = idx == len(items) - 1
result += self._expand(sub_template, **vars) result += self._expand(sub_template, **vars)
del vars[var_name] del vars[var_name]
return self._strip_single_nl(result) return self._strip_single_nl(result)

View file

@ -13,20 +13,13 @@ class GenericTemplateExporter(TextExporter):
@classmethod @classmethod
def export_entry(cls, entry): def export_entry(cls, entry):
"""Returns a string representation of a single entry.""" """Returns a string representation of a single entry."""
vars = { vars = {"entry": entry, "tags": entry.tags}
'entry': entry,
'tags': entry.tags
}
return cls.template.render_block("entry", **vars) return cls.template.render_block("entry", **vars)
@classmethod @classmethod
def export_journal(cls, journal): def export_journal(cls, journal):
"""Returns a string representation of an entire journal.""" """Returns a string representation of an entire journal."""
vars = { vars = {"journal": journal, "entries": journal.entries, "tags": journal.tags}
'journal': journal,
'entries': journal.entries,
'tags': journal.tags
}
return cls.template.render_block("journal", **vars) return cls.template.render_block("journal", **vars)
@ -34,11 +27,12 @@ def __exporter_from_file(template_file):
"""Create a template class from a file""" """Create a template class from a file"""
name = os.path.basename(template_file).replace(".template", "") name = os.path.basename(template_file).replace(".template", "")
template = Template.from_file(template_file) template = Template.from_file(template_file)
return type(str(f"{name.title()}Exporter"), (GenericTemplateExporter, ), { return type(
"names": [name], str(f"{name.title()}Exporter"),
"extension": template.extension, (GenericTemplateExporter,),
"template": template {"names": [name], "extension": template.extension, "template": template},
}) )
__all__ = [] __all__ = []

View file

@ -8,6 +8,7 @@ from ..util import ERROR_COLOR, RESET_COLOR
class TextExporter: class TextExporter:
"""This Exporter can convert entries and journals into text files.""" """This Exporter can convert entries and journals into text files."""
names = ["text", "txt"] names = ["text", "txt"]
extension = "txt" extension = "txt"
@ -33,7 +34,9 @@ class TextExporter:
@classmethod @classmethod
def make_filename(cls, entry): def make_filename(cls, entry):
return entry.date.strftime("%Y-%m-%d_{}.{}".format(slugify(str(entry.title)), cls.extension)) return entry.date.strftime(
"%Y-%m-%d_{}.{}".format(slugify(str(entry.title)), cls.extension)
)
@classmethod @classmethod
def write_files(cls, journal, path): def write_files(cls, journal, path):
@ -44,7 +47,9 @@ class TextExporter:
with open(full_path, "w", encoding="utf-8") as f: with open(full_path, "w", encoding="utf-8") as f:
f.write(cls.export_entry(entry)) f.write(cls.export_entry(entry))
except IOError as e: except IOError as e:
return "[{2}ERROR{3}: {0} {1}]".format(e.filename, e.strerror, ERROR_COLOR, RESET_COLOR) return "[{2}ERROR{3}: {0} {1}]".format(
e.filename, e.strerror, ERROR_COLOR, RESET_COLOR
)
return "[Journal exported to {}]".format(path) return "[Journal exported to {}]".format(path)
@classmethod @classmethod
@ -54,7 +59,7 @@ class TextExporter:
representation as string if output is None.""" representation as string if output is None."""
if output and os.path.isdir(output): # multiple files if output and os.path.isdir(output): # multiple files
return cls.write_files(journal, output) return cls.write_files(journal, output)
elif output: # single file elif output: # single file
return cls.write_file(journal, output) return cls.write_file(journal, output)
else: else:
return cls.export_journal(journal) return cls.export_journal(journal)

View file

@ -6,9 +6,7 @@ def get_tags_count(journal):
"""Returns a set of tuples (count, tag) for all tags present in the journal.""" """Returns a set of tuples (count, tag) for all tags present in the journal."""
# Astute reader: should the following line leave you as puzzled as me the first time # Astute reader: should the following line leave you as puzzled as me the first time
# I came across this construction, worry not and embrace the ensuing moment of enlightment. # I came across this construction, worry not and embrace the ensuing moment of enlightment.
tags = [tag tags = [tag for entry in journal.entries for tag in set(entry.tags)]
for entry in journal.entries
for tag in set(entry.tags)]
# To be read: [for entry in journal.entries: for tag in set(entry.tags): tag] # To be read: [for entry in journal.entries: for tag in set(entry.tags): tag]
tag_counts = {(tags.count(tag), tag) for tag in tags} tag_counts = {(tags.count(tag), tag) for tag in tags}
return tag_counts return tag_counts
@ -24,4 +22,4 @@ def oxford_list(lst):
elif len(lst) == 2: elif len(lst) == 2:
return lst[0] + " or " + lst[1] return lst[0] + " or " + lst[1]
else: else:
return ', '.join(lst[:-1]) + ", or " + lst[-1] return ", ".join(lst[:-1]) + ", or " + lst[-1]

View file

@ -8,6 +8,7 @@ from xml.dom import minidom
class XMLExporter(JSONExporter): class XMLExporter(JSONExporter):
"""This Exporter can convert entries and journals into XML.""" """This Exporter can convert entries and journals into XML."""
names = ["xml"] names = ["xml"]
extension = "xml" extension = "xml"
@ -15,7 +16,7 @@ class XMLExporter(JSONExporter):
def export_entry(cls, entry, doc=None): def export_entry(cls, entry, doc=None):
"""Returns an XML representation of a single entry.""" """Returns an XML representation of a single entry."""
doc_el = doc or minidom.Document() doc_el = doc or minidom.Document()
entry_el = doc_el.createElement('entry') entry_el = doc_el.createElement("entry")
for key, value in cls.entry_to_dict(entry).items(): for key, value in cls.entry_to_dict(entry).items():
elem = doc_el.createElement(key) elem = doc_el.createElement(key)
elem.appendChild(doc_el.createTextNode(value)) elem.appendChild(doc_el.createTextNode(value))
@ -28,11 +29,11 @@ class XMLExporter(JSONExporter):
@classmethod @classmethod
def entry_to_xml(cls, entry, doc): def entry_to_xml(cls, entry, doc):
entry_el = doc.createElement('entry') entry_el = doc.createElement("entry")
entry_el.setAttribute('date', entry.date.isoformat()) entry_el.setAttribute("date", entry.date.isoformat())
if hasattr(entry, "uuid"): if hasattr(entry, "uuid"):
entry_el.setAttribute('uuid', entry.uuid) entry_el.setAttribute("uuid", entry.uuid)
entry_el.setAttribute('starred', entry.starred) entry_el.setAttribute("starred", entry.starred)
entry_el.appendChild(doc.createTextNode(entry.fulltext)) entry_el.appendChild(doc.createTextNode(entry.fulltext))
return entry_el return entry_el
@ -41,12 +42,12 @@ class XMLExporter(JSONExporter):
"""Returns an XML representation of an entire journal.""" """Returns an XML representation of an entire journal."""
tags = get_tags_count(journal) tags = get_tags_count(journal)
doc = minidom.Document() doc = minidom.Document()
xml = doc.createElement('journal') xml = doc.createElement("journal")
tags_el = doc.createElement('tags') tags_el = doc.createElement("tags")
entries_el = doc.createElement('entries') entries_el = doc.createElement("entries")
for count, tag in tags: for count, tag in tags:
tag_el = doc.createElement('tag') tag_el = doc.createElement("tag")
tag_el.setAttribute('name', tag) tag_el.setAttribute("name", tag)
count_node = doc.createTextNode(str(count)) count_node = doc.createTextNode(str(count))
tag_el.appendChild(count_node) tag_el.appendChild(count_node)
tags_el.appendChild(tag_el) tags_el.appendChild(tag_el)

View file

@ -10,6 +10,7 @@ from ..util import WARNING_COLOR, ERROR_COLOR, RESET_COLOR
class YAMLExporter(TextExporter): class YAMLExporter(TextExporter):
"""This Exporter can convert entries and journals into Markdown formatted text with YAML front matter.""" """This Exporter can convert entries and journals into Markdown formatted text with YAML front matter."""
names = ["yaml"] names = ["yaml"]
extension = "md" extension = "md"
@ -17,22 +18,29 @@ class YAMLExporter(TextExporter):
def export_entry(cls, entry, to_multifile=True): def export_entry(cls, entry, to_multifile=True):
"""Returns a markdown representation of a single entry, with YAML front matter.""" """Returns a markdown representation of a single entry, with YAML front matter."""
if to_multifile is False: if to_multifile is False:
print("{}ERROR{}: YAML export must be to individual files. " print(
"Please specify a directory to export to.".format("\033[31m", "\033[0m"), file=sys.stderr) "{}ERROR{}: YAML export must be to individual files. "
"Please specify a directory to export to.".format(
"\033[31m", "\033[0m"
),
file=sys.stderr,
)
return return
date_str = entry.date.strftime(entry.journal.config['timeformat']) date_str = entry.date.strftime(entry.journal.config["timeformat"])
body_wrapper = "\n" if entry.body else "" body_wrapper = "\n" if entry.body else ""
body = body_wrapper + entry.body body = body_wrapper + entry.body
tagsymbols = entry.journal.config['tagsymbols'] tagsymbols = entry.journal.config["tagsymbols"]
# see also Entry.Entry.rag_regex # see also Entry.Entry.rag_regex
multi_tag_regex = re.compile(r'(?u)^\s*([{tags}][-+*#/\w]+\s*)+$'.format(tags=tagsymbols)) multi_tag_regex = re.compile(
r"(?u)^\s*([{tags}][-+*#/\w]+\s*)+$".format(tags=tagsymbols)
)
'''Increase heading levels in body text''' """Increase heading levels in body text"""
newbody = '' newbody = ""
heading = '#' heading = "#"
previous_line = '' previous_line = ""
warn_on_heading_level = False warn_on_heading_level = False
for line in entry.body.splitlines(True): for line in entry.body.splitlines(True):
if re.match(r"^#+ ", line): if re.match(r"^#+ ", line):
@ -40,45 +48,59 @@ class YAMLExporter(TextExporter):
newbody = newbody + previous_line + heading + line newbody = newbody + previous_line + heading + line
if re.match(r"^#######+ ", heading + line): if re.match(r"^#######+ ", heading + line):
warn_on_heading_level = True warn_on_heading_level = True
line = '' line = ""
elif re.match(r"^=+$", line.rstrip()) and not re.match(r"^$", previous_line.strip()): elif re.match(r"^=+$", line.rstrip()) and not re.match(
r"^$", previous_line.strip()
):
"""Setext style H1""" """Setext style H1"""
newbody = newbody + heading + "# " + previous_line newbody = newbody + heading + "# " + previous_line
line = '' line = ""
elif re.match(r"^-+$", line.rstrip()) and not re.match(r"^$", previous_line.strip()): elif re.match(r"^-+$", line.rstrip()) and not re.match(
r"^$", previous_line.strip()
):
"""Setext style H2""" """Setext style H2"""
newbody = newbody + heading + "## " + previous_line newbody = newbody + heading + "## " + previous_line
line = '' line = ""
elif multi_tag_regex.match(line): elif multi_tag_regex.match(line):
"""Tag only lines""" """Tag only lines"""
line = '' line = ""
else: else:
newbody = newbody + previous_line newbody = newbody + previous_line
previous_line = line previous_line = line
newbody = newbody + previous_line # add very last line newbody = newbody + previous_line # add very last line
if warn_on_heading_level is True: if warn_on_heading_level is True:
print("{}WARNING{}: Headings increased past H6 on export - {} {}".format(WARNING_COLOR, RESET_COLOR, date_str, entry.title), file=sys.stderr) print(
"{}WARNING{}: Headings increased past H6 on export - {} {}".format(
WARNING_COLOR, RESET_COLOR, date_str, entry.title
),
file=sys.stderr,
)
dayone_attributes = '' dayone_attributes = ""
if hasattr(entry, "uuid"): if hasattr(entry, "uuid"):
dayone_attributes += 'uuid: ' + entry.uuid + '\n' dayone_attributes += "uuid: " + entry.uuid + "\n"
# TODO: copy over pictures, if present # TODO: copy over pictures, if present
# source directory is entry.journal.config['journal'] # source directory is entry.journal.config['journal']
# output directory is...? # output directory is...?
return "title: {title}\ndate: {date}\nstared: {stared}\ntags: {tags}\n{dayone} {body} {space}".format( return "title: {title}\ndate: {date}\nstared: {stared}\ntags: {tags}\n{dayone} {body} {space}".format(
date = date_str, date=date_str,
title = entry.title, title=entry.title,
stared = entry.starred, stared=entry.starred,
tags = ', '.join([tag[1:] for tag in entry.tags]), tags=", ".join([tag[1:] for tag in entry.tags]),
dayone = dayone_attributes, dayone=dayone_attributes,
body = newbody, body=newbody,
space="" space="",
) )
@classmethod @classmethod
def export_journal(cls, journal): def export_journal(cls, journal):
"""Returns an error, as YAML export requires a directory as a target.""" """Returns an error, as YAML export requires a directory as a target."""
print("{}ERROR{}: YAML export must be to individual files. Please specify a directory to export to.".format(ERROR_COLOR, RESET_COLOR), file=sys.stderr) print(
"{}ERROR{}: YAML export must be to individual files. Please specify a directory to export to.".format(
ERROR_COLOR, RESET_COLOR
),
file=sys.stderr,
)
return return

View file

@ -1,7 +1,10 @@
from datetime import datetime from datetime import datetime
from dateutil.parser import parse as dateparse from dateutil.parser import parse as dateparse
try: import parsedatetime.parsedatetime_consts as pdt
except ImportError: import parsedatetime as pdt try:
import parsedatetime.parsedatetime_consts as pdt
except ImportError:
import parsedatetime as pdt
FAKE_YEAR = 9999 FAKE_YEAR = 9999
DEFAULT_FUTURE = datetime(FAKE_YEAR, 12, 31, 23, 59, 59) DEFAULT_FUTURE = datetime(FAKE_YEAR, 12, 31, 23, 59, 59)
@ -12,7 +15,9 @@ consts.DOWParseStyle = -1 # "Monday" will be either today or the last Monday
CALENDAR = pdt.Calendar(consts) CALENDAR = pdt.Calendar(consts)
def parse(date_str, inclusive=False, default_hour=None, default_minute=None, bracketed=False): def parse(
date_str, inclusive=False, default_hour=None, default_minute=None, bracketed=False
):
"""Parses a string containing a fuzzy date and returns a datetime.datetime object""" """Parses a string containing a fuzzy date and returns a datetime.datetime object"""
if not date_str: if not date_str:
return None return None
@ -37,7 +42,7 @@ def parse(date_str, inclusive=False, default_hour=None, default_minute=None, bra
flag = 1 if date.hour == date.minute == 0 else 2 flag = 1 if date.hour == date.minute == 0 else 2
date = date.timetuple() date = date.timetuple()
except Exception as e: except Exception as e:
if e.args[0] == 'day is out of range for month': if e.args[0] == "day is out of range for month":
y, m, d, H, M, S = default_date.timetuple()[:6] y, m, d, H, M, S = default_date.timetuple()[:6]
default_date = datetime(y, m, d - 1, H, M, S) default_date = datetime(y, m, d - 1, H, M, S)
else: else:
@ -53,10 +58,12 @@ def parse(date_str, inclusive=False, default_hour=None, default_minute=None, bra
return None return None
if flag is 1: # Date found, but no time. Use the default time. if flag is 1: # Date found, but no time. Use the default time.
date = datetime(*date[:3], date = datetime(
hour=23 if inclusive else default_hour or 0, *date[:3],
minute=59 if inclusive else default_minute or 0, hour=23 if inclusive else default_hour or 0,
second=59 if inclusive else 0) minute=59 if inclusive else default_minute or 0,
second=59 if inclusive else 0
)
else: else:
date = datetime(*date[:6]) date = datetime(*date[:6])

View file

@ -11,9 +11,9 @@ import os
def backup(filename, binary=False): def backup(filename, binary=False):
print(f" Created a backup at {filename}.backup", file=sys.stderr) print(f" Created a backup at {filename}.backup", file=sys.stderr)
filename = os.path.expanduser(os.path.expandvars(filename)) filename = os.path.expanduser(os.path.expandvars(filename))
with open(filename, 'rb' if binary else 'r') as original: with open(filename, "rb" if binary else "r") as original:
contents = original.read() contents = original.read()
with open(filename + ".backup", 'wb' if binary else 'w') as backup: with open(filename + ".backup", "wb" if binary else "w") as backup:
backup.write(contents) backup.write(contents)
@ -25,7 +25,8 @@ def upgrade_jrnl_if_necessary(config_path):
config = util.load_config(config_path) config = util.load_config(config_path)
print("""Welcome to jrnl {}. print(
"""Welcome to jrnl {}.
It looks like you've been using an older version of jrnl until now. That's It looks like you've been using an older version of jrnl until now. That's
okay - jrnl will now upgrade your configuration and journal files. Afterwards okay - jrnl will now upgrade your configuration and journal files. Afterwards
@ -39,18 +40,21 @@ you can enjoy all of the great new features that come with jrnl 2:
Please note that jrnl 1.x is NOT forward compatible with this version of jrnl. Please note that jrnl 1.x is NOT forward compatible with this version of jrnl.
If you choose to proceed, you will not be able to use your journals with If you choose to proceed, you will not be able to use your journals with
older versions of jrnl anymore. older versions of jrnl anymore.
""".format(__version__)) """.format(
__version__
)
)
encrypted_journals = {} encrypted_journals = {}
plain_journals = {} plain_journals = {}
other_journals = {} other_journals = {}
all_journals = [] all_journals = []
for journal_name, journal_conf in config['journals'].items(): for journal_name, journal_conf in config["journals"].items():
if isinstance(journal_conf, dict): if isinstance(journal_conf, dict):
path = journal_conf.get("journal") path = journal_conf.get("journal")
encrypt = journal_conf.get("encrypt") encrypt = journal_conf.get("encrypt")
else: else:
encrypt = config.get('encrypt') encrypt = config.get("encrypt")
path = journal_conf path = journal_conf
path = os.path.expanduser(path) path = os.path.expanduser(path)
@ -62,21 +66,36 @@ older versions of jrnl anymore.
else: else:
plain_journals[journal_name] = path plain_journals[journal_name] = path
longest_journal_name = max([len(journal) for journal in config['journals']]) longest_journal_name = max([len(journal) for journal in config["journals"]])
if encrypted_journals: if encrypted_journals:
print(f"\nFollowing encrypted journals will be upgraded to jrnl {__version__}:", file=sys.stderr) print(
f"\nFollowing encrypted journals will be upgraded to jrnl {__version__}:",
file=sys.stderr,
)
for journal, path in encrypted_journals.items(): for journal, path in encrypted_journals.items():
print(" {:{pad}} -> {}".format(journal, path, pad=longest_journal_name), file=sys.stderr) print(
" {:{pad}} -> {}".format(journal, path, pad=longest_journal_name),
file=sys.stderr,
)
if plain_journals: if plain_journals:
print(f"\nFollowing plain text journals will upgraded to jrnl {__version__}:", file=sys.stderr) print(
f"\nFollowing plain text journals will upgraded to jrnl {__version__}:",
file=sys.stderr,
)
for journal, path in plain_journals.items(): for journal, path in plain_journals.items():
print(" {:{pad}} -> {}".format(journal, path, pad=longest_journal_name), file=sys.stderr) print(
" {:{pad}} -> {}".format(journal, path, pad=longest_journal_name),
file=sys.stderr,
)
if other_journals: if other_journals:
print("\nFollowing journals will be not be touched:", file=sys.stderr) print("\nFollowing journals will be not be touched:", file=sys.stderr)
for journal, path in other_journals.items(): for journal, path in other_journals.items():
print(" {:{pad}} -> {}".format(journal, path, pad=longest_journal_name), file=sys.stderr) print(
" {:{pad}} -> {}".format(journal, path, pad=longest_journal_name),
file=sys.stderr,
)
try: try:
cont = util.yesno("\nContinue upgrading jrnl?", default=False) cont = util.yesno("\nContinue upgrading jrnl?", default=False)
@ -86,24 +105,37 @@ older versions of jrnl anymore.
raise UserAbort("jrnl NOT upgraded, exiting.") raise UserAbort("jrnl NOT upgraded, exiting.")
for journal_name, path in encrypted_journals.items(): for journal_name, path in encrypted_journals.items():
print(f"\nUpgrading encrypted '{journal_name}' journal stored in {path}...", file=sys.stderr) print(
f"\nUpgrading encrypted '{journal_name}' journal stored in {path}...",
file=sys.stderr,
)
backup(path, binary=True) backup(path, binary=True)
old_journal = Journal.open_journal(journal_name, util.scope_config(config, journal_name), legacy=True) old_journal = Journal.open_journal(
journal_name, util.scope_config(config, journal_name), legacy=True
)
all_journals.append(EncryptedJournal.from_journal(old_journal)) all_journals.append(EncryptedJournal.from_journal(old_journal))
for journal_name, path in plain_journals.items(): for journal_name, path in plain_journals.items():
print(f"\nUpgrading plain text '{journal_name}' journal stored in {path}...", file=sys.stderr) print(
f"\nUpgrading plain text '{journal_name}' journal stored in {path}...",
file=sys.stderr,
)
backup(path) backup(path)
old_journal = Journal.open_journal(journal_name, util.scope_config(config, journal_name), legacy=True) old_journal = Journal.open_journal(
journal_name, util.scope_config(config, journal_name), legacy=True
)
all_journals.append(Journal.PlainJournal.from_journal(old_journal)) all_journals.append(Journal.PlainJournal.from_journal(old_journal))
# loop through lists to validate # loop through lists to validate
failed_journals = [j for j in all_journals if not j.validate_parsing()] failed_journals = [j for j in all_journals if not j.validate_parsing()]
if len(failed_journals) > 0: if len(failed_journals) > 0:
print("\nThe following journal{} failed to upgrade:\n{}".format( print(
's' if len(failed_journals) > 1 else '', "\n".join(j.name for j in failed_journals)), "\nThe following journal{} failed to upgrade:\n{}".format(
file=sys.stderr "s" if len(failed_journals) > 1 else "",
"\n".join(j.name for j in failed_journals),
),
file=sys.stderr,
) )
raise UpgradeValidationException raise UpgradeValidationException
@ -120,4 +152,5 @@ older versions of jrnl anymore.
class UpgradeValidationException(Exception): class UpgradeValidationException(Exception):
"""Raised when the contents of an upgraded journal do not match the old journal""" """Raised when the contents of an upgraded journal do not match the old journal"""
pass pass

View file

@ -25,7 +25,8 @@ RESET_COLOR = "\033[0m"
# Based on Segtok by Florian Leitner # Based on Segtok by Florian Leitner
# https://github.com/fnl/segtok # https://github.com/fnl/segtok
SENTENCE_SPLITTER = re.compile(r""" SENTENCE_SPLITTER = re.compile(
r"""
( # A sentence ends at one of two sequences: ( # A sentence ends at one of two sequences:
[.!?\u203C\u203D\u2047\u2048\u2049\u3002\uFE52\uFE57\uFF01\uFF0E\uFF1F\uFF61] # Either, a sequence starting with a sentence terminal, [.!?\u203C\u203D\u2047\u2048\u2049\u3002\uFE52\uFE57\uFF01\uFF0E\uFF1F\uFF61] # Either, a sequence starting with a sentence terminal,
[\'\u2019\"\u201D]? # an optional right quote, [\'\u2019\"\u201D]? # an optional right quote,
@ -33,14 +34,18 @@ SENTENCE_SPLITTER = re.compile(r"""
\s+ # a sequence of required spaces. \s+ # a sequence of required spaces.
| # Otherwise, | # Otherwise,
\n # a sentence also terminates newlines. \n # a sentence also terminates newlines.
)""", re.VERBOSE) )""",
re.VERBOSE,
)
class UserAbort(Exception): class UserAbort(Exception):
pass pass
def create_password(journal_name: str, prompt: str = "Enter password for new journal: ") -> str: def create_password(
journal_name: str, prompt: str = "Enter password for new journal: "
) -> str:
while True: while True:
pw = gp.getpass(prompt) pw = gp.getpass(prompt)
if not pw: if not pw:
@ -59,7 +64,11 @@ def create_password(journal_name: str, prompt: str = "Enter password for new jou
return pw return pw
def decrypt_content(decrypt_func: Callable[[str], Optional[str]], keychain: str = None, max_attempts: int = 3) -> str: def decrypt_content(
decrypt_func: Callable[[str], Optional[str]],
keychain: str = None,
max_attempts: int = 3,
) -> str:
pwd_from_keychain = keychain and get_keychain(keychain) pwd_from_keychain = keychain and get_keychain(keychain)
password = pwd_from_keychain or gp.getpass() password = pwd_from_keychain or gp.getpass()
result = decrypt_func(password) result = decrypt_func(password)
@ -81,21 +90,23 @@ def decrypt_content(decrypt_func: Callable[[str], Optional[str]], keychain: str
def get_keychain(journal_name): def get_keychain(journal_name):
import keyring import keyring
try: try:
return keyring.get_password('jrnl', journal_name) return keyring.get_password("jrnl", journal_name)
except RuntimeError: except RuntimeError:
return "" return ""
def set_keychain(journal_name, password): def set_keychain(journal_name, password):
import keyring import keyring
if password is None: if password is None:
try: try:
keyring.delete_password('jrnl', journal_name) keyring.delete_password("jrnl", journal_name)
except RuntimeError: except RuntimeError:
pass pass
else: else:
keyring.set_password('jrnl', journal_name, password) keyring.set_password("jrnl", journal_name, password)
def yesno(prompt, default=True): def yesno(prompt, default=True):
@ -112,34 +123,40 @@ def load_config(config_path):
def scope_config(config, journal_name): def scope_config(config, journal_name):
if journal_name not in config['journals']: if journal_name not in config["journals"]:
return config return config
config = config.copy() config = config.copy()
journal_conf = config['journals'].get(journal_name) journal_conf = config["journals"].get(journal_name)
if type(journal_conf) is dict: # We can override the default config on a by-journal basis if (
log.debug('Updating configuration with specific journal overrides %s', journal_conf) type(journal_conf) is dict
): # We can override the default config on a by-journal basis
log.debug(
"Updating configuration with specific journal overrides %s", journal_conf
)
config.update(journal_conf) config.update(journal_conf)
else: # But also just give them a string to point to the journal file else: # But also just give them a string to point to the journal file
config['journal'] = journal_conf config["journal"] = journal_conf
config.pop('journals') config.pop("journals")
return config return config
def get_text_from_editor(config, template=""): def get_text_from_editor(config, template=""):
filehandle, tmpfile = tempfile.mkstemp(prefix="jrnl", text=True, suffix=".txt") filehandle, tmpfile = tempfile.mkstemp(prefix="jrnl", text=True, suffix=".txt")
with open(tmpfile, 'w', encoding="utf-8") as f: with open(tmpfile, "w", encoding="utf-8") as f:
if template: if template:
f.write(template) f.write(template)
try: try:
subprocess.call(shlex.split(config['editor'], posix="win" not in sys.platform) + [tmpfile]) subprocess.call(
shlex.split(config["editor"], posix="win" not in sys.platform) + [tmpfile]
)
except AttributeError: except AttributeError:
subprocess.call(config['editor'] + [tmpfile]) subprocess.call(config["editor"] + [tmpfile])
with open(tmpfile, "r", encoding="utf-8") as f: with open(tmpfile, "r", encoding="utf-8") as f:
raw = f.read() raw = f.read()
os.close(filehandle) os.close(filehandle)
os.remove(tmpfile) os.remove(tmpfile)
if not raw: if not raw:
print('[Nothing saved to file]', file=sys.stderr) print("[Nothing saved to file]", file=sys.stderr)
return raw return raw
@ -152,9 +169,9 @@ def slugify(string):
"""Slugifies a string. """Slugifies a string.
Based on public domain code from https://github.com/zacharyvoase/slugify Based on public domain code from https://github.com/zacharyvoase/slugify
""" """
normalized_string = str(unicodedata.normalize('NFKD', string)) normalized_string = str(unicodedata.normalize("NFKD", string))
no_punctuation = re.sub(r'[^\w\s-]', '', normalized_string).strip().lower() no_punctuation = re.sub(r"[^\w\s-]", "", normalized_string).strip().lower()
slug = re.sub(r'[-\s]+', '-', no_punctuation) slug = re.sub(r"[-\s]+", "-", no_punctuation)
return slug return slug
@ -163,4 +180,4 @@ def split_title(text):
punkt = SENTENCE_SPLITTER.search(text) punkt = SENTENCE_SPLITTER.search(text)
if not punkt: if not punkt:
return text, "" return text, ""
return text[:punkt.end()].strip(), text[punkt.end():].strip() return text[: punkt.end()].strip(), text[punkt.end() :].strip()