mirror of
https://github.com/jrnl-org/jrnl.git
synced 2025-05-10 08:38:32 +02:00
Add extended metadata support for DayOne Classic (#928)
* Updating changelog [ci skip] * Incrementing version to v2.4 [ci skip] * [DayOne] remove extra spaces from the titles of edited DayOne entries Otherwise, a leading space was being introduced * [DayOne] maintain existing tags stored in DayOne metadata * [DayOne] brings back extended DayOne attributes * [DayOne] maintain metadata on edited entries Fixes #358, See also #159 * [DayOne Exporter] apply black formatting * [JSON Exporter] add support for extended DayOne Metadata * [DayOne] [Tests] test that extended DayOne metadata is added to new entries Co-authored-by: Jrnl Bot <jrnl.bot@gmail.com>
This commit is contained in:
parent
759c69c497
commit
404760876f
6 changed files with 160 additions and 8 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -43,6 +43,7 @@ obj
|
||||||
env/
|
env/
|
||||||
env*/
|
env*/
|
||||||
venv*/
|
venv*/
|
||||||
|
.venv*/
|
||||||
|
|
||||||
# PyCharm Project files
|
# PyCharm Project files
|
||||||
.idea/
|
.idea/
|
||||||
|
|
19
CHANGELOG.md
19
CHANGELOG.md
|
@ -1,5 +1,24 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## [v2.4](https://pypi.org/project/jrnl/v2.4/) (2020-04-25)
|
||||||
|
|
||||||
|
[Full Changelog](https://github.com/jrnl-org/jrnl/compare/v2.4-beta...v2.4)
|
||||||
|
|
||||||
|
**Implemented enhancements:**
|
||||||
|
|
||||||
|
- Upgrade license to GPLv3 [\#918](https://github.com/jrnl-org/jrnl/pull/918) ([wren](https://github.com/wren))
|
||||||
|
|
||||||
|
**Build:**
|
||||||
|
|
||||||
|
- Update makefile to match pipeline better [\#919](https://github.com/jrnl-org/jrnl/pull/919) ([wren](https://github.com/wren))
|
||||||
|
|
||||||
|
**Updated documentation:**
|
||||||
|
|
||||||
|
- Clean up readme file [\#924](https://github.com/jrnl-org/jrnl/pull/924) ([wren](https://github.com/wren))
|
||||||
|
- Clarify that editing config isn't always destructive [\#923](https://github.com/jrnl-org/jrnl/pull/923) ([Epskampie](https://github.com/Epskampie))
|
||||||
|
|
||||||
|
# Changelog
|
||||||
|
|
||||||
## [Unreleased](https://github.com/jrnl-org/jrnl/)
|
## [Unreleased](https://github.com/jrnl-org/jrnl/)
|
||||||
|
|
||||||
[Full Changelog](https://github.com/jrnl-org/jrnl/compare/v2.4.2...HEAD)
|
[Full Changelog](https://github.com/jrnl-org/jrnl/compare/v2.4.2...HEAD)
|
||||||
|
|
|
@ -63,3 +63,15 @@ Feature: Dayone specific implementation details.
|
||||||
Then we should get no error
|
Then we should get no error
|
||||||
and the output should be parsable as json
|
and the output should be parsable as json
|
||||||
and the json output should contain entries.0.uuid = "4BB1F46946AD439996C9B59DE7C4DDC1"
|
and the json output should contain entries.0.uuid = "4BB1F46946AD439996C9B59DE7C4DDC1"
|
||||||
|
|
||||||
|
Scenario: Writing into Dayone adds extended metadata
|
||||||
|
Given we use the config "dayone.yaml"
|
||||||
|
When we run "jrnl 01 may 1979: Being born hurts."
|
||||||
|
and we run "jrnl --export json"
|
||||||
|
Then "entries" in the json output should have 5 elements
|
||||||
|
and the json output should contain entries.0.creator.software_agent
|
||||||
|
and the json output should contain entries.0.creator.os_agent
|
||||||
|
and the json output should contain entries.0.creator.host_name
|
||||||
|
and the json output should contain entries.0.creator.generation_date
|
||||||
|
and the json output should contain entries.0.creator.device_agent
|
||||||
|
and "entries.0.creator.software_agent" in the json output should contain "jrnl"
|
||||||
|
|
|
@ -32,13 +32,21 @@ def check_output_field_not_key(context, field, key):
|
||||||
@then('"{field}" in the json output should contain "{key}"')
|
@then('"{field}" in the json output should contain "{key}"')
|
||||||
def check_output_field_key(context, field, key):
|
def check_output_field_key(context, field, key):
|
||||||
out = context.stdout_capture.getvalue()
|
out = context.stdout_capture.getvalue()
|
||||||
out_json = json.loads(out)
|
struct = json.loads(out)
|
||||||
assert field in out_json
|
|
||||||
assert key in out_json[field]
|
for node in field.split("."):
|
||||||
|
try:
|
||||||
|
struct = struct[int(node)]
|
||||||
|
except ValueError:
|
||||||
|
assert node in struct
|
||||||
|
struct = struct[node]
|
||||||
|
|
||||||
|
assert key in struct
|
||||||
|
|
||||||
|
|
||||||
|
@then("the json output should contain {path}")
|
||||||
@then('the json output should contain {path} = "{value}"')
|
@then('the json output should contain {path} = "{value}"')
|
||||||
def check_json_output_path(context, path, value):
|
def check_json_output_path(context, path, value=None):
|
||||||
""" E.g.
|
""" E.g.
|
||||||
the json output should contain entries.0.title = "hello"
|
the json output should contain entries.0.title = "hello"
|
||||||
"""
|
"""
|
||||||
|
@ -50,7 +58,11 @@ def check_json_output_path(context, path, value):
|
||||||
struct = struct[int(node)]
|
struct = struct[int(node)]
|
||||||
except ValueError:
|
except ValueError:
|
||||||
struct = struct[node]
|
struct = struct[node]
|
||||||
|
|
||||||
|
if value is not None:
|
||||||
assert struct == value, struct
|
assert struct == value, struct
|
||||||
|
else:
|
||||||
|
assert struct is not None
|
||||||
|
|
||||||
|
|
||||||
@then("the output should be a valid XML string")
|
@then("the output should be a valid XML string")
|
||||||
|
|
|
@ -9,11 +9,13 @@ import re
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
from xml.parsers.expat import ExpatError
|
from xml.parsers.expat import ExpatError
|
||||||
|
import socket
|
||||||
|
import platform
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
import tzlocal
|
import tzlocal
|
||||||
|
|
||||||
from . import Entry, Journal
|
from . import __title__, __version__, Entry, Journal
|
||||||
from . import time as jrnl_time
|
from . import time as jrnl_time
|
||||||
|
|
||||||
|
|
||||||
|
@ -71,6 +73,41 @@ class DayOne(Journal.Journal):
|
||||||
for tag in dict_entry.get("Tags", [])
|
for tag in dict_entry.get("Tags", [])
|
||||||
]
|
]
|
||||||
|
|
||||||
|
"""Extended DayOne attributes"""
|
||||||
|
try:
|
||||||
|
entry.creator_device_agent = dict_entry["Creator"][
|
||||||
|
"Device Agent"
|
||||||
|
]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
entry.creator_generation_date = dict_entry["Creator"][
|
||||||
|
"Generation Date"
|
||||||
|
]
|
||||||
|
except:
|
||||||
|
entry.creator_generation_date = date
|
||||||
|
try:
|
||||||
|
entry.creator_host_name = dict_entry["Creator"]["Host Name"]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
entry.creator_os_agent = dict_entry["Creator"]["OS Agent"]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
entry.creator_software_agent = dict_entry["Creator"][
|
||||||
|
"Software Agent"
|
||||||
|
]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
entry.location = dict_entry["Location"]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
entry.weather = dict_entry["Weather"]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
self.entries.append(entry)
|
self.entries.append(entry)
|
||||||
self.sort()
|
self.sort()
|
||||||
return self
|
return self
|
||||||
|
@ -85,6 +122,20 @@ class DayOne(Journal.Journal):
|
||||||
|
|
||||||
if not hasattr(entry, "uuid"):
|
if not hasattr(entry, "uuid"):
|
||||||
entry.uuid = uuid.uuid1().hex
|
entry.uuid = uuid.uuid1().hex
|
||||||
|
if not hasattr(entry, "creator_device_agent"):
|
||||||
|
entry.creator_device_agent = "" # iPhone/iPhone5,3
|
||||||
|
if not hasattr(entry, "creator_generation_date"):
|
||||||
|
entry.creator_generation_date = utc_time
|
||||||
|
if not hasattr(entry, "creator_host_name"):
|
||||||
|
entry.creator_host_name = socket.gethostname()
|
||||||
|
if not hasattr(entry, "creator_os_agent"):
|
||||||
|
entry.creator_os_agent = "{}/{}".format(
|
||||||
|
platform.system(), platform.release()
|
||||||
|
)
|
||||||
|
if not hasattr(entry, "creator_software_agent"):
|
||||||
|
entry.creator_software_agent = "{}/{}".format(
|
||||||
|
__title__, __version__
|
||||||
|
)
|
||||||
|
|
||||||
fn = (
|
fn = (
|
||||||
Path(self.config["journal"])
|
Path(self.config["journal"])
|
||||||
|
@ -102,10 +153,23 @@ class DayOne(Journal.Journal):
|
||||||
tag.strip(self.config["tagsymbols"]).replace("_", " ")
|
tag.strip(self.config["tagsymbols"]).replace("_", " ")
|
||||||
for tag in entry.tags
|
for tag in entry.tags
|
||||||
],
|
],
|
||||||
|
"Creator": {
|
||||||
|
"Device Agent": entry.creator_device_agent,
|
||||||
|
"Generation Date": entry.creator_generation_date,
|
||||||
|
"Host Name": entry.creator_host_name,
|
||||||
|
"OS Agent": entry.creator_os_agent,
|
||||||
|
"Software Agent": entry.creator_software_agent,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
if hasattr(entry, "location"):
|
||||||
|
entry_plist["Location"] = entry.location
|
||||||
|
if hasattr(entry, "weather"):
|
||||||
|
entry_plist["Weather"] = entry.weather
|
||||||
|
|
||||||
# plistlib expects a binary object
|
# plistlib expects a binary object
|
||||||
with fn.open(mode="wb") as f:
|
with fn.open(mode="wb") as f:
|
||||||
plistlib.dump(entry_plist, f, fmt=plistlib.FMT_XML, sort_keys=False)
|
plistlib.dump(entry_plist, f, fmt=plistlib.FMT_XML, sort_keys=False)
|
||||||
|
|
||||||
for entry in self._deleted_entries:
|
for entry in self._deleted_entries:
|
||||||
filename = os.path.join(
|
filename = os.path.join(
|
||||||
self.config["journal"], "entries", entry.uuid + ".doentry"
|
self.config["journal"], "entries", entry.uuid + ".doentry"
|
||||||
|
@ -147,7 +211,7 @@ class DayOne(Journal.Journal):
|
||||||
if line.endswith("*"):
|
if line.endswith("*"):
|
||||||
current_entry.starred = True
|
current_entry.starred = True
|
||||||
line = line[:-1]
|
line = line[:-1]
|
||||||
current_entry.title = line[len(date_blob) - 1 :]
|
current_entry.title = line[len(date_blob) - 1 :].strip()
|
||||||
current_entry.date = new_date
|
current_entry.date = new_date
|
||||||
elif current_entry:
|
elif current_entry:
|
||||||
current_entry.body += line + "\n"
|
current_entry.body += line + "\n"
|
||||||
|
@ -159,10 +223,33 @@ class DayOne(Journal.Journal):
|
||||||
# Now, update our current entries if they changed
|
# Now, update our current entries if they changed
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
entry._parse_text()
|
entry._parse_text()
|
||||||
matched_entries = [e for e in self.entries if e.uuid.lower() == entry.uuid]
|
matched_entries = [
|
||||||
|
e for e in self.entries if e.uuid.lower() == entry.uuid.lower()
|
||||||
|
]
|
||||||
|
# tags in entry body
|
||||||
if matched_entries:
|
if matched_entries:
|
||||||
# This entry is an existing entry
|
# This entry is an existing entry
|
||||||
match = matched_entries[0]
|
match = matched_entries[0]
|
||||||
|
|
||||||
|
# merge existing tags with tags pulled from the entry body
|
||||||
|
entry.tags = list(set(entry.tags + match.tags))
|
||||||
|
|
||||||
|
# extended Dayone metadata
|
||||||
|
if hasattr(match, "creator_device_agent"):
|
||||||
|
entry.creator_device_agent = match.creator_device_agent
|
||||||
|
if hasattr(match, "creator_generation_date"):
|
||||||
|
entry.creator_generation_date = match.creator_generation_date
|
||||||
|
if hasattr(match, "creator_host_name"):
|
||||||
|
entry.creator_host_name = match.creator_host_name
|
||||||
|
if hasattr(match, "creator_os_agent"):
|
||||||
|
entry.creator_os_agent = match.creator_os_agent
|
||||||
|
if hasattr(match, "creator_software_agent"):
|
||||||
|
entry.creator_software_agent = match.creator_software_agent
|
||||||
|
if hasattr(match, "location"):
|
||||||
|
entry.location = match.location
|
||||||
|
if hasattr(match, "weather"):
|
||||||
|
entry.weather = match.weather
|
||||||
|
|
||||||
if match != entry:
|
if match != entry:
|
||||||
self.entries.remove(match)
|
self.entries.remove(match)
|
||||||
entry.modified = True
|
entry.modified = True
|
||||||
|
|
|
@ -24,6 +24,27 @@ class JSONExporter(TextExporter):
|
||||||
}
|
}
|
||||||
if hasattr(entry, "uuid"):
|
if hasattr(entry, "uuid"):
|
||||||
entry_dict["uuid"] = entry.uuid
|
entry_dict["uuid"] = entry.uuid
|
||||||
|
if (
|
||||||
|
hasattr(entry, "creator_device_agent")
|
||||||
|
or hasattr(entry, "creator_generation_date")
|
||||||
|
or hasattr(entry, "creator_host_name")
|
||||||
|
or hasattr(entry, "creator_os_agent")
|
||||||
|
or hasattr(entry, "creator_software_agent")
|
||||||
|
):
|
||||||
|
entry_dict["creator"] = {}
|
||||||
|
if hasattr(entry, "creator_device_agent"):
|
||||||
|
entry_dict["creator"]["device_agent"] = entry.creator_device_agent
|
||||||
|
if hasattr(entry, "creator_generation_date"):
|
||||||
|
entry_dict["creator"]["generation_date"] = str(
|
||||||
|
entry.creator_generation_date
|
||||||
|
)
|
||||||
|
if hasattr(entry, "creator_host_name"):
|
||||||
|
entry_dict["creator"]["host_name"] = entry.creator_host_name
|
||||||
|
if hasattr(entry, "creator_os_agent"):
|
||||||
|
entry_dict["creator"]["os_agent"] = entry.creator_os_agent
|
||||||
|
if hasattr(entry, "creator_software_agent"):
|
||||||
|
entry_dict["creator"]["software_agent"] = entry.creator_software_agent
|
||||||
|
|
||||||
return entry_dict
|
return entry_dict
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
Loading…
Add table
Reference in a new issue