20250117.1737136826

This commit is contained in:
fz0x1 2025-01-17 19:00:26 +01:00
parent 0fb210e8cb
commit 0de118b032
2 changed files with 49 additions and 15 deletions

View file

@ -49,7 +49,7 @@ ex ()
DIARY="default"
dis ()
diary_string ()
{
"${HOME:?}/scripts/bin/diary.py" insert "${DIARY:?}" single "$*"
}
@ -125,9 +125,9 @@ alias relmacs="doom sync"
## diary
# alias di="diary.sh"
alias m2jrnl="diary.py export ${DIARY:?} diaryf"
alias di="diary.py insert"
alias dib="diary.py insert ${DIARY:?} bulk"
alias dis=" dis"
alias dis=" diary_string"
alias dit="diary.py doctor ${DIARY:?}"
# bindkeys
## autosuggest

View file

@ -3,6 +3,7 @@
import base64
import hashlib
import json
import logging
import os
import re
import shlex
@ -36,6 +37,17 @@ class Config:
Config.validate()
def make_logger():
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
handlers=[
logging.FileHandler(Path.home() / ".diary_requests.log", mode="a"),
],
)
return logging.getLogger("urllib_logger")
def get_diary_path_by_name(name: str):
result = subprocess.run(["jrnl", "--list"], capture_output=True, text=True)
if result.stderr:
@ -74,23 +86,38 @@ def convert_diary_date(date_str):
def fetch_data(url, headers, data=None):
logit = make_logger()
method = "POST" if data else "GET"
encoded_data = urllib.parse.urlencode(data).encode("utf-8") if data else None
req = urllib.request.Request(url, headers=headers, data=encoded_data, method=method)
req.add_header("Content-Type", "application/x-www-form-urlencoded")
with urllib.request.urlopen(req) as response:
if response.status != 200:
sys.exit(f"HTTP error {response.status}")
return json.loads(response.read().decode("utf-8"))
try:
with urllib.request.urlopen(req) as response:
if response.status != 200:
logit.error(response.read())
sys.exit(f"HTTP error {response.status}")
data = response.read()
logit.info(data.decode("utf-8"))
return json.loads(data.decode("utf-8"))
except Exception as e:
logit.error(str(e))
raise
def delete_entity(url, headers):
logit = make_logger()
req = urllib.request.Request(url, headers=headers, method="DELETE")
with urllib.request.urlopen(req) as response:
if response.status != 200:
sys.exit(f"HTTP delete error {response.status}")
try:
with urllib.request.urlopen(req) as response:
if response.status != 200:
logit.error(response.read())
sys.exit(f"HTTP delete error {response.status}")
logit.info(response.read().decode("utf-8"))
except Exception as e:
logit.error(str(e))
raise
def db_connection(diary_path: Path):
@ -230,7 +257,10 @@ def fetch_weather(
def doctor():
diary_name = sys.argv[1]
if len(sys.argv) < 3:
sys.exit("Usage: script.py doctor <diary_name>")
diary_name = sys.argv[2]
diary_path = get_diary_path_by_name(diary_name).parent
conn = db_connection(diary_path)
@ -246,7 +276,10 @@ def doctor():
"SELECT * FROM location WHERE metadata_id = ?", (m[0],)
).fetchall()
print(weather, location)
if not weather:
print(f"There is no weather info about {m[0]} - {m[1]}")
if not location:
print(f"There is no weather info about {m[0]} - {m[1]}")
def make_hash(file: Path):
@ -399,11 +432,12 @@ def insert():
if __name__ == "__main__":
# doctor()
if "export" in sys.argv:
if sys.argv[1] == "export":
export()
elif "insert" in sys.argv:
elif sys.argv[1] == "insert":
insert()
elif sys.argv[1] == "doctor":
doctor()
else:
print("Unknown command")
sys.exit(1)