From 0fb210e8cbe11ed724a1d13399d43ebefafc4a7b Mon Sep 17 00:00:00 2001 From: fz0x1 Date: Fri, 17 Jan 2025 18:34:26 +0100 Subject: [PATCH] 20250117.1737135266 --- global/.zshrc | 1 + global/scripts/bin/diary.py | 201 ++++++++++++++++++++++-------------- 2 files changed, 125 insertions(+), 77 deletions(-) diff --git a/global/.zshrc b/global/.zshrc index d142449..c0f9099 100644 --- a/global/.zshrc +++ b/global/.zshrc @@ -127,6 +127,7 @@ alias relmacs="doom sync" alias m2jrnl="diary.py export ${DIARY:?} diaryf" alias di="diary.py insert" alias dib="diary.py insert ${DIARY:?} bulk" +alias dis=" dis" # bindkeys ## autosuggest diff --git a/global/scripts/bin/diary.py b/global/scripts/bin/diary.py index 30fa068..ad08c88 100755 --- a/global/scripts/bin/diary.py +++ b/global/scripts/bin/diary.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import base64 +import hashlib import json import os import re @@ -12,18 +13,12 @@ import tempfile import urllib.parse import urllib.request from datetime import datetime, timedelta, timezone -from enum import IntEnum from pathlib import Path DB_NAME = Path("metadata.db") TZ = 1 -class MetadataType(IntEnum): - WEATHER = 1 - LOCATION = 2 - - class Config: memo_token = os.getenv("MEMOS_TOKEN") memo_url = os.getenv("MEMOS_URL") @@ -110,8 +105,7 @@ def initialize_db(conn: sqlite3.Connection): """ CREATE TABLE IF NOT EXISTS metadata ( id INTEGER PRIMARY KEY, - unixtime INTEGER NOT NULL, - type INTEGER NOT NULL + unixtime INTEGER NOT NULL ); CREATE TABLE IF NOT EXISTS weather ( id INTEGER PRIMARY KEY, @@ -137,56 +131,73 @@ def initialize_db(conn: sqlite3.Connection): def insert_metadata( - conn: sqlite3.Connection, unixtime: int, metadata_type: MetadataType + conn: sqlite3.Connection, + unixtime: int, ): cursor = conn.cursor() - cursor.execute( - "INSERT INTO metadata(unixtime, type) VALUES(?, ?)", [unixtime, metadata_type] - ) + cursor.execute("INSERT INTO metadata(unixtime) VALUES(?)", [unixtime]) conn.commit() return cursor.lastrowid +def remove_metadata(conn: sqlite3.Connection, metadata_id: int): + cursor = conn.cursor() + cursor.execute("DELETE FROM metadata WHERE id=?", (metadata_id,)) + conn.commit() + + def insert_weather(weather: dict, conn: sqlite3.Connection, metadata_id: int): if isinstance(weather, list): weather = weather[0] cursor = conn.cursor() - cursor.execute( - """ - INSERT INTO weather(temp, temp_like, sunrise, sunset, icon, metadata_id) - VALUES(?, ?, ?, ?, ?, ?) - """, - [ - weather["temp"], - weather["feels_like"], - weather["sunrise"], - weather["sunset"], - weather["weather"][0]["icon"], - metadata_id, - ], - ) + try: + cursor.execute( + """ + INSERT INTO weather(temp, temp_like, sunrise, sunset, icon, metadata_id) + VALUES(?, ?, ?, ?, ?, ?) + """, + [ + weather["temp"], + weather["feels_like"], + weather["sunrise"], + weather["sunset"], + weather["weather"][0]["icon"], + metadata_id, + ], + ) + except Exception as e: + remove_metadata(conn, metadata_id) + conn.rollback() + print(e) + raise conn.commit() def insert_location(location: dict, conn: sqlite3.Connection, metadata_id: int): cursor = conn.cursor() - cursor.execute( - """ - INSERT INTO location(city, lon, lat, tz, metadata_id) - VALUES(?, ?, ?, ?, ?) - """, - [ - location["locality"], - location["lon"], - location["lat"], - location["tzname"], - metadata_id, - ], - ) + try: + cursor.execute( + """ + INSERT INTO location(city, lon, lat, tz, metadata_id) + VALUES(?, ?, ?, ?, ?) + """, + [ + location["locality"], + location["lon"], + location["lat"], + location["tzname"], + metadata_id, + ], + ) + except Exception as e: + remove_metadata(conn, metadata_id) + conn.rollback() + print(e) + raise conn.commit() -def fetch_geo(create_time_timestamp: int, conn: sqlite3.Connection): +def fetch_geo(metadata_id: int, create_time_timestamp: int, conn: sqlite3.Connection): geo_url = f"{Config.owntracks_url}/api/0/locations" geo_headers = { "Authorization": f"Basic {base64.b64encode(Config.owntracks_creds).decode()}" @@ -204,20 +215,49 @@ def fetch_geo(create_time_timestamp: int, conn: sqlite3.Connection): closest_entry = find_closest_entry( geo_response.get("data", []), create_time_timestamp ) - metadata_id = insert_metadata(conn, create_time_timestamp, MetadataType.LOCATION) insert_location(closest_entry, conn, metadata_id) return closest_entry -def fetch_weather(closest_entry: dict, unixtime: int, conn: sqlite3.Connection): +def fetch_weather( + metadata_id: int, closest_entry: dict, unixtime: int, conn: sqlite3.Connection +): weather_response = fetch_data( f"https://api.openweathermap.org/data/3.0/onecall/timemachine?lat={closest_entry['lat']}&lon={closest_entry['lon']}&dt={unixtime}&appid={Config.openweathermap_api_key}&units=metric", headers={}, ) - metadata_id = insert_metadata(conn, unixtime, MetadataType.WEATHER) insert_weather(weather_response["data"], conn, metadata_id) +def doctor(): + diary_name = sys.argv[1] + diary_path = get_diary_path_by_name(diary_name).parent + + conn = db_connection(diary_path) + initialize_db(conn) + + cursor = conn.cursor() + metadata = cursor.execute("SELECT * FROM metadata").fetchall() + for m in metadata: + weather = cursor.execute( + "SELECT * FROM weather WHERE metadata_id = ?", (m[0],) + ).fetchall() + location = cursor.execute( + "SELECT * FROM location WHERE metadata_id = ?", (m[0],) + ).fetchall() + + print(weather, location) + + +def make_hash(file: Path): + sha256_hash = hashlib.sha256() + + with open(file, "rb") as filed: + sha256_hash.update(filed.read()) + + return sha256_hash.hexdigest() + + def export(): if len(sys.argv) < 4 or sys.argv[1] != "export": sys.exit("Usage: script.py export ") @@ -251,9 +291,15 @@ def export(): create_time = memo["createTime"] content = shlex.quote(memo["content"].replace(f"#{tag}", "").strip()) + metadata_id = insert_metadata(conn, make_tz_unixtime(create_time)) + closest_entry = fetch_geo(metadata_id, make_tz_unixtime(create_time), conn) + fetch_weather( + metadata_id, closest_entry, make_tz_unixtime(create_time), conn + ) + try: - result = subprocess.run( - f'printf "%s %s" "{convert_diary_date(create_time)}" {content} | jrnl', + subprocess.run( + f'printf "%s %s" "{convert_diary_date(create_time)}" {content} | jrnl {diary_name}', shell=True, capture_output=True, text=True, @@ -263,13 +309,11 @@ def export(): print(f"Error writing to journal: {e.stderr}") continue - closest_entry = fetch_geo(make_tz_unixtime(create_time), conn) - fetch_weather(closest_entry, make_tz_unixtime(create_time), conn) - delete_entity(f"{Config.memo_url}/api/v1/{memo['name']}", headers) except Exception as e: print(f"An error occurred: {e}") + raise def insert(): @@ -292,14 +336,26 @@ def insert(): conn = db_connection(diary_path) initialize_db(conn) + datenow = datetime.now(timezone.utc) + datenow_timestamp = datenow.strftime("%Y-%m-%dT%H:%M:%SZ") + metadata_id = insert_metadata(conn, make_tz_unixtime(datenow_timestamp)) + + closest_entry = fetch_geo( + metadata_id, make_tz_unixtime(datenow_timestamp), conn + ) + fetch_weather( + metadata_id, closest_entry, make_tz_unixtime(datenow_timestamp), conn + ) + if insert_type == "single": - content = sys.argv[4] + content = shlex.quote(sys.argv[4]) if not content: print("There is no text") sys.exit(1) try: subprocess.run( - ["jrnl", diary_name, content], + f'printf "%s %s" "{convert_diary_date(datenow_timestamp)}" {content} | jrnl {diary_name}', + shell=True, capture_output=True, text=True, check=True, @@ -308,39 +364,29 @@ def insert(): print(f"Error inserting single entry: {e.stderr}") raise - datenow = datetime.now(timezone.utc) - datenow_timestamp = datenow.strftime("%Y-%m-%dT%H:%M:%SZ") - closest_entry = fetch_geo(make_tz_unixtime(datenow_timestamp), conn) - fetch_weather(closest_entry, make_tz_unixtime(datenow_timestamp), conn) - elif insert_type == "bulk": fd, temp_file_path = tempfile.mkstemp() os.close(fd) - try: - subprocess.run(["nvim", temp_file_path], text=True, check=True) - with open(temp_file_path, "r") as tmp_file: - content = tmp_file.read() + hash = make_hash(Path(temp_file_path)) - try: - subprocess.run( - f"cat {temp_file_path} | jrnl {diary_name} --import", - shell=True, - capture_output=True, - text=True, - check=True, - ) - except subprocess.CalledProcessError as e: - print(f"Error during bulk import: {e.stderr}") - raise + subprocess.run(["nvim", temp_file_path], text=True, check=True) + with open(temp_file_path, "r") as file: + content = shlex.quote(file.read()) + if hash != make_hash(temp_file_path): + try: + subprocess.run( + f'printf "%s %s" "{convert_diary_date(datenow_timestamp)}" {content} | jrnl {diary_name}', + shell=True, + capture_output=True, + text=True, + check=True, + ) + except subprocess.CalledProcessError as e: + print(f"Error during bulk import: {e.stderr}") + raise - datenow = datetime.now(timezone.utc) - datenow_timestamp = datenow.strftime("%Y-%m-%dT%H:%M:%SZ") - closest_entry = fetch_geo(make_tz_unixtime(datenow_timestamp), conn) - fetch_weather(closest_entry, make_tz_unixtime(datenow_timestamp), conn) - - finally: - os.remove(temp_file_path) + os.remove(temp_file_path) except Exception as e: print(f"An error occurred: {e}") @@ -353,6 +399,7 @@ def insert(): if __name__ == "__main__": + # doctor() if "export" in sys.argv: export() elif "insert" in sys.argv: