aboutsummaryrefslogtreecommitdiffstatshomepage
diff options
context:
space:
mode:
-rwxr-xr-xlater151
1 files changed, 94 insertions, 57 deletions
diff --git a/later b/later
index 70e45e9..59220af 100755
--- a/later
+++ b/later
@@ -6,6 +6,7 @@ import json
import os
import re
import sys
+from dataclasses import dataclass
from datetime import datetime as dt
@@ -35,6 +36,92 @@ later_cache_dir = os.path.join(xdg_cache_home, "later")
title_map_file = os.path.join(later_cache_dir, "titles.json")
+@dataclass
+class WatchLaterEntry:
+ name: str
+ path: str
+ mtime: dt
+
+ def __repr__(self):
+ def format_time(time):
+ now = dt.now()
+ if time < now - datetime.timedelta(days=7):
+ return time.strftime("%b %d %Y")
+ return time.strftime("%b %d %H:%M")
+
+ output = [format_time(self.mtime), self.name]
+
+ if title := title_map.get(self.name):
+ output.append(f"# {title}")
+
+ return "\t".join(output)
+
+
+class TitleMap:
+ def __init__(self, path, update=False):
+ self.map = {}
+ self.path = path
+ self.update = update
+ self.commit_to_disk = update
+ self.ytdl = None
+
+ try:
+ with open(path, "r") as handle:
+ self.map = json.load(handle)
+ except FileNotFoundError:
+ pass
+ except json.decoder.JSONDecodeError:
+ # Clobber the title cache if it was corrupted
+ self.commit_to_disk = True
+ except Exception as err:
+ sys.exit(f"later: {err}")
+
+ def get(self, key):
+ title = self.map.get(key)
+
+ if title or title == "":
+ return title
+
+ if not re.fullmatch(r"https?://.*", key):
+ return None
+
+ if self.update:
+ return self.extract(key)
+
+ def extract(self, key):
+ # Make painstakingly sure that we only do this when absolutely
+ # necessary: importing yt_dlp is noticeably slow :(
+ import yt_dlp
+
+ if not self.ytdl:
+ self.ytdl = yt_dlp.YoutubeDL({"logger": YTDLPLogger()})
+
+ try:
+ info = self.ytdl.extract_info(key, download=False)
+
+ # The generic extractor uses the filename part of the url as the
+ # title. Since we already display the URL, this adds no extra
+ # information.
+ if info["extractor"] == "generic":
+ self.map[key] = ""
+ else:
+ self.map[key] = info["title"]
+
+ return self.map.get(key)
+
+ except yt_dlp.utils.DownloadError:
+ pass
+
+ def maybe_commit(self):
+ if not self.commit_to_disk:
+ return
+
+ os.makedirs(later_cache_dir, exist_ok=True)
+
+ with open(self.path, "w") as handle:
+ json.dump(self.map, handle)
+
+
def entries():
def get_mtime(entry):
return entry.stat().st_mtime
@@ -56,7 +143,7 @@ def entries():
if name == "redirect entry":
continue
- yield entry.path, name, mtime
+ yield WatchLaterEntry(name=name, path=entry.path, mtime=mtime)
parser = argparse.ArgumentParser(
@@ -70,59 +157,9 @@ parser.add_argument(
)
args = parser.parse_args()
-if args.update_titles:
- import yt_dlp
-
- yt_dlp_opts = {
- "logger": YTDLPLogger(),
- }
- ytdl = yt_dlp.YoutubeDL(yt_dlp_opts)
-
-write_title_map = args.update_titles
-
-try:
- with open(title_map_file, "r") as handle:
- title_map = json.load(handle)
-except FileNotFoundError:
- title_map = {}
-except json.decoder.JSONDecodeError:
- title_map = {}
- write_title_map = True
-except Exception as err:
- sys.exit(f"later: {err}")
-
-for path, name, mtime in entries():
- if args.update_titles and name not in title_map:
- if re.fullmatch(r"https?://.*", name):
- try:
- info = ytdl.extract_info(name, download=False)
-
- # The generic extractor uses the filename part of the url as the
- # title. Since we already display the URL, this adds no extra
- # information.
- if info["extractor"] == "generic":
- title_map[name] = ""
- else:
- title_map[name] = info["title"]
-
- except yt_dlp.utils.DownloadError:
- pass
-
- def format_time(time):
- now = dt.now()
- if time < now - datetime.timedelta(days=7):
- return time.strftime("%b %d %Y")
- return time.strftime("%b %d %H:%M")
-
- output = [format_time(mtime), name]
-
- if title := title_map.get(name):
- output.append("# " + title)
-
- print(*output, sep="\t")
-
-if write_title_map:
- os.makedirs(later_cache_dir, exist_ok=True)
-
- with open(title_map_file, "w") as handle:
- json.dump(title_map, handle)
+title_map = TitleMap(title_map_file, update=args.update_titles)
+
+for entry in entries():
+ print(entry)
+
+title_map.maybe_commit()