This commit is contained in:
2026-03-25 05:58:37 +01:00
commit 0b9b617380
22 changed files with 2363 additions and 0 deletions
+13
View File
@@ -0,0 +1,13 @@
# Python-generated files
__pycache__/
*.py[oc]
build/
dist/
wheels/
*.egg-info
# Virtual environments
.venv
.env
.claude
+1
View File
@@ -0,0 +1 @@
3.13
+3
View File
@@ -0,0 +1,3 @@
{
"python-envs.defaultEnvManager": "ms-python.python:system"
}
View File
+1
View File
@@ -0,0 +1 @@
__version__ = "0.1.0"
+4
View File
@@ -0,0 +1,4 @@
from lrcfetch.cli import run
if __name__ == "__main__":
run()
+275
View File
@@ -0,0 +1,275 @@
"""SQLite-based lyric cache with per-source storage and TTL expiration."""
import sqlite3
import hashlib
import time
from typing import Optional
from lrcfetch.config import DB_PATH
from lrcfetch.models import TrackMeta, LyricResult, CacheStatus
from loguru import logger
def _generate_key(track: TrackMeta, source: str) -> str:
"""Generate a unique cache key from track metadata and source.
The key is scoped by source so that different fetchers can cache
independently for the same track (e.g. Spotify synced vs Netease unsynced).
"""
# Spotify tracks always use their track ID as the primary identifier
if track.trackid and source == "spotify":
return f"spotify:{track.trackid}"
parts = []
if track.artist:
parts.append(track.artist)
if track.title:
parts.append(track.title)
if track.album:
parts.append(track.album)
if track.length:
parts.append(str(track.length))
# Fall back to URL for local files
if not parts and track.url:
return f"{source}:url:{track.url}"
if not parts:
raise ValueError("Insufficient metadata to generate cache key")
raw = "|".join(parts)
digest = hashlib.sha256(raw.encode()).hexdigest()
return f"{source}:{digest}"
class CacheEngine:
def __init__(self, db_path: str = DB_PATH):
self.db_path = db_path
self._init_db()
def _init_db(self) -> None:
"""Create or migrate the cache table."""
with sqlite3.connect(self.db_path) as conn:
conn.execute("""
CREATE TABLE IF NOT EXISTS cache (
key TEXT PRIMARY KEY,
source TEXT NOT NULL,
status TEXT NOT NULL,
lyrics TEXT,
created_at INTEGER NOT NULL,
expires_at INTEGER,
artist TEXT,
title TEXT,
album TEXT
)
""")
conn.commit()
# ------------------------------------------------------------------
# Read
# ------------------------------------------------------------------
def get(self, track: TrackMeta, source: str) -> Optional[LyricResult]:
"""Look up a cached result for *track* from *source*.
Returns None on cache miss or expiration.
"""
try:
key = _generate_key(track, source)
except ValueError:
return None
with sqlite3.connect(self.db_path) as conn:
row = conn.execute(
"SELECT status, lyrics, source, expires_at FROM cache WHERE key = ?",
(key,),
).fetchone()
if not row:
logger.debug(f"Cache miss: {source} / {track.display_name()}")
return None
status_str, lyrics, src, expires_at = row
# Check TTL expiration
if expires_at and expires_at < int(time.time()):
logger.debug(f"Cache expired: {source} / {track.display_name()}")
conn.execute("DELETE FROM cache WHERE key = ?", (key,))
conn.commit()
return None
remaining = expires_at - int(time.time()) if expires_at else None
logger.debug(
f"Cache hit: {source} / {track.display_name()} "
f"[{status_str}, ttl={remaining}s]"
)
return LyricResult(
status=CacheStatus(status_str),
lyrics=lyrics,
source=src,
ttl=remaining,
)
def get_best(self, track: TrackMeta, sources: list[str]) -> Optional[LyricResult]:
"""Return the best cached result across *sources* (synced > unsynced).
Skips negative statuses (NOT_FOUND, NETWORK_ERROR) — those are only
consulted per-source to avoid redundant fetches.
"""
best: Optional[LyricResult] = None
for src in sources:
cached = self.get(track, src)
if not cached:
continue
if cached.status == CacheStatus.SUCCESS_SYNCED:
return cached # Can't do better
if cached.status == CacheStatus.SUCCESS_UNSYNCED and best is None:
best = cached
return best
# ------------------------------------------------------------------
# Write
# ------------------------------------------------------------------
def set(
self,
track: TrackMeta,
source: str,
result: LyricResult,
ttl_seconds: Optional[int] = None,
) -> None:
"""Store a lyric result in the cache."""
try:
key = _generate_key(track, source)
except ValueError:
logger.warning("Cannot cache: insufficient track metadata.")
return
now = int(time.time())
expires_at = now + ttl_seconds if ttl_seconds else None
with sqlite3.connect(self.db_path) as conn:
conn.execute(
"""INSERT OR REPLACE INTO cache
(key, source, status, lyrics, created_at, expires_at,
artist, title, album)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
key,
source,
result.status.value,
result.lyrics,
now,
expires_at,
track.artist,
track.title,
track.album,
),
)
conn.commit()
logger.debug(
f"Cached: {source} / {track.display_name()} "
f"[{result.status.value}, ttl={ttl_seconds}s]"
)
# ------------------------------------------------------------------
# Delete
# ------------------------------------------------------------------
def clear_all(self) -> None:
"""Remove every entry from the cache."""
with sqlite3.connect(self.db_path) as conn:
conn.execute("DELETE FROM cache")
conn.commit()
logger.info("Cache cleared.")
def clear_track(self, track: TrackMeta) -> None:
"""Remove all cached entries (every source) for a single track."""
conditions, params = self._track_where(track)
if not conditions:
logger.info(f"No cache entries found for {track.display_name()}.")
return
where = " AND ".join(conditions)
with sqlite3.connect(self.db_path) as conn:
cur = conn.execute(f"DELETE FROM cache WHERE {where}", params)
conn.commit()
if cur.rowcount:
logger.info(f"Cleared {cur.rowcount} cache entries for {track.display_name()}.")
else:
logger.info(f"No cache entries found for {track.display_name()}.")
def prune(self) -> int:
"""Remove all expired entries. Returns the number of rows deleted."""
with sqlite3.connect(self.db_path) as conn:
cur = conn.execute(
"DELETE FROM cache WHERE expires_at IS NOT NULL AND expires_at < ?",
(int(time.time()),),
)
conn.commit()
count = cur.rowcount
logger.info(f"Pruned {count} expired cache entries.")
return count
@staticmethod
def _track_where(track: TrackMeta) -> tuple[list[str], list[str]]:
"""Build WHERE conditions to match a track across all sources."""
conditions: list[str] = []
params: list[str] = []
if track.artist:
conditions.append("artist = ?")
params.append(track.artist)
if track.title:
conditions.append("title = ?")
params.append(track.title)
if track.album:
conditions.append("album = ?")
params.append(track.album)
return conditions, params
# ------------------------------------------------------------------
# Query / inspect
# ------------------------------------------------------------------
def query_track(self, track: TrackMeta) -> list[dict]:
"""Return all cached rows for a given track (across all sources)."""
conditions, params = self._track_where(track)
if not conditions:
return []
where = " AND ".join(conditions)
with sqlite3.connect(self.db_path) as conn:
conn.row_factory = sqlite3.Row
return [dict(r) for r in conn.execute(
f"SELECT * FROM cache WHERE {where}", params
).fetchall()]
def query_all(self) -> list[dict]:
"""Return every row in the cache table."""
with sqlite3.connect(self.db_path) as conn:
conn.row_factory = sqlite3.Row
return [dict(r) for r in conn.execute("SELECT * FROM cache").fetchall()]
def stats(self) -> dict:
"""Return aggregate cache statistics."""
now = int(time.time())
with sqlite3.connect(self.db_path) as conn:
total = conn.execute("SELECT COUNT(*) FROM cache").fetchone()[0]
expired = conn.execute(
"SELECT COUNT(*) FROM cache WHERE expires_at IS NOT NULL AND expires_at < ?",
(now,),
).fetchone()[0]
by_status = dict(
conn.execute(
"SELECT status, COUNT(*) FROM cache GROUP BY status"
).fetchall()
)
by_source = dict(
conn.execute(
"SELECT source, COUNT(*) FROM cache GROUP BY source"
).fetchall()
)
return {
"total": total,
"expired": expired,
"active": total - expired,
"by_status": by_status,
"by_source": by_source,
}
+318
View File
@@ -0,0 +1,318 @@
"""CLI interface for lrcfetch."""
import typer
import time
from typing import Optional
from loguru import logger
import os
from lrcfetch.config import enable_debug
from lrcfetch.models import TrackMeta, CacheStatus
from lrcfetch.mpris import get_current_track
from lrcfetch.core import LrcManager
app = typer.Typer(
help="LRCFetch — Fetch lyrics for tracks.",
add_completion=False,
)
manager = LrcManager()
@app.callback()
def main(
debug: bool = typer.Option(False, "--debug", "-d", help="Enable debug logging."),
):
if debug:
enable_debug()
# ------------------------------------------------------------------
# fetch
# ------------------------------------------------------------------
@app.command()
def fetch(
method: Optional[str] = typer.Option(
None, "--method", help="Force a specific source (local, spotify, lrclib, lrclib-search, netease)."
),
player: Optional[str] = typer.Option(
None, "--player", "-p", help="Target a specific MPRIS player."
),
no_cache: bool = typer.Option(
False, "--no-cache", help="Bypass the cache for this request."
),
only_synced: bool = typer.Option(
False, "--only-synced", help="Only accept synced (timed) lyrics."
),
):
"""Fetch and print lyrics for the currently playing track."""
track = get_current_track(player)
if not track:
logger.error("No active playing track found.")
raise typer.Exit(1)
logger.info(f"Track: {track.display_name()}")
result = manager.fetch_for_track(
track, force_method=method, bypass_cache=no_cache
)
if not result or not result.lyrics:
logger.error("No lyrics found.")
raise typer.Exit(1)
if only_synced and result.status != CacheStatus.SUCCESS_SYNCED:
logger.error("Only unsynced lyrics available (--only-synced requested).")
raise typer.Exit(1)
print(result.lyrics)
# ------------------------------------------------------------------
# search
# ------------------------------------------------------------------
@app.command()
def search(
title: str = typer.Option(..., "--title", "-t", help="Track title."),
artist: Optional[str] = typer.Option(None, "--artist", "-a", help="Artist name."),
album: Optional[str] = typer.Option(None, "--album", help="Album name."),
trackid: Optional[str] = typer.Option(None, "--trackid", help="Spotify track ID."),
length: Optional[int] = typer.Option(None, "--length", "-l", help="Track duration in milliseconds."),
url: Optional[str] = typer.Option(None, "--url", help="Local file URL (file:///...)."),
method: Optional[str] = typer.Option(
None, "--method", help="Force a specific source."
),
no_cache: bool = typer.Option(
False, "--no-cache", help="Bypass the cache for this request."
),
only_synced: bool = typer.Option(
False, "--only-synced", help="Only accept synced (timed) lyrics."
),
):
"""Search for lyrics by metadata (bypasses MPRIS)."""
track = TrackMeta(
title=title,
artist=artist,
album=album,
trackid=trackid,
length=length,
url=url,
)
logger.info(f"Track: {track.display_name()}")
result = manager.fetch_for_track(
track, force_method=method, bypass_cache=no_cache
)
if not result or not result.lyrics:
logger.error("No lyrics found.")
raise typer.Exit(1)
if only_synced and result.status != CacheStatus.SUCCESS_SYNCED:
logger.error("Only unsynced lyrics available (--only-synced requested).")
raise typer.Exit(1)
print(result.lyrics)
# ------------------------------------------------------------------
# export
# ------------------------------------------------------------------
@app.command()
def export(
output: Optional[str] = typer.Option(
None, "--output", "-o", help="Output file path (default: <Artist> - <Title>.lrc)."
),
method: Optional[str] = typer.Option(
None, "--method", help="Force a specific source."
),
player: Optional[str] = typer.Option(
None, "--player", "-p", help="Target a specific MPRIS player."
),
no_cache: bool = typer.Option(False, "--no-cache", help="Bypass cache."),
overwrite: bool = typer.Option(
False, "--overwrite", "-f", help="Overwrite existing file."
),
):
"""Export lyrics of the current track to a .lrc file."""
track = get_current_track(player)
if not track:
logger.error("No active playing track found.")
raise typer.Exit(1)
result = manager.fetch_for_track(
track, force_method=method, bypass_cache=no_cache
)
if not result or not result.lyrics:
logger.error("No lyrics available to export.")
raise typer.Exit(1)
# Build default output path
if not output:
filename = (
f"{track.artist} - {track.title}.lrc"
if track.artist and track.title
else "lyrics.lrc"
)
# Sanitize filename
filename = "".join(
c for c in filename if c.isalpha() or c.isdigit() or c in " -_."
).rstrip()
output = os.path.join(os.getcwd(), filename)
if os.path.exists(output) and not overwrite:
logger.error(f"File exists: {output} (use -f to overwrite)")
raise typer.Exit(1)
try:
with open(output, "w", encoding="utf-8") as f:
f.write(result.lyrics)
logger.info(f"Exported lyrics to {output}")
except Exception as e:
logger.error(f"Failed to write file: {e}")
raise typer.Exit(1)
# ------------------------------------------------------------------
# cache
# ------------------------------------------------------------------
@app.command()
def cache(
clear: bool = typer.Option(False, "--clear", help="Clear the entire cache."),
clear_current: bool = typer.Option(
False, "--clear-current", help="Clear cache for the current track."
),
prune: bool = typer.Option(False, "--prune", help="Remove expired entries."),
stats: bool = typer.Option(False, "--stats", help="Show cache statistics."),
query: bool = typer.Option(
False, "--query", "-q", help="Show detailed cache info for the current track."
),
query_all: bool = typer.Option(
False, "--query-all", help="Dump all cache entries."
),
):
"""Manage the local SQLite cache."""
if clear:
manager.cache.clear_all()
return
if clear_current:
track = get_current_track()
if not track:
logger.error("No active playing track found.")
raise typer.Exit(1)
manager.cache.clear_track(track)
return
if prune:
manager.cache.prune()
return
if stats:
s = manager.cache.stats()
print("=== Cache Statistics ===")
print(f"Total entries : {s['total']}")
print(f"Active : {s['active']}")
print(f"Expired : {s['expired']}")
if s["by_status"]:
print("\nBy status:")
for status, count in s["by_status"].items():
print(f" {status}: {count}")
if s["by_source"]:
print("\nBy source:")
for source, count in s["by_source"].items():
print(f" {source}: {count}")
return
if query:
track = get_current_track()
if not track:
logger.error("No active playing track found.")
raise typer.Exit(1)
_print_track_cache(track)
return
if query_all:
rows = manager.cache.query_all()
if not rows:
print("Cache is empty.")
return
for row in rows:
_print_cache_row(row)
print()
return
logger.info(
"No action specified. Try --stats, --query, --query-all, "
"--prune, --clear, or --clear-current."
)
def _print_track_cache(track: TrackMeta) -> None:
"""Print all cached entries for a given track."""
print(f"Track: {track.display_name()}")
if track.album:
print(f"Album: {track.album}")
if track.length:
secs = track.length / 1000.0
print(f"Duration: {int(secs // 60)}:{secs % 60:05.2f}")
print()
rows = manager.cache.query_track(track)
if not rows:
print(" (no cache entries)")
return
for row in rows:
_print_cache_row(row, indent=" ")
def _print_cache_row(row: dict, indent: str = "") -> None:
"""Pretty-print a single cache row."""
now = int(time.time())
source = row.get("source", "?")
status = row.get("status", "?")
artist = row.get("artist", "")
title = row.get("title", "")
album = row.get("album", "")
created = row.get("created_at", 0)
expires = row.get("expires_at")
lyrics = row.get("lyrics", "")
name = f"{artist} - {title}" if artist and title else row.get("key", "?")
print(f"{indent}[{source}] {name}")
if album:
print(f"{indent} Album : {album}")
print(f"{indent} Status : {status}")
if created:
age = now - created
print(f"{indent} Cached : {age // 3600}h {(age % 3600) // 60}m ago")
if expires:
remaining = expires - now
if remaining > 0:
print(f"{indent} Expires : in {remaining // 3600}h {(remaining % 3600) // 60}m")
else:
print(f"{indent} Expires : EXPIRED")
else:
print(f"{indent} Expires : never")
if lyrics:
line_count = len(lyrics.splitlines())
print(f"{indent} Lyrics : {line_count} lines")
def run():
app()
if __name__ == "__main__":
run()
+77
View File
@@ -0,0 +1,77 @@
"""Global configuration constants and logger setup."""
import os
import sys
from pathlib import Path
from platformdirs import user_cache_dir, user_config_dir
from dotenv import load_dotenv
from loguru import logger
# ─── Application ─────────────────────────────────────────────────────
APP_NAME = "lrcfetch"
APP_AUTHOR = "Uyanide"
# ─── .env loading (XDG config dir first, then project-local) ─────────
_config_env = Path(user_config_dir(APP_NAME, APP_AUTHOR)) / ".env"
load_dotenv(_config_env) # ~/.config/lrcfetch/.env
load_dotenv() # .env in cwd (does NOT override existing vars)
# ─── HTTP ────────────────────────────────────────────────────────────
HTTP_TIMEOUT = 10.0
# ─── Cache TTLs (seconds) ───────────────────────────────────────────
TTL_SYNCED = 86400 * 30 # 30 days
TTL_UNSYNCED = 86400 # 1 day
TTL_NOT_FOUND = 86400 * 3 # 3 days
TTL_NETWORK_ERROR = 3600 # 1 hour
# ─── Search ──────────────────────────────────────────────────────────
DURATION_TOLERANCE_MS = 3000 # max duration mismatch for search matching
# ─── Spotify ─────────────────────────────────────────────────────────
SPOTIFY_TOKEN_URL = "https://open.spotify.com/api/token"
SPOTIFY_LYRICS_URL = "https://spclient.wg.spotify.com/color-lyrics/v2/track/"
SPOTIFY_SERVER_TIME_URL = "https://open.spotify.com/api/server-time"
SPOTIFY_SECRET_URL = (
"https://raw.githubusercontent.com/xyloflake/spot-secrets-go"
"/refs/heads/main/secrets/secrets.json"
)
SPOTIFY_SP_DC = os.environ.get("SPOTIFY_SP_DC", "")
# ─── Netease ─────────────────────────────────────────────────────────
NETEASE_SEARCH_URL = "https://music.163.com/api/cloudsearch/pc"
NETEASE_LYRIC_URL = "https://interface3.music.163.com/api/song/lyric"
# ─── LRCLIB ──────────────────────────────────────────────────────────
LRCLIB_API_URL = "https://lrclib.net/api/get"
LRCLIB_SEARCH_URL = "https://lrclib.net/api/search"
# ─── User-Agents ─────────────────────────────────────────────────────
UA_BROWSER = (
"Mozilla/5.0 (X11; Linux x86_64; rv:148.0) "
"Gecko/20100101 Firefox/148.0"
)
UA_LRCFETCH = "LRCFetch (https://github.com/Uyanide/lrcfetch)"
# ─── Paths ───────────────────────────────────────────────────────────
CACHE_DIR = user_cache_dir(APP_NAME, APP_AUTHOR)
DB_PATH = os.path.join(CACHE_DIR, "cache.db")
os.makedirs(CACHE_DIR, exist_ok=True)
# ─── Logger ──────────────────────────────────────────────────────────
_LOG_FORMAT = (
"<green>{time:YYYY-MM-DD HH:mm:ss}</green> | "
"<level>{level: <8}</level> | "
"<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
"<level>{message}</level>"
)
logger.remove()
logger.add(sys.stderr, format=_LOG_FORMAT, level="INFO")
def enable_debug() -> None:
"""Switch logger to DEBUG level."""
logger.remove()
logger.add(sys.stderr, format=_LOG_FORMAT, level="DEBUG")
+187
View File
@@ -0,0 +1,187 @@
"""Core orchestrator — coordinates fetchers with cache-aware fallback.
Fetch pipeline:
1. Check cache for each source in the fallback sequence
2. For sources without a valid cache hit, call the fetcher
3. Cache every result (success, not-found, or error) per source
4. Return the best result (synced > unsynced > None)
"""
import re
from typing import Optional
from loguru import logger
from lrcfetch.models import TrackMeta, LyricResult, CacheStatus
from lrcfetch.config import TTL_SYNCED, TTL_UNSYNCED, TTL_NOT_FOUND, TTL_NETWORK_ERROR
from lrcfetch.cache import CacheEngine
from lrcfetch.fetchers.base import BaseFetcher
from lrcfetch.fetchers.local import LocalFetcher
from lrcfetch.fetchers.spotify import SpotifyFetcher
from lrcfetch.fetchers.lrclib import LrclibFetcher
from lrcfetch.fetchers.lrclib_search import LrclibSearchFetcher
from lrcfetch.fetchers.netease import NeteaseFetcher
# Matches any LRC time tag at the start of a line: [mm:ss.cc] or [mm:ss.ccc]
_LRC_LINE_RE = re.compile(r"^\[(\d{2}:\d{2}\.\d{2,3})\]", re.MULTILINE)
def _normalize_unsynced(lyrics: str) -> str:
"""Normalize unsynced lyrics so every line has a [00:00.00] tag.
- Lines that already have time tags: replace with [00:00.00]
- Lines without time tags: prepend [00:00.00]
- Blank lines are kept as-is
"""
out: list[str] = []
for line in lyrics.splitlines():
stripped = line.strip()
if not stripped:
out.append("")
continue
# Strip existing time tag(s) from the beginning
cleaned = _LRC_LINE_RE.sub("", stripped)
# Could have multiple tags like [00:12.34][00:56.78]text
while _LRC_LINE_RE.match(cleaned):
cleaned = _LRC_LINE_RE.sub("", cleaned)
out.append(f"[00:00.00]{cleaned}")
return "\n".join(out)
# Maps CacheStatus to the default TTL used when storing results
_STATUS_TTL: dict[CacheStatus, int] = {
CacheStatus.SUCCESS_SYNCED: TTL_SYNCED,
CacheStatus.SUCCESS_UNSYNCED: TTL_UNSYNCED,
CacheStatus.NOT_FOUND: TTL_NOT_FOUND,
CacheStatus.NETWORK_ERROR: TTL_NETWORK_ERROR,
}
class LrcManager:
"""Main entry point for fetching lyrics with caching."""
def __init__(self) -> None:
self.cache = CacheEngine()
self.fetchers: dict[str, BaseFetcher] = {
"local": LocalFetcher(),
"spotify": SpotifyFetcher(),
"lrclib": LrclibFetcher(),
"lrclib-search": LrclibSearchFetcher(),
"netease": NeteaseFetcher(),
}
def _build_sequence(
self, track: TrackMeta, force_method: Optional[str] = None
) -> list[BaseFetcher]:
"""Determine the ordered list of fetchers to try."""
if force_method:
if force_method not in self.fetchers:
logger.error(f"Unknown method: {force_method}")
return []
return [self.fetchers[force_method]]
sequence: list[BaseFetcher] = []
if track.is_local:
sequence.append(self.fetchers["local"])
if track.trackid:
sequence.append(self.fetchers["spotify"])
if track.is_complete:
sequence.append(self.fetchers["lrclib"])
if track.title:
sequence.append(self.fetchers["lrclib-search"])
sequence.append(self.fetchers["netease"])
logger.debug(
f"Fallback sequence: {[f.source_name for f in sequence]}"
)
return sequence
def fetch_for_track(
self,
track: TrackMeta,
force_method: Optional[str] = None,
bypass_cache: bool = False,
) -> Optional[LyricResult]:
"""Fetch lyrics for *track* using the fallback pipeline.
Each source is checked against the cache independently:
- Cache hit with synced lyrics → return immediately
- Cache hit with negative status (NOT_FOUND / NETWORK_ERROR) → skip source
- Cache miss or unsynced → call fetcher, then cache the result
After all sources are tried, returns the best result found
(synced > unsynced > None).
"""
logger.info(f"Fetching lyrics for: {track.display_name()}")
sequence = self._build_sequence(track, force_method)
if not sequence:
return None
# Best result seen so far (synced wins over unsynced)
best_result: Optional[LyricResult] = None
for fetcher in sequence:
source = fetcher.source_name
# Cache check
if not bypass_cache:
cached = self.cache.get(track, source)
if cached:
if cached.status == CacheStatus.SUCCESS_SYNCED:
logger.info(f"[{source}] cache hit: synced lyrics")
return cached
elif cached.status == CacheStatus.SUCCESS_UNSYNCED:
logger.debug(f"[{source}] cache hit: unsynced lyrics (continuing)")
if best_result is None:
best_result = cached
continue # Try next source for synced
elif cached.status in (CacheStatus.NOT_FOUND, CacheStatus.NETWORK_ERROR):
logger.debug(f"[{source}] cache hit: {cached.status.value}, skipping")
continue
else:
logger.debug(f"[{source}] cache bypassed")
# Fetch
logger.debug(f"[{source}] calling fetcher...")
result = fetcher.fetch(track)
if not result:
logger.debug(f"[{source}] returned None (no result)")
continue
# Cache the result
ttl = result.ttl or _STATUS_TTL.get(result.status, TTL_NOT_FOUND)
self.cache.set(track, source, result, ttl_seconds=ttl)
# Evaluate result
if result.status == CacheStatus.SUCCESS_SYNCED:
logger.info(f"[{source}] got synced lyrics")
return result
if result.status == CacheStatus.SUCCESS_UNSYNCED:
logger.debug(f"[{source}] got unsynced lyrics (continuing)")
if best_result is None:
best_result = result
# NOT_FOUND / NETWORK_ERROR: already cached, try next
# Return best available
if best_result:
# Normalize unsynced lyrics: set all timestamps to [00:00.00]
if (
best_result.status == CacheStatus.SUCCESS_UNSYNCED
and best_result.lyrics
):
best_result = LyricResult(
status=best_result.status,
lyrics=_normalize_unsynced(best_result.lyrics),
source=best_result.source,
ttl=best_result.ttl,
)
logger.info(
f"Returning unsynced lyrics from {best_result.source} "
f"(no synced source found)"
)
else:
logger.info(f"No lyrics found for {track.display_name()}")
return best_result
View File
+16
View File
@@ -0,0 +1,16 @@
from abc import ABC, abstractmethod
from typing import Optional
from lrcfetch.models import TrackMeta, LyricResult
class BaseFetcher(ABC):
@property
@abstractmethod
def source_name(self) -> str:
"""Name of the fetcher source."""
pass
@abstractmethod
def fetch(self, track: TrackMeta) -> Optional[LyricResult]:
"""Fetch lyrics for the given track. Returns None if unable to fetch."""
pass
+99
View File
@@ -0,0 +1,99 @@
"""Local fetcher — reads lyrics from .lrc sidecar files or embedded audio metadata.
Priority:
1. Same-directory .lrc file (e.g. /path/to/track.lrc)
2. Embedded lyrics in audio metadata (FLAC, MP3 USLT/SYLT tags)
"""
import re
import os
from typing import Optional
from loguru import logger
from lrcfetch.models import TrackMeta, LyricResult, CacheStatus
from lrcfetch.fetchers.base import BaseFetcher
from mutagen._file import File
from mutagen.flac import FLAC
# Matches LRC time tags like [00:12.34] or [01:23.456]
_LRC_TIME_TAG_RE = re.compile(r"\[\d{2}:\d{2}\.\d{2,3}\]")
# Matches time tags that are all zeros
_ZERO_TIME_TAG_RE = re.compile(r"^\[00:00\.0{2,3}\]$")
def _detect_sync_status(text: str) -> CacheStatus:
"""Determine whether lyrics text contains meaningful LRC time tags.
Returns UNSYNCED if no tags exist or all tags are [00:00.00].
"""
tags = _LRC_TIME_TAG_RE.findall(text)
if not tags:
return CacheStatus.SUCCESS_UNSYNCED
for tag in tags:
if not _ZERO_TIME_TAG_RE.match(tag):
return CacheStatus.SUCCESS_SYNCED
return CacheStatus.SUCCESS_UNSYNCED
class LocalFetcher(BaseFetcher):
@property
def source_name(self) -> str:
return "local"
def fetch(self, track: TrackMeta) -> Optional[LyricResult]:
"""Attempt to read lyrics from local filesystem."""
if not track.is_local or not track.url:
return None
file_path = track.url.replace("file://", "", 1)
if not os.path.exists(file_path):
logger.debug(f"Local: file does not exist: {file_path}")
return None
logger.info(f"Local: checking for lyrics near {file_path}")
# Sidecar .lrc file
lrc_path = os.path.splitext(file_path)[0] + ".lrc"
if os.path.exists(lrc_path):
try:
with open(lrc_path, "r", encoding="utf-8") as f:
content = f.read().strip()
if content:
status = _detect_sync_status(content)
logger.info(f"Local: found .lrc sidecar ({status.value})")
return LyricResult(
status=status, lyrics=content, source=self.source_name
)
except Exception as e:
logger.error(f"Local: error reading {lrc_path}: {e}")
# Embedded metadata
try:
audio = File(file_path)
if audio is not None:
lyrics = None
if isinstance(audio, FLAC):
# FLAC stores lyrics in vorbis comment tags
lyrics = (audio.get("lyrics") or audio.get("unsynclyrics") or [None])[0]
elif hasattr(audio, "tags") and audio.tags:
# MP3 / other: look for USLT or SYLT ID3 frames
for key in audio.tags.keys():
if key.startswith("USLT") or key.startswith("SYLT"):
lyrics = str(audio.tags[key])
break
if lyrics:
status = _detect_sync_status(lyrics)
logger.info(f"Local: found embedded lyrics ({status.value})")
return LyricResult(
status=status,
lyrics=lyrics.strip(),
source=f"{self.source_name} (embedded)",
)
else:
logger.debug("Local: no embedded lyrics found")
except Exception as e:
logger.error(f"Local: error reading metadata for {file_path}: {e}")
logger.debug(f"Local: no lyrics found for {file_path}")
return None
+94
View File
@@ -0,0 +1,94 @@
"""LRCLIB fetcher — queries lrclib.net for synced/plain lyrics.
Requires complete track metadata (artist, title, album, duration).
"""
import httpx
from typing import Optional
from loguru import logger
from urllib.parse import urlencode
from lrcfetch.models import TrackMeta, LyricResult, CacheStatus
from lrcfetch.fetchers.base import BaseFetcher
from lrcfetch.config import (
HTTP_TIMEOUT,
TTL_UNSYNCED,
TTL_NOT_FOUND,
TTL_NETWORK_ERROR,
LRCLIB_API_URL,
UA_LRCFETCH,
)
class LrclibFetcher(BaseFetcher):
@property
def source_name(self) -> str:
return "lrclib"
def fetch(self, track: TrackMeta) -> Optional[LyricResult]:
"""Fetch lyrics from LRCLIB. Requires complete metadata."""
if not track.is_complete:
logger.debug("LRCLIB: skipped — incomplete metadata")
return None
params = {
"track_name": track.title,
"artist_name": track.artist,
"album_name": track.album,
"duration": track.length / 1000.0 if track.length else 0,
}
url = f"{LRCLIB_API_URL}?{urlencode(params)}"
logger.info(f"LRCLIB: fetching lyrics for {track.display_name()}")
try:
with httpx.Client(timeout=HTTP_TIMEOUT) as client:
resp = client.get(url, headers={"User-Agent": UA_LRCFETCH})
if resp.status_code == 404:
logger.debug(f"LRCLIB: not found for {track.display_name()}")
return LyricResult(status=CacheStatus.NOT_FOUND, ttl=TTL_NOT_FOUND)
if resp.status_code != 200:
logger.error(f"LRCLIB: API returned {resp.status_code}")
return LyricResult(status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR)
data = resp.json()
# Validate response
if not isinstance(data, dict):
logger.error(f"LRCLIB: unexpected response type: {type(data).__name__}")
return LyricResult(status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR)
synced = data.get("syncedLyrics")
unsynced = data.get("plainLyrics")
if isinstance(synced, str) and synced.strip():
logger.info(
f"LRCLIB: got synced lyrics ({len(synced.splitlines())} lines)"
)
return LyricResult(
status=CacheStatus.SUCCESS_SYNCED,
lyrics=synced.strip(),
source=self.source_name,
)
elif isinstance(unsynced, str) and unsynced.strip():
logger.info(
f"LRCLIB: got unsynced lyrics ({len(unsynced.splitlines())} lines)"
)
return LyricResult(
status=CacheStatus.SUCCESS_UNSYNCED,
lyrics=unsynced.strip(),
source=self.source_name,
ttl=TTL_UNSYNCED,
)
else:
logger.debug(f"LRCLIB: empty response for {track.display_name()}")
return LyricResult(status=CacheStatus.NOT_FOUND, ttl=TTL_NOT_FOUND)
except httpx.HTTPError as e:
logger.error(f"LRCLIB: HTTP error: {e}")
return LyricResult(status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR)
except Exception as e:
logger.error(f"LRCLIB: unexpected error: {e}")
return None
+136
View File
@@ -0,0 +1,136 @@
"""LRCLIB search fetcher — fuzzy search via lrclib.net /api/search.
Used when metadata is incomplete (no album or duration) but title is available.
Selects the best match by duration when track length is known.
"""
import httpx
from typing import Optional
from loguru import logger
from urllib.parse import urlencode
from lrcfetch.models import TrackMeta, LyricResult, CacheStatus
from lrcfetch.fetchers.base import BaseFetcher
from lrcfetch.config import (
HTTP_TIMEOUT,
TTL_UNSYNCED,
TTL_NOT_FOUND,
TTL_NETWORK_ERROR,
DURATION_TOLERANCE_MS,
LRCLIB_SEARCH_URL,
UA_LRCFETCH,
)
class LrclibSearchFetcher(BaseFetcher):
@property
def source_name(self) -> str:
return "lrclib-search"
def fetch(self, track: TrackMeta) -> Optional[LyricResult]:
"""Search LRCLIB for lyrics. Requires at least a title."""
if not track.title:
logger.debug("LRCLIB-search: skipped — no title")
return None
params: dict[str, str] = {"track_name": track.title}
if track.artist:
params["artist_name"] = track.artist
if track.album:
params["album_name"] = track.album
url = f"{LRCLIB_SEARCH_URL}?{urlencode(params)}"
logger.info(f"LRCLIB-search: searching for {track.display_name()}")
try:
with httpx.Client(timeout=HTTP_TIMEOUT) as client:
resp = client.get(url, headers={"User-Agent": UA_LRCFETCH})
if resp.status_code != 200:
logger.error(f"LRCLIB-search: API returned {resp.status_code}")
return LyricResult(status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR)
data = resp.json()
if not isinstance(data, list) or len(data) == 0:
logger.debug(f"LRCLIB-search: no results for {track.display_name()}")
return LyricResult(status=CacheStatus.NOT_FOUND, ttl=TTL_NOT_FOUND)
logger.debug(f"LRCLIB-search: got {len(data)} candidates")
# Select best match by duration
best = self._select_best(data, track)
if best is None:
logger.debug("LRCLIB-search: no valid candidate found")
return LyricResult(status=CacheStatus.NOT_FOUND, ttl=TTL_NOT_FOUND)
# Extract lyrics
synced = best.get("syncedLyrics")
unsynced = best.get("plainLyrics")
if isinstance(synced, str) and synced.strip():
logger.info(
f"LRCLIB-search: got synced lyrics ({len(synced.splitlines())} lines)"
)
return LyricResult(
status=CacheStatus.SUCCESS_SYNCED,
lyrics=synced.strip(),
source=self.source_name,
)
elif isinstance(unsynced, str) and unsynced.strip():
logger.info(
f"LRCLIB-search: got unsynced lyrics ({len(unsynced.splitlines())} lines)"
)
return LyricResult(
status=CacheStatus.SUCCESS_UNSYNCED,
lyrics=unsynced.strip(),
source=self.source_name,
ttl=TTL_UNSYNCED,
)
else:
logger.debug("LRCLIB-search: best candidate has empty lyrics")
return LyricResult(status=CacheStatus.NOT_FOUND, ttl=TTL_NOT_FOUND)
except httpx.HTTPError as e:
logger.error(f"LRCLIB-search: HTTP error: {e}")
return LyricResult(status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR)
except Exception as e:
logger.error(f"LRCLIB-search: unexpected error: {e}")
return None
@staticmethod
def _select_best(candidates: list[dict], track: TrackMeta) -> Optional[dict]:
"""Pick the best candidate, preferring synced lyrics and closest duration."""
if track.length is not None:
track_s = track.length / 1000.0
best: Optional[dict] = None
best_diff = float("inf")
for item in candidates:
if not isinstance(item, dict):
continue
duration = item.get("duration")
if not isinstance(duration, (int, float)):
continue
diff = abs(duration - track_s) * 1000 # compare in ms
if diff > DURATION_TOLERANCE_MS:
continue
# Prefer synced over unsynced at similar duration
has_synced = isinstance(item.get("syncedLyrics"), str) and item["syncedLyrics"].strip()
best_synced = best is not None and isinstance(best.get("syncedLyrics"), str) and best["syncedLyrics"].strip()
if diff < best_diff or (diff == best_diff and has_synced and not best_synced):
best_diff = diff
best = item
if best is not None:
logger.debug(f"LRCLIB-search: selected id={best.get('id')} (diff={best_diff:.0f}ms)")
return best
logger.debug(f"LRCLIB-search: no candidate within {DURATION_TOLERANCE_MS}ms")
return None
# No duration — pick first with synced lyrics, or just first
for item in candidates:
if isinstance(item, dict) and isinstance(item.get("syncedLyrics"), str) and item["syncedLyrics"].strip():
return item
return candidates[0] if isinstance(candidates[0], dict) else None
+216
View File
@@ -0,0 +1,216 @@
"""Netease Cloud Music fetcher.
Uses the public cloudsearch API for searching and the song/lyric API for
retrieving lyrics. No authentication required.
Search results are filtered by duration when the track has a known length
to avoid returning lyrics for the wrong version of a song.
"""
import re
import httpx
from typing import Optional
from loguru import logger
from lrcfetch.models import TrackMeta, LyricResult, CacheStatus
from lrcfetch.fetchers.base import BaseFetcher
from lrcfetch.config import (
HTTP_TIMEOUT,
TTL_NOT_FOUND,
TTL_NETWORK_ERROR,
DURATION_TOLERANCE_MS,
NETEASE_SEARCH_URL,
NETEASE_LYRIC_URL,
UA_BROWSER,
)
# Matches LRC time tags like [00:12.34] or [01:23.456]
_LRC_TIME_TAG_RE = re.compile(r"\[\d{2}:\d{2}\.\d{2,3}\]")
# Matches time tags that are all zeros: [00:00.00] or [00:00.000]
_ZERO_TIME_TAG_RE = re.compile(r"^\[00:00\.0{2,3}\]")
_HEADERS = {
"User-Agent": UA_BROWSER,
"Referer": "https://music.163.com/",
}
def _is_synced_lrc(text: str) -> bool:
"""Check whether *text* contains actual LRC time tags with non-zero times.
Returns False if:
- No time tags at all
- All time tags are [00:00.00] (unsynced disguised as synced)
"""
lines_with_tags = _LRC_TIME_TAG_RE.findall(text)
if not lines_with_tags:
return False
# Check if ALL tags are zero — if so, it's unsynced
for tag in lines_with_tags:
if not _ZERO_TIME_TAG_RE.match(tag):
return True # Found at least one non-zero tag
return False
class NeteaseFetcher(BaseFetcher):
@property
def source_name(self) -> str:
return "netease"
def _search(self, track: TrackMeta, limit: int = 10) -> Optional[int]:
"""Search Netease and return the best-matching song ID.
When ``track.length`` is available, candidates are ranked by duration
difference and only accepted if within ``DURATION_TOLERANCE_MS``.
"""
query = f"{track.artist or ''} {track.title or ''}".strip()
if not query:
return None
logger.debug(f"Netease: searching for '{query}' (limit={limit})")
try:
with httpx.Client(timeout=HTTP_TIMEOUT) as client:
resp = client.post(
NETEASE_SEARCH_URL,
headers=_HEADERS,
data={"s": query, "type": "1", "limit": str(limit), "offset": "0"},
)
resp.raise_for_status()
result = resp.json()
# Validate response
if not isinstance(result, dict):
logger.error(f"Netease: search returned non-dict: {type(result).__name__}")
return None
result_body = result.get("result")
if not isinstance(result_body, dict):
logger.debug(f"Netease: search 'result' field missing or invalid")
return None
songs = result_body.get("songs")
if not isinstance(songs, list) or len(songs) == 0:
logger.debug("Netease: search returned 0 results")
return None
logger.debug(f"Netease: search returned {len(songs)} candidates")
# Duration-based best-match selection
if track.length is not None:
track_ms = track.length
best_id: Optional[int] = None
best_diff = float("inf")
for song in songs:
if not isinstance(song, dict):
continue
sid = song.get("id")
name = song.get("name", "?")
duration = song.get("dt") # milliseconds
if not isinstance(duration, int):
logger.debug(f" candidate {sid} '{name}': no duration, skipped")
continue
diff = abs(duration - track_ms)
logger.debug(
f" candidate {sid} '{name}': "
f"duration={duration}ms, diff={diff}ms"
)
if diff < best_diff:
best_diff = diff
best_id = sid
if best_id is not None and best_diff <= DURATION_TOLERANCE_MS:
logger.debug(
f"Netease: selected id={best_id} (diff={best_diff}ms)"
)
return best_id
logger.debug(
f"Netease: no candidate within {DURATION_TOLERANCE_MS}ms "
f"(best diff={best_diff}ms)"
)
return None
# No duration info — take the first result
first = songs[0]
if not isinstance(first, dict) or "id" not in first:
logger.error("Netease: first search result has no 'id'")
return None
logger.debug(
f"Netease: no duration available, using first result "
f"id={first['id']} '{first.get('name', '?')}'"
)
return first["id"]
except Exception as e:
logger.error(f"Netease: search failed: {e}")
return None
def _get_lyric(self, song_id: int) -> Optional[LyricResult]:
"""Fetch lyrics for a given Netease song ID."""
logger.debug(f"Netease: fetching lyrics for song_id={song_id}")
try:
with httpx.Client(timeout=HTTP_TIMEOUT) as client:
resp = client.post(
NETEASE_LYRIC_URL,
headers=_HEADERS,
data={
"id": str(song_id),
"cp": "false",
"tv": "0",
"lv": "0",
"rv": "0",
"kv": "0",
"yv": "0",
"ytv": "0",
"yrv": "0",
},
)
resp.raise_for_status()
data = resp.json()
# Validate response
if not isinstance(data, dict):
logger.error(f"Netease: lyric response is not dict: {type(data).__name__}")
return LyricResult(status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR)
lrc_obj = data.get("lrc")
if not isinstance(lrc_obj, dict):
logger.debug(f"Netease: no 'lrc' object in response for song_id={song_id}")
return LyricResult(status=CacheStatus.NOT_FOUND, ttl=TTL_NOT_FOUND)
lrc: str = lrc_obj.get("lyric", "")
if not isinstance(lrc, str) or not lrc.strip():
logger.debug(f"Netease: empty lyrics for song_id={song_id}")
return LyricResult(status=CacheStatus.NOT_FOUND, ttl=TTL_NOT_FOUND)
# Determine sync status
synced = _is_synced_lrc(lrc)
status = CacheStatus.SUCCESS_SYNCED if synced else CacheStatus.SUCCESS_UNSYNCED
logger.info(
f"Netease: got {status.value} lyrics for song_id={song_id} "
f"({len(lrc.splitlines())} lines)"
)
return LyricResult(
status=status, lyrics=lrc.strip(), source=self.source_name
)
except Exception as e:
logger.error(f"Netease: lyric fetch failed for song_id={song_id}: {e}")
return LyricResult(status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR)
def fetch(self, track: TrackMeta) -> Optional[LyricResult]:
"""Search for the track and fetch its lyrics."""
query = f"{track.artist or ''} {track.title or ''}".strip()
if not query:
logger.debug("Netease: skipped — insufficient metadata")
return None
logger.info(f"Netease: fetching lyrics for {track.display_name()}")
song_id = self._search(track)
if not song_id:
logger.debug(f"Netease: no match found for {track.display_name()}")
return LyricResult(status=CacheStatus.NOT_FOUND, ttl=TTL_NOT_FOUND)
return self._get_lyric(song_id)
+323
View File
@@ -0,0 +1,323 @@
"""Spotify fetcher — obtains synced lyrics via Spotify's internal color-lyrics API.
Authentication flow (mirrors spotify-lyrics Go implementation):
1. Fetch server time from Spotify
2. Fetch TOTP secret from xyloflake/spot-secrets-go
3. Generate a TOTP code and exchange it (with SP_DC cookie) for an access token
4. Request lyrics using the access token
The secret and token are cached on the instance to avoid redundant network
calls within the same session.
Requires SPOTIFY_SP_DC environment variable to be set.
"""
import httpx
import time
import struct
import hmac
import hashlib
from typing import Optional, Tuple
from loguru import logger
from lrcfetch.models import TrackMeta, LyricResult, CacheStatus
from lrcfetch.fetchers.base import BaseFetcher
from lrcfetch.config import (
HTTP_TIMEOUT,
TTL_NOT_FOUND,
TTL_NETWORK_ERROR,
SPOTIFY_TOKEN_URL,
SPOTIFY_LYRICS_URL,
SPOTIFY_SERVER_TIME_URL,
SPOTIFY_SECRET_URL,
SPOTIFY_SP_DC,
UA_BROWSER,
)
class SpotifyFetcher(BaseFetcher):
def __init__(self) -> None:
# Session-level caches to avoid refetching within the same run
self._cached_secret: Optional[Tuple[str, int]] = None
self._cached_token: Optional[str] = None
self._token_expires_at: float = 0.0
@property
def source_name(self) -> str:
return "spotify"
# ─── Auth helpers ────────────────────────────────────────────────
def _get_server_time(self, client: httpx.Client) -> Optional[int]:
"""Fetch Spotify's server timestamp (seconds since epoch)."""
try:
res = client.get(SPOTIFY_SERVER_TIME_URL, timeout=HTTP_TIMEOUT)
res.raise_for_status()
data = res.json()
if not isinstance(data, dict) or "serverTime" not in data:
logger.error(f"Spotify: unexpected server-time response: {data}")
return None
server_time = data["serverTime"]
logger.debug(f"Spotify: server time = {server_time}")
return server_time
except Exception as e:
logger.error(f"Spotify: failed to fetch server time: {e}")
return None
def _get_secret(self, client: httpx.Client) -> Optional[Tuple[str, int]]:
"""Fetch and decode the TOTP secret. Cached after first success.
Response format: [{version: int, secret: str}, ...]
Each character in *secret* is XOR-decoded with ``(index % 33) + 9``.
"""
if self._cached_secret is not None:
logger.debug("Spotify: using cached TOTP secret")
return self._cached_secret
try:
res = client.get(SPOTIFY_SECRET_URL, timeout=HTTP_TIMEOUT)
res.raise_for_status()
data = res.json()
if not isinstance(data, list) or len(data) == 0:
logger.error(
f"Spotify: unexpected secrets response (type={type(data).__name__}, len={len(data) if isinstance(data, list) else '?'})")
return None
last = data[-1]
if "secret" not in last or "version" not in last:
logger.error(f"Spotify: malformed secret entry: {list(last.keys())}")
return None
secret_raw = last["secret"]
version = last["version"]
# XOR decode
parts = []
for i, char in enumerate(secret_raw):
parts.append(str(ord(char) ^ ((i % 33) + 9)))
secret = "".join(parts)
logger.debug(f"Spotify: decoded secret v{version} (len={len(secret)})")
self._cached_secret = (secret, version)
return self._cached_secret
except Exception as e:
logger.error(f"Spotify: failed to fetch secret: {e}")
return None
@staticmethod
def _generate_totp(server_time_s: int, secret: str) -> str:
"""Generate a 6-digit TOTP code compatible with Spotify's auth.
Uses HMAC-SHA1 with a 30-second period, matching the Go reference.
"""
counter = server_time_s // 30
counter_bytes = struct.pack(">Q", counter)
mac = hmac.new(secret.encode(), counter_bytes, hashlib.sha1).digest()
offset = mac[-1] & 0x0F
binary_code = (
(mac[offset] & 0x7F) << 24
| (mac[offset + 1] & 0xFF) << 16
| (mac[offset + 2] & 0xFF) << 8
| (mac[offset + 3] & 0xFF)
)
code = binary_code % (10**6)
return str(code).zfill(6)
def _get_token(self) -> Optional[str]:
"""Obtain a Spotify access token. Cached until expiry.
Requires SP_DC cookie (set via SPOTIFY_SP_DC env var).
"""
# Return cached token if still valid (with 30s safety margin)
if self._cached_token and time.time() < self._token_expires_at - 30:
logger.debug("Spotify: using cached access token")
return self._cached_token
if not SPOTIFY_SP_DC:
logger.error(
"Spotify: SPOTIFY_SP_DC env var not set — "
"cannot authenticate with Spotify"
)
return None
headers = {
"User-Agent": UA_BROWSER,
"Cookie": f"sp_dc={SPOTIFY_SP_DC}",
}
with httpx.Client(headers=headers) as client:
# Step 1: server time
server_time = self._get_server_time(client)
if server_time is None:
return None
# Step 2: secret
secret_data = self._get_secret(client)
if secret_data is None:
return None
secret, version = secret_data
# Step 3: TOTP
totp = self._generate_totp(server_time, secret)
logger.debug(f"Spotify: generated TOTP v{version}: {totp}")
# Step 4: exchange for token
params = {
"reason": "transport",
"productType": "web-player",
"totp": totp,
"totpVer": str(version),
"ts": str(int(time.time())),
}
try:
res = client.get(SPOTIFY_TOKEN_URL, params=params, timeout=HTTP_TIMEOUT)
if res.status_code != 200:
logger.error(
f"Spotify: token request returned {res.status_code}"
)
return None
body = res.json()
if not isinstance(body, dict) or "accessToken" not in body:
logger.error(
f"Spotify: unexpected token response keys: {list(body.keys()) if isinstance(body, dict) else type(body).__name__}")
return None
token = body["accessToken"]
is_anonymous = body.get("isAnonymous", False)
if is_anonymous:
logger.warning(
"Spotify: received anonymous token — SP_DC may be invalid"
)
# Cache with reported expiry
expires_ms = body.get("accessTokenExpirationTimestampMs", 0)
if expires_ms and expires_ms > int(time.time() * 1000):
self._token_expires_at = expires_ms / 1000.0
else:
logger.warning("Spotify: token expiry missing or invalid")
self._token_expires_at = time.time() + 3600
self._cached_token = token
logger.debug("Spotify: obtained access token")
return token
except Exception as e:
logger.error(f"Spotify: token request failed: {e}")
return None
# ─── Lyrics ──────────────────────────────────────────────────────
@staticmethod
def _format_lrc_line(start_ms: int, words: str) -> str:
"""Format a single lyric line as LRC ``[mm:ss.cc]text``."""
minutes = start_ms // 60000
seconds = (start_ms // 1000) % 60
centiseconds = round((start_ms % 1000) / 10.0)
return f"[{minutes:02d}:{seconds:02d}.{centiseconds:02.0f}]{words}"
@staticmethod
def _is_truly_synced(lines: list[dict]) -> bool:
"""Check if lyrics are actually synced (not all timestamps zero)."""
for line in lines:
try:
ms = int(line.get("startTimeMs", "0"))
if ms > 0:
return True
except (ValueError, TypeError):
continue
return False
def fetch(self, track: TrackMeta) -> Optional[LyricResult]:
"""Fetch lyrics for a Spotify track by its track ID."""
if not track.trackid:
logger.debug("Spotify: skipped — no trackid in metadata")
return None
logger.info(f"Spotify: fetching lyrics for trackid={track.trackid}")
token = self._get_token()
if not token:
logger.error("Spotify: cannot fetch lyrics without a token")
return LyricResult(status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR)
url = f"{SPOTIFY_LYRICS_URL}{track.trackid}?format=json&market=from_token"
headers = {
"User-Agent": UA_BROWSER,
"Authorization": f"Bearer {token}",
"App-Platform": "WebPlayer",
}
try:
with httpx.Client(timeout=HTTP_TIMEOUT) as client:
res = client.get(url, headers=headers)
if res.status_code == 404:
logger.debug(f"Spotify: 404 for trackid={track.trackid}")
return LyricResult(
status=CacheStatus.NOT_FOUND, ttl=TTL_NOT_FOUND
)
if res.status_code != 200:
logger.error(f"Spotify: lyrics API returned {res.status_code}")
return LyricResult(
status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR
)
data = res.json()
# Validate response structure
if not isinstance(data, dict) or "lyrics" not in data:
logger.error(f"Spotify: unexpected lyrics response structure")
return LyricResult(
status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR
)
lyrics_data = data["lyrics"]
sync_type = lyrics_data.get("syncType", "")
lines = lyrics_data.get("lines", [])
if not isinstance(lines, list) or len(lines) == 0:
logger.debug("Spotify: response contained no lyric lines")
return LyricResult(status=CacheStatus.NOT_FOUND, ttl=TTL_NOT_FOUND)
# Determine sync status
# syncType == "LINE_SYNCED" AND at least one non-zero timestamp
is_synced = sync_type == "LINE_SYNCED" and self._is_truly_synced(lines)
# Convert to LRC
lrc_lines: list[str] = []
for line in lines:
words = line.get("words", "")
if not isinstance(words, str):
continue
try:
ms = int(line.get("startTimeMs", "0"))
except (ValueError, TypeError):
ms = 0
if is_synced:
lrc_lines.append(self._format_lrc_line(ms, words))
else:
# Unsynced: emit with zero timestamps
lrc_lines.append(f"[00:00.00]{words}")
content = "\n".join(lrc_lines)
status = CacheStatus.SUCCESS_SYNCED if is_synced else CacheStatus.SUCCESS_UNSYNCED
logger.info(
f"Spotify: got {status.value} lyrics ({len(lrc_lines)} lines)"
)
return LyricResult(status=status, lyrics=content, source=self.source_name)
except Exception as e:
logger.error(f"Spotify: lyrics fetch failed: {e}")
return LyricResult(status=CacheStatus.NETWORK_ERROR, ttl=TTL_NETWORK_ERROR)
+54
View File
@@ -0,0 +1,54 @@
"""Data models for lrcfetch."""
from pydantic import BaseModel, ConfigDict
from enum import Enum
from typing import Optional
class CacheStatus(str, Enum):
"""Status of a cached lyric entry."""
SUCCESS_SYNCED = "SUCCESS_SYNCED"
SUCCESS_UNSYNCED = "SUCCESS_UNSYNCED"
NOT_FOUND = "NOT_FOUND"
NETWORK_ERROR = "NETWORK_ERROR"
class TrackMeta(BaseModel):
"""Metadata describing a track obtained from MPRIS or manual input."""
model_config = ConfigDict(strict=True)
trackid: Optional[str] = None # Spotify track ID (without "spotify:track:" prefix)
length: Optional[int] = None # Duration in milliseconds
album: Optional[str] = None
artist: Optional[str] = None
title: Optional[str] = None
url: Optional[str] = None # Playback URL (file:// for local files)
@property
def is_local(self) -> bool:
"""True when the track is a local file (file:// URL)."""
return bool(self.url and self.url.startswith("file://"))
@property
def is_complete(self) -> bool:
"""True when all fields required by LRCLIB are present."""
return all([self.length, self.album, self.title, self.artist])
def display_name(self) -> str:
"""Human-readable representation for logging."""
parts = []
if self.artist:
parts.append(self.artist)
if self.title:
parts.append(self.title)
return " - ".join(parts) if parts else self.trackid or self.url or "(unknown)"
class LyricResult(BaseModel):
"""Result of a lyric fetch attempt, also used as cache record."""
model_config = ConfigDict(strict=True)
status: CacheStatus
lyrics: Optional[str] = None
source: Optional[str] = None # Which fetcher produced this result
ttl: Optional[int] = None # Hint for cache TTL (seconds)
+169
View File
@@ -0,0 +1,169 @@
import asyncio
from dbus_next.aio.message_bus import MessageBus
from dbus_next.constants import BusType
from dbus_next.message import Message
from lrcfetch.models import TrackMeta
from loguru import logger
from typing import Optional, List, Any
import subprocess
async def _get_active_players(bus: MessageBus, specific_player: Optional[str] = None) -> List[str]:
try:
reply = await bus.call(
Message(
destination="org.freedesktop.DBus",
path="/org/freedesktop/DBus",
interface="org.freedesktop.DBus",
member="ListNames"
)
)
if not reply or not reply.body:
return []
names = reply.body[0]
players = [name for name in names if name.startswith("org.mpris.MediaPlayer2.")]
if specific_player:
players = [p for p in players if specific_player.lower() in p.lower()]
else:
# Sort so that spotify is preferred
players.sort(key=lambda x: 0 if "spotify" in x.lower() else 1)
return players
except Exception as e:
logger.error(f"Failed to list DBus names: {e}")
return []
async def _fetch_metadata_dbus(specific_player: Optional[str] = None) -> Optional[TrackMeta]:
bus = None
try:
bus = await MessageBus(bus_type=BusType.SESSION).connect()
except Exception as e:
logger.error(f"Failed to connect to DBus: {e}")
return None
try:
players = await _get_active_players(bus, specific_player)
if not players:
logger.debug(f"No active MPRIS players found via DBus{' for ' + specific_player if specific_player else ''}.")
return None
player_name = players[0]
logger.debug(f"Using player: {player_name}")
introspection = await bus.introspect(player_name, "/org/mpris/MediaPlayer2")
proxy = bus.get_proxy_object(player_name, "/org/mpris/MediaPlayer2", introspection)
props_iface = proxy.get_interface("org.freedesktop.DBus.Properties")
if not props_iface:
logger.error(f"Player {player_name} doesn't support Properties interface.")
return None
try:
metadata_var: Any = await getattr(props_iface, "call_get")("org.mpris.MediaPlayer2.Player", "Metadata")
if not metadata_var:
logger.error("Empty metadata received.")
return None
metadata = metadata_var.value
# Extract trackid — MPRIS returns either "spotify:track:ID"
# or a DBus object path like "/com/spotify/track/ID"
trackid = metadata.get("mpris:trackid", None)
if trackid:
trackid = trackid.value
if isinstance(trackid, str):
if trackid.startswith("spotify:track:"):
trackid = trackid.removeprefix("spotify:track:")
elif trackid.startswith("/com/spotify/track/"):
trackid = trackid.removeprefix("/com/spotify/track/")
# Extract length (usually microseconds)
length = metadata.get("mpris:length", None)
if length:
length = length.value // 1000 if isinstance(length.value, int) else None
album = metadata.get("xesam:album", None)
album = album.value if album else None
artist = metadata.get("xesam:artist", None)
artist = artist.value[0] if artist and isinstance(artist.value, list) and artist.value else None
title = metadata.get("xesam:title", None)
title = title.value if title else None
url = metadata.get("xesam:url", None)
url = url.value if url else None
return TrackMeta(
trackid=trackid,
length=length,
album=album,
artist=artist,
title=title,
url=url
)
except Exception as e:
logger.error(f"Failed to get properties from {player_name}: {e}")
return None
finally:
if bus:
bus.disconnect()
def _fetch_metadata_subprocess(specific_player: Optional[str] = None) -> Optional[TrackMeta]:
"""Fallback using playerctl if dbus-next fails or session bus is problematic."""
logger.debug("Attempting to use playerctl as fallback.")
try:
# Check if playerctl exists
subprocess.run(["playerctl", "--version"], capture_output=True, check=True)
base_cmd = ["playerctl"]
if specific_player:
base_cmd.extend(["-p", specific_player])
def _get_prop(prop: str) -> Optional[str]:
res = subprocess.run(base_cmd + ["metadata", prop], capture_output=True, text=True)
if res.returncode == 0 and res.stdout.strip():
return res.stdout.strip()
return None
trackid = _get_prop("mpris:trackid")
if trackid:
if trackid.startswith("spotify:track:"):
trackid = trackid.removeprefix("spotify:track:")
elif trackid.startswith("/com/spotify/track/"):
trackid = trackid.removeprefix("/com/spotify/track/")
length_str = _get_prop("mpris:length")
length = int(length_str) // 1000 if length_str and length_str.isdigit() else None
album = _get_prop("xesam:album")
artist = _get_prop("xesam:artist")
title = _get_prop("xesam:title")
url = _get_prop("xesam:url")
if not any([trackid, length, album, artist, title, url]):
return None
return TrackMeta(
trackid=trackid,
length=length,
album=album,
artist=artist,
title=title,
url=url
)
except Exception as e:
logger.debug(f"playerctl fallback failed: {e}")
return None
def get_current_track(player_name: Optional[str] = None) -> Optional[TrackMeta]:
try:
meta = asyncio.run(_fetch_metadata_dbus(player_name))
if meta:
return meta
except Exception as e:
logger.error(f"DBus async loop failed: {e}")
return _fetch_metadata_subprocess(player_name)
+4
View File
@@ -0,0 +1,4 @@
from lrcfetch.cli import run
if __name__ == "__main__":
run()
+23
View File
@@ -0,0 +1,23 @@
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "lrcfetch"
version = "0.1.0"
description = "Fetch lyrics for tracks."
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"dbus-next>=0.2.3",
"httpx>=0.28.1",
"loguru>=0.7.3",
"mutagen>=1.47.0",
"platformdirs>=4.9.4",
"pydantic>=2.12.5",
"python-dotenv>=1.2.2",
"typer>=0.24.1",
]
[project.scripts]
lrcfetch = "lrcfetch.cli:run"
Generated
+350
View File
@@ -0,0 +1,350 @@
version = 1
revision = 3
requires-python = ">=3.13"
[[package]]
name = "annotated-doc"
version = "0.0.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" },
]
[[package]]
name = "annotated-types"
version = "0.7.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
]
[[package]]
name = "anyio"
version = "4.13.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
]
sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" },
]
[[package]]
name = "certifi"
version = "2026.2.25"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" },
]
[[package]]
name = "click"
version = "8.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
]
[[package]]
name = "colorama"
version = "0.4.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
]
[[package]]
name = "dbus-next"
version = "0.2.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ce/45/6a40fbe886d60a8c26f480e7d12535502b5ba123814b3b9a0b002ebca198/dbus_next-0.2.3.tar.gz", hash = "sha256:f4eae26909332ada528c0a3549dda8d4f088f9b365153952a408e28023a626a5", size = 71112, upload-time = "2021-07-25T22:11:28.398Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d2/fc/c0a3f4c4eaa5a22fbef91713474666e13d0ea2a69c84532579490a9f2cc8/dbus_next-0.2.3-py3-none-any.whl", hash = "sha256:58948f9aff9db08316734c0be2a120f6dc502124d9642f55e90ac82ffb16a18b", size = 57885, upload-time = "2021-07-25T22:11:25.466Z" },
]
[[package]]
name = "h11"
version = "0.16.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
]
[[package]]
name = "httpcore"
version = "1.0.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
]
[[package]]
name = "httpx"
version = "0.28.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "certifi" },
{ name = "httpcore" },
{ name = "idna" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
]
[[package]]
name = "idna"
version = "3.11"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
]
[[package]]
name = "loguru"
version = "0.7.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "win32-setctime", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" },
]
[[package]]
name = "lrcfetch"
version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "dbus-next" },
{ name = "httpx" },
{ name = "loguru" },
{ name = "mutagen" },
{ name = "platformdirs" },
{ name = "pydantic" },
{ name = "python-dotenv" },
{ name = "typer" },
]
[package.metadata]
requires-dist = [
{ name = "dbus-next", specifier = ">=0.2.3" },
{ name = "httpx", specifier = ">=0.28.1" },
{ name = "loguru", specifier = ">=0.7.3" },
{ name = "mutagen", specifier = ">=1.47.0" },
{ name = "platformdirs", specifier = ">=4.9.4" },
{ name = "pydantic", specifier = ">=2.12.5" },
{ name = "python-dotenv", specifier = ">=1.2.2" },
{ name = "typer", specifier = ">=0.24.1" },
]
[[package]]
name = "markdown-it-py"
version = "4.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "mdurl" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
]
[[package]]
name = "mdurl"
version = "0.1.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
]
[[package]]
name = "mutagen"
version = "1.47.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/81/e6/64bc71b74eef4b68e61eb921dcf72dabd9e4ec4af1e11891bbd312ccbb77/mutagen-1.47.0.tar.gz", hash = "sha256:719fadef0a978c31b4cf3c956261b3c58b6948b32023078a2117b1de09f0fc99", size = 1274186, upload-time = "2023-09-03T16:33:33.411Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b0/7a/620f945b96be1f6ee357d211d5bf74ab1b7fe72a9f1525aafbfe3aee6875/mutagen-1.47.0-py3-none-any.whl", hash = "sha256:edd96f50c5907a9539d8e5bba7245f62c9f520aef333d13392a79a4f70aca719", size = 194391, upload-time = "2023-09-03T16:33:29.955Z" },
]
[[package]]
name = "platformdirs"
version = "4.9.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/19/56/8d4c30c8a1d07013911a8fdbd8f89440ef9f08d07a1b50ab8ca8be5a20f9/platformdirs-4.9.4.tar.gz", hash = "sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934", size = 28737, upload-time = "2026-03-05T18:34:13.271Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868", size = 21216, upload-time = "2026-03-05T18:34:12.172Z" },
]
[[package]]
name = "pydantic"
version = "2.12.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-types" },
{ name = "pydantic-core" },
{ name = "typing-extensions" },
{ name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
]
[[package]]
name = "pydantic-core"
version = "2.41.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
{ url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
{ url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
{ url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
{ url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
{ url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
{ url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
{ url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
{ url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
{ url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
{ url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
{ url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
{ url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
{ url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
{ url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
{ url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
{ url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
{ url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
{ url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
{ url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
{ url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
{ url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
{ url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
{ url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
{ url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
{ url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
{ url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
{ url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
{ url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
{ url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
{ url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
{ url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
{ url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
{ url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
{ url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
{ url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
{ url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
{ url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
{ url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
{ url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
{ url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
{ url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
]
[[package]]
name = "pygments"
version = "2.19.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
]
[[package]]
name = "python-dotenv"
version = "1.2.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
]
[[package]]
name = "rich"
version = "14.3.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "markdown-it-py" },
{ name = "pygments" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" },
]
[[package]]
name = "shellingham"
version = "1.5.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
]
[[package]]
name = "typer"
version = "0.24.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-doc" },
{ name = "click" },
{ name = "rich" },
{ name = "shellingham" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f5/24/cb09efec5cc954f7f9b930bf8279447d24618bb6758d4f6adf2574c41780/typer-0.24.1.tar.gz", hash = "sha256:e39b4732d65fbdcde189ae76cf7cd48aeae72919dea1fdfc16593be016256b45", size = 118613, upload-time = "2026-02-21T16:54:40.609Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" },
]
[[package]]
name = "typing-extensions"
version = "4.15.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
]
[[package]]
name = "typing-inspection"
version = "0.4.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
]
[[package]]
name = "win32-setctime"
version = "1.2.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" },
]