Add logging

This commit is contained in:
MMaker 2025-01-31 14:32:19 -05:00
parent fe4df5e9d7
commit 76193a2e84
Signed by: mmaker
GPG Key ID: CCE79B8FEDA40FB2

16
app.py
View File

@ -4,6 +4,10 @@ import requests
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from flask import Flask, Response from flask import Flask, Response
from diskcache import Cache from diskcache import Cache
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
app = Flask(__name__) app = Flask(__name__)
@ -15,7 +19,7 @@ cookie_jar = http.cookiejar.MozillaCookieJar('cookies.txt')
try: try:
cookie_jar.load(ignore_discard=True, ignore_expires=True) cookie_jar.load(ignore_discard=True, ignore_expires=True)
except FileNotFoundError: except FileNotFoundError:
print("cookies.txt not found, starting with empty cookie jar") logger.warning("cookies.txt not found, starting with empty cookie jar")
s = requests.Session() s = requests.Session()
s.headers.update({ s.headers.update({
@ -25,16 +29,21 @@ s.cookies = cookie_jar # type: ignore
@app.route("/watch/<video_id>") @app.route("/watch/<video_id>")
def proxy(video_id): def proxy(video_id):
logger.info(f"Received request for video ID: {video_id}")
cached_html = cache.get(video_id) cached_html = cache.get(video_id)
if cached_html is not None: if cached_html is not None:
logger.info(f"Using cached response for video ID: {video_id}")
return Response(cached_html, mimetype="text/html") # type: ignore return Response(cached_html, mimetype="text/html") # type: ignore
# Not in cache or cache expired; fetch from nicovideo.jp # Not in cache or cache expired; fetch from nicovideo.jp
real_url = f"https://www.nicovideo.jp/watch/{video_id}" real_url = f"https://www.nicovideo.jp/watch/{video_id}"
try: try:
logger.info(f"Fetching content from URL: {real_url}")
r = s.get(real_url, timeout=10) r = s.get(real_url, timeout=10)
except requests.RequestException as e: except requests.RequestException as e:
return Response(f"Error fetching the page: {e}", status=500) logger.error(f"Error fetching the page for video ID '{video_id}': {e}")
return Response(status=500)
soup = BeautifulSoup(r.text, "html.parser") soup = BeautifulSoup(r.text, "html.parser")
thumbnail_url = None thumbnail_url = None
@ -49,7 +58,8 @@ def proxy(video_id):
params["video"]["thumbnail"].get("middleUrl") or params["video"]["thumbnail"].get("middleUrl") or
params["video"]["thumbnail"].get("url") params["video"]["thumbnail"].get("url")
) )
except (KeyError, json.JSONDecodeError): except (KeyError, json.JSONDecodeError) as e:
logger.warning(f"Failed to extract thumbnail info for video ID '{video_id}': {e}")
pass pass
og_tags = soup.find_all("meta", property=lambda x: x) # type: ignore og_tags = soup.find_all("meta", property=lambda x: x) # type: ignore