Better log phrasing

This commit is contained in:
MMaker 2025-02-27 12:39:38 -05:00
parent c6d53e0c1c
commit 96326f543f
Signed by: mmaker
GPG Key ID: CCE79B8FEDA40FB2

44
app.py
View File

@ -91,7 +91,7 @@ def download_and_upload_video(video_id, url, video_quality):
temp_path = temp_file.name temp_path = temp_file.name
try: try:
logger.info(f"Starting video download of {video_id}") logger.info(f"{video_id}: Starting download")
nndownload_args = [ nndownload_args = [
"--no-login", "--no-login",
"--user-agent", "Googlebot/2.1", "--user-agent", "Googlebot/2.1",
@ -105,7 +105,7 @@ def download_and_upload_video(video_id, url, video_quality):
nndownload.execute(*nndownload_args) nndownload.execute(*nndownload_args)
if os.path.exists(temp_path) and s3_client and S3_BUCKET_NAME: if os.path.exists(temp_path) and s3_client and S3_BUCKET_NAME:
logger.info(f"Downloaded video {video_id}, uploading to CDN") logger.info(f"{video_id}: Downloaded, uploading to CDN")
try: try:
s3_key = f"niconico/{video_id}.mp4" s3_key = f"niconico/{video_id}.mp4"
s3_client.upload_file( s3_client.upload_file(
@ -115,27 +115,27 @@ def download_and_upload_video(video_id, url, video_quality):
ExtraArgs={'ContentType': 'video/mp4', 'ACL': 'public-read'} ExtraArgs={'ContentType': 'video/mp4', 'ACL': 'public-read'}
) )
logger.info(f"Successfully uploaded video {video_id} to CDN") logger.info(f"{video_id}: Upload successful to CDN")
if cache is not None: if cache is not None:
cache.set(f"{video_id}_cdn", True, expire=CACHE_EXPIRATION_CDN) cache.set(f"{video_id}_cdn", True, expire=CACHE_EXPIRATION_CDN)
# Clear HTML cache for this video to ensure next view gets updated HTML # Clear HTML cache for this video to ensure next view gets updated HTML
cache.delete(f"{video_id}_html") cache.delete(f"{video_id}_html")
logger.debug(f"Cleared cache for video ID: {video_id}") logger.debug(f"{video_id}: Cleared HTML cache")
return True return True
except Exception as e: except Exception as e:
logger.error(f"Error uploading video {video_id} to CDN: {e}") logger.error(f"{video_id}: Error uploading to CDN: {e}")
return False return False
else: else:
logger.error(f"Failed to download video {video_id} or S3 client not configured") logger.error(f"{video_id}: Failed to download or S3 client not configured")
return False return False
finally: finally:
if os.path.exists(temp_path): if os.path.exists(temp_path):
os.unlink(temp_path) os.unlink(temp_path)
logger.debug(f"Removed temporary file: {temp_path}") logger.debug(f"Removed temporary file: {temp_path}")
except Exception as e: except Exception as e:
logger.error(f"Error in download process for video {video_id}: {e}") logger.error(f"{video_id}: Error in download process: {e}")
return False return False
finally: finally:
with download_lock: with download_lock:
@ -169,7 +169,7 @@ worker_thread.start()
def is_video_in_cdn(video_id): def is_video_in_cdn(video_id):
"""Check if video exists in CDN""" """Check if video exists in CDN"""
if cache is not None and cache.get(f"{video_id}_cdn"): if cache is not None and cache.get(f"{video_id}_cdn"):
logger.debug(f"Video {video_id} is already uploaded to CDN (cached)") logger.debug(f"{video_id}: Already uploaded to CDN (cached)")
return True return True
if not s3_client or not S3_BUCKET_NAME: if not s3_client or not S3_BUCKET_NAME:
@ -213,11 +213,11 @@ def get_video_quality(params, quality_level_threshold=3):
def get_data(video_id, real_url): def get_data(video_id, real_url):
"""Get the server response for a given video ID""" """Get the server response for a given video ID"""
try: try:
logger.debug(f"Fetching content from URL: {real_url}") logger.debug(f"{video_id}: Fetching content from URL: {real_url}")
r = s.get(real_url, timeout=10) r = s.get(real_url, timeout=10)
# r.raise_for_status() # r.raise_for_status()
except requests.RequestException as e: except requests.RequestException as e:
logger.error(f"Error fetching the page for video ID '{video_id}': {e}") logger.error(f"{video_id}: Error fetching the page ('{real_url}'): {e}")
return None, None return None, None
soup = BeautifulSoup(r.text, "html.parser") soup = BeautifulSoup(r.text, "html.parser")
@ -227,7 +227,7 @@ def get_data(video_id, real_url):
params = json.loads(server_response["content"])["data"]["response"] # type: ignore params = json.loads(server_response["content"])["data"]["response"] # type: ignore
return params, soup return params, soup
except (KeyError, json.JSONDecodeError) as e: except (KeyError, json.JSONDecodeError) as e:
logger.warning(f"Failed to extract thumbnail info for video ID '{video_id}': {e}") logger.warning(f"{video_id}: Failed to extract thumbnail info: {e}")
pass pass
return None, soup return None, soup
@ -281,28 +281,28 @@ def get_oembed_url(params):
@app.route("/watch/<video_id>") @app.route("/watch/<video_id>")
def proxy(video_id): def proxy(video_id):
logger.info(f"Received request for video ID: {video_id}") logger.info(f"{video_id}: Received request")
if cache is not None: if cache is not None:
logger.debug(f"Checking cache for video ID: {video_id}") logger.debug(f"{video_id}: Checking cache")
cached_html = cache.get(f"{video_id}_html") cached_html = cache.get(f"{video_id}_html")
if cached_html is not None: if cached_html is not None:
logger.info(f"Returning cached response for video ID: {video_id}") logger.info(f"{video_id}: Returning cached response")
return Response(cached_html, mimetype="text/html") # type: ignore return Response(cached_html, mimetype="text/html") # type: ignore
logger.debug(f"Cache miss for video ID: {video_id} - fetching") logger.debug(f"{video_id}: Cache miss - fetching")
# Not in cache or cache expired; fetch from nicovideo.jp # Not in cache or cache expired; fetch from nicovideo.jp
real_url = f"https://www.nicovideo.jp/watch/{video_id}" real_url = f"https://www.nicovideo.jp/watch/{video_id}"
params, soup = get_data(video_id, real_url) params, soup = get_data(video_id, real_url)
if not params or not soup: if not params or not soup:
logger.error(f"Failed to retrieve data for video ID '{video_id}'") logger.error(f"{video_id}: Failed to fetch data")
return Response("Video not found", status=404) return Response("Video not found", status=404)
reason_code = params.get('reasonCode', '').upper() reason_code = params.get('reasonCode', '').upper()
if reason_code in ['HIDDEN_VIDEO']: if reason_code in ['HIDDEN_VIDEO']:
logger.warning(f"Video ID '{video_id}' is hidden") logger.warning(f"{video_id}: Video is hidden - returning 404")
return Response("Video not found", status=404) return Response("Video not found", status=404)
thumbnail_url = ( thumbnail_url = (
@ -317,7 +317,7 @@ def proxy(video_id):
download_allowed = allow_download(params) if params else False download_allowed = allow_download(params) if params else False
request_user_agent = request.headers.get('User-Agent', '').lower() request_user_agent = request.headers.get('User-Agent', '').lower()
if download_allowed and 'discordbot' not in request_user_agent: if download_allowed and 'discordbot' not in request_user_agent:
logger.info(f"Video download ignored for {video_id} due to user agent ({request_user_agent})") logger.info(f"{video_id}: Video download ignored for due to user agent ({request_user_agent})")
download_allowed = False download_allowed = False
video_quality = get_video_quality(params) if params else None video_quality = get_video_quality(params) if params else None
if download_allowed and video_quality is not None: if download_allowed and video_quality is not None:
@ -329,7 +329,7 @@ def proxy(video_id):
queue_video_ids = [item[0] for item in download_queue] queue_video_ids = [item[0] for item in download_queue]
if video_id not in queue_video_ids: if video_id not in queue_video_ids:
download_queue.append((video_id, real_url, video_quality)) download_queue.append((video_id, real_url, video_quality))
logger.info(f"Queued video ID {video_id} for download") logger.info(f"{video_id}: Queued for download")
cdn_video_url = get_cdn_url(video_id) cdn_video_url = get_cdn_url(video_id)
og_tags = soup.find_all("meta", attrs={"property": True}) og_tags = soup.find_all("meta", attrs={"property": True})
@ -367,10 +367,10 @@ if you want to download videos, please consider using a tool like nndownload: ht
</head><body></body></html>""" </head><body></body></html>"""
if cache is not None: if cache is not None:
logger.info(f"Caching response for video ID: {video_id}") logger.info(f"{video_id}: Caching HTML response")
cache.set(f"{video_id}_html", html_response, expire=CACHE_EXPIRATION_HTML) cache.set(f"{video_id}_html", html_response, expire=CACHE_EXPIRATION_HTML)
logger.info(f"Returning response for video ID: {video_id}") logger.info(f"{video_id}: Returning response")
return Response(html_response, mimetype="text/html") return Response(html_response, mimetype="text/html")
@app.route("/owoembed") @app.route("/owoembed")
@ -405,5 +405,5 @@ def owoembed():
"version": "1.0" "version": "1.0"
} }
logger.info(f"Returning oEmbed response for video ID: {video_id}") logger.info(f"{video_id}: Returning oEmbed response")
return jsonify(oembed_response) return jsonify(oembed_response)