Add diskcache
This commit is contained in:
parent
07280587a9
commit
fe4df5e9d7
37
app.py
37
app.py
|
@ -1,11 +1,16 @@
|
||||||
import http.cookiejar
|
import http.cookiejar
|
||||||
import json
|
import json
|
||||||
from flask import Flask, Response
|
|
||||||
import requests
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
from flask import Flask, Response
|
||||||
|
from diskcache import Cache
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
CACHE_EXPIRATION_SECONDS = 3600 # 1 hour
|
||||||
|
CACHE_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MB
|
||||||
|
cache = Cache("disk_cache", size_limit=CACHE_SIZE_LIMIT)
|
||||||
|
|
||||||
cookie_jar = http.cookiejar.MozillaCookieJar('cookies.txt')
|
cookie_jar = http.cookiejar.MozillaCookieJar('cookies.txt')
|
||||||
try:
|
try:
|
||||||
cookie_jar.load(ignore_discard=True, ignore_expires=True)
|
cookie_jar.load(ignore_discard=True, ignore_expires=True)
|
||||||
|
@ -20,8 +25,12 @@ s.cookies = cookie_jar # type: ignore
|
||||||
|
|
||||||
@app.route("/watch/<video_id>")
|
@app.route("/watch/<video_id>")
|
||||||
def proxy(video_id):
|
def proxy(video_id):
|
||||||
real_url = f"https://www.nicovideo.jp/watch/{video_id}"
|
cached_html = cache.get(video_id)
|
||||||
|
if cached_html is not None:
|
||||||
|
return Response(cached_html, mimetype="text/html") # type: ignore
|
||||||
|
|
||||||
|
# Not in cache or cache expired; fetch from nicovideo.jp
|
||||||
|
real_url = f"https://www.nicovideo.jp/watch/{video_id}"
|
||||||
try:
|
try:
|
||||||
r = s.get(real_url, timeout=10)
|
r = s.get(real_url, timeout=10)
|
||||||
except requests.RequestException as e:
|
except requests.RequestException as e:
|
||||||
|
@ -30,21 +39,24 @@ def proxy(video_id):
|
||||||
soup = BeautifulSoup(r.text, "html.parser")
|
soup = BeautifulSoup(r.text, "html.parser")
|
||||||
thumbnail_url = None
|
thumbnail_url = None
|
||||||
try:
|
try:
|
||||||
if soup.find("meta", {"name": "server-response"}):
|
server_response = soup.find("meta", {"name": "server-response"})
|
||||||
params = json.loads(soup.find("meta", {"name": "server-response"})["content"])["data"]["response"] # type: ignore
|
if server_response:
|
||||||
thumbnail_url = ( # Use highest quality thumbnail available
|
params = json.loads(server_response["content"])["data"]["response"] # type: ignore
|
||||||
params["video"]["thumbnail"]["ogp"]
|
thumbnail_url = (
|
||||||
or params["video"]["thumbnail"]["player"]
|
params["video"]["thumbnail"].get("ogp") or
|
||||||
or params["video"]["thumbnail"]["largeUrl"]
|
params["video"]["thumbnail"].get("player") or
|
||||||
or params["video"]["thumbnail"]["middleUrl"]
|
params["video"]["thumbnail"].get("largeUrl") or
|
||||||
or params["video"]["thumbnail"]["url"]
|
params["video"]["thumbnail"].get("middleUrl") or
|
||||||
|
params["video"]["thumbnail"].get("url")
|
||||||
)
|
)
|
||||||
except (KeyError, json.JSONDecodeError):
|
except (KeyError, json.JSONDecodeError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
og_tags = soup.find_all("meta", property=lambda x: x) # type: ignore
|
og_tags = soup.find_all("meta", property=lambda x: x) # type: ignore
|
||||||
for tag in og_tags:
|
for tag in og_tags:
|
||||||
if tag.get("property") == "og:image" and thumbnail_url:
|
if tag.get("property") == "og:image" and thumbnail_url:
|
||||||
tag["content"] = thumbnail_url
|
tag["content"] = thumbnail_url
|
||||||
|
|
||||||
og_tags_str = "\n".join(str(tag) for tag in og_tags)
|
og_tags_str = "\n".join(str(tag) for tag in og_tags)
|
||||||
html_response = f"""
|
html_response = f"""
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
|
@ -58,7 +70,6 @@ def proxy(video_id):
|
||||||
</html>
|
</html>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return Response(html_response, mimetype="text/html")
|
cache.set(video_id, html_response, expire=CACHE_EXPIRATION_SECONDS)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
return Response(html_response, mimetype="text/html")
|
||||||
app.run(host="127.0.0.1", port=2525, debug=False)
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user