webserver: Use an lru cache for 100 result pages.

This commit is contained in:
Andrew Hamilton 2021-11-18 13:11:14 +10:00
parent e6097931b2
commit ae7f53a1fc

View file

@ -2,6 +2,7 @@
import asyncio
import functools
import gzip
import http.server
import os
@ -47,6 +48,14 @@ def make_main_body():
</table>"""
@functools.lru_cache(maxsize=100)
def make_listing_page(url_path):
path, tool = os.path.split(url_path)
result = index[(path, tool)]
body = fill3.appearance_as_html(result.appearance_min())
return make_page(body, f"{tool} of {path}")
class Webserver(http.server.BaseHTTPRequestHandler):
def _set_headers(self):
@ -61,10 +70,7 @@ class Webserver(http.server.BaseHTTPRequestHandler):
elif self.path == "/summary":
page = summary_page
elif "/" in self.path[1:]:
path, tool = os.path.split(self.path[1:])
result = index[(path, tool)]
body = fill3.appearance_as_html(result.appearance_min())
page = make_page(body, f"{tool} of {path}")
page = make_listing_page(self.path[1:])
else:
return
self.wfile.write(page.encode("utf-8"))