webserver: Use an lru cache for 100 result pages.
This commit is contained in:
parent
e6097931b2
commit
ae7f53a1fc
1 changed files with 10 additions and 4 deletions
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import gzip
|
||||
import http.server
|
||||
import os
|
||||
|
|
@ -47,6 +48,14 @@ def make_main_body():
|
|||
</table>"""
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=100)
|
||||
def make_listing_page(url_path):
|
||||
path, tool = os.path.split(url_path)
|
||||
result = index[(path, tool)]
|
||||
body = fill3.appearance_as_html(result.appearance_min())
|
||||
return make_page(body, f"{tool} of {path}")
|
||||
|
||||
|
||||
class Webserver(http.server.BaseHTTPRequestHandler):
|
||||
|
||||
def _set_headers(self):
|
||||
|
|
@ -61,10 +70,7 @@ class Webserver(http.server.BaseHTTPRequestHandler):
|
|||
elif self.path == "/summary":
|
||||
page = summary_page
|
||||
elif "/" in self.path[1:]:
|
||||
path, tool = os.path.split(self.path[1:])
|
||||
result = index[(path, tool)]
|
||||
body = fill3.appearance_as_html(result.appearance_min())
|
||||
page = make_page(body, f"{tool} of {path}")
|
||||
page = make_listing_page(self.path[1:])
|
||||
else:
|
||||
return
|
||||
self.wfile.write(page.encode("utf-8"))
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue