Let cache compression type be controlled, including none.
- No compression is good if the filesystem is already compressed.
This commit is contained in:
parent
6b9dddcbd2
commit
b2e6ab2c3e
4 changed files with 45 additions and 16 deletions
|
|
@ -58,6 +58,8 @@ Options:
|
|||
the *edit command. It may contain options.
|
||||
-t THEME, --theme=THEME The pygment theme used for syntax
|
||||
highlighting. Defaults to "native".
|
||||
-c TYPE, --compression=TYPE The type of compression used in the cache:
|
||||
gzip, lzma, bz2, or none. Defaults to gzip.
|
||||
"""
|
||||
|
||||
|
||||
|
|
@ -655,10 +657,11 @@ class Screen:
|
|||
state["workers"] = None
|
||||
return state
|
||||
|
||||
def make_workers(self, worker_count, is_being_tested):
|
||||
def make_workers(self, worker_count, is_being_tested, compression):
|
||||
workers = []
|
||||
for index in range(worker_count):
|
||||
worker_ = worker.Worker(self._is_paused, is_being_tested)
|
||||
worker_ = worker.Worker(self._is_paused, is_being_tested,
|
||||
compression)
|
||||
workers.append(worker_)
|
||||
future = worker_.job_runner(self, self._summary, self._log,
|
||||
self._summary._jobs_added_event,
|
||||
|
|
@ -1069,12 +1072,14 @@ def load_state(pickle_path, jobs_added_event, appearance_changed_event,
|
|||
return summary, screen, log, is_first_run
|
||||
|
||||
|
||||
def main(root_path, loop, worker_count=None, editor_command=None, theme=None,
|
||||
def main(root_path, loop, worker_count=None, editor_command=None, theme=None, compression=None,
|
||||
is_being_tested=False):
|
||||
if worker_count is None:
|
||||
worker_count = max(multiprocessing.cpu_count() - 1, 1)
|
||||
if theme is None:
|
||||
theme = "native"
|
||||
if compression is None:
|
||||
compression = "gzip"
|
||||
os.environ["PYGMENT_STYLE"] = theme
|
||||
pickle_path = os.path.join(tools.CACHE_PATH, "summary.pickle")
|
||||
jobs_added_event = asyncio.Event()
|
||||
|
|
@ -1097,7 +1102,7 @@ def main(root_path, loop, worker_count=None, editor_command=None, theme=None,
|
|||
is_path_excluded)
|
||||
try:
|
||||
log.log_message(f"Starting workers ({worker_count}) …")
|
||||
screen.make_workers(worker_count, is_being_tested)
|
||||
screen.make_workers(worker_count, is_being_tested, compression)
|
||||
|
||||
def exit_loop():
|
||||
log.log_command("Exiting…")
|
||||
|
|
@ -1162,18 +1167,26 @@ def check_arguments():
|
|||
if arguments["--theme"] not in themes:
|
||||
print("--theme must be one of:", " ".join(themes))
|
||||
sys.exit(1)
|
||||
if arguments["--compression"] is not None:
|
||||
compressions = ["gzip", "lzma", "bz2", "none"]
|
||||
if arguments["--compression"] not in compressions:
|
||||
print("--compression must be one of:", " ".join(compressions))
|
||||
sys.exit(1)
|
||||
editor_command = arguments["--editor"] or os.environ.get("EDITOR", None)\
|
||||
or os.environ.get("VISUAL", None)
|
||||
return root_path, worker_count, editor_command, arguments["--theme"]
|
||||
return root_path, worker_count, editor_command, arguments["--theme"], \
|
||||
arguments["--compression"]
|
||||
|
||||
|
||||
def entry_point():
|
||||
root_path, worker_count, editor_command, theme = check_arguments()
|
||||
root_path, worker_count, editor_command, theme, compression = \
|
||||
check_arguments()
|
||||
with terminal.terminal_title("eris: " + os.path.basename(root_path)):
|
||||
manage_cache(root_path)
|
||||
with chdir(root_path): # FIX: Don't change directory if possible.
|
||||
loop = asyncio.get_event_loop()
|
||||
main(root_path, loop, worker_count, editor_command, theme)
|
||||
main(root_path, loop, worker_count, editor_command, theme,
|
||||
compression)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -529,6 +529,12 @@ def dump_pickle_safe(object_, path, protocol=pickle.HIGHEST_PROTOCOL,
|
|||
os.rename(tmp_path, path)
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def compression_open_func(compression):
|
||||
return (open if compression == "none" else
|
||||
importlib.import_module(compression).open)
|
||||
|
||||
|
||||
class Result:
|
||||
|
||||
COMPLETED_STATUSES = {
|
||||
|
|
@ -538,6 +544,7 @@ class Result:
|
|||
def __init__(self, path, tool):
|
||||
self.path = path
|
||||
self.tool = tool
|
||||
self.compression = None
|
||||
self.pickle_path = os.path.join(CACHE_PATH, path + "-" + tool.__name__)
|
||||
self.scroll_position = (0, 0)
|
||||
self.status = Status.pending
|
||||
|
|
@ -546,10 +553,11 @@ class Result:
|
|||
@lru_cache_with_eviction(maxsize=50)
|
||||
def result(self):
|
||||
unknown_label = fill3.Text("?")
|
||||
if self.status == Status.pending:
|
||||
if self.status == Status.pending or self.compression is None:
|
||||
return unknown_label
|
||||
try:
|
||||
with gzip.open(self.pickle_path, "rb") as pickle_file:
|
||||
with compression_open_func(self.compression)(self.pickle_path, "rb") \
|
||||
as pickle_file:
|
||||
return pickle.load(pickle_file)
|
||||
except FileNotFoundError:
|
||||
return unknown_label
|
||||
|
|
@ -557,7 +565,8 @@ class Result:
|
|||
@result.setter
|
||||
def result(self, value):
|
||||
os.makedirs(os.path.dirname(self.pickle_path), exist_ok=True)
|
||||
dump_pickle_safe(value, self.pickle_path, open=gzip.open)
|
||||
dump_pickle_safe(value, self.pickle_path,
|
||||
open=compression_open_func(self.compression))
|
||||
Result.result.fget.evict(self)
|
||||
|
||||
def set_status(self, status):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@
|
|||
# Licensed under the Artistic License 2.0.
|
||||
|
||||
import asyncio
|
||||
import gzip
|
||||
import os
|
||||
import shutil
|
||||
import signal
|
||||
|
|
@ -19,9 +18,10 @@ class Worker:
|
|||
AUTOSAVE_MESSAGE = "Auto-saving…"
|
||||
unsaved_jobs_total = 0
|
||||
|
||||
def __init__(self, is_already_paused, is_being_tested):
|
||||
def __init__(self, is_already_paused, is_being_tested, compression):
|
||||
self.is_already_paused = is_already_paused
|
||||
self.is_being_tested = is_being_tested
|
||||
self.compression = compression
|
||||
self.result = None
|
||||
self.process = None
|
||||
self.child_pgid = None
|
||||
|
|
@ -44,6 +44,7 @@ class Worker:
|
|||
async def job_runner(self, screen, summary, log, jobs_added_event,
|
||||
appearance_changed_event):
|
||||
await self.create_process()
|
||||
self.process.stdin.write(f"{self.compression}\n".encode("utf-8"))
|
||||
while True:
|
||||
await jobs_added_event.wait()
|
||||
while True:
|
||||
|
|
@ -53,6 +54,7 @@ class Worker:
|
|||
self.result = None
|
||||
break
|
||||
await self.result.run(log, appearance_changed_event, self)
|
||||
self.result.compression = self.compression
|
||||
Worker.unsaved_jobs_total += 1
|
||||
if Worker.unsaved_jobs_total == 100:
|
||||
log.log_message(Worker.AUTOSAVE_MESSAGE)
|
||||
|
|
@ -83,25 +85,28 @@ class Worker:
|
|||
os.killpg(self.child_pgid, signal.SIGKILL)
|
||||
|
||||
|
||||
def make_result_widget(text, result):
|
||||
def make_result_widget(text, result, compression):
|
||||
appearance = fill3.str_to_appearance(text)
|
||||
page_size = 500
|
||||
compression_open_func = tools.compression_open_func(compression)
|
||||
if len(appearance) > page_size:
|
||||
appearance = eris.paged_list.PagedList(
|
||||
appearance, result.get_pages_dir(), page_size, cache_size=2,
|
||||
exist_ok=True, open_func=gzip.open)
|
||||
exist_ok=True, open_func=compression_open_func)
|
||||
return fill3.Fixed(appearance)
|
||||
|
||||
|
||||
def main():
|
||||
print(os.getpgid(os.getpid()), flush=True)
|
||||
compression = input()
|
||||
try:
|
||||
while True:
|
||||
tool_name, path = input(), input()
|
||||
tool = getattr(tools, tool_name)
|
||||
result = tools.Result(path, tool)
|
||||
result.compression = compression
|
||||
status, text = tools.run_tool_no_error(path, tool)
|
||||
result.result = make_result_widget(text, result)
|
||||
result.result = make_result_widget(text, result, compression)
|
||||
print(status.value, flush=True)
|
||||
except:
|
||||
tools.log_error()
|
||||
|
|
|
|||
|
|
@ -29,8 +29,10 @@ class WorkerTestCase(unittest.TestCase):
|
|||
|
||||
def test_run_job(self):
|
||||
loop = asyncio.get_event_loop()
|
||||
worker_ = worker.Worker(False, False)
|
||||
compression = "none"
|
||||
worker_ = worker.Worker(False, False, compression)
|
||||
loop.run_until_complete(worker_.create_process())
|
||||
worker_.process.stdin.write(f"{compression}\n".encode("utf-8"))
|
||||
future = worker_.run_tool("foo", tools.metadata)
|
||||
status = loop.run_until_complete(future)
|
||||
self.assertEqual(status, tools.Status.normal)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue