Run all tools in a filesystem sandbox
This commit is contained in:
parent
4d0a03cc16
commit
721cc28d03
11 changed files with 426 additions and 151 deletions
2
BUGS
2
BUGS
|
|
@ -7,7 +7,7 @@ Current
|
|||
- If a pending file is deleted, while not watching the filesystem,
|
||||
then tools fail when they can't find the file. Also, even if watching, there
|
||||
would be a race. Do what?
|
||||
- The scrollbars in the help screen don't work with the arrow keys.
|
||||
- Scrolling in the help screen doesn't work with the arrow keys.
|
||||
|
||||
|
||||
Current (tool related)
|
||||
|
|
|
|||
12
TODO
12
TODO
|
|
@ -8,15 +8,16 @@ Todo
|
|||
- Need to use conventional version numbers for pypi. See pep0440.
|
||||
- Add ESC as an alternative to 'q' for quit. If looking at Help, ESC should just
|
||||
exit the help screen.
|
||||
- Have a sandbox for unsafe (or all) tools.
|
||||
- Statuses' pretty names and variable names don't match.
|
||||
- Report on python doctests. (also coverage of)
|
||||
- Treat any compressed file as though it is uncompressed. But the metadata tool
|
||||
should still report on the original compressed file.
|
||||
- Cache tools._python_version.
|
||||
- Determine if perl files are perl5 or perl6.
|
||||
- Colourise mccabe.
|
||||
- Use perldoc on pod files.
|
||||
- Add rich LSCOLORS if the environment variable is not set.
|
||||
- Simplify coloring of mccabe. Just color functions with complexity greater than 10.
|
||||
- Add bandit tool for python.
|
||||
|
||||
|
||||
Done
|
||||
|
|
@ -152,6 +153,8 @@ Done
|
|||
- Have an option to turn off all automatic work.
|
||||
<- The 'working' switch does this
|
||||
- Add means to pause and unpause all current jobs.
|
||||
- Colourise mccabe.
|
||||
- Have a sandbox for unsafe (or all) tools.
|
||||
|
||||
A-syntax, B-tests, C-auto docs, D-lint, E-coverage, F-profile, G-tidy, H-import deps
|
||||
A B C D E F G H
|
||||
|
|
@ -226,8 +229,6 @@ Ideas
|
|||
- Integrate editor?
|
||||
- Open file in editor at the spot?
|
||||
- Use linguist as a generic tool.
|
||||
- POLA, only put in the sandbox what the tool needs. e.g. only the file.
|
||||
Make a chroot with one file?
|
||||
- Have a Cache widget with an update_appearance_min method
|
||||
- Colourise all directory listings. e.g. file listings of archives
|
||||
- Make sure theres no problems if its run twice concurrently on the same
|
||||
|
|
@ -400,3 +401,6 @@ Shelved
|
|||
- chdir shouldn't be used with multi-threading. openat and fstatat can help.
|
||||
- Show all binary files with a binary viewer tool? Like a hex editor?
|
||||
- python-guacamole deals with 24bit color & conversions
|
||||
- POLA, only put in the sandbox what the tool needs. e.g. only the file.
|
||||
Make a chroot with one file?
|
||||
<- Not sure what the tool needs.
|
||||
|
|
|
|||
9
in-directory
Executable file
9
in-directory
Executable file
|
|
@ -0,0 +1,9 @@
|
|||
#!/bin/bash
|
||||
|
||||
|
||||
set -e
|
||||
|
||||
|
||||
cd $1
|
||||
shift
|
||||
exec $@
|
||||
89
sandbox_fs.py
Normal file
89
sandbox_fs.py
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
|
||||
|
||||
# Copyright (C) 2016 Andrew Hamilton. All rights reserved.
|
||||
# Licensed under the Artistic License 2.0.
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
|
||||
class OverlayfsMount():
|
||||
|
||||
def __init__(self, lower_dir, mount_point):
|
||||
self.lower_dir = lower_dir
|
||||
self.mount_point = mount_point
|
||||
self.upper_dir = tempfile.mkdtemp()
|
||||
self.work_dir = tempfile.mkdtemp()
|
||||
option_string = ("lowerdir=%s,upperdir=%s,workdir=%s" %
|
||||
(self.lower_dir, self.upper_dir, self.work_dir))
|
||||
subprocess.check_call(["sudo", "mount", "-t", "overlayfs", "-o",
|
||||
option_string, "overlayfs", self.mount_point],
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
def __repr__(self):
|
||||
return "<OverlayfsMount:%r over %r>" % (self.mount_point,
|
||||
self.lower_dir)
|
||||
|
||||
def umount(self):
|
||||
subprocess.check_call(["sudo", "umount", "--lazy", self.mount_point])
|
||||
subprocess.check_call(["sudo", "rm", "-rf", self.upper_dir,
|
||||
self.work_dir])
|
||||
|
||||
|
||||
def _in_chroot(mount_point, command):
|
||||
return ["sudo", "chroot", "--userspec=%s" % os.environ["USER"],
|
||||
mount_point] + command
|
||||
|
||||
|
||||
_IN_DIRECTORY_SCRIPT = os.path.join(os.path.dirname(__file__), "in-directory")
|
||||
|
||||
|
||||
def _in_directory(directory_path, command):
|
||||
return [_IN_DIRECTORY_SCRIPT, directory_path] + command
|
||||
|
||||
|
||||
def _parse_proc_mounts():
|
||||
with open("/proc/mounts") as file_:
|
||||
for line in file_:
|
||||
yield line.split()
|
||||
|
||||
|
||||
class SandboxFs:
|
||||
|
||||
def __init__(self, mount_point):
|
||||
self.mount_point = mount_point
|
||||
self.overlay_mounts = []
|
||||
|
||||
def __repr__(self):
|
||||
return "<SandboxFs:%r mounts:%r>" % (self.mount_point,
|
||||
len(self.overlay_mounts))
|
||||
|
||||
def _find_mounts(self):
|
||||
all_mounts = set(part[1] for part in _parse_proc_mounts())
|
||||
obvious_mount_points = {"/", "/usr", "/bin", "/etc", "/lib", "/dev",
|
||||
"/proc", "/home", "/boot", "/opt", "/run",
|
||||
"/sys", "/root", "/var", "/tmp"}
|
||||
return all_mounts.intersection(obvious_mount_points)
|
||||
|
||||
def mount(self):
|
||||
self.overlay_mounts = [OverlayfsMount(mount_point,
|
||||
self.mount_point + mount_point)
|
||||
for mount_point in sorted(self._find_mounts())]
|
||||
|
||||
def umount(self):
|
||||
for mount in reversed(self.overlay_mounts):
|
||||
mount.umount()
|
||||
self.overlay_mounts = []
|
||||
|
||||
def Popen(self, command, env=None):
|
||||
full_command = _in_chroot(self.mount_point,
|
||||
_in_directory(os.getcwd(), command))
|
||||
return subprocess.Popen(full_command, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
env=env)
|
||||
|
||||
def run_command(self, command, env=None):
|
||||
process = self.Popen(command, env)
|
||||
stdout, stderr = process.communicate()
|
||||
return stdout, stderr, process.returncode
|
||||
42
sandbox_fs_test.py
Executable file
42
sandbox_fs_test.py
Executable file
|
|
@ -0,0 +1,42 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (C) 2016 Andrew Hamilton. All rights reserved.
|
||||
# Licensed under the Artistic License 2.0.
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import sandbox_fs
|
||||
|
||||
|
||||
class SandboxFilesystemTestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.temp_dir = tempfile.mkdtemp()
|
||||
self.sandbox = sandbox_fs.SandboxFs(self.temp_dir)
|
||||
self.sandbox.mount()
|
||||
|
||||
def tearDown(self):
|
||||
self.sandbox.umount()
|
||||
os.rmdir(self.temp_dir)
|
||||
|
||||
def test_sandbox_minimal(self):
|
||||
foo_upper_path = os.path.join(self.sandbox.mount_point, "foo")
|
||||
open(foo_upper_path, "w").close()
|
||||
self.assertTrue(os.path.exists(foo_upper_path))
|
||||
foo_lower_path = os.path.join(self.sandbox.overlay_mounts[0].lower_dir,
|
||||
"foo")
|
||||
self.assertFalse(os.path.exists(foo_lower_path))
|
||||
|
||||
def test_home_directory_exists_in_the_sandbox(self):
|
||||
home_directory = (self.sandbox.mount_point + os.environ["HOME"])
|
||||
self.assertTrue(os.path.exists(home_directory))
|
||||
|
||||
def test_run_a_command_in_the_sandbox(self):
|
||||
stdout, stderr, returncode = self.sandbox.run_command(["pwd"])
|
||||
self.assertEqual(stdout.strip().decode("utf-8"), os.environ["PWD"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
4
test-all
4
test-all
|
|
@ -1,7 +1,9 @@
|
|||
#!/bin/bash
|
||||
|
||||
|
||||
for test in *_test.py; do
|
||||
CODEBASE_PATH=$(dirname $0)
|
||||
sudo -p "Some tests need sudo to run... [sudo] password for %u: " true
|
||||
for test in ${CODEBASE_PATH}/*_test.py; do
|
||||
echo "Testing $test ..."
|
||||
./${test} 2>&1
|
||||
echo
|
||||
|
|
|
|||
47
tools.py
47
tools.py
|
|
@ -4,6 +4,7 @@
|
|||
# Licensed under the Artistic License 2.0.
|
||||
|
||||
import ast
|
||||
import contextlib
|
||||
import dis
|
||||
import functools
|
||||
import hashlib
|
||||
|
|
@ -76,14 +77,12 @@ def fix_input(input_):
|
|||
return input_str.replace("\t", " " * 4)
|
||||
|
||||
|
||||
def _do_command(command):
|
||||
def _do_command(command, **kwargs):
|
||||
stdout, stderr = "", ""
|
||||
try:
|
||||
with contextlib.suppress(subprocess.CalledProcessError):
|
||||
process = subprocess.Popen(command, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
stderr=subprocess.PIPE, **kwargs)
|
||||
stdout, stderr = process.communicate()
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
return fix_input(stdout), fix_input(stderr), process.returncode
|
||||
|
||||
|
||||
|
|
@ -173,14 +172,14 @@ def metadata(path):
|
|||
stat_result.st_atime)]
|
||||
size = [_pretty_bytes(stat_result.st_size),
|
||||
_detail(stat_result.st_size, "bytes")]
|
||||
stdout, stderr, returncode = _do_command(
|
||||
stdout, *rest = _do_command(
|
||||
["file", "--dereference", "--brief", "--uncompress", "--mime", path])
|
||||
mime_type = stdout
|
||||
stdout, stderr, returncode = _do_command(
|
||||
stdout, *rest = _do_command(
|
||||
["file", "--dereference", "--brief", "--uncompress", path])
|
||||
file_type = stdout
|
||||
md5sum = md5(path)
|
||||
stdout, stderr, returncode = _do_command(["sha1sum", path])
|
||||
stdout, *rest = _do_command(["sha1sum", path])
|
||||
sha1sum = stdout.split()[0]
|
||||
permissions_value = [permissions,
|
||||
_detail(_permissions_in_octal(permissions), None)]
|
||||
|
|
@ -285,10 +284,9 @@ def python_coverage(path):
|
|||
coverage_path = os.path.join(temp_dir, "coverage")
|
||||
env = os.environ.copy()
|
||||
env["COVERAGE_FILE"] = coverage_path
|
||||
stdout, stderr, returncode = _do_command(
|
||||
["timeout", "20", python_exe, "run", test_path], env=env)
|
||||
assert returncode == 0, returncode
|
||||
stdout, stderr, returncode = _do_command(
|
||||
stdout, *rest = _do_command(
|
||||
["timeout", "60", python_exe, "run", test_path], env=env)
|
||||
stdout, *rest = _do_command(
|
||||
[python_exe, "annotate", "--directory", temp_dir,
|
||||
os.path.normpath(path)], env=env)
|
||||
with open(os.path.join(temp_dir, path + ",cover"), "r") as f:
|
||||
|
|
@ -301,9 +299,8 @@ python_coverage.dependencies = {"python-coverage", "python3-coverage"}
|
|||
|
||||
|
||||
def python_profile(path):
|
||||
stdout, stderr, returncode = _do_command(
|
||||
["timeout", "20", _python_version(path), "-m", "cProfile",
|
||||
"--sort=cumulative", path])
|
||||
stdout, *rest = _do_command(["timeout", "20", _python_version(path), "-m",
|
||||
"cProfile", "--sort=cumulative", path])
|
||||
return Status.info, fill3.Text(stdout)
|
||||
python_profile.dependencies = {"python", "python3"}
|
||||
|
||||
|
|
@ -352,14 +349,11 @@ def _colorize_mccabe(text, python_version, max_score):
|
|||
|
||||
def python_mccabe(path):
|
||||
python_version = _python_version(path)
|
||||
stdout, stderr, returncode = _do_command(
|
||||
[python_version, "-m", "mccabe", path])
|
||||
stdout, *rest = _do_command([python_version, "-m", "mccabe", path])
|
||||
max_score = 0
|
||||
try:
|
||||
with contextlib.suppress(ValueError): # When there are no lines
|
||||
max_score = max(_get_mccabe_line_score(line, python_version)
|
||||
for line in stdout.splitlines())
|
||||
except ValueError: # When there are no lines
|
||||
pass
|
||||
status = Status.failure if max_score > 10 else Status.success
|
||||
return status, fill3.Text(
|
||||
_colorize_mccabe(stdout, python_version, max_score))
|
||||
|
|
@ -367,8 +361,7 @@ python_mccabe.dependencies = {"python-mccabe", "python3-mccabe"}
|
|||
|
||||
|
||||
def python_tidy(path): # Deps: found on internet?
|
||||
stdout, stderr, returncode = _do_command(["python", "python-tidy.py",
|
||||
path])
|
||||
stdout, *rest = _do_command(["python", "python-tidy.py", path])
|
||||
return Status.info, _syntax_highlight_code(stdout, path)
|
||||
|
||||
|
||||
|
|
@ -394,7 +387,7 @@ perldoc.dependencies = {"perl-doc"}
|
|||
|
||||
|
||||
def perltidy(path):
|
||||
stdout, stderr, returncode = _do_command(["perltidy", "-st", path])
|
||||
stdout, *rest = _do_command(["perltidy", "-st", path])
|
||||
return Status.info, _syntax_highlight_code(stdout, path)
|
||||
perltidy.dependencies = {"perltidy"}
|
||||
|
||||
|
|
@ -405,7 +398,7 @@ perl6_syntax.dependencies = {"perl6"}
|
|||
|
||||
|
||||
def _jlint_tool(tool_type, path):
|
||||
stdout, stderr, returncode = _do_command([tool_type, path])
|
||||
stdout, *rest = _do_command([tool_type, path])
|
||||
status = (Status.success
|
||||
if b"Verification completed: 0 reported messages." in stdout
|
||||
else Status.failure)
|
||||
|
|
@ -435,7 +428,7 @@ objdump_headers.dependencies = {"binutils"}
|
|||
|
||||
|
||||
def objdump_disassemble(path):
|
||||
stdout, stderr, returncode = _do_command(
|
||||
stdout, *rest = _do_command(
|
||||
["objdump", "--disassemble", "--reloc", "--dynamic-reloc", path])
|
||||
import pygments.lexers.asm
|
||||
lexer = pygments.lexers.asm.ObjdumpLexer()
|
||||
|
|
@ -449,7 +442,7 @@ readelf.dependencies = {"binutils"}
|
|||
|
||||
|
||||
def mp3info(path):
|
||||
stdout, stderr, returncode = _do_command(["mp3info", "-x", path])
|
||||
stdout, *rest = _do_command(["mp3info", "-x", path])
|
||||
source_widget = fill3.Text(stdout)
|
||||
return Status.info, source_widget
|
||||
mp3info.dependencies = ["mp3info"]
|
||||
|
|
@ -501,7 +494,7 @@ html_syntax.dependencies = {"tidy"}
|
|||
|
||||
|
||||
def tidy(path):
|
||||
stdout, stderr, returncode = _do_command(["tidy", path])
|
||||
stdout, *rest = _do_command(["tidy", path])
|
||||
return Status.info, fill3.Text(stdout)
|
||||
tidy.dependencies = {"tidy"}
|
||||
|
||||
|
|
|
|||
155
vigil
155
vigil
|
|
@ -41,8 +41,8 @@ Keys:
|
|||
|
||||
import asyncio
|
||||
import collections
|
||||
import contextlib
|
||||
import functools
|
||||
import gc
|
||||
import gzip
|
||||
import importlib
|
||||
import multiprocessing
|
||||
|
|
@ -52,14 +52,15 @@ import shutil
|
|||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
|
||||
import psutil
|
||||
import pyinotify
|
||||
|
||||
import fill3
|
||||
import sandbox_fs
|
||||
import terminal
|
||||
import termstr
|
||||
import tools
|
||||
|
|
@ -134,13 +135,8 @@ class Result:
|
|||
path + "-" + tool.__name__)
|
||||
self.scroll_position = (0, 0)
|
||||
self.is_completed = False
|
||||
self.reset()
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
os.remove(self.pickle_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
self.is_placeholder = True
|
||||
self.status = tools.Status.empty
|
||||
|
||||
@property
|
||||
@lru_cache_with_eviction(maxsize=50)
|
||||
|
|
@ -160,22 +156,23 @@ class Result:
|
|||
dump_pickle_safe(value, self.pickle_path, open=self._open_func)
|
||||
Result.result.fget.evict(self)
|
||||
|
||||
def set_status(self, status, appearance_changed_event):
|
||||
def set_status(self, status):
|
||||
self.status = status
|
||||
appearance_changed_event.set()
|
||||
self.entry.appearance_cache = None
|
||||
|
||||
def run(self, log, appearance_changed_event, worker):
|
||||
self.is_placeholder = False
|
||||
tool_name = tools._tool_name_colored(self.tool, self.path)
|
||||
path_colored = tools._path_colored(self.path)
|
||||
log.log_message(["Running ", tool_name, " on ", path_colored, "."])
|
||||
self.set_status(tools.Status.running, appearance_changed_event)
|
||||
log.log_message(["Running ", tool_name, " on ", path_colored, "..."])
|
||||
self.set_status(tools.Status.running)
|
||||
appearance_changed_event.set()
|
||||
start_time = time.time()
|
||||
new_status = worker.run_tool(self.path, self.tool)
|
||||
Result.result.fget.evict(self)
|
||||
end_time = time.time()
|
||||
self.set_status(new_status, appearance_changed_event)
|
||||
self.set_status(new_status)
|
||||
appearance_changed_event.set()
|
||||
self.is_completed = True
|
||||
log.log_message(
|
||||
["Finished running ", tool_name, " on ", path_colored, ". ",
|
||||
|
|
@ -184,13 +181,16 @@ class Result:
|
|||
|
||||
def reset(self):
|
||||
self.is_placeholder = True
|
||||
self.status = tools.Status.empty
|
||||
self.set_status(tools.Status.empty)
|
||||
|
||||
def appearance_min(self):
|
||||
return [status_to_str(self.status,
|
||||
self.entry.summary.is_status_simple)]
|
||||
|
||||
|
||||
import worker # Avoid a circular import. worker.py needs the Result class.
|
||||
|
||||
|
||||
def reverse_style(style):
|
||||
return termstr.CharStyle(style.bg_color, style.fg_color, style.is_bold,
|
||||
style.is_underlined)
|
||||
|
|
@ -276,6 +276,7 @@ class Summary:
|
|||
self.is_directory_sort = True
|
||||
self._max_width = None
|
||||
self._max_path_length = None
|
||||
self._all_results = set()
|
||||
self.sync_with_filesystem()
|
||||
|
||||
@property
|
||||
|
|
@ -309,6 +310,7 @@ class Summary:
|
|||
new_cursor_position = (0, 0)
|
||||
row_index = 0
|
||||
result_total, completed_total = 0, 0
|
||||
all_results = set()
|
||||
for path in paths:
|
||||
full_path = os.path.join(self._root_path, path)
|
||||
try:
|
||||
|
|
@ -326,6 +328,7 @@ class Summary:
|
|||
else:
|
||||
result = Result(path, tool)
|
||||
jobs_added = True
|
||||
all_results.add(result)
|
||||
if result.is_completed:
|
||||
completed_total += 1
|
||||
new_cache[cache_key] = result
|
||||
|
|
@ -335,16 +338,17 @@ class Summary:
|
|||
result_total += len(row)
|
||||
max_width = max(len(row) for row in new_column)
|
||||
max_path_length = max(len(path) for path in paths) - len("./")
|
||||
deleted_results = self._all_results - all_results
|
||||
self._column, self._cache, self._cursor_position, self.result_total, \
|
||||
self.completed_total, self._max_width, self._max_path_length, \
|
||||
self.closest_placeholder_generator = (
|
||||
self.closest_placeholder_generator, self._all_results = (
|
||||
new_column, new_cache, new_cursor_position, result_total,
|
||||
completed_total, max_width, max_path_length, None)
|
||||
completed_total, max_width, max_path_length, None, all_results)
|
||||
if jobs_added:
|
||||
self._jobs_added_event.set()
|
||||
# Delete the stale results from the disk now, to avoid accidently
|
||||
# deleting a future result with the same filename. See Result.__del__.
|
||||
gc.collect()
|
||||
for result in deleted_results:
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.remove(result.pickle_path)
|
||||
|
||||
def placeholder_spiral(self):
|
||||
x, y = self.cursor_position()
|
||||
|
|
@ -440,10 +444,8 @@ class Summary:
|
|||
yield result, (index_x, row_index)
|
||||
|
||||
def move_to_next_issue(self):
|
||||
try:
|
||||
with contextlib.suppress(StopIteration):
|
||||
issue, self._cursor_position = self._issue_generator().send(None)
|
||||
except StopIteration:
|
||||
pass
|
||||
|
||||
def move_to_next_issue_of_tool(self):
|
||||
current_tool = self.get_selection().tool
|
||||
|
|
@ -707,7 +709,7 @@ class Screen:
|
|||
|
||||
def toggle_watch_filesystem(self):
|
||||
self._is_watching_filesystem = not self._is_watching_filesystem
|
||||
self._log.log_command("Watching the filesystem for changes."
|
||||
self._log.log_command("Watching the filesystem for changes..."
|
||||
if self._is_watching_filesystem else
|
||||
"Stopped watching the filesystem.")
|
||||
if self._is_watching_filesystem:
|
||||
|
|
@ -720,7 +722,7 @@ class Screen:
|
|||
def toggle_pause(self):
|
||||
self._is_paused = not self._is_paused
|
||||
self._log.log_command("Paused work." if self._is_paused else
|
||||
"Continuing work.")
|
||||
"Continuing work...")
|
||||
if self._is_paused:
|
||||
for runner in self.runners:
|
||||
runner.pause()
|
||||
|
|
@ -849,72 +851,29 @@ def regulate_temperature(log):
|
|||
log.log_message("The computer has cooled down. Continuing...")
|
||||
|
||||
|
||||
def make_process_nicest(pid):
|
||||
process = psutil.Process(pid)
|
||||
process.nice(19)
|
||||
process.ionice(psutil.IOPRIO_CLASS_IDLE)
|
||||
|
||||
|
||||
class _Result(Result):
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
|
||||
def work_loop(parent_connection):
|
||||
while True:
|
||||
tool, path = parent_connection.recv()
|
||||
result = _Result(path, tool)
|
||||
status, result.result = tools.run_tool_no_error(path, tool)
|
||||
parent_connection.send(status)
|
||||
|
||||
|
||||
class Worker:
|
||||
|
||||
def __init__(self):
|
||||
self.child_connection, parent_connection = multiprocessing.Pipe()
|
||||
self.process = multiprocessing.Process(
|
||||
target=work_loop, args=(parent_connection,), daemon=True)
|
||||
make_process_nicest(self.process.pid)
|
||||
self.process.start()
|
||||
|
||||
def run_tool(self, path, tool):
|
||||
self.child_connection.send([tool, path])
|
||||
return self.child_connection.recv()
|
||||
|
||||
def pause(self):
|
||||
os.kill(self.process.pid, signal.SIGSTOP)
|
||||
|
||||
def continue_(self):
|
||||
os.kill(self.process.pid, signal.SIGCONT)
|
||||
|
||||
def stop(self):
|
||||
os.kill(self.process.pid, signal.SIGKILL)
|
||||
|
||||
|
||||
class Runner:
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, sandbox, is_being_tested):
|
||||
self.result = None
|
||||
self.is_running = True
|
||||
self.worker = Worker()
|
||||
self.worker = worker.Worker(sandbox)
|
||||
self.is_being_tested = is_being_tested
|
||||
|
||||
def job_runner(self, summary, log, jobs_added_event,
|
||||
appearance_changed_event):
|
||||
while True:
|
||||
jobs_added_event.wait()
|
||||
while self.is_running:
|
||||
while True:
|
||||
# regulate_temperature(log) # My fan is broken
|
||||
try:
|
||||
self.result = summary.get_closest_placeholder()
|
||||
except StopIteration:
|
||||
log.log_message("All results are up to date.")
|
||||
break
|
||||
try:
|
||||
with contextlib.suppress(ValueError): # Process was terminated
|
||||
self.result.run(log, appearance_changed_event, self.worker)
|
||||
summary.completed_total += 1
|
||||
except EOFError: # Occurs if the process is terminated
|
||||
pass
|
||||
if self.is_being_tested:
|
||||
os.kill(os.getpid(), signal.SIGINT)
|
||||
jobs_added_event.clear()
|
||||
|
||||
def pause(self):
|
||||
|
|
@ -936,9 +895,8 @@ def update_screen(main_widget, appearance_changed_event):
|
|||
fill3.patch_screen(main_widget)
|
||||
|
||||
|
||||
def main(root_path):
|
||||
def main(root_path, is_being_tested=False):
|
||||
global _UPDATE_THREAD_STOPPED
|
||||
os.chdir(root_path) # FIX: Don't change directory if possible.
|
||||
loop = asyncio.get_event_loop()
|
||||
jobs_added_event = threading.Event()
|
||||
appearance_changed_event = threading.Event()
|
||||
|
|
@ -964,8 +922,20 @@ def main(root_path):
|
|||
summary.sync_with_filesystem()
|
||||
log.log_message("Program started.")
|
||||
jobs_added_event.set()
|
||||
runners = [Runner() for index in range(multiprocessing.cpu_count() * 2)]
|
||||
runners = []
|
||||
sandbox_temp_dir = tempfile.mkdtemp()
|
||||
sandbox = sandbox_fs.SandboxFs(sandbox_temp_dir)
|
||||
|
||||
def start_runners():
|
||||
log.log_message("Making filesystem sandbox...")
|
||||
sandbox.mount()
|
||||
log.log_message("Sandbox made.")
|
||||
log.log_message("Starting workers...")
|
||||
worker_total = multiprocessing.cpu_count() * 2
|
||||
for index in range(worker_total):
|
||||
runners.append(Runner(sandbox, is_being_tested))
|
||||
screen.runners = runners
|
||||
log.log_message("Workers started. (%s)" % worker_total)
|
||||
for runner in runners:
|
||||
args = (summary, log, jobs_added_event, appearance_changed_event)
|
||||
threading.Thread(target=runner.job_runner, args=args,
|
||||
|
|
@ -973,6 +943,8 @@ def main(root_path):
|
|||
if screen._is_paused:
|
||||
for runner in runners:
|
||||
runner.pause()
|
||||
try:
|
||||
threading.Thread(target=start_runners, daemon=True).start()
|
||||
|
||||
def on_window_resize(n, frame):
|
||||
appearance_changed_event.set()
|
||||
|
|
@ -982,20 +954,19 @@ def main(root_path):
|
|||
daemon=True)
|
||||
with terminal.hidden_cursor():
|
||||
with terminal.urwid_screen() as urwid_screen:
|
||||
signal.signal(signal.SIGWINCH, on_window_resize)
|
||||
update_display_thread.start()
|
||||
loop.add_reader(sys.stdin, screen.on_keypressed, urwid_screen)
|
||||
update_display_thread.start()
|
||||
signal.signal(signal.SIGWINCH, on_window_resize)
|
||||
try:
|
||||
loop.run_forever()
|
||||
except KeyboardInterrupt:
|
||||
log.log_message("Program stopped.")
|
||||
_UPDATE_THREAD_STOPPED = True
|
||||
appearance_changed_event.set()
|
||||
update_display_thread.join()
|
||||
log.log_message("Program stopped.")
|
||||
for runner in runners:
|
||||
runner.worker.stop()
|
||||
runner.is_running = False
|
||||
for runner in runners:
|
||||
runner.pause()
|
||||
if runner.result is not None:
|
||||
runner.result.reset()
|
||||
# Cannot pickle generators, locks, sockets or events.
|
||||
(summary.closest_placeholder_generator, summary._lock,
|
||||
|
|
@ -1004,6 +975,19 @@ def main(root_path):
|
|||
log._appearance_changed_event) = [None] * 8
|
||||
open_compressed = functools.partial(gzip.open, compresslevel=1)
|
||||
dump_pickle_safe(screen, pickle_path, open=open_compressed)
|
||||
finally:
|
||||
sandbox.umount()
|
||||
os.rmdir(sandbox_temp_dir)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def chdir(path):
|
||||
old_cwd = os.getcwd()
|
||||
os.chdir(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(old_cwd)
|
||||
|
||||
|
||||
def manage_cache(root_path):
|
||||
|
|
@ -1021,9 +1005,12 @@ def manage_cache(root_path):
|
|||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) == 2:
|
||||
subprocess.call(["sudo", "-p", "Vigil needs sudo to create the filesy"
|
||||
"stem sandbox... [sudo] password for %u: ", "true"])
|
||||
root_path = os.path.abspath(sys.argv[1])
|
||||
with terminal.console_title("vigil: " + os.path.basename(root_path)):
|
||||
manage_cache(root_path)
|
||||
with chdir(root_path): # FIX: Don't change directory if possible.
|
||||
main(root_path)
|
||||
else:
|
||||
usage = __doc__.replace("*", "")
|
||||
|
|
|
|||
|
|
@ -3,11 +3,12 @@
|
|||
# Copyright (C) 2015-2016 Andrew Hamilton. All rights reserved.
|
||||
# Licensed under the Artistic License 2.0.
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import threading
|
||||
# import time
|
||||
import unittest
|
||||
|
||||
import fill3
|
||||
|
|
@ -52,7 +53,9 @@ def touch(path):
|
|||
|
||||
|
||||
def assert_widget_appearance(widget, golden_path, dimensions=_DIMENSIONS):
|
||||
golden.assertGolden(_widget_to_string(widget, dimensions), golden_path)
|
||||
golden_path_absolute = os.path.join(os.path.dirname(__file__), golden_path)
|
||||
golden.assertGolden(_widget_to_string(widget, dimensions),
|
||||
golden_path_absolute)
|
||||
|
||||
|
||||
class MockMainLoop:
|
||||
|
|
@ -61,7 +64,7 @@ class MockMainLoop:
|
|||
pass
|
||||
|
||||
|
||||
class MainTestCase(unittest.TestCase):
|
||||
class ScreenWidgetTestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.temp_dir = tempfile.mkdtemp()
|
||||
|
|
@ -220,5 +223,36 @@ class SummarySyncWithFilesystem(unittest.TestCase):
|
|||
# assert_widget_appearance(log, "golden-files/log-appearance")
|
||||
|
||||
|
||||
def _mount_total():
|
||||
with open("/proc/mounts") as proc_mounts:
|
||||
return len(proc_mounts.readlines())
|
||||
|
||||
|
||||
def _tmp_total():
|
||||
return len(os.listdir("/tmp"))
|
||||
|
||||
|
||||
class MainTestCase(unittest.TestCase):
|
||||
|
||||
def test_start_and_run_a_job_then_stop_with_no_leaks(self):
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
mount_total = _mount_total()
|
||||
tmp_total = _tmp_total()
|
||||
foo_path = os.path.join(temp_dir, "foo")
|
||||
open(foo_path, "w").close()
|
||||
vigil.manage_cache(temp_dir)
|
||||
with vigil.chdir(temp_dir):
|
||||
with contextlib.redirect_stdout(io.StringIO()):
|
||||
vigil.main(temp_dir, is_being_tested=True)
|
||||
self.assertTrue(os.path.exists(".vigil/.summary.pickle"))
|
||||
self.assertTrue(os.path.exists(".vigil/.creation-time"))
|
||||
self.assertTrue(os.path.exists(".vigil/foo-metadata"))
|
||||
self.assertEqual(_mount_total(), mount_total)
|
||||
self.assertEqual(_tmp_total(), tmp_total)
|
||||
finally:
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
golden.main()
|
||||
|
|
|
|||
63
worker.py
Executable file
63
worker.py
Executable file
|
|
@ -0,0 +1,63 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (C) 2015-2016 Andrew Hamilton. All rights reserved.
|
||||
# Licensed under the Artistic License 2.0.
|
||||
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
|
||||
import psutil
|
||||
|
||||
import tools
|
||||
import vigil
|
||||
|
||||
|
||||
def make_process_nicest(pid):
|
||||
process = psutil.Process(pid)
|
||||
process.nice(19)
|
||||
process.ionice(psutil.IOPRIO_CLASS_IDLE)
|
||||
|
||||
|
||||
class Worker:
|
||||
|
||||
def __init__(self, sandbox):
|
||||
self.sandbox = sandbox
|
||||
if sandbox is None:
|
||||
self.process = subprocess.Popen(
|
||||
[__file__], stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
else:
|
||||
cache_path = os.path.join(os.getcwd(), vigil._CACHE_PATH)
|
||||
self.cache_mount = sandbox.mount_point + cache_path
|
||||
subprocess.check_call(["sudo", "mount", "--bind", cache_path,
|
||||
self.cache_mount])
|
||||
self.process = sandbox.Popen([__file__])
|
||||
self.child_pid = int(self.process.stdout.readline())
|
||||
make_process_nicest(self.child_pid)
|
||||
|
||||
def run_tool(self, path, tool):
|
||||
self.process.stdin.write(("%s\n%s\n" %
|
||||
(tool.__qualname__, path)).encode("utf-8"))
|
||||
self.process.stdin.flush()
|
||||
return int(self.process.stdout.readline())
|
||||
|
||||
def pause(self):
|
||||
os.kill(self.child_pid, signal.SIGSTOP)
|
||||
|
||||
def continue_(self):
|
||||
os.kill(self.child_pid, signal.SIGCONT)
|
||||
|
||||
|
||||
def main():
|
||||
print(os.getpid(), flush=True)
|
||||
while True:
|
||||
tool_name, path = input(), input()
|
||||
tool = getattr(tools, tool_name)
|
||||
result = vigil.Result(path, tool)
|
||||
status, result.result = tools.run_tool_no_error(path, tool)
|
||||
print(status, flush=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
52
worker_test.py
Executable file
52
worker_test.py
Executable file
|
|
@ -0,0 +1,52 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (C) 2016 Andrew Hamilton. All rights reserved.
|
||||
# Licensed under the Artistic License 2.0.
|
||||
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import sandbox_fs
|
||||
import tools
|
||||
import vigil
|
||||
import worker
|
||||
|
||||
|
||||
class WorkerTestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.temp_dir = tempfile.mkdtemp()
|
||||
self.original_working_dir = os.getcwd()
|
||||
os.chdir(self.temp_dir)
|
||||
os.mkdir(vigil._CACHE_PATH)
|
||||
open("foo", "w").close()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
os.chdir(self.original_working_dir)
|
||||
|
||||
def _test_worker(self, sandbox):
|
||||
status = worker.Worker(sandbox).run_tool("foo", tools.metadata)
|
||||
self.assertEqual(status, tools.Status.info)
|
||||
result_path = os.path.join(vigil._CACHE_PATH, "foo-metadata")
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
|
||||
def test_run_job_without_sandbox(self):
|
||||
self._test_worker(None)
|
||||
|
||||
def test_run_job_with_sandbox(self):
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
sandbox = sandbox_fs.SandboxFs(temp_dir)
|
||||
sandbox.mount()
|
||||
try:
|
||||
self._test_worker(sandbox)
|
||||
finally:
|
||||
sandbox.umount()
|
||||
os.rmdir(temp_dir)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Loading…
Add table
Add a link
Reference in a new issue