use Queue.get() with timeout, not Process.join() for timeout

this was because the threads spawned by multiprocessing.Queue seemed to be
making Process.join() believe the subprocess had not exited.
This commit is contained in:
jesopo 2019-09-17 13:39:23 +01:00
parent f2f09bf0ca
commit d454f9b732

View file

@ -1,5 +1,5 @@
import contextlib, datetime, decimal, enum, io, ipaddress, re, signal import contextlib, datetime, decimal, enum, io, ipaddress, multiprocessing
import threading, typing import queue, re, signal, threading, typing
from src.utils import cli, consts, irc, http, parse, security from src.utils import cli, consts, irc, http, parse, security
class Direction(enum.Enum): class Direction(enum.Enum):
@ -390,18 +390,18 @@ def deadline_process(func: typing.Callable[[], None], seconds: int=10):
try: try:
q.put([True, func()]) q.put([True, func()])
except Exception as e: except Exception as e:
print(e)
q.put([False, e]) q.put([False, e])
q.close()
p = multiprocessing.Process(target=_wrap, args=(func, q)) p = multiprocessing.Process(target=_wrap, args=(func, q))
p.start() p.start()
p.join(seconds)
if p.is_alive(): try:
p.terminate() success, out = q.get(block=True, timeout=seconds)
except queue.Empty:
p.kill()
_raise_deadline() _raise_deadline()
success, out = q.get(block=False)
if success: if success:
return out return out
else: else: