structio/tests/processes.py

48 lines
1.9 KiB
Python

import structio
import subprocess
import shlex
# In the interest of compatibility, structio.parallel
# tries to be compatible with the subprocess module. You
# can even pass the constants such as DEVNULL, PIPE, etc.
# to it and it'll work
# TODO: Implement a higher-level multiprocessing module that does not expose the
# internal Process object directly so that there's no chance of leaving dangling
# processes running in the background (unless explicitly desired, of course)
async def main(data: str):
cmd = shlex.split("python -c 'print(input())'")
to_send = data.encode(errors="ignore")
# This will print data to stdout
await structio.parallel.run(cmd, input=to_send)
# Other option
out = await structio.parallel.check_output(cmd, input=to_send)
# Thanks to Linux, Mac OS X and Windows all using different
# line endings, we have to do this abomination
out = out.decode().rstrip("\r").rstrip("\r\n").rstrip("\n")
assert out == data
# Other, other option :D
process = structio.parallel.Process(
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE
)
# Note that the process is spawned only after we run start()!
await process.start()
out, _ = await process.communicate(to_send)
out = out.decode().rstrip("\r").rstrip("\r\n").rstrip("\n")
assert out == data
# We can also use process pools to spawn multiple processes
# at the same time while limiting resource usage
pool = structio.AsyncProcessPool(4) # Max. 4 processes at a time
for i in range(0, (4 * 5) + 1): # We iterate 21 instead of 20 times: this is important to the test!
# This will stop every 4 iterations
await pool.submit(shlex.split(f"""python -c 'print({i}); __import__("time").sleep(1)'"""))
# Since we're exiting the program right after the last iteration, we need
# to wait for the pool to complete.
await pool.close()
await pool.wait()
structio.run(main, "owo")