structio/tests/processes.py

82 lines
3.0 KiB
Python

import structio
import subprocess
import shlex
# In the interest of compatibility, structio.parallel
# tries to be compatible with the subprocess module. You
# can even pass the constants such as DEVNULL, PIPE, etc.
# to it and it'll work
async def main_simple(data: str):
print("[main] Starting process test")
cmd = shlex.split("python -c 'print(input())'")
to_send = data.encode(errors="ignore")
# This will print data to stdout
await structio.parallel.run(cmd, input=to_send)
# Other option
out = await structio.parallel.check_output(cmd, input=to_send)
# Thanks to Linux, Mac OS X and Windows all using different
# line endings, we have to do this abomination
out = out.decode().rstrip("\r").rstrip("\r\n").rstrip("\n")
assert out == data
# Other, other option :D
process = structio.parallel.Process(
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE
)
# Note that the process is spawned only after we run start()!
process.start()
out, _ = await process.communicate(to_send)
out = out.decode().rstrip("\r").rstrip("\r\n").rstrip("\n")
assert out == data
# Note how we never wait for the processes to complete: that is not by chance! The loop
# will automatically wait for any worker processes spawned via structio.parallel to complete
# unless they're explicitly detached from it by calling detach()
structio.parallel.Process(
shlex.split("""python -c '__import__("time").sleep(1); print(1)'""")
).start()
# Here, we spawn a long-running process and detach it from the event loop: structio will not
# wait for it to complete automatically
proc = structio.parallel.Process(
shlex.split("""python -c '__import__("time").sleep(10);'""")
)
proc.start()
proc.detach()
# If we change our mind, we can call proc.attach() to reattach the process to the event loop
# and have it be waited for. This can be done at any time
print("[main] Process test complete")
async def main_limiter():
print("[main] Starting process limiter test")
# We can use a limiter to spawn multiple processes
# at the same time while constraining resource usage
pool = structio.ProcessLimiter(2) # Max. 2 processes at a time
for i in range(0, pool.max_workers * 4 + 1): # We iterate one more time than the limit: this is on purpose
# This will stop every 2 iterations
await pool.submit(shlex.split(f"""python -c 'print({i}); __import__("time").sleep(1)'"""))
print(f"Submitted {i + 1} processes")
def foo():
print("Called in the remote process!")
async def main_python():
print("[main] Starting python process test")
# Spawns a new Python process and prepares it to
# run the given target function
p = structio.parallel.PythonProcess(target=foo)
p.start()
await p.wait_started()
await p.wait()
print("[main] Pyhon process test complete")
if __name__ == "__main__":
#structio.run(main_simple, "owo")
structio.run(main_limiter)
#structio.run(main_python)