runner.py
1 from torch.multiprocessing import Process 2 from ilock import ILock 3 import tempfile 4 import os 5 import subprocess 6 import sys 7 import pickle 8 9 from restai.models.models import ImageModel 10 11 def generate(manager, worker, imageModel, options: dict = None, venv_python: str = None): 12 sharedmem = manager.dict() 13 if hasattr(imageModel, 'image') and imageModel.image: 14 sharedmem["input_image"] = imageModel.image 15 16 sharedmem["prompt"] = imageModel.prompt 17 sharedmem["options"] = options 18 19 # Save sharedmem to a temp file for IPC 20 sharedmem_file = tempfile.NamedTemporaryFile(delete=False) 21 with open(sharedmem_file.name, "wb") as f: 22 pickle.dump(dict(sharedmem), f) 23 24 try: 25 with ILock('image', timeout=180): 26 worker_module = worker.__module__ 27 if venv_python is None: 28 import importlib 29 module = importlib.import_module(worker_module) 30 if hasattr(module, 'get_python_executable'): 31 venv_python = module.get_python_executable() 32 else: 33 venv_python = sys.executable # fallback to current python 34 35 # Set PYTHONPATH so the worker subprocess can import the restai package 36 env = os.environ.copy() 37 # Project root is 3 levels up from this file (restai/restai/image/runner.py) 38 project_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) 39 env["PYTHONPATH"] = project_root 40 41 # Set CUDA_VISIBLE_DEVICES from settings if configured 42 from restai import config 43 if config.GPU_WORKER_DEVICES: 44 env["CUDA_VISIBLE_DEVICES"] = config.GPU_WORKER_DEVICES 45 46 result = subprocess.run([ 47 venv_python, 48 os.path.join(os.path.dirname(__file__), "worker_entry.py"), 49 worker_module, 50 sharedmem.get("prompt", ""), 51 sharedmem_file.name 52 ], capture_output=True, text=True, env=env) 53 if result.returncode != 0: 54 import logging 55 logging.error(f"Image worker {worker_module} failed (exit {result.returncode}):\n{result.stderr}") 56 raise subprocess.CalledProcessError(result.returncode, result.args, result.stdout, result.stderr) 57 58 # Load sharedmem back 59 with open(sharedmem_file.name, "rb") as f: 60 sharedmem_result = pickle.load(f) 61 62 if "image" not in sharedmem_result or not sharedmem_result["image"]: 63 raise Exception("An error occurred while processing the image. Please try again.") 64 65 return sharedmem_result["image"] 66 finally: 67 try: 68 os.unlink(sharedmem_file.name) 69 except: 70 pass