|
| 1 | +""" |
| 2 | +The final, self-sufficient DistributedTransformer. |
| 3 | +""" |
| 4 | + |
| 5 | +from collections.abc import Iterable |
| 6 | +from collections.abc import Iterator |
| 7 | +from concurrent.futures import FIRST_COMPLETED |
| 8 | +from concurrent.futures import ThreadPoolExecutor |
| 9 | +from concurrent.futures import wait |
| 10 | +import hashlib |
| 11 | +import itertools |
| 12 | +import pickle |
| 13 | + |
| 14 | +import requests |
| 15 | + |
| 16 | +from laygo import PipelineContext |
| 17 | +from laygo import Transformer |
| 18 | + |
| 19 | + |
| 20 | +class HTTPTransformer(Transformer): |
| 21 | + """ |
| 22 | + A self-sufficient, chainable transformer that manages its own |
| 23 | + distributed execution and worker endpoint definition. |
| 24 | + """ |
| 25 | + |
| 26 | + def __init__(self, base_url: str, endpoint: str | None = None, max_workers: int = 8): |
| 27 | + super().__init__() |
| 28 | + self.base_url = base_url.rstrip("/") |
| 29 | + self.endpoint = endpoint |
| 30 | + self.max_workers = max_workers |
| 31 | + self.session = requests.Session() |
| 32 | + self._worker_url: str |
| 33 | + |
| 34 | + def _finalize_config(self): |
| 35 | + """Determines the final worker URL, generating one if needed.""" |
| 36 | + if self._worker_url: |
| 37 | + return |
| 38 | + |
| 39 | + if self.endpoint: |
| 40 | + path = self.endpoint |
| 41 | + else: |
| 42 | + # Using pickle to serialize the function chain and hashing for a unique ID |
| 43 | + serialized_logic = pickle.dumps(self.transformer) |
| 44 | + hash_id = hashlib.sha1(serialized_logic).hexdigest()[:16] |
| 45 | + path = f"/autogen/{hash_id}" |
| 46 | + |
| 47 | + self.endpoint = path.lstrip("/") |
| 48 | + self._worker_url = f"{self.base_url}/{self.endpoint}" |
| 49 | + |
| 50 | + def __call__(self, data: Iterable, context=None) -> Iterator: |
| 51 | + """CLIENT-SIDE: Called by the Pipeline to start distributed processing.""" |
| 52 | + self._finalize_config() |
| 53 | + |
| 54 | + def process_chunk(chunk: list) -> list: |
| 55 | + """Target for a thread: sends one chunk to the worker.""" |
| 56 | + try: |
| 57 | + response = self.session.post(self._worker_url, json=chunk, timeout=300) |
| 58 | + response.raise_for_status() |
| 59 | + return response.json() |
| 60 | + except requests.RequestException as e: |
| 61 | + print(f"Error calling worker {self._worker_url}: {e}") |
| 62 | + return [] |
| 63 | + |
| 64 | + with ThreadPoolExecutor(max_workers=self.max_workers) as executor: |
| 65 | + chunk_iterator = self._chunk_generator(data) |
| 66 | + futures = {executor.submit(process_chunk, chunk) for chunk in itertools.islice(chunk_iterator, self.max_workers)} |
| 67 | + while futures: |
| 68 | + done, futures = wait(futures, return_when=FIRST_COMPLETED) |
| 69 | + for future in done: |
| 70 | + yield from future.result() |
| 71 | + try: |
| 72 | + new_chunk = next(chunk_iterator) |
| 73 | + futures.add(executor.submit(process_chunk, new_chunk)) |
| 74 | + except StopIteration: |
| 75 | + continue |
| 76 | + |
| 77 | + def get_route(self): |
| 78 | + """ |
| 79 | + Function that returns the route for the worker. |
| 80 | + This is used to register the worker in a Flask app or similar. |
| 81 | +
|
| 82 | + Returns: |
| 83 | + A tuple containing the endpoint and the worker function. |
| 84 | + """ |
| 85 | + self._finalize_config() |
| 86 | + |
| 87 | + def worker_view_func(chunk: list, context: PipelineContext): |
| 88 | + """The actual Flask view function for this transformer's logic.""" |
| 89 | + return self.transformer(chunk, context) |
| 90 | + |
| 91 | + return (f"/{self.endpoint}", worker_view_func) |
0 commit comments