|
22 | 22 | "from fastcore.meta import *\n",
|
23 | 23 | "from fastcore.xtras import *\n",
|
24 | 24 | "from functools import wraps\n",
|
25 |
| - "\n", |
| 25 | + "import multiprocessing.pool\n", |
26 | 26 | "import concurrent.futures,time\n",
|
27 | 27 | "from multiprocessing import Process,Queue,Manager,set_start_method,get_all_start_methods,get_context\n",
|
28 | 28 | "from threading import Thread\n",
|
|
207 | 207 | "text/markdown": [
|
208 | 208 | "---\n",
|
209 | 209 | "\n",
|
210 |
| - "### ThreadPoolExecutor\n", |
| 210 | + "[source](https://github.com/fastai/fastcore/blob/master/fastcore/parallel.py#L58){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", |
| 211 | + "\n", |
| 212 | + "#### ThreadPoolExecutor\n", |
211 | 213 | "\n",
|
212 |
| - "> ThreadPoolExecutor (max_workers=8, on_exc=<built-infunctionprint>,\n", |
| 214 | + "> ThreadPoolExecutor (max_workers=6, on_exc=<built-in function print>,\n", |
213 | 215 | "> pause=0, **kwargs)\n",
|
214 | 216 | "\n",
|
215 | 217 | "Same as Python's ThreadPoolExecutor, except can pass `max_workers==0` for serial execution"
|
216 | 218 | ],
|
217 | 219 | "text/plain": [
|
218 |
| - "<nbdev.showdoc.BasicMarkdownRenderer>" |
| 220 | + "---\n", |
| 221 | + "\n", |
| 222 | + "[source](https://github.com/fastai/fastcore/blob/master/fastcore/parallel.py#L58){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", |
| 223 | + "\n", |
| 224 | + "#### ThreadPoolExecutor\n", |
| 225 | + "\n", |
| 226 | + "> ThreadPoolExecutor (max_workers=6, on_exc=<built-in function print>,\n", |
| 227 | + "> pause=0, **kwargs)\n", |
| 228 | + "\n", |
| 229 | + "Same as Python's ThreadPoolExecutor, except can pass `max_workers==0` for serial execution" |
219 | 230 | ]
|
220 | 231 | },
|
221 | 232 | "execution_count": null,
|
|
267 | 278 | "text/markdown": [
|
268 | 279 | "---\n",
|
269 | 280 | "\n",
|
270 |
| - "### ProcessPoolExecutor\n", |
| 281 | + "[source](https://github.com/fastai/fastcore/blob/master/fastcore/parallel.py#L77){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", |
271 | 282 | "\n",
|
272 |
| - "> ProcessPoolExecutor (max_workers=8, on_exc=<built-infunctionprint>,\n", |
| 283 | + "#### ProcessPoolExecutor\n", |
| 284 | + "\n", |
| 285 | + "> ProcessPoolExecutor (max_workers=6, on_exc=<built-in function print>,\n", |
273 | 286 | "> pause=0, mp_context=None, initializer=None,\n",
|
274 | 287 | "> initargs=())\n",
|
275 | 288 | "\n",
|
276 | 289 | "Same as Python's ProcessPoolExecutor, except can pass `max_workers==0` for serial execution"
|
277 | 290 | ],
|
278 | 291 | "text/plain": [
|
279 |
| - "<nbdev.showdoc.BasicMarkdownRenderer>" |
| 292 | + "---\n", |
| 293 | + "\n", |
| 294 | + "[source](https://github.com/fastai/fastcore/blob/master/fastcore/parallel.py#L77){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", |
| 295 | + "\n", |
| 296 | + "#### ProcessPoolExecutor\n", |
| 297 | + "\n", |
| 298 | + "> ProcessPoolExecutor (max_workers=6, on_exc=<built-in function print>,\n", |
| 299 | + "> pause=0, mp_context=None, initializer=None,\n", |
| 300 | + "> initargs=())\n", |
| 301 | + "\n", |
| 302 | + "Same as Python's ProcessPoolExecutor, except can pass `max_workers==0` for serial execution" |
280 | 303 | ]
|
281 | 304 | },
|
282 | 305 | "execution_count": null,
|
|
288 | 311 | "show_doc(ProcessPoolExecutor, title_level=4)"
|
289 | 312 | ]
|
290 | 313 | },
|
| 314 | + { |
| 315 | + "cell_type": "code", |
| 316 | + "execution_count": null, |
| 317 | + "metadata": {}, |
| 318 | + "outputs": [], |
| 319 | + "source": [ |
| 320 | + "#|export\n", |
| 321 | + "class NoDaemonProcess(multiprocessing.Process):\n", |
| 322 | + " # See https://stackoverflow.com/questions/6974695/python-process-pool-non-daemonic\n", |
| 323 | + " @property\n", |
| 324 | + " def daemon(self):\n", |
| 325 | + " return False\n", |
| 326 | + " @daemon.setter\n", |
| 327 | + " def daemon(self, value):\n", |
| 328 | + " pass" |
| 329 | + ] |
| 330 | + }, |
| 331 | + { |
| 332 | + "cell_type": "code", |
| 333 | + "execution_count": null, |
| 334 | + "metadata": {}, |
| 335 | + "outputs": [], |
| 336 | + "source": [ |
| 337 | + "#|export\n", |
| 338 | + "@delegates()\n", |
| 339 | + "class ProcessPool(multiprocessing.pool.Pool):\n", |
| 340 | + " \"Same as Python's Pool, except can pass `max_workers==0` for serial execution\"\n", |
| 341 | + " def __init__(self, max_workers=defaults.cpus, on_exc=print, pause=0, daemonic=False, **kwargs):\n", |
| 342 | + " if max_workers is None: max_workers=defaults.cpus\n", |
| 343 | + " store_attr()\n", |
| 344 | + " self.not_parallel = max_workers==0\n", |
| 345 | + " if self.not_parallel: max_workers=1\n", |
| 346 | + " if not daemonic:\n", |
| 347 | + " class NoDaemonContext(type(kwargs.get('context', get_context()))):\n", |
| 348 | + " Process = NoDaemonProcess\n", |
| 349 | + " kwargs['context'] = NoDaemonContext()\n", |
| 350 | + " super().__init__(max_workers, **kwargs)\n", |
| 351 | + "\n", |
| 352 | + " def map(self, f, items, *args, timeout=None, chunksize=1, **kwargs):\n", |
| 353 | + " assert timeout is None, \"timeout is not supported by ProcessPool, use ProcessPoolExecutor instead\"\n", |
| 354 | + " if not parallelable('max_workers', self.max_workers, f): self.max_workers = 0\n", |
| 355 | + " self.not_parallel = self.max_workers==0\n", |
| 356 | + " if self.not_parallel: self.max_workers=1\n", |
| 357 | + "\n", |
| 358 | + " if self.not_parallel == False: self.lock = Manager().Lock()\n", |
| 359 | + " g = partial(f, *args, **kwargs)\n", |
| 360 | + " if self.not_parallel: return map(g, items)\n", |
| 361 | + " _g = partial(_call, self.lock, self.pause, self.max_workers, g)\n", |
| 362 | + " try: return super().map(_g, items, chunksize=chunksize)\n", |
| 363 | + " except Exception as e: self.on_exc(e)" |
| 364 | + ] |
| 365 | + }, |
| 366 | + { |
| 367 | + "cell_type": "code", |
| 368 | + "execution_count": null, |
| 369 | + "metadata": {}, |
| 370 | + "outputs": [], |
| 371 | + "source": [ |
| 372 | + "# |export\n", |
| 373 | + "@delegates()\n", |
| 374 | + "class ThreadPool():\n", |
| 375 | + " # If you have a need for a ThreadPool, please open an issue.\n", |
| 376 | + " def __init__(self, *args, **kwargs):\n", |
| 377 | + " raise NotImplementedError(\"`ThreadPool` is not implemented\")" |
| 378 | + ] |
| 379 | + }, |
291 | 380 | {
|
292 | 381 | "cell_type": "code",
|
293 | 382 | "execution_count": null,
|
|
307 | 396 | "source": [
|
308 | 397 | "#|export\n",
|
309 | 398 | "def parallel(f, items, *args, n_workers=defaults.cpus, total=None, progress=None, pause=0,\n",
|
310 |
| - " method=None, threadpool=False, timeout=None, chunksize=1, **kwargs):\n", |
| 399 | + " method=None, threadpool=False, timeout=None, chunksize=1,\n", |
| 400 | + " executor=True, maxtasksperchild=None, **kwargs):\n", |
311 | 401 | " \"Applies `func` in parallel to `items`, using `n_workers`\"\n",
|
312 | 402 | " kwpool = {}\n",
|
313 |
| - " if threadpool: pool = ThreadPoolExecutor\n", |
| 403 | + " if threadpool: pool = ThreadPoolExecutor if executor else ThreadPool\n", |
314 | 404 | " else:\n",
|
| 405 | + " pool = ProcessPoolExecutor if executor else ProcessPool\n", |
315 | 406 | " if not method and sys.platform == 'darwin': method='fork'\n",
|
316 |
| - " if method: kwpool['mp_context'] = get_context(method)\n", |
317 |
| - " pool = ProcessPoolExecutor\n", |
| 407 | + " if method:\n", |
| 408 | + " if executor: kwpool['mp_context'] = get_context(method)\n", |
| 409 | + " else: kwpool['context'] = get_context(method)\n", |
| 410 | + "\n", |
| 411 | + " if maxtasksperchild:\n", |
| 412 | + " assert pool==ProcessPool, \"`maxtasksperchild` is only supported by ProcessPool\"\n", |
| 413 | + " kwpool['maxtasksperchild'] = maxtasksperchild\n", |
318 | 414 | " with pool(n_workers, pause=pause, **kwpool) as ex:\n",
|
319 | 415 | " r = ex.map(f,items, *args, timeout=timeout, chunksize=chunksize, **kwargs)\n",
|
320 | 416 | " if progress and progress_bar:\n",
|
|
377 | 473 | "name": "stdout",
|
378 | 474 | "output_type": "stream",
|
379 | 475 | "text": [
|
380 |
| - "0 2022-08-07 05:10:05.999916\n", |
381 |
| - "1 2022-08-07 05:10:06.252031\n", |
382 |
| - "2 2022-08-07 05:10:06.503603\n", |
383 |
| - "3 2022-08-07 05:10:06.755216\n", |
384 |
| - "4 2022-08-07 05:10:07.006702\n" |
| 476 | + "0 2023-02-14 20:40:39.098928\n", |
| 477 | + "1 2023-02-14 20:40:39.350350\n", |
| 478 | + "2 2023-02-14 20:40:39.601602\n", |
| 479 | + "3 2023-02-14 20:40:39.851952\n", |
| 480 | + "4 2023-02-14 20:40:40.102687\n" |
385 | 481 | ]
|
386 | 482 | }
|
387 | 483 | ],
|
|
499 | 595 | "cell_type": "code",
|
500 | 596 | "execution_count": null,
|
501 | 597 | "metadata": {},
|
502 |
| - "outputs": [ |
503 |
| - { |
504 |
| - "data": { |
505 |
| - "text/html": [ |
506 |
| - "\n", |
507 |
| - "<style>\n", |
508 |
| - " /* Turns off some styling */\n", |
509 |
| - " progress {\n", |
510 |
| - " /* gets rid of default border in Firefox and Opera. */\n", |
511 |
| - " border: none;\n", |
512 |
| - " /* Needs to be in here for Safari polyfill so background images work as expected. */\n", |
513 |
| - " background-size: auto;\n", |
514 |
| - " }\n", |
515 |
| - " progress:not([value]), progress:not([value])::-webkit-progress-bar {\n", |
516 |
| - " background: repeating-linear-gradient(45deg, #7e7e7e, #7e7e7e 10px, #5c5c5c 10px, #5c5c5c 20px);\n", |
517 |
| - " }\n", |
518 |
| - " .progress-bar-interrupted, .progress-bar-interrupted::-webkit-progress-bar {\n", |
519 |
| - " background: #F44336;\n", |
520 |
| - " }\n", |
521 |
| - "</style>\n" |
522 |
| - ], |
523 |
| - "text/plain": [ |
524 |
| - "<IPython.core.display.HTML object>" |
525 |
| - ] |
526 |
| - }, |
527 |
| - "metadata": {}, |
528 |
| - "output_type": "display_data" |
529 |
| - }, |
530 |
| - { |
531 |
| - "data": { |
532 |
| - "text/html": [], |
533 |
| - "text/plain": [ |
534 |
| - "<IPython.core.display.HTML object>" |
535 |
| - ] |
536 |
| - }, |
537 |
| - "metadata": {}, |
538 |
| - "output_type": "display_data" |
539 |
| - } |
540 |
| - ], |
| 598 | + "outputs": [], |
541 | 599 | "source": [
|
542 | 600 | "class TestSleepyBatchFunc:\n",
|
543 | 601 | " \"For testing parallel processes that run at different speeds\"\n",
|
|
0 commit comments