Skip to content

Commit

Permalink
Replace multiprocessing with multiprocess (#171)
Browse files Browse the repository at this point in the history
* Use `multiprocess` instead of `multiprocessing`

* Add warning about executing the example in Mac OS

* Apply suggestions from code review

Co-authored-by: Alvaro Bartolome <[email protected]>

---------

Co-authored-by: Alvaro Bartolome <[email protected]>
  • Loading branch information
gabrielmbmb and alvarobartt authored Dec 19, 2023
1 parent f91706c commit 9a318e4
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 1 deletion.
4 changes: 4 additions & 0 deletions examples/pipeline-llamacpp-and-openai-process.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# WARNING: to run this example in Mac OS use:
# no_proxy=* OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES python examples/pipeline-llamacpp-and-openai-process.py
# Otherwise you will get an error when loading the llama.cpp model

import os
from typing import TYPE_CHECKING

Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ dependencies = [
"rich >= 13.5.0",
"tenacity >= 8",
"importlib-resources >= 6.1.1; python_version < '3.9'",
"multiprocess",
]
dynamic = ["version"]

Expand Down
3 changes: 2 additions & 1 deletion src/distilabel/llm/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

from __future__ import annotations

import multiprocessing as mp
import queue
import random
import warnings
Expand All @@ -34,6 +33,8 @@
Union,
)

import multiprocess as mp

from distilabel.logger import get_logger
from distilabel.tasks.prompt import Prompt
from distilabel.utils.futures import when_all_complete
Expand Down

0 comments on commit 9a318e4

Please sign in to comment.