Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 607d217

Browse filesBrowse files
committed
Allow both .so and .dylib extensions for macos
1 parent c9e79c6 commit 607d217
Copy full SHA for 607d217

File tree

Expand file treeCollapse file tree

1 file changed

+17
-12
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+17
-12
lines changed

‎llama_cpp/llama_cpp.py

Copy file name to clipboardExpand all lines: llama_cpp/llama_cpp.py
+17-12Lines changed: 17 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -15,29 +15,33 @@
1515
c_size_t,
1616
)
1717
import pathlib
18+
from typing import List
1819

1920

2021
# Load the library
2122
def _load_shared_library(lib_base_name: str):
23+
# Construct the paths to the possible shared library names
24+
_base_path = pathlib.Path(__file__).parent.resolve()
25+
# Searching for the library in the current directory under the name "libllama" (default name
26+
# for llamacpp) and "llama" (default name for this repo)
27+
_lib_paths: List[pathlib.Path] = []
2228
# Determine the file extension based on the platform
2329
if sys.platform.startswith("linux"):
24-
lib_ext = ".so"
30+
_lib_paths += [
31+
_base_path / f"lib{lib_base_name}.so",
32+
]
2533
elif sys.platform == "darwin":
26-
lib_ext = ".so"
34+
_lib_paths += [
35+
_base_path / f"lib{lib_base_name}.so",
36+
_base_path / f"lib{lib_base_name}.dylib",
37+
]
2738
elif sys.platform == "win32":
28-
lib_ext = ".dll"
39+
_lib_paths += [
40+
_base_path / f"{lib_base_name}.dll",
41+
]
2942
else:
3043
raise RuntimeError("Unsupported platform")
3144

32-
# Construct the paths to the possible shared library names
33-
_base_path = pathlib.Path(__file__).parent.resolve()
34-
# Searching for the library in the current directory under the name "libllama" (default name
35-
# for llamacpp) and "llama" (default name for this repo)
36-
_lib_paths = [
37-
_base_path / f"lib{lib_base_name}{lib_ext}",
38-
_base_path / f"{lib_base_name}{lib_ext}",
39-
]
40-
4145
if "LLAMA_CPP_LIB" in os.environ:
4246
lib_base_name = os.environ["LLAMA_CPP_LIB"]
4347
_lib = pathlib.Path(lib_base_name)
@@ -160,6 +164,7 @@ class llama_token_data_array(Structure):
160164
# bool use_mlock; // force system to keep model in RAM
161165
# bool embedding; // embedding mode only
162166

167+
163168
# // called with a progress value between 0 and 1, pass NULL to disable
164169
# llama_progress_callback progress_callback;
165170
# // context pointer passed to the progress callback

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.