From f0eee3faa0da42fc63c9ec73df22855e80c93d7b Mon Sep 17 00:00:00 2001 From: Bruce MacDonald Date: Fri, 23 Jun 2023 17:18:47 -0400 Subject: [PATCH] build server executable --- .gitignore | 5 ++++- server/README.md | 6 ++++++ server/build.py | 20 ++++++++++++++++++++ server/requirements.txt | 1 + 4 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 server/build.py diff --git a/.gitignore b/.gitignore index 2608ec26..dd5a59ef 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,5 @@ .DS_Store -.vscode \ No newline at end of file +.vscode +*.spec +*/build +*/dist \ No newline at end of file diff --git a/server/README.md b/server/README.md index 0607716f..848b9792 100644 --- a/server/README.md +++ b/server/README.md @@ -18,6 +18,12 @@ pip install llama-cpp-python pip install -r requirements.txt ``` +## Building + +```bash +python3 build.py +``` + ## Running Put your model in `models/` and run: diff --git a/server/build.py b/server/build.py new file mode 100644 index 00000000..3b173db7 --- /dev/null +++ b/server/build.py @@ -0,0 +1,20 @@ +import site +import os +from PyInstaller.__main__ import run as pyi_run + +# Get the directory of site-packages and llama_cpp +site_packages_dir = site.getsitepackages()[0] +llama_cpp_dir = os.path.join(site_packages_dir, "llama_cpp") + +# Prepare the arguments for PyInstaller +args = [ + "server.py", + "--paths", + site_packages_dir, + "--add-data", + f"{llama_cpp_dir}{os.pathsep}llama_cpp", + "--onefile", +] + +# Generate the .spec file and run PyInstaller +pyi_run(args) diff --git a/server/requirements.txt b/server/requirements.txt index 91c27d18..5366ce47 100644 --- a/server/requirements.txt +++ b/server/requirements.txt @@ -1,2 +1,3 @@ Flask==2.3.2 flask_cors==3.0.10 +llama-cpp-python==0.1.65 \ No newline at end of file