diff --git a/examples/modelfiles/midjourneyprompter b/examples/midjourneyprompter similarity index 100% rename from examples/modelfiles/midjourneyprompter rename to examples/midjourneyprompter diff --git a/examples/python/README.md b/examples/python/README.md deleted file mode 100644 index ce621a5b..00000000 --- a/examples/python/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# Python - -This is a simple example of calling the Ollama api from a python app. - -First, download a model: - -``` -curl -L https://huggingface.co/TheBloke/orca_mini_3B-GGML/resolve/main/orca-mini-3b.ggmlv3.q4_1.bin -o orca.bin -``` - -Then run it using the example script. You'll need to have Ollama running on your machine. - -``` -python3 main.py orca.bin -``` diff --git a/examples/python/main.py b/examples/python/main.py deleted file mode 100644 index b6791b54..00000000 --- a/examples/python/main.py +++ /dev/null @@ -1,32 +0,0 @@ -import http.client -import json -import os -import sys - -if len(sys.argv) < 2: - print("Usage: python main.py ") - sys.exit(1) - -conn = http.client.HTTPConnection('localhost', 11434) - -headers = { 'Content-Type': 'application/json' } - -# generate text from the model -conn.request("POST", "/api/generate", json.dumps({ - 'model': os.path.join(os.getcwd(), sys.argv[1]), - 'prompt': 'write me a short story', - 'stream': True -}), headers) - -response = conn.getresponse() - -def parse_generate(data): - for event in data.decode('utf-8').split("\n"): - if not event: - continue - yield event - -if response.status == 200: - for chunk in response: - for event in parse_generate(chunk): - print(json.loads(event)['response'], end="", flush=True) diff --git a/examples/modelfiles/recipemaker b/examples/recipemaker similarity index 100% rename from examples/modelfiles/recipemaker rename to examples/recipemaker diff --git a/examples/modelfiles/tweetwriter b/examples/tweetwriter similarity index 100% rename from examples/modelfiles/tweetwriter rename to examples/tweetwriter