From 76bc4d0458c298435c928cad0e46dcb9d95871b4 Mon Sep 17 00:00:00 2001 From: Matt Williams Date: Wed, 6 Dec 2023 15:44:40 -0800 Subject: [PATCH] Cleanup as per Bruce Signed-off-by: Matt Williams --- examples/python-simplechat/client.py | 6 +++--- examples/python-simplechat/readme.md | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/python-simplechat/client.py b/examples/python-simplechat/client.py index 3f1d0bb1..3c480f97 100644 --- a/examples/python-simplechat/client.py +++ b/examples/python-simplechat/client.py @@ -15,6 +15,8 @@ def chat(messages): for line in r.iter_lines(): body = json.loads(line) + if "error" in body: + raise Exception(body["error"]) if body.get("done") is False: message = body.get("message", "") content = message.get("content", "") @@ -22,8 +24,6 @@ def chat(messages): # the response streams one token at a time, print that as we receive it print(content, end="", flush=True) - if "error" in body: - raise Exception(body["error"]) if body.get("done", False): message["content"] = output @@ -32,7 +32,7 @@ def chat(messages): def main(): messages = [] - ) # the context stores a conversation history, you can use this to make the model more context aware + while True: user_input = input("Enter a prompt: ") print() diff --git a/examples/python-simplechat/readme.md b/examples/python-simplechat/readme.md index e4543c6c..abbdfe7e 100644 --- a/examples/python-simplechat/readme.md +++ b/examples/python-simplechat/readme.md @@ -21,4 +21,4 @@ In the **main** function, we collect `user_input` and add it as a message to our ## Next Steps -In this example, all generations are kept. You might want to experiment with summarizing everything older than 10 conversations to enable longer history with less context being used. \ No newline at end of file +In this example, all generations are kept. You might want to experiment with summarizing everything older than 10 conversations to enable longer history with less context being used.