From 051272b6fbefbac5ea337f9e8f998dbe794802b3 Mon Sep 17 00:00:00 2001
From: Jan Frenzel <jan.frenzel@tu-dresden.de>
Date: Tue, 20 Aug 2024 17:16:53 +0200
Subject: [PATCH] Add summarize example

---
 README.md                  |  6 ++++-
 summarize/README.md        | 27 ++++++++++++++++++++
 summarize/requirements.txt |  1 +
 summarize/summarize.py     | 51 ++++++++++++++++++++++++++++++++++++++
 4 files changed, 84 insertions(+), 1 deletion(-)
 create mode 100644 summarize/README.md
 create mode 100644 summarize/requirements.txt
 create mode 100755 summarize/summarize.py

diff --git a/README.md b/README.md
index d415464..1d9bc40 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
 # ScaDS.AI LLM API Examples
 
-This repository contains code snippets to get you started with our API. Before you start with any application example, please go to https://llm.scads.ai and request an API key.
+This repository contains code snippets to get you started with our Large Language Model (LLM) API. Before you start with any application example, please go to https://llm.scads.ai and request an API key.
 
 ## Installation
 Each example comes with its own installation instructions, because there are examples with several languages and environments which makes this necessary.
@@ -8,6 +8,10 @@ Each example comes with its own installation instructions, because there are exa
 ## Usage
 A base example is available in directory [base](base).
 
+Further examples:
+
+- [summarize](summarize) shows how to use a LLM to summarize a (plain) text file
+
 ## Support
 If you have the feeling that the examples contain errors or otherwise need improvement, please [check whether there is an open issues already](https://gitlab.hrz.tu-chemnitz.de/scads-ai-llm/scads-ai-llm-api-examples/-/issues). If there is no issue yet, create one.
 
diff --git a/summarize/README.md b/summarize/README.md
new file mode 100644
index 0000000..c978a9a
--- /dev/null
+++ b/summarize/README.md
@@ -0,0 +1,27 @@
+# Summarize a text file
+
+This example intends to show you how to:
+
+- Summarize a plain text file
+- Adjust the temperature to reduce the models creativity
+- Set the maximum length of a response
+
+## Installation
+
+Do the following steps in your bash shell:
+
+```bash
+python3 -m venv myenv
+source myenv/bin/activate
+pip install -r requirements.txt
+```
+
+## Usage
+
+1. Put your API key in the file `my_key`.
+2. Then start the script `main.py` from your bash shell and provide a plain text file:
+
+```bash
+source myenv/bin/activate
+./summarize.py README.md
+```
diff --git a/summarize/requirements.txt b/summarize/requirements.txt
new file mode 100644
index 0000000..ec838c5
--- /dev/null
+++ b/summarize/requirements.txt
@@ -0,0 +1 @@
+openai
diff --git a/summarize/summarize.py b/summarize/summarize.py
new file mode 100755
index 0000000..e049965
--- /dev/null
+++ b/summarize/summarize.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+# Find instructions how to install dependencies and how to run this script in README.md
+import sys
+from openai import OpenAI
+my_api_key = ""
+with open("my_key") as keyfile:
+    my_api_key = keyfile.readline()[:-1]
+
+client = OpenAI(base_url="https://llm.scads.ai/v1",api_key=my_api_key)
+
+# Find model with "llama" in name
+for model in client.models.list().data:
+    model_name = model.id
+    if "llama" in model_name:
+        break
+
+if len(sys.argv) > 1:
+    textfilename = sys.argv[1]
+else:
+    print("No argument given, using README.md...")
+    textfilename = "README.md"
+
+lines = []
+with open(textfilename) as textfile:
+    lines = textfile.readlines()
+
+filecontents = "\n".join(lines)
+
+template = """### Instruction:
+Summarize the following text:
+### Input:
+"""
+
+request = template + filecontents
+
+# Use model
+response = client.chat.completions.create(
+    messages=[
+        {"role":"user","content":request}
+    ],
+    model=model_name,
+    temperature = 0.1, # avoid creativity
+    max_tokens = 2048 # longer answer
+)
+
+# Print the joke
+print("""
+Summary:
+""")
+summary = response.choices[0].message.content
+print(summary)
-- 
GitLab