Skip to content

Commit

Permalink
update with experiments
Browse files Browse the repository at this point in the history
  • Loading branch information
microchipgnu committed Dec 26, 2023
1 parent e480819 commit bdc4d6f
Show file tree
Hide file tree
Showing 19 changed files with 10,627 additions and 2 deletions.
Binary file modified bun.lockb
Binary file not shown.
10 changes: 10 additions & 0 deletions log
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
[GIN] 2023/12/21 - 15:26:11 | 200 | 17.3221125s | 127.0.0.1 | POST "/api/chat"
[GIN] 2023/12/21 - 15:27:46 | 200 | 18.572235791s | 127.0.0.1 | POST "/api/chat"
[GIN] 2023/12/21 - 15:31:52 | 200 | 4.614720416s | 127.0.0.1 | POST "/api/chat"
[GIN] 2023/12/21 - 15:32:08 | 200 | 3.160842708s | 127.0.0.1 | POST "/api/chat"
[GIN] 2023/12/21 - 15:52:39 | 200 | 8.500764792s | 127.0.0.1 | POST "/api/chat"
[GIN] 2023/12/21 - 15:52:56 | 200 | 4.521970916s | 127.0.0.1 | POST "/api/chat"
[GIN] 2023/12/21 - 15:53:09 | 200 | 3.970299709s | 127.0.0.1 | POST "/api/chat"
[GIN] 2023/12/21 - 15:53:22 | 200 | 4.36826475s | 127.0.0.1 | POST "/api/chat"
[GIN] 2023/12/21 - 15:55:22 | 200 | 57.785192875s | 127.0.0.1 | POST "/api/chat"
[GIN] 2023/12/21 - 15:56:48 | 200 | 29.588289708s | 127.0.0.1 | POST "/api/chat"
9,681 changes: 9,681 additions & 0 deletions misc/OpenAI Open.yaml

Large diffs are not rendered by default.

173 changes: 173 additions & 0 deletions misc/backup-ollama-functions.ts

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions misc/ollama-llava-generate.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
curl http://localhost:11434/api/generate -d '{\n "model": "llava",\n "prompt":"What is in this picture?",\n "images": ["iVBORw0KGgoAAAANSUhEUgAAAG0AAABmCAYAAADBPx+VAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA3VSURBVHgB7Z27r0zdG8fX743i1bi1ikMoFMQloXRpKFFIqI7LH4BEQ+NWIkjQuSWCRIEoULk0gsK1kCBI0IhrQVT7tz/7zZo888yz1r7MnDl7z5xvsjkzs2fP3uu71nNfa7lkAsm7d++Sffv2JbNmzUqcc8m0adOSzZs3Z+/XES4ZckAWJEGWPiCxjsQNLWmQsWjRIpMseaxcuTKpG/7HP27I8P79e7dq1ars/yL4/v27S0ejqwv+cUOGEGGpKHR37tzJCEpHV9tnT58+dXXCJDdECBE2Ojrqjh071hpNECjx4cMHVycM1Uhbv359B2F79+51586daxN/+pyRkRFXKyRDAqxEp4yMlDDzXG1NPnnyJKkThoK0VFd1ELZu3TrzXKxKfW7dMBQ6bcuWLW2v0VlHjx41z717927ba22U9APcw7Nnz1oGEPeL3m3p2mTAYYnFmMOMXybPPXv2bNIPpFZr1NHn4HMw0KRBjg9NuRw95s8PEcz/6DZELQd/09C9QGq5RsmSRybqkwHGjh07OsJSsYYm3ijPpyHzoiacg35MLdDSIS/O1yM778jOTwYUkKNHWUzUWaOsylE00MyI0fcnOwIdjvtNdW/HZwNLGg+sR1kMepSNJXmIwxBZiG8tDTpEZzKg0GItNsosY8USkxDhD0Rinuiko2gfL/RbiD2LZAjU9zKQJj8RDR0vJBR1/Phx9+PHj9Z7REF4nTZkxzX4LCXHrV271qXkBAPGfP/atWvu/PnzHe4C97F48eIsRLZ9+3a3f/9+87dwP1JxaF7/3r17ba+5l4EcaVo0lj3SBq5kGTJSQmLWMjgYNei2GPT1MuMqGTDEFHzeQSP2wi/jGnkmPJ/nhccs44jvDAxpVcxnq0F6eT8h4ni/iIWpR5lPyA6ETkNXoSukvpJAD3AsXLiwpZs49+fPn5ke4j10TqYvegSfn0OnafC+Tv9ooA/JPkgQysqQNBzagXY55nO/oa1F7qvIPWkRL12WRpMWUvpVDYmxAPehxWSe8ZEXL20sadYIozfmNch4QJPAfeJgW3rNsnzphBKNJM2KKODo1rVOMRYik5ETy3ix4qWNI81qAAirizgMIc+yhTytx0JWZuNI03qsrgWlGtwjoS9XwgUhWGyhUaRZZQNNIEwCiXD16tXcAHUs79co0vSD8rrJCIW98pzvxpAWyyo3HYwqS0+H0BjStClcZJT5coMm6D2LOF8TolGJtK9fvyZpyiC5ePFi9nc/oJU4eiEP0jVoAnHa9wyJycITMP78+eMeP37sXrx44d6+fdt6f82aNdkx1pg9e3Zb5W+RSRE+n+VjksQWifvVaTKFhn5O8my63K8Qabdv33b379/PiAP//vuvW7BggZszZ072/+TJk91YgkafPn166zXB1rQHFvouAWHq9z3SEevSUerqCn2/dDCeta2jxYbr69evk4MHDyY7d+7MjhMnTiTPnz9Pfv/+nfQT2ggpO2dMF8cghuoM7Ygj5iWCqRlGFml0QC/ftGmTmzt3rmsaKDsgBSPh0/8yPeLLBihLkOKJc0jp8H8vUzcxIA1k6QJ/c78tWEyj5P3o4u9+jywNPdJi5rAH9x0KHcl4Hg570eQp3+vHXGyrmEeigzQsQsjavXt38ujRo44LQuDDhw+TW7duRS1HGgMxhNXHgflaNTOsHyKvHK5Ijo2jbFjJBQK9YwFd6RVMzfgRBmEfP37suBBm/p49e1qjEP2mwTViNRo0VJWH1deMXcNK08uUjVUu7s/zRaL+oLNxz1bpANco4npUgX4G2eFbpDFyQoQxojBCpEGSytmOH8qrH5Q9vuzD6ofQylkCUmh8DBAr+q8JCyVNtWQIidKQE9wNtLSQnS4jDSsxNHogzFuQBw4cyM61UKVsjfr3ooBkPSqqQHesUPWVtzi9/vQi1T+rJj7WiTz4Pt/l3LxUkr5P2VYZaZ4URpsE+st/dujQoaBBYokbrz/8TJNQYLSonrPS9kUaSkPeZyj1AWSj+d+VBoy1pIWVNed8P0Ll/ee5HdGRhrHhR5GGN0r4LGZBaj8oFDJitBTJzIZgFcmU0Y8ytWMZMzJOaXUSrUs5RxKnrxmbb5YXO9VGUhtpXldhEUogFr3IzIsvlpmdosVcGVGXFWp2oU9kLFL3dEkSz6NHEY1sjSRdIuDFWEhd8KxFqsRi1uM/nz9/zpxnwlESONdg6dKlbsaMGS4EHFHtjFIDHwKOo46l4TxSuxgDzi+rE2jg+BaFruOX4HXa0Nnf1lwAPufZeF8/r6zD97WK2qFnGjBxTw5qNGPxT+5T/r7/7RawFC3j4vTp09koCxkeHjqbHJqArmH5UrFKKksnxrK7FuRIs8STfBZv+luugXZ2pR/pP9Ois4z+TiMzUUkUjD0iEi1fzX8GmXyuxUBRcaUfykV0YZnlJGKQpOiGB76x5GeWkWWJc3mOrK6S7xdND+W5N6XyaRgtWJFe13GkaZnKOsYqGdOVVVbGupsyA/l7emTLHi7vwTdirNEt0qxnzAvBFcnQF16xh/TMpUuXHDowhlA9vQVraQhkudRdzOnK+04ZSP3DUhVSP61YsaLtd/ks7ZgtPcXqPqEafHkdqa84X6aCeL7YWlv6edGFHb+ZFICPlljHhg0bKuk0CSvVznWsotRu433alNdFrqG45ejoaPCaUkWERpLXjzFL2Rpllp7PJU2a/v7Ab8N05/9t27Z16KUqoFGsxnI9EosS2niSYg9SpU6B4JgTrvVW1flt1sT+0ADIJU2maXzcUTraGCRaL1Wp9rUMk16PMom8QhruxzvZIegJjFU7LLCePfS8uaQdPny4jTTL0dbee5mYokQsXTIWNY46kuMbnt8Kmec+LGWtOVIl9cT1rCB0V8WqkjAsRwta93TbwNYoGKsUSChN44lgBNCoHLHzquYKrU6qZ8lolCIN0Rh6cP0Q3U6I6IXILYOQI513hJaSKAorFpuHXJNfVlpRtmYBk1Su1obZr5dnKAO+L10Hrj3WZW+E3qh6IszE37F6EB+68mGpvKm4eb9bFrlzrok7fvr0Kfv727dvWRmdVTJHw0qiiCUSZ6wCK+7XL/AcsgNyL74DQQ730sv78Su7+t/A36MdY0sW5o40ahslXr58aZ5HtZB8GH64m9EmMZ7FpYw4T6QnrZfgenrhFxaSiSGXtPnz57e9TkNZLvTjeqhr734CNtrK41L40sUQckmj1lGKQ0rC37x544r8eNXRpnVE3ZZY7zXo8NomiO0ZUCj2uHz58rbXoZ6gc0uA+F6ZeKS/jhRDUq8MKrTho9fEkihMmhxtBI1DxKFY9XLpVcSkfoi8JGnToZO5sU5aiDQIW716ddt7ZLYtMQlhECdBGXZZMWldY5BHm5xgAroWj4C0hbYkSc/jBmggIrXJWlZM6pSETsEPGqZOndr2uuuR5rF169a2HoHPdurUKZM4CO1WTPqaDaAd+GFGKdIQkxAn9RuEWcTRyN2KSUgiSgF5aWzPTeA/lN5rZubMmR2bE4SIC4nJoltgAV/dVefZm72AtctUCJU2CMJ327hxY9t7EHbkyJFseq+EJSY16RPo3Dkq1kkr7+q0bNmyDuLQcZBEPYmHVdOBiJyIlrRDq41YPWfXOxUysi5fvtyaj+2BpcnsUV/oSoEMOk2CQGlr4ckhBwaetBhjCwH0ZHtJROPJkyc7UjcYLDjmrH7ADTEBXFfOYmB0k9oYBOjJ8b4aOYSe7QkKcYhFlq3QYLQhSidNmtS2RATwy8YOM3EQJsUjKiaWZ+vZToUQgzhkHXudb/PW5YMHD9yZM2faPsMwoc7RciYJXbGuBqJ1UIGKKLv915jsvgtJxCZDubdXr165mzdvtr1Hz5LONA8jrUwKPqsmVesKa49S3Q4WxmRPUEYdTjgiUcfUwLx589ySJUva3oMkP6IYddq6HMS4o55xBJBUeRjzfa4Zdeg56QZ43LhxoyPo7Lf1kNt7oO8wWAbNwaYjIv5lhyS7kRf96dvm5Jah8vfvX3flyhX35cuX6HfzFHOToS1H4BenCaHvO8pr8iDuwoUL7tevX+b5ZdbBair0xkFIlFDlW4ZknEClsp/TzXyAKVOmmHWFVSbDNw1l1+4f90U6IY/q4V27dpnE9bJ+v87QEydjqx/UamVVPRG+mwkNTYN+9tjkwzEx+atCm/X9WvWtDtAb68Wy9LXa1UmvCDDIpPkyOQ5ZwSzJ4jMrvFcr0rSjOUh+GcT4LSg5ugkW1Io0/SCDQBojh0hPlaJdah+tkVYrnTZowP8iq1F1TgMBBauufyB33x1v+NWFYmT5KmppgHC+NkAgbmRkpD3yn9QIseXymoTQFGQmIOKTxiZIWpvAatenVqRVXf2nTrAWMsPnKrMZHz6bJq5jvce6QK8J1cQNgKxlJapMPdZSR64/UivS9NztpkVEdKcrs5alhhWP9NeqlfWopzhZScI6QxseegZRGeg5a8C3Re1Mfl1ScP36ddcUaMuv24iOJtz7sbUjTS4qBvKmstYJoUauiuD3k5qhyr7QdUHMeCgLa1Ear9NquemdXgmum4fvJ6w1lqsuDhNrg1qSpleJK7K3TF0Q2jSd94uSZ60kK1e3qyVpQK6PVWXp2/FC3mp6jBhKKOiY2h3gtUV64TWM6wDETRPLDfSakXmH3w8g9Jlug8ZtTt4kVF0kLUYYmCCtD/DrQ5YhMGbA9L3ucdjh0y8kOHW5gU/VEEmJTcL4Pz/f7mgoAbYkAAAAAElFTkSuQmCC"]\n}'
8 changes: 6 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,25 @@
],
"scripts": {
"run:core": "bun run --cwd ./packages/core .",
"run:agent": "bun run --cwd ./packages/agent ."
"run:agent": "bun run --cwd ./packages/agent .",
"run:ollama": "bun run --cwd ./packages/ollama-functions .",
"run:plan-and-execute": "bun run --cwd ./packages/plan-and-execute ."
},
"devDependencies": {
"@types/node-localstorage": "^1.3.3",
"bun-types": "latest",
"@micro-agi/core": "workspace:*",
"@micro-agi/agent": "workspace:*",
"@micro-agi/plugins": "workspace:*"
"@micro-agi/plugins": "workspace:*",
"@micro-agi/plan-and-execute": "workspace:*"
},
"peerDependencies": {
"typescript": "^5.0.0"
},
"dependencies": {
"@dqbd/tiktoken": "^1.0.7",
"dotenv": "^16.3.1",
"langchain": "^0.0.210",
"node-localstorage": "^3.0.5",
"yaml": "^2.3.4"
}
Expand Down
42 changes: 42 additions & 0 deletions packages/ollama-functions/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

# dependencies
/node_modules
/.pnp
.pnp.js

# testing
/coverage

# next.js
/.next/
/out/

# production
/build

# misc
.DS_Store
*.pem

# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*

# local env files
.env.local
.env.development.local
.env.test.local
.env.production.local

# vercel
.vercel

**/*.trace
**/*.zip
**/*.tar.gz
**/*.tgz
**/*.log
package-lock.json
**/*.bun
15 changes: 15 additions & 0 deletions packages/ollama-functions/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Elysia with Bun runtime

## Getting Started
To get started with this template, simply paste this command into your terminal:
```bash
bun create elysia ./elysia-example
```

## Development
To start the development server run:
```bash
bun run dev
```

Open http://localhost:3000/ with your browser to see the result.
Binary file added packages/ollama-functions/bun.lockb
Binary file not shown.
16 changes: 16 additions & 0 deletions packages/ollama-functions/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"name": "@micro-agi/ollama-functions",
"version": "1.0.50",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "bun run --watch src/index.ts"
},
"dependencies": {
"elysia": "latest",
"langchain": "^0.0.210"
},
"devDependencies": {
"bun-types": "latest"
},
"module": "src/index.js"
}
18 changes: 18 additions & 0 deletions packages/ollama-functions/src/_index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { ChatOllama } from "langchain/chat_models/ollama";
import { StringOutputParser } from "langchain/schema/output_parser";

const model = new ChatOllama({
baseUrl: "http://localhost:11434", // Default value
model: "mistral", // Default value
});

const stream = await model
.pipe(new StringOutputParser())
.stream(`Translate "I love programming" into German.`);

const chunks = [];
for await (const chunk of stream) {
chunks.push(chunk);
}

console.log(chunks.join(""));
158 changes: 158 additions & 0 deletions packages/ollama-functions/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
import Elysia from "elysia";
import { ChatOllama } from "langchain/chat_models/ollama";
import { OllamaFunctions } from "langchain/experimental/chat_models/ollama_functions";
import { HumanMessage } from "langchain/schema";
import { StringOutputParser } from "langchain/schema/output_parser";
import { DynamicStructuredTool, formatToOpenAIFunction } from "langchain/tools";
import { z } from "zod";

const BASE_URL = "http://localhost:11434";
const MODEL = "mistral";

let db = {} as Record<string, string>;

const run = async (input: string) => {
const tools = [
new DynamicStructuredTool({
name: "random_number_generator",
description: "generates a random number between two input numbers",
schema: z.object({
low: z.number().describe("The lower bound of the generated number"),
high: z.number().describe("The upper bound of the generated number"),
}),
func: async ({ low, high }) =>
(Math.random() * (high - low) + low).toString(),
returnDirect: false,
}),
new DynamicStructuredTool({
name: "store_db",
description: "stores value to key db",
schema: z.object({
key: z.string().describe("the key"),
value: z.string().describe("the value"),
}),
func: async ({ key, value }) => {
db = { ...db, [key]: value };

return "";
},

returnDirect: false,
}),
new DynamicStructuredTool({
name: "read_db",
description: "get data from db",
schema: z.object({
key: z.string().describe("the key"),
}),
func: async ({ key }) => {
return "";
},

returnDirect: false,
}),
];

const functionsModel = new OllamaFunctions({
temperature: 0.1,
model: MODEL,
baseUrl: BASE_URL,
}).bind({
functions: [
...tools.map((tool) => formatToOpenAIFunction(tool)),
{
name: "get_current_weather",
description: "Get the current weather in a given location",
parameters: {
type: "object",
properties: {
location: {
type: "string",
description: "The city and state, e.g. San Francisco, CA",
},
unit: { type: "string", enum: ["celsius", "fahrenheit"] },
},
required: ["location"],
},
},
],
});

const functionResponse = await functionsModel.invoke([
new HumanMessage({
content: input,
}),
]);

const functionName = functionResponse.additional_kwargs.function_call?.name;
const functionArgs =
functionResponse.additional_kwargs.function_call?.arguments;

let executionResponse = undefined;

if (functionName === "random_number_generator" && functionArgs) {
const { low, high } = JSON.parse(functionArgs);
executionResponse = Math.floor(Math.random() * (high - low + 1) + low);
} else if (functionName === "get_current_weather" && functionArgs) {
const { location, unit = "celcius" } = JSON.parse(functionArgs);

executionResponse = Math.floor(Math.random() * (50 - -10 + 1) - 10);
} else if (functionName === "store_db" && functionArgs) {
const { key, value } = JSON.parse(functionArgs);

db = { ...db, [key]: value };

executionResponse = `key: ${key}, value: ${value}`;
} else if (functionName === "read_db" && functionArgs) {
const { key } = JSON.parse(functionArgs);

const value = Object.keys(db).length > 0 ? db[key] : "";

executionResponse = `key: ${key}, value: ${value}`;
}

const chatModel = new ChatOllama({
baseUrl: BASE_URL,
model: MODEL,
temperature: 0.1,
});

const stream = await chatModel
.pipe(new StringOutputParser())
.stream(
`The user input is You just ran a function called ${functionName}, the inputs where ${functionArgs} and the result was ${executionResponse}. Explain this in few words.`
);

const chunks = [];
for await (const chunk of stream) {
chunks.push(chunk);
}

return {
explanation: chunks.join(""),
execution: executionResponse,
};
};

const app = new Elysia()
.get("/:id", async (req) => {
const { id } = req.params;
const input = id.split("+").join(" ");
const { execution, explanation } = await run(input);

return Response.json({
input,
execution,
explanation,
});
})
.get("/db", async (req) => {
return Response.json({
db,
});
})
.listen(3000);

console.log(
`🦊 Elysia is running at ${app.server?.hostname}:${app.server?.port}`
);
Loading

0 comments on commit bdc4d6f

Please sign in to comment.