Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

✨OpenAI and Ollama Clients #948

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions modules/ai/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
OpenAI and Ollama Clients

- Streaming output
- The OpenAI interface employs the `ai` prefix for user-friendly input.
- Option for controllable return values
- Supports chat context retention
- Customizable prompt functionality for `ai do`
- Refer to [prompt.nu](prompt.nu) for definition guidelines
- Default model can be overridden using `--model`
- line containing placeholders in the prompt can only include `{}` and quotation marks
- Importing and exporting of Ollama models
- Connection details managed through environment variables

Control some options with the following code.
```
$env.OLLAMA_HOST = 'http://localhost:11434'
$env.OPENAI_HOST = 'http://localhost:11434'
$env.OPENAI_API_KEY = 'secret'
$env.OPENAI_PROMPT = $env.OPENAI_PROMPT
| insert 'json2rust' {
prompt: [
"Analyze the following JSON data to convert it into a Rust struct:"
"```{}```"
]
model: '',
description: 'Analyze JSON content, converting it into a Rust struct'
}
```
24 changes: 24 additions & 0 deletions modules/ai/mod.nu
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
export-env {
use ollama.nu *
use openai.nu *
use prompt.nu *
}

export use ollama.nu *
export use openai.nu *


export def 'similarity cosine' [a b] {
if ($a | length) != ($b | length) {
print "The lengths of the vectors must be equal."
}
$a | zip $b | reduce -f {p: 0, a: 0, b: 0} {|i,a|
{
p: ($a.p + ($i.0 * $i.1))
a: ($a.a + ($i.0 * $i.0))
b: ($a.b + ($i.1 * $i.1))
}
}
| $in.p / (($in.a | math sqrt) * ($in.b | math sqrt))
}

186 changes: 186 additions & 0 deletions modules/ai/ollama.nu
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
export-env {
$env.OLLAMA_HOST = "http://localhost:11434"
$env.OLLAMA_CHAT = {}
$env.OLLAMA_HOME = [$env.HOME .ollama] | path join
}

def "nu-complete models" [] {
http get $"($env.OLLAMA_HOST)/api/tags"
| get models
| each {{value: $in.name, description: $in.modified_at}}
}

export def "ollama info" [model: string@"nu-complete models"] {
http post -t application/json $"($env.OLLAMA_HOST)/api/show" {name: $model}
}

export def "ollama embed" [
model: string@"nu-complete models"
input: string
] {
http post -t application/json $"($env.OLLAMA_HOST)/api/embed" {
model: $model, input: [$input]
}
| get embeddings.0
}


export def "ollama gen" [
model: string@"nu-complete models"
prompt: string
--image(-i): path
--full(-f)
] {
let content = $in | default ""
let img = if ($image | is-empty) {
{}
} else {
{images: [(open $image | encode base64)]}
}
let r = http post -t application/json $"($env.OLLAMA_HOST)/api/generate" {
model: $model
prompt: ($prompt | str replace "{}" $content)
stream: false
...$img
}
if $full {
$r
} else {
$r.response
}
}


export def --env "ollama chat" [
model: string@"nu-complete models"
message: string
--image(-i): path
--reset(-r)
--forget(-f)
--placehold(-p): string = '{}'
--out(-o)
--debug
] {
let content = $in | default ""
let img = if ($image | is-empty) {
{}
} else {
{images: [(open $image | encode base64)]}
}
let msg = {
role: "user"
content: ($message | str replace -m $placehold $content)
...$img
}
if $debug {
print $"(ansi grey)($msg.content)(ansi reset)"
}
if not $forget {
if ($env.OLLAMA_CHAT | is-empty) or ($model not-in $env.OLLAMA_CHAT) {
$env.OLLAMA_CHAT = ($env.OLLAMA_CHAT | insert $model [])
}
if $reset {
$env.OLLAMA_CHAT = ($env.OLLAMA_CHAT | update $model [])
print '✨'
}
$env.OLLAMA_CHAT = ($env.OLLAMA_CHAT | update $model {|x| $x | get $model | append $msg})
}

let r = http post -t application/json $"($env.OLLAMA_HOST)/api/chat" {
model: $model
messages: [
...(if $forget { [] } else { $env.OLLAMA_CHAT | get $model })
$msg
]
stream: true
}
| lines
| reduce -f {msg: '', token: 0} {|i,a|
let x = $i | parse -r '.*?(?<data>\{.*)'
if ($x | is-empty) { return $a }
let x = $x | get 0.data | from json
let m = $x.message.content
print -n $m
$a
| update msg {|x| $x.msg + $m }
| update token {|x| $x.token + 1 }
}
if not $forget {
let r = {role: 'assistant', content: $r.msg, token: $r.token}
$env.OLLAMA_CHAT = ($env.OLLAMA_CHAT | update $model {|x| $x | get $model | append $r })
}
if $out { $r.msg }
}




def "nu-complete ollama model" [] {
cd $"($env.OLLAMA_HOME)/models/manifests/"
ls **/* | where type == file | get name
}

export def "ollama export" [
model: string@"nu-complete ollama model"
target
--home: string
] {
if ($target | path exists) {
if ([y n] | input list "already exists, remove it?") == 'y' {
rm -rf $target
} else {
return
}
}
mkdir $target

let base = {
blob: ([$env.OLLAMA_HOME models blobs] | path join)
manifests: ([$env.OLLAMA_HOME models manifests] | path join)
}

let tg = {
bin: ([$target model.bin] | path join)
model: ([$target Modelfile] | path join)
source: ([$target source.txt] | path join)
}

$model | split row '/' | $"($in | range 0..<-1 | str join '/'):($in | last)" | save $tg.source


let manifests = open ([$base.manifests $model] | path join) | from json

for i in $manifests.layers {

let digest = $i.digest
let type = $i.mediaType | split row '.' | last
let blob = [$base.blob ($i.digest | str replace ':' '-')] | path join
match $type {
model => {
cp $blob $tg.bin
$"FROM ./model.bin(char newline)" | save -a $tg.model
}
params => {
let p = open $blob | from json
$p
| items {|k,v| {k: $k, v: $v} }
| each {|x| $x.v | each {|y| $'PARAMETER ($x.k) "($y)"' } }
| flatten
| str join (char newline)
| $"(char newline)($in)"
| save -a $tg.model
}
_ => {
$'(char newline)($type | str upcase) """(cat $blob)"""' | save -a $tg.model
}
}
}

print 'success'
}

export def "ollama import" [dir] {
cd $dir
let model = cat source.txt
ollama create $model
}
139 changes: 139 additions & 0 deletions modules/ai/openai.nu
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
export-env {
$env.OPENAI_HOST = "http://localhost:11434"
$env.OPENAI_CHAT = {}
$env.OPENAI_API_KEY = 'secret'
$env.OPENAI_ORG_ID = ''
$env.OPENAI_PROJECT_ID = ''
}


def "nu-complete models" [] {
http get --headers [
Authorization $"Bearer ($env.OPENAI_API_KEY)"
OpenAI-Organization $env.OPENAI_ORG_ID
OpenAI-Project $env.OPENAI_PROJECT_ID
] $"($env.OPENAI_HOST)/v1/models"
| get data.id
}


export def --env "ai chat" [
model: string@"nu-complete models"
message: string
--image(-i): path
--reset(-r)
--forget(-f)
--placehold(-p): string = '{}'
--out(-o)
--debug
] {
let content = $in | default ""
let img = if ($image | is-empty) {
{}
} else {
{images: [(open $image | encode base64)]}
}
let msg = {
role: "user"
content: ($message | str replace -m $placehold $content)
...$img
}
if $debug {
print $"(ansi grey)($message)\n---\n($placehold)\n---(ansi reset)"
print $"(ansi grey)($msg.content)\n---(ansi reset)"
}
if not $forget {
if ($env.OPENAI_CHAT | is-empty) or ($model not-in $env.OPENAI_CHAT) {
$env.OPENAI_CHAT = ($env.OPENAI_CHAT | insert $model [])
}
if $reset {
$env.OPENAI_CHAT = ($env.OPENAI_CHAT | update $model [])
print '✨'
}
$env.OPENAI_CHAT = ($env.OPENAI_CHAT | update $model {|x| $x | get $model | append $msg})
}

let r = http post -t application/json --headers [
Authorization $"Bearer ($env.OPENAI_API_KEY)"
] $"($env.OPENAI_HOST)/v1/chat/completions" {
model: $model
messages: [
...(if $forget { [] } else { $env.OPENAI_CHAT | get $model })
$msg
]
stream: true
}
| lines
| reduce -f {msg: '', token: 0} {|i,a|
let x = $i | parse -r '.*?(?<data>\{.*)'
if ($x | is-empty) { return $a }
let x = $x | get 0.data | from json
let m = $x.choices | each { $in.delta.content } | str join
print -n $m
$a
| update msg {|x| $x.msg + $m }
| update token {|x| $x.token + 1 }
}
if not $forget {
let r = {role: 'assistant', content: $r.msg, token: $r.token}
$env.OPENAI_CHAT = ($env.OPENAI_CHAT | update $model {|x| $x | get $model | append $r })
}
if $out { $r.msg }
}


export def "ai embed" [
model: string@"nu-complete models"
input: string
] {
http post -t application/json $"($env.OPENAI_HOST)/v1/embeddings" {
model: $model, input: [$input], encoding_format: 'float'
}
| get data.0.embedding
}


def 'nu-complete role' [ctx] {
let args = $ctx | split row '|' | last | str trim -l | split row ' ' | range 2..
let len = $args | length
match $len {
1 => {
$env.OPENAI_PROMPT | items {|k, v| {value: $k, description: $v.description? } }
}
_ => {
let role = $env.OPENAI_PROMPT | get $args.0
let ph = $role.placeholder? | get ($len - 2)
$ph | columns
}
}
}

export def 'ai do' [
...args: string@"nu-complete role"
--out(-o)
--model(-m): string@"nu-complete models"
--debug
] {
let input = if ($in | is-empty) { $args | last } else { $in }
let argv = if ($in | is-empty) { $args | range 1..<-1 } else { $args | range 1.. }
let role = $env.OPENAI_PROMPT | get $args.0
let placehold = $"<(random chars -l 6)>"
let model = if ($model | is-empty) {
$role | get model
} else {
$model
}
let prompt = $role | get prompt | each {|x|
if ($x | str replace -ar "['\"`]+" '' | $in == '{}') {
$x | str replace '{}' $placehold
} else {
$x
}
} | str join (char newline)
let prompt = $argv | enumerate
| reduce -f $prompt {|i,a|
$a | str replace '{}' (($role.placeholder? | get $i.index) | get $i.item)
}

$input | ai chat $model -p $placehold --out=$out --debug=$debug $prompt
}
Loading