diff --git a/README.md b/README.md index b522b0a..d3fb075 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,9 @@ Move it to your `$PATH`; sudo mv tlm /usr/local/bin ``` -⚠️ If you already have CodeLLaMa on your system, you can just use the following command to configure it; +> [!TIP] +> If you already have CodeLLaMa on your system, you can just use the following command to configure it; + ``` tlm config set llm.host ``` @@ -59,7 +61,9 @@ Download latest release; Invoke-WebRequest -Uri "https://github.com/yusufcanb/tlm/releases/download/1.0-alpha.0/tlama_1.0-alpha.0_windows_amd64.exe" -OutFile "tlm.exe" ``` -⚠️ If you already have CodeLLaMa on your system, you can just use the following command to configure it; +> [!TIP] +> If you already have CodeLLaMa on your system, you can just use the following command to configure it; + ``` .\tlm.exe config set llm.host ``` diff --git a/cmd/Modelfile.explain b/cmd/Modelfile.explain new file mode 100644 index 0000000..85751ff --- /dev/null +++ b/cmd/Modelfile.explain @@ -0,0 +1,8 @@ +FROM codellama:7b + +PARAMETER temperature 0.1 +PARAMETER top_p 0.5 +PARAMETER top_k 40 +PARAMETER seed 1 + +SYSTEM You are software program specifically for Command Line Interface usage. User will ask you some thing that can be convertible to a UNIX or Windows command. You won't provide information or explanations and your output will be just an executable shell command inside three backticks. \ No newline at end of file diff --git a/cmd/Modelfile.suggest b/cmd/Modelfile.suggest new file mode 100644 index 0000000..85751ff --- /dev/null +++ b/cmd/Modelfile.suggest @@ -0,0 +1,8 @@ +FROM codellama:7b + +PARAMETER temperature 0.1 +PARAMETER top_p 0.5 +PARAMETER top_k 40 +PARAMETER seed 1 + +SYSTEM You are software program specifically for Command Line Interface usage. User will ask you some thing that can be convertible to a UNIX or Windows command. You won't provide information or explanations and your output will be just an executable shell command inside three backticks. \ No newline at end of file diff --git a/cmd/VERSION b/cmd/VERSION new file mode 100644 index 0000000..62289be --- /dev/null +++ b/cmd/VERSION @@ -0,0 +1 @@ +1.0-rc1 \ No newline at end of file diff --git a/cmd/cli.go b/cmd/cli.go index 43a81df..4b1da10 100644 --- a/cmd/cli.go +++ b/cmd/cli.go @@ -1,17 +1,25 @@ package main import ( + _ "embed" "log" "os" "github.com/yusufcanb/tlama/pkg/app" ) -var version = "1.0" +//go:embed VERSION +var version string + +//go:embed Modelfile.explain +var explainModelfile string + +//go:embed Modelfile.suggest +var suggestModelfile string func main() { - tlama := app.New(version) - if err := tlama.App.Run(os.Args); err != nil { + tlm := app.New(version, explainModelfile, suggestModelfile) + if err := tlm.App.Run(os.Args); err != nil { log.Fatal(err) } } diff --git a/go.mod b/go.mod index aab1dcf..28c7581 100644 --- a/go.mod +++ b/go.mod @@ -3,8 +3,11 @@ module github.com/yusufcanb/tlama go 1.21 require ( + github.com/briandowns/spinner v1.23.0 github.com/charmbracelet/bubbles v0.18.0 github.com/charmbracelet/bubbletea v0.25.0 + github.com/charmbracelet/huh v0.3.0 + github.com/charmbracelet/huh/spinner v0.0.0-20240209193029-45947515c4cf github.com/jmorganca/ollama v0.1.25 github.com/spf13/viper v1.18.2 github.com/urfave/cli/v2 v2.27.1 @@ -13,7 +16,7 @@ require ( require ( github.com/atotto/clipboard v0.1.4 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect - github.com/briandowns/spinner v1.23.0 // indirect + github.com/catppuccin/go v0.2.0 // indirect github.com/charmbracelet/harmonica v0.2.0 // indirect github.com/charmbracelet/lipgloss v0.9.1 // indirect github.com/containerd/console v1.0.4 // indirect diff --git a/go.sum b/go.sum index 5ade1be..8c4ff4d 100644 --- a/go.sum +++ b/go.sum @@ -4,12 +4,18 @@ github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiE github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= github.com/briandowns/spinner v1.23.0 h1:alDF2guRWqa/FOZZYWjlMIx2L6H0wyewPxo/CH4Pt2A= github.com/briandowns/spinner v1.23.0/go.mod h1:rPG4gmXeN3wQV/TsAY4w8lPdIM6RX3yqeBQJSrbXjuE= +github.com/catppuccin/go v0.2.0 h1:ktBeIrIP42b/8FGiScP9sgrWOss3lw0Z5SktRoithGA= +github.com/catppuccin/go v0.2.0/go.mod h1:8IHJuMGaUUjQM82qBrGNBv7LFq6JI3NnQCF6MOlZjpc= github.com/charmbracelet/bubbles v0.18.0 h1:PYv1A036luoBGroX6VWjQIE9Syf2Wby2oOl/39KLfy0= github.com/charmbracelet/bubbles v0.18.0/go.mod h1:08qhZhtIwzgrtBjAcJnij1t1H0ZRjwHyGsy6AL11PSw= github.com/charmbracelet/bubbletea v0.25.0 h1:bAfwk7jRz7FKFl9RzlIULPkStffg5k6pNt5dywy4TcM= github.com/charmbracelet/bubbletea v0.25.0/go.mod h1:EN3QDR1T5ZdWmdfDzYcqOCAps45+QIJbLOBxmVNWNNg= github.com/charmbracelet/harmonica v0.2.0 h1:8NxJWRWg/bzKqqEaaeFNipOu77YR5t8aSwG4pgaUBiQ= github.com/charmbracelet/harmonica v0.2.0/go.mod h1:KSri/1RMQOZLbw7AHqgcBycp8pgJnQMYYT8QZRqZ1Ao= +github.com/charmbracelet/huh v0.3.0 h1:CxPplWkgW2yUTDDG0Z4S5HH8SJOosWHd4LxCvi0XsKE= +github.com/charmbracelet/huh v0.3.0/go.mod h1:fujUdKX8tC45CCSaRQdw789O6uaCRwx8l2NDyKfC4jA= +github.com/charmbracelet/huh/spinner v0.0.0-20240209193029-45947515c4cf h1:hzfl5rHblaVR/8zfAoCBuqsTcEp/Zvy1IVZBIebZelM= +github.com/charmbracelet/huh/spinner v0.0.0-20240209193029-45947515c4cf/go.mod h1:2l0nupcBRhdSZQqIiaV2hKwctrYbBbOr9Dn6Smox3f4= github.com/charmbracelet/lipgloss v0.9.1 h1:PNyd3jvaJbg4jRHKWXnCj1akQm4rh8dbEzN1p/u1KWg= github.com/charmbracelet/lipgloss v0.9.1/go.mod h1:1mPmG4cxScwUQALAAnacHaigiiHB9Pmr+v1VEawJl6I= github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro= diff --git a/pkg/api/api.go b/pkg/api/api.go deleted file mode 100644 index a019626..0000000 --- a/pkg/api/api.go +++ /dev/null @@ -1,108 +0,0 @@ -package api - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "log" - "net/http" - "runtime" - "strings" - - "github.com/yusufcanb/tlama/pkg/config" -) - -type OllamaAPI struct { - Config *config.TlamaConfig -} - -func (o *OllamaAPI) PullModel(model string) { - payload := pullModelRequestPayload{ - Name: model, - Stream: false, - } - - jsonBytes, _ := json.Marshal(payload) - - req, err := http.NewRequest("POST", fmt.Sprintf("%s/api/pull", o.Config.Llm.Host), bytes.NewBuffer(jsonBytes)) - if err != nil { - log.Fatal("Error creating HTTP request: ", err.Error()) - } - - client := http.Client{} - resp, err := client.Do(req) - if err != nil { - log.Fatal("Error making HTTP request: ", err.Error()) - } - - // Read the response header - fmt.Println("Response: Content-length:", resp.Header.Get("Content-length")) - - bytesRead := 0 - buf := make([]byte, 128) - - // Read the response body - for { - n, err := resp.Body.Read(buf) - bytesRead += n - - if err == io.EOF { - break - } - - if err != nil { - log.Fatal("Error reading HTTP response: ", err.Error()) - } - } - - fmt.Println("Response: Read", bytesRead, "bytes") - -} - -func (o *OllamaAPI) Generate(prompt string) (string, error) { - builder := strings.Builder{} - builder.WriteString(prompt) - builder.WriteString(fmt.Sprintf(". I'm using %s terminal", o.Config.Shell)) - builder.WriteString(fmt.Sprintf("on operating system: %s", runtime.GOOS)) - - payload := generateRequestPayload{ - Model: o.Config.Llm.Model, - System: `You are software program specifically for Command Line Interface usage. User will ask you some thing that can be convertible to a UNIX or Windows command. You won't provide information or explanations and your output will be just an executable shell command inside three backticks.`, - Prompt: builder.String(), - Stream: false, - Options: options{ - Temperature: o.Config.Llm.Parameters.Temperature, - TopP: o.Config.Llm.Parameters.TopP, - }, - } - - jsonBytes, _ := json.Marshal(payload) - - resp, err := http.Post(fmt.Sprintf("%s/api/generate", o.Config.Llm.Host), "application/json", bytes.NewBuffer(jsonBytes)) - if err != nil { - fmt.Println("Error sending request:", err) - return "", err - } - defer resp.Body.Close() - - body, _ := io.ReadAll(resp.Body) - - response := generateResponsePayload{} - json.Unmarshal(body, &response) - if err != nil { - return "", err - } - - retval := strings.Replace(response.Response, "```bash", "", -1) - retval = strings.Replace(retval, "```", "", -1) - retval = strings.Replace(retval, "\n", "", -1) - - return retval, nil -} - -func New(cfg *config.TlamaConfig) *OllamaAPI { - return &OllamaAPI{ - Config: cfg, - } -} diff --git a/pkg/api/model.go b/pkg/api/model.go deleted file mode 100644 index acbf72f..0000000 --- a/pkg/api/model.go +++ /dev/null @@ -1,7 +0,0 @@ -package api - -type pullModelRequestPayload struct { - Name string `json:"name"` - Insecure bool `json:"insecure"` - Stream bool `json:"stream"` -} diff --git a/pkg/api/prompt.go b/pkg/api/prompt.go deleted file mode 100644 index e59722a..0000000 --- a/pkg/api/prompt.go +++ /dev/null @@ -1,33 +0,0 @@ -package api - -import ( - "time" -) - -type options struct { // This is a struct that contains the options for the request - Temperature float64 `json:"temperature"` - TopP float64 `json:"top_p"` - TopK int `json:"top_k"` -} - -type generateRequestPayload struct { - Model string `json:"model"` - System string `json:"system"` - Prompt string `json:"prompt"` - Stream bool `json:"stream"` - Options options `json:"options"` -} - -type generateResponsePayload struct { - Model string `json:"model"` - CreatedAt time.Time `json:"created_at"` - Response string `json:"response"` - Done bool `json:"done"` - Context []int `json:"context"` - TotalDuration int `json:"total_duration"` - LoadDuration int `json:"load_duration"` - PromptEvalCount int `json:"prompt_eval_count"` - PromptEvalDuration int `json:"prompt_eval_duration"` - EvalCount int `json:"eval_count"` - EvalDuration int `json:"eval_duration"` -} diff --git a/pkg/app/app.go b/pkg/app/app.go index 248087d..7057ca2 100644 --- a/pkg/app/app.go +++ b/pkg/app/app.go @@ -1,7 +1,8 @@ package app import ( - "github.com/yusufcanb/tlama/pkg/api" + _ "embed" + ollama "github.com/jmorganca/ollama/api" "github.com/yusufcanb/tlama/pkg/config" "github.com/yusufcanb/tlama/pkg/explain" "github.com/yusufcanb/tlama/pkg/install" @@ -10,30 +11,36 @@ import ( "github.com/urfave/cli/v2" ) -type TlamaApp struct { - App *cli.App - Config *config.TlamaConfig +type TlmApp struct { + App *cli.App } -func New(version string) *TlamaApp { +func New(version string, suggestModelfile string, explainModelfile string) *TlmApp { + con := config.New() + con.LoadOrCreateConfig() + + o, _ := ollama.ClientFromEnvironment() + sug := suggest.New(o, suggestModelfile) + exp := explain.New(o, explainModelfile) + ins := install.New(o) cliApp := &cli.App{ - Name: "tlm", - Usage: "terminal intelligence with local language model.", - Description: "tlm is a command line tool to provide terminal intelligence using CodeLLaMa.", - Version: version, + Name: "tlm", + Usage: "local terminal companion powered by CodeLLaMa.", + Version: version, + HideHelpCommand: true, Action: func(c *cli.Context) error { return cli.ShowAppHelp(c) }, Commands: []*cli.Command{ - suggest.GetCommand(), - explain.GetCommand(), - install.GetCommand(), - config.GetCommand(), + sug.Command(), + exp.Command(), + ins.Command(), + con.Command(), &cli.Command{ Name: "version", Aliases: []string{"v"}, - Usage: "Print version.", + Usage: "print version.", Action: func(c *cli.Context) error { cli.ShowVersion(c) return nil @@ -42,13 +49,7 @@ func New(version string) *TlamaApp { }, } - cliApp.HideHelpCommand = true - cliApp.Metadata = make(map[string]interface{}) - - cliApp.Metadata["config"] = config.New() - cliApp.Metadata["api"] = api.New(cliApp.Metadata["config"].(*config.TlamaConfig)) - - return &TlamaApp{ + return &TlmApp{ App: cliApp, } } diff --git a/pkg/config/api.go b/pkg/config/api.go new file mode 100644 index 0000000..2028bcc --- /dev/null +++ b/pkg/config/api.go @@ -0,0 +1,58 @@ +package config + +import ( + "fmt" + "github.com/spf13/viper" + "github.com/yusufcanb/tlama/pkg/shell" + "log" + "os" + "path" +) + +var defaultLLMHost = "http://localhost:11434" + +func isExists(path string) bool { + if _, err := os.Stat(path); os.IsNotExist(err) { + return false + } + return true +} + +func (c *Config) LoadOrCreateConfig() { + viper.SetConfigName(".tlm") + viper.SetConfigType("yaml") + viper.AddConfigPath("$HOME") + + homeDir, err := os.UserHomeDir() + if err != nil { + log.Fatal(err) + } + + configPath := path.Join(homeDir, ".tlm.yaml") + if !isExists(configPath) { + viper.Set("shell", shell.GetShell()) + + viper.Set("llm.host", defaultLLMHost) + viper.Set("llm.suggestion", "balanced") + viper.Set("llm.explain", "balanced") + + err := os.Setenv("OLLAMA_HOST", defaultLLMHost) + if err != nil { + fmt.Printf(shell.Err()+" error writing config file, %s", err) + } + + if err := viper.WriteConfigAs(path.Join(homeDir, ".tlm.yaml")); err != nil { + fmt.Printf(shell.Err()+" error writing config file, %s", err) + } + } + + err = viper.ReadInConfig() + if err != nil { + log.Fatalf("Error reading config file, %s", err) + } + + err = os.Setenv("OLLAMA_HOST", viper.GetString("llm.host")) + if err != nil { + fmt.Printf(shell.Err()+" %s", err) + } +} diff --git a/pkg/config/cli.go b/pkg/config/cli.go new file mode 100644 index 0000000..d2cf48f --- /dev/null +++ b/pkg/config/cli.go @@ -0,0 +1,45 @@ +package config + +import ( + "fmt" + "github.com/spf13/viper" + "github.com/urfave/cli/v2" + "github.com/yusufcanb/tlama/pkg/shell" +) + +func (c *Config) Action(_ *cli.Context) error { + var err error + + form := ConfigForm{ + host: viper.GetString("llm.host"), + explain: viper.GetString("llm.explain"), + suggest: viper.GetString("llm.suggest"), + } + + err = form.Run() + if err != nil { + return err + } + + viper.Set("shell", form.shell) + viper.Set("llm.host", form.host) + viper.Set("llm.explain", form.explain) + viper.Set("llm.suggest", form.suggest) + + err = viper.WriteConfig() + if err != nil { + return err + } + + fmt.Println(shell.Ok() + " configuration saved") + return nil +} + +func (c *Config) Command() *cli.Command { + return &cli.Command{ + Name: "config", + Aliases: []string{"c"}, + Usage: "configure preferences.", + Action: c.Action, + } +} diff --git a/pkg/config/command.go b/pkg/config/command.go deleted file mode 100644 index f89f4b1..0000000 --- a/pkg/config/command.go +++ /dev/null @@ -1,93 +0,0 @@ -package config - -import ( - "fmt" - "strconv" - - "github.com/spf13/viper" - "github.com/urfave/cli/v2" -) - -func printAllConfig() { - for _, k := range viper.AllKeys() { - fmt.Println(fmt.Sprintf("%s = %s", k, viper.GetString(k))) - } -} - -func configGetCommand() *cli.Command { - return &cli.Command{ - Name: "get", - Aliases: []string{"g"}, - Args: true, - ArgsUsage: " get from config file", - HelpName: "config get", - Action: func(c *cli.Context) error { - - arg := c.Args().Get(0) - if arg == "" { - printAllConfig() - return nil - } - - val := viper.Get(c.Args().Get(0)) - if val == "" { - fmt.Println(fmt.Sprintf("ERROR: %s not found", c.Args().Get(0))) - return nil - } - - fmt.Println(val) - return nil - }, - } -} - -func configSetCommand() *cli.Command { - return &cli.Command{ - Name: "set", - Args: true, - ArgsUsage: " - set to config file", - Action: func(c *cli.Context) error { - key := c.Args().Get(0) - // check key is in the list - if key != "llm.host" && key != "llm.model" && key != "llm.parameters.temperature" && key != "llm.parameters.top_p" { - fmt.Println(fmt.Sprintf("%s is not a tlm parameter", key)) - return nil - } - - if key == "llm.parameters.temperature" || key == "llm.parameters.top_p" { - value, err := strconv.ParseFloat(c.Args().Get(1), 64) - viper.Set(key, value) - - if err != nil { - fmt.Println(fmt.Sprintf("%s is not a valid float value", c.Args().Get(1))) - return nil - } - return nil - } - - viper.Set(key, c.Args().Get(1)) - err := viper.WriteConfig() - if err != nil { - return err - } - - printAllConfig() - return nil - }, - } -} - -func GetCommand() *cli.Command { - return &cli.Command{ - Name: "config", - Aliases: []string{"c"}, - Usage: "Configure tlama parameters.", - Action: func(c *cli.Context) error { - return nil - }, - Subcommands: []*cli.Command{ - configGetCommand(), - configSetCommand(), - }, - } -} diff --git a/pkg/config/config.go b/pkg/config/config.go index 74bbdfa..31d2d0c 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -1,63 +1,8 @@ package config -import ( - "log" - "os" - "path" - - "github.com/spf13/viper" - "github.com/yusufcanb/tlama/pkg/shell" -) - -func isExists(path string) bool { - if _, err := os.Stat(path); os.IsNotExist(err) { - return false - } - return true +type Config struct { } -func loadOrCreateConfig() (*TlamaConfig, error) { - viper.SetConfigName(".tlama") - viper.SetConfigType("yaml") - viper.AddConfigPath("$HOME") - - homeDir, err := os.UserHomeDir() - if err != nil { - log.Fatal(err) - } - - configPath := path.Join(homeDir, ".tlama.yaml") - if !isExists(configPath) { - viper.Set("shell", shell.GetShell()) - viper.Set("llm.host", defaultLLMHost) - viper.Set("llm.model", defaultLLMModel) - viper.Set("llm.parameters.temperature", defaultTemperature) - viper.Set("llm.parameters.top_p", defaultTopP) - - if err := viper.WriteConfigAs(path.Join(homeDir, ".tlama.yaml")); err != nil { - return nil, err - } - } - - err = viper.ReadInConfig() - if err != nil { - return nil, err - } - - tlamaConfig := &TlamaConfig{} - err = tlamaConfig.LoadConfig() - if err != nil { - return nil, err - } - - return tlamaConfig, nil -} - -func New() *TlamaConfig { - cfg, err := loadOrCreateConfig() - if err != nil { - log.Fatal(err) - } - - return cfg +func New() *Config { + return &Config{} } diff --git a/pkg/config/form.go b/pkg/config/form.go new file mode 100644 index 0000000..a4d97e2 --- /dev/null +++ b/pkg/config/form.go @@ -0,0 +1,53 @@ +package config + +import "github.com/charmbracelet/huh" + +type ConfigForm struct { + form *huh.Form + + host string + shell string + explain string + suggest string +} + +func (c *ConfigForm) Run() error { + c.form = huh.NewForm( + huh.NewGroup( + + huh.NewInput(). + Title("Ollama"). + Value(&c.host), + + huh.NewSelect[string](). + Title("Default Shell (Windows)"). + Options( + huh.NewOption("Windows Powershell", "powershell"), + huh.NewOption("Windows Command Prompt", "cmd"), + ). + Value(&c.shell), + + huh.NewSelect[string](). + Title("Suggestion Preference"). + Description("This sets how suggestions should be in placed"). + Options( + huh.NewOption("Stable", "stable"), + huh.NewOption("Balanced", "balanced"), + huh.NewOption("Creative", "creative"), + ). + Value(&c.explain), + + huh.NewSelect[string](). + Title("Explain Preference"). + Description("This configuration sets explain responses"). + Options( + huh.NewOption("Stable", "stable"), + huh.NewOption("Balanced", "balanced"), + huh.NewOption("Creative", "creative"), + ). + Value(&c.suggest), + ), + ) + + return c.form.WithTheme(huh.ThemeBase16()).Run() +} diff --git a/pkg/config/model.go b/pkg/config/model.go deleted file mode 100644 index 00775a4..0000000 --- a/pkg/config/model.go +++ /dev/null @@ -1,58 +0,0 @@ -package config - -import ( - "github.com/spf13/viper" - "log" - "os" - "path" -) - -var defaultLLMModel = "codellama:7b" -var defaultLLMHost = "http://localhost:11434" -var defaultTemperature = 0.1 -var defaultTopP = 0.5 - -type llmConfig struct { - Host string - Model string - Parameters llmParametersConfig -} - -type llmParametersConfig struct { - Temperature float64 - TopP float64 -} - -type TlamaConfig struct { - Shell string - Llm llmConfig -} - -func (t *TlamaConfig) SaveConfig() error { - homeDir, err := os.UserHomeDir() - if err != nil { - log.Fatal(err) - } - - if err := viper.WriteConfigAs(path.Join(homeDir, ".tlama.yaml")); err != nil { - return err - } - return nil -} - -func (t *TlamaConfig) LoadConfig() error { - t.Shell = viper.Get("shell").(string) - t.Llm = llmConfig{ - Host: viper.Get("llm.host").(string), - Model: viper.Get("llm.model").(string), - Parameters: llmParametersConfig{ - Temperature: viper.Get("llm.parameters.temperature").(float64), - TopP: viper.Get("llm.parameters.top_p").(float64), - }, - } - return nil -} - -func (t TlamaConfig) GetOllamaApi() *Ollama { - return NewOllama(&t) -} diff --git a/pkg/config/ollama.go b/pkg/config/ollama.go deleted file mode 100644 index eaf52df..0000000 --- a/pkg/config/ollama.go +++ /dev/null @@ -1,241 +0,0 @@ -package config - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "github.com/briandowns/spinner" - ollama "github.com/jmorganca/ollama/api" - "github.com/yusufcanb/tlama/pkg/shell" - "io" - "net/http" - "os" - "os/exec" - "strings" - "time" -) - -type Ollama struct { - cfg *TlamaConfig - Api *ollama.Client -} - -func (o *Ollama) createVolume() error { - cmd := exec.Command("docker", "volume", "create", "ollama") - if err := cmd.Run(); err != nil { - return err - } - return nil -} - -func (o *Ollama) isContainerRunning(name string) (bool, error) { - cmd := exec.Command("docker", "ps", "-aqf", "name=ollama") - out, err := cmd.Output() - if err != nil { - return false, err - } - return strings.TrimSpace(string(out)) != "", nil -} - -func (o *Ollama) removeContainer(name string) error { - cmd := exec.Command("docker", "rm", "-f", name) - if err := cmd.Run(); err != nil { - return err - } - return nil -} - -func (o *Ollama) IsInstalled() bool { - resp, err := http.Get(o.cfg.Llm.Host) - if err != nil { - return false - } - defer resp.Body.Close() - - if resp.StatusCode == http.StatusOK { - bodyBytes, err := io.ReadAll(resp.Body) - if err != nil { - return false - } - bodyString := string(bodyBytes) - - if bodyString == "Ollama is running" { - return true - } - } - - return false -} - -func (o *Ollama) IsModelExists() bool { - fmt.Printf("- Checking %s model is installed...", o.cfg.Llm.Model) - - payload := map[string]string{"name": o.cfg.Llm.Model} - jsonPayload, err := json.Marshal(payload) - if err != nil { - return false - } - - client := &http.Client{} - req, err := http.NewRequest("POST", o.cfg.Llm.Host+"/api/show", bytes.NewBuffer(jsonPayload)) - if err != nil { - fmt.Println("\t\tno") - return false - } - req.Header.Set("Content-Type", "application/json") - - resp, err := client.Do(req) - if err != nil { - fmt.Println("\t\tno") - return false - } - defer resp.Body.Close() - - if resp.StatusCode == http.StatusOK { - fmt.Println("\t\tok") - } - - return resp.StatusCode == http.StatusOK -} - -func (o *Ollama) IsVolumeInstalled() bool { - cmd := exec.Command("docker", "volume", "inspect", "ollama") - err := cmd.Run() - return err == nil // Returns true if the volume exists, false otherwise -} - -func (o *Ollama) InstallModel() error { - payload := map[string]string{"name": "codellama:34b"} - jsonPayload, err := json.Marshal(payload) - if err != nil { - return err - } - - // Construct the request - req, err := http.NewRequest("POST", o.cfg.Llm.Host+"/api/pull", bytes.NewBuffer(jsonPayload)) - if err != nil { - return err - } - - // Execute the request - client := &http.Client{} - resp, err := client.Do(req) - if err != nil { - return err - } - defer resp.Body.Close() - - // Handle streaming response - decoder := json.NewDecoder(resp.Body) - for { - var responseChunk map[string]interface{} // Adjust the type if your API sends structured data - if err := decoder.Decode(&responseChunk); err != nil { - if err == io.EOF { - break // End of stream - } - return err // Error during decoding - } - // Log the response chunk - fmt.Println(responseChunk["status"]) // Replace with your preferred logging - } - - return nil // Installation successful (adjust if needed) -} - -func (o *Ollama) List() error { - ctx := context.Background() - list, err := o.Api.List(ctx) - if err != nil { - return err - } - - for _, model := range list.Models { - fmt.Println(model.Name) - } - return nil -} - -func (o *Ollama) Pull() error { - - ctx := context.Background() - err := o.Api.Pull(ctx, &ollama.PullRequest{ - Model: o.cfg.Llm.Model, - }, func(response ollama.ProgressResponse) error { - return nil - }) - - if err != nil { - return err - } - - return nil -} - -func (o *Ollama) Install() error { - var s *spinner.Spinner - - // 1. Check ollama volume exists - if !o.IsVolumeInstalled() { - fmt.Println("- Ollama volume not found. Creating a new volume.") - if err := o.createVolume(); err != nil { - return fmt.Errorf("error creating Ollama volume: %v", err) - } - } else { - fmt.Println("- Ollama volume found. Using existing volume.") - } - - // 2. Check ollama container exists - containerExists, err := o.isContainerRunning("ollama") - if err != nil { - return fmt.Errorf("error checking for existing container: %v", err) - } - - if containerExists { - if err := o.removeContainer("ollama"); err != nil { - return fmt.Errorf("error removing existing container: %v", err) - } - } - - // 3. Run the Docker command - s = nil - s = spinner.New(spinner.CharSets[9], 100*time.Millisecond) - s.Suffix = " Creating Ollama container. (might take a few minutes)" - s.Start() - cmd, _, stderr := shell.Exec2("docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama") - - err = cmd.Run() - if err != nil { - return errors.New(stderr.String()) - } - s.Stop() - fmt.Println("- Creating Ollama container. done") - - // 4. Pull CodeLlama model - s = nil - s = spinner.New(spinner.CharSets[9], 100*time.Millisecond) - s.Suffix = " Downloading CodeLLaMa. (might take a few minutes)" - s.Start() - err = o.Pull() - if err != nil { - return err - } - s.Stop() - fmt.Println("- Downloading CodeLLaMa. done") - - return nil -} - -func NewOllama(cfg *TlamaConfig) *Ollama { - os.Setenv("OLLAMA_HOST", cfg.Llm.Host) - api, err := ollama.ClientFromEnvironment() - if err != nil { - panic(err) - } - - return &Ollama{ - cfg: cfg, - Api: api, - } -} diff --git a/pkg/config/pull_view.go b/pkg/config/pull_view.go deleted file mode 100644 index 68714aa..0000000 --- a/pkg/config/pull_view.go +++ /dev/null @@ -1,61 +0,0 @@ -package config - -import ( - "github.com/charmbracelet/bubbles/progress" - tea "github.com/charmbracelet/bubbletea" - "github.com/charmbracelet/lipgloss" - "strings" - "time" -) - -const ( - padding = 2 - maxWidth = 80 -) - -type tickMsg time.Time - -var helpStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("#626262")).Render - -type pullViewModel struct { - percent float64 - progress progress.Model -} - -func (m pullViewModel) Init() tea.Cmd { - return tickCmd() -} - -func (m pullViewModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { - switch msg := msg.(type) { - - case tea.KeyMsg: - return m, tea.Quit - - case tea.WindowSizeMsg: - m.progress.Width = msg.Width - padding*2 - 4 - if m.progress.Width > maxWidth { - m.progress.Width = maxWidth - } - return m, nil - - case tickMsg: - return m, tickCmd() - - default: - return m, nil - } -} - -func (m pullViewModel) View() string { - pad := strings.Repeat(" ", padding) - return "\n" + - pad + m.progress.ViewAs(m.percent) + "\n\n" + - pad + helpStyle("Press any key to quit") -} - -func tickCmd() tea.Cmd { - return tea.Tick(time.Second, func(t time.Time) tea.Msg { - return tickMsg(t) - }) -} diff --git a/pkg/explain/api.go b/pkg/explain/api.go new file mode 100644 index 0000000..41b7671 --- /dev/null +++ b/pkg/explain/api.go @@ -0,0 +1,56 @@ +package explain + +import ( + "context" + "fmt" + ollama "github.com/jmorganca/ollama/api" +) + +const ( + Stable string = "stable" + Balanced = "balanced" + Creative = "creative" +) + +func (e *Explain) getParametersFor(preference string) map[string]interface{} { + switch preference { + case Stable: + return map[string]interface{}{ + "temperature": 0.1, + "top_p": 0.25, + } + + case Balanced: + return map[string]interface{}{ + "temperature": 0.5, + "top_p": 0.4, + } + + case Creative: + return map[string]interface{}{ + "temperature": 0.9, + "top_p": 0.7, + } + + default: + return map[string]interface{}{} + } +} + +func (e *Explain) streamExplanationFor(mode, prompt string) error { + onResponseFunc := func(res ollama.GenerateResponse) error { + fmt.Print(res.Response) + return nil + } + + err := e.api.Generate(context.Background(), &ollama.GenerateRequest{ + Model: "codellama:7b", + Prompt: "Explain the command briefly: " + prompt, + Options: e.getParametersFor(mode), + }, onResponseFunc) + + if err != nil { + fmt.Println("Error during generation:", err) + } + return nil +} diff --git a/pkg/explain/cli.go b/pkg/explain/cli.go new file mode 100644 index 0000000..2a0325c --- /dev/null +++ b/pkg/explain/cli.go @@ -0,0 +1,18 @@ +package explain + +import ( + "github.com/urfave/cli/v2" +) + +func (e *Explain) Action(c *cli.Context) error { + return e.streamExplanationFor(Balanced, c.Args().Get(0)) +} + +func (e *Explain) Command() *cli.Command { + return &cli.Command{ + Name: "explain", + Aliases: []string{"e"}, + Usage: "explain a command.", + Action: e.Action, + } +} diff --git a/pkg/explain/command.go b/pkg/explain/command.go deleted file mode 100644 index f141ed5..0000000 --- a/pkg/explain/command.go +++ /dev/null @@ -1,14 +0,0 @@ -package explain - -import ( - "github.com/urfave/cli/v2" -) - -func GetCommand() *cli.Command { - return &cli.Command{ - Name: "explain", - Aliases: []string{"e"}, - Usage: "Explain a command.", - Action: explainAction, - } -} diff --git a/pkg/explain/explain.go b/pkg/explain/explain.go index a454995..1fb6381 100644 --- a/pkg/explain/explain.go +++ b/pkg/explain/explain.go @@ -1,35 +1,20 @@ package explain import ( - "context" - "fmt" - "github.com/jmorganca/ollama/api" - "github.com/urfave/cli/v2" - "github.com/yusufcanb/tlama/pkg/config" + ollama "github.com/jmorganca/ollama/api" ) -func explainAction(c *cli.Context) error { - cfg := c.App.Metadata["config"].(*config.TlamaConfig) - ollama := cfg.GetOllamaApi() - - myResponseFunc := func(resp api.GenerateResponse) error { - // Process the response here (e.g., print it) - fmt.Print(resp.Response) - return nil // Or return an error if processing fails - } +type Explain struct { + api *ollama.Client + modelfile string + system string +} - // Call the Generate function - err := ollama.Api.Generate(context.Background(), &api.GenerateRequest{ - Model: cfg.Llm.Model, - Prompt: "Explain the command briefly: " + c.Args().First(), - Options: map[string]interface{}{ - "num_predict": 128, - "temperature": 0.1, - "top_p": 0.25, - }, - }, myResponseFunc) - if err != nil { - fmt.Println("Error during generation:", err) - } - return nil +func New(api *ollama.Client, modelfile string) *Explain { + e := &Explain{api: api, modelfile: modelfile} + e.system = `You are software program specifically for Command Line Interface usage. +User will ask you some thing that can be convertible to a UNIX or Windows command. +You won't provide information or explanations and your output will be just an executable shell command inside three backticks. +` + return e } diff --git a/pkg/install/api.go b/pkg/install/api.go new file mode 100644 index 0000000..ea0834d --- /dev/null +++ b/pkg/install/api.go @@ -0,0 +1,21 @@ +package install + +func (i *Install) createVolume(volumeName string) error { + + return nil +} + +func (i *Install) installModelfile(modelfile string) error { + + return nil +} + +func (i *Install) removeContainer() error { + + return nil +} + +func (i *Install) createContainer() error { + + return nil +} diff --git a/pkg/install/cli.go b/pkg/install/cli.go new file mode 100644 index 0000000..9d3dfcd --- /dev/null +++ b/pkg/install/cli.go @@ -0,0 +1,18 @@ +package install + +import ( + "github.com/urfave/cli/v2" +) + +func (i *Install) Action(c *cli.Context) error { + return NewInstallForm2().Run() +} + +func (i *Install) Command() *cli.Command { + return &cli.Command{ + Name: "install", + Aliases: []string{"i"}, + Usage: "deploy CodeLLaMa to your system.", + Action: i.Action, + } +} diff --git a/pkg/install/command.go b/pkg/install/command.go deleted file mode 100644 index efa7a1f..0000000 --- a/pkg/install/command.go +++ /dev/null @@ -1,44 +0,0 @@ -package install - -import ( - "fmt" - tea "github.com/charmbracelet/bubbletea" - "github.com/urfave/cli/v2" - "github.com/yusufcanb/tlama/pkg/config" - "log" -) - -func GetCommand() *cli.Command { - return &cli.Command{ - Name: "install", - Aliases: []string{"i"}, - Usage: "Install LLM to your system.", - Action: func(c *cli.Context) error { - cfg := c.App.Metadata["config"].(*config.TlamaConfig) - ollama := cfg.GetOllamaApi() - - model := initialModel(&initialModelArgs{alreadyInstalled: ollama.IsInstalled()}) - program := tea.NewProgram(model) - _, err := program.Run() - if err != nil { - log.Fatalf("could not run program: %s", err) - } - defer program.Quit() - - if model.questions[len(model.questions)-1].answer == false { - fmt.Println("\nAbort...") - return nil - } - - fmt.Println("\n\nInstalling Ollama...\n") - err = ollama.Install() - if err != nil { - fmt.Printf("ERR: %s", err.Error()) - return nil - } - - fmt.Println("\nDone.\nStarting using now;\n\ntlm s \"list all files in cwd\"\n") - return nil - }, - } -} diff --git a/pkg/install/form.go b/pkg/install/form.go new file mode 100644 index 0000000..3e0e9fc --- /dev/null +++ b/pkg/install/form.go @@ -0,0 +1,73 @@ +package install + +import ( + "github.com/charmbracelet/huh" +) + +type InstallForm2 struct { + form *huh.Form + + redeploy bool + gpuEnabled bool + ollamaImage string + ollamaVolume string +} + +func (i *InstallForm2) Run() error { + + i.form = huh.NewForm( + huh.NewGroup( + huh.NewSelect[bool](). + Title("GPU Support"). + Options( + huh.NewOption("Enable", true), + huh.NewOption("Disable", false), + ). + Value(&i.gpuEnabled), + + huh.NewInput(). + Title("Ollama Image"). + Value(&i.ollamaImage), + + huh.NewInput(). + Title("Ollama Volume"). + Value(&i.ollamaVolume), + ), + ) + + if i.redeploy { + var c bool + err := huh.NewConfirm(). + Title("Redeploy"). + Description("An Ollama instance is running on 11434, redeploy?"). + Affirmative("Proceed"). + Negative("Abort"). + Value(&c). + WithTheme(huh.ThemeBase16()). + Run() + + if err != nil { + return err + } + + if c { + return i.form.WithTheme(huh.ThemeBase16()).Run() + } + + return nil + + } + + return nil +} + +func NewInstallForm2() *InstallForm2 { + ollamaImage := "ollama:latest" + ollamaVolume := "ollama" + + return &InstallForm2{ + ollamaImage: ollamaImage, + ollamaVolume: ollamaVolume, + redeploy: true, + } +} diff --git a/pkg/install/install.go b/pkg/install/install.go index e4cded8..10688b1 100644 --- a/pkg/install/install.go +++ b/pkg/install/install.go @@ -1,32 +1,16 @@ package install import ( - "bytes" - "fmt" - "github.com/yusufcanb/tlama/pkg/shell" + _ "embed" + ollama "github.com/jmorganca/ollama/api" ) -func installOllama(gpuSupport bool) error { - var stdout, stderr bytes.Buffer - var cmdStr string - - if gpuSupport { - cmdStr = "docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama" - } else { - cmdStr = "docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama" - } - - cmd := shell.Exec(cmdStr) - cmd.Stdout = &stdout - cmd.Stderr = &stderr +type Install struct { + api *ollama.Client +} - err := cmd.Run() - if err != nil { - fmt.Println(stderr.String()) - return err - } else { - fmt.Println(stdout.String()) +func New(api *ollama.Client) *Install { + return &Install{ + api: api, } - - return nil } diff --git a/pkg/install/setup_view.go b/pkg/install/setup_view.go deleted file mode 100644 index 7e576a0..0000000 --- a/pkg/install/setup_view.go +++ /dev/null @@ -1,129 +0,0 @@ -package install - -import ( - "fmt" - tea "github.com/charmbracelet/bubbletea" - "slices" - "strings" -) - -type ( - errMsg error -) - -type question struct { - question string - answer bool - exitOnNo bool -} - -type model struct { - questions []question - index int // Index of the current question - viewText strings.Builder - - exited bool -} - -type initialModelArgs struct { - alreadyInstalled bool -} - -var confirmText = ` -- Image: ollama:latest -- Model: codellama:7b -- Volume: ollama -- GPU: %v - -LLaMa will be deployed and model will be pulled for the first time. -This process might take a few minutes depending on your network speed. - -[enter] to continue -[ctrl+c] to cancel` - -func initialModel(args *initialModelArgs) *model { - questions := []question{ - {question: "Enable GPU support (Only NVIDIA GPUs are supported)? [y/n]", answer: false, exitOnNo: false}, - {question: fmt.Sprintf(confirmText, args.alreadyInstalled), answer: false, exitOnNo: false}, // Assuming confirmText is defined elsewhere - } - - if args.alreadyInstalled { - questions = slices.Insert(questions, 0, question{question: "Ollama is already deployed and running, proceed? [y/n]", answer: false, exitOnNo: true}) - } - - return &model{ - questions: questions, - index: 0, - viewText: strings.Builder{}, - } -} - -func (m model) Init() tea.Cmd { - return nil -} - -func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { - if m.index >= len(m.questions) { - return m, tea.Quit - } - - switch msg := msg.(type) { - case tea.KeyMsg: - switch msg.String() { - case "enter": - if m.index == len(m.questions)-1 { // last question - m.questions[m.index].answer = true - return m, tea.Quit - } - return m, nil - case "y", "Y": - if m.index == len(m.questions)-1 { // last question - return m, nil - } - m.questions[m.index].answer = true - m.nextQuestion() - return m, nil - case "n", "N": - if m.index == len(m.questions)-1 { // last question - return m, nil - } - m.questions[m.index].answer = false - if m.questions[m.index].exitOnNo { - return m, tea.Quit - } - m.nextQuestion() - case "ctrl+c": - m.exited = true - return m, tea.Quit - } - } - - return m, nil -} - -func (m model) View() string { - if m.index >= len(m.questions) { - for i := 0; i < m.index; i++ { - q := m.questions[i] - m.viewText.WriteString(fmt.Sprintf("%s: %v\n", q.question, q.answer)) - } - return m.viewText.String() - } - - for i := 0; i < m.index; i++ { - q := m.questions[i] - m.viewText.WriteString(fmt.Sprintf("%s: %v\n", q.question, q.answer)) - } - - q := m.questions[m.index] - m.viewText.WriteString(fmt.Sprintf("%s\n", q.question)) - - return m.viewText.String() -} - -func (m *model) nextQuestion() { - m.index++ - if m.index >= len(m.questions) { - return // Reached the end - } -} diff --git a/pkg/shell/shell.go b/pkg/shell/shell.go index dcdffd4..f6a5682 100644 --- a/pkg/shell/shell.go +++ b/pkg/shell/shell.go @@ -2,10 +2,38 @@ package shell import ( "bytes" + "github.com/charmbracelet/lipgloss" "os/exec" "runtime" ) +func Ok() string { + style := lipgloss.NewStyle() + + style = style.Bold(true) + style = style.Foreground(lipgloss.Color("2")) + + return style.Render("(ok)") +} + +func SuccessMessage(message string) string { + style := lipgloss.NewStyle() + + style = style.Bold(true) + style = style.Foreground(lipgloss.Color("2")) + + return style.Render(message) +} + +func Err() string { + style := lipgloss.NewStyle() + + style = style.Bold(true) + style = style.Foreground(lipgloss.Color("9")) + + return style.Render("[err]") +} + func GetShell() string { if runtime.GOOS == "windows" { return "powershell" diff --git a/pkg/suggest/api.go b/pkg/suggest/api.go new file mode 100644 index 0000000..44c885a --- /dev/null +++ b/pkg/suggest/api.go @@ -0,0 +1,92 @@ +package suggest + +import ( + "context" + "fmt" + ollama "github.com/jmorganca/ollama/api" + "regexp" + "runtime" + "strings" +) + +const ( + Stable string = "stable" + Balanced = "balanced" + Creative = "creative" +) + +func (s *Suggest) getParametersFor(preference string) map[string]interface{} { + switch preference { + case Stable: + return map[string]interface{}{ + "seed": 42, + "temperature": 0.1, + "top_p": 0.25, + } + + case Balanced: + return map[string]interface{}{ + "seed": 42, + "temperature": 0.5, + "top_p": 0.4, + } + + case Creative: + return map[string]interface{}{ + "seed": 0, + "temperature": 0.9, + "top_p": 0.7, + } + + default: + return map[string]interface{}{} + } +} + +func (s *Suggest) extractCommandsFromResponse(response string) []string { + re := regexp.MustCompile("```([^\n]*)\n([^\n]*)\n```") + + matches := re.FindAllStringSubmatch(response, -1) + + if len(matches) == 0 { + return nil + } + + var codeSnippets []string + for _, match := range matches { + if len(match) == 3 { + codeSnippets = append(codeSnippets, match[2]) + } + } + + return codeSnippets +} + +func (s *Suggest) getCommandSuggestionFor(mode, shell string, prompt string) (string, error) { + var responseText string + + builder := strings.Builder{} + builder.WriteString(prompt) + builder.WriteString(fmt.Sprintf(". I'm using %s terminal", shell)) + builder.WriteString(fmt.Sprintf("on operating system: %s", runtime.GOOS)) + + stream := false + req := &ollama.GenerateRequest{ + Model: "tlm:7b", + Prompt: builder.String(), + Stream: &stream, + Options: s.getParametersFor(mode), + } + + onResponse := func(res ollama.GenerateResponse) error { + responseText = res.Response + return nil + } + + err := s.api.Generate(context.Background(), req, onResponse) + if err != nil { + return "", err + } + + return responseText, nil +} diff --git a/pkg/suggest/cli.go b/pkg/suggest/cli.go new file mode 100644 index 0000000..5798fe4 --- /dev/null +++ b/pkg/suggest/cli.go @@ -0,0 +1,69 @@ +package suggest + +import ( + "errors" + "fmt" + "github.com/charmbracelet/huh/spinner" + "github.com/charmbracelet/lipgloss" + "github.com/spf13/viper" + "github.com/urfave/cli/v2" + "github.com/yusufcanb/tlama/pkg/shell" + "time" +) + +func (s *Suggest) Action(c *cli.Context) error { + var responseText string + var err error + + var t1, t2 time.Time + + prompt := c.Args().Get(0) + spinner.New(). + Type(spinner.Line). + Title(" Thinking..."). + Style(lipgloss.NewStyle().Foreground(lipgloss.Color("2"))). + Action(func() { + t1 = time.Now() + responseText, err = s.getCommandSuggestionFor(Stable, viper.GetString("shell"), prompt) + t2 = time.Now() + }). + Run() + + if err != nil { + fmt.Println(shell.Err()+" error getting suggestion:", err) + } + + fmt.Printf("┃ >"+" Thinking... %s\n", shell.SuccessMessage("("+t2.Sub(t1).String()+")")) + form := NewCommandForm(s.extractCommandsFromResponse(responseText)[0]) + err = form.Run() + + if err != nil { + fmt.Println(shell.Err() + " " + err.Error()) + } + + fmt.Println("┃ > " + form.command + "\n") + if form.confirm { + cmd, stdout, stderr := shell.Exec2(form.command) + cmd.Run() + + if stderr.String() != "" { + fmt.Println(stderr.String()) + return errors.New("command failed") + } + + fmt.Println(stdout.String()) + return nil + } + + fmt.Println("suggestion elapsed time:", t2.Sub(t1)) + return nil +} + +func (s *Suggest) Command() *cli.Command { + return &cli.Command{ + Name: "suggest", + Aliases: []string{"s"}, + Usage: "suggest a command.", + Action: s.Action, + } +} diff --git a/pkg/suggest/command.go b/pkg/suggest/command.go deleted file mode 100644 index ade3267..0000000 --- a/pkg/suggest/command.go +++ /dev/null @@ -1,14 +0,0 @@ -package suggest - -import ( - "github.com/urfave/cli/v2" -) - -func GetCommand() *cli.Command { - return &cli.Command{ - Name: "suggest", - Aliases: []string{"s"}, - Usage: "Suggest a command.", - Action: promptAction, - } -} diff --git a/pkg/suggest/form.go b/pkg/suggest/form.go new file mode 100644 index 0000000..683d803 --- /dev/null +++ b/pkg/suggest/form.go @@ -0,0 +1,32 @@ +package suggest + +import ( + "github.com/charmbracelet/huh" +) + +type CommandForm struct { + command string + confirm bool +} + +func (s *CommandForm) Run() error { + group := huh.NewGroup( + huh.NewInput(). + Value(&s.command), + huh.NewConfirm(). + Value(&s.confirm). + Affirmative("execute"). + Negative("abort"). + WithHeight(1), + ) + + form := huh.NewForm(group). + WithTheme(huh.ThemeBase16()). + WithKeyMap(huh.NewDefaultKeyMap()) + + return form.Run() +} + +func NewCommandForm(command string) *CommandForm { + return &CommandForm{command: command, confirm: true} +} diff --git a/pkg/suggest/suggest.go b/pkg/suggest/suggest.go index 39b453a..1723659 100644 --- a/pkg/suggest/suggest.go +++ b/pkg/suggest/suggest.go @@ -1,60 +1,14 @@ package suggest import ( - tea "github.com/charmbracelet/bubbletea" - "github.com/urfave/cli/v2" - "github.com/yusufcanb/tlama/pkg/api" - "log" - "sync" + ollama "github.com/jmorganca/ollama/api" ) -func promptAction(c *cli.Context) error { - var wg sync.WaitGroup - - var program *tea.Program - var command string - var err error - - prompt := c.Args().Get(0) - - if prompt == "" { - err := cli.ShowAppHelp(c) - if err != nil { - return err - } - return nil - } - - wg.Add(1) - - go func() { - program = tea.NewProgram(NewRequestView()) - _, err = program.Run() - if err != nil { - log.Fatalf("could not run program: %s", err) - } - defer program.Quit() - }() - - go func() { - command, err = c.App.Metadata["api"].(*api.OllamaAPI).Generate(prompt) - wg.Done() - defer program.Quit() - }() - - wg.Wait() - - if err != nil { - log.Fatal("Couldn't get the prompt from local LLM.", err) - } - - if command == "" { - log.Fatal("Prompt is empty.") - } - - if _, err := tea.NewProgram(NewPromptView(command)).Run(); err != nil { - log.Fatalf("could not run program: %s", err) - } +type Suggest struct { + api *ollama.Client + modelfile string +} - return nil +func New(api *ollama.Client, modelfile string) *Suggest { + return &Suggest{api: api, modelfile: modelfile} } diff --git a/pkg/suggest/prompt_view.go b/pkg/suggest/views/prompt_view.go similarity index 96% rename from pkg/suggest/prompt_view.go rename to pkg/suggest/views/prompt_view.go index d27f092..6b101b8 100644 --- a/pkg/suggest/prompt_view.go +++ b/pkg/suggest/views/prompt_view.go @@ -1,4 +1,4 @@ -package suggest +package views import ( "bytes" @@ -73,8 +73,6 @@ func (m suggestViewModel) View() string { func NewPromptView(prompt string) suggestViewModel { ti := textinput.New() ti.SetValue(prompt) - ti.CharLimit = 256 - ti.Width = 256 return suggestViewModel{ textInput: ti, diff --git a/pkg/suggest/request_view.go b/pkg/suggest/views/request_view.go similarity index 98% rename from pkg/suggest/request_view.go rename to pkg/suggest/views/request_view.go index b1c6dfa..453e654 100644 --- a/pkg/suggest/request_view.go +++ b/pkg/suggest/views/request_view.go @@ -1,4 +1,4 @@ -package suggest +package views import ( "fmt"