diff --git a/pkg/cli/eval.go b/pkg/cli/eval.go new file mode 100644 index 00000000..bb7e57c8 --- /dev/null +++ b/pkg/cli/eval.go @@ -0,0 +1,71 @@ +package cli + +import ( + "fmt" + "os" + "strconv" + "strings" + + "github.com/gptscript-ai/gptscript/pkg/gptscript" + "github.com/gptscript-ai/gptscript/pkg/input" + "github.com/gptscript-ai/gptscript/pkg/loader" + "github.com/gptscript-ai/gptscript/pkg/types" + "github.com/spf13/cobra" +) + +type Eval struct { + Tools []string `usage:"Tools available to call"` + MaxTokens int `usage:"Maximum number of tokens to output"` + Model string `usage:"The model to use"` + JSON bool `usage:"Output JSON"` + Temperature string `usage:"Set the temperature, \"creativity\""` + InternalPrompt *bool `Usage:"Set to false to disable the internal prompt"` + + gptscript *GPTScript +} + +func (e *Eval) Run(cmd *cobra.Command, args []string) error { + tool := types.Tool{ + Parameters: types.Parameters{ + Description: "inline script", + Tools: e.Tools, + MaxTokens: e.MaxTokens, + ModelName: e.Model, + JSONResponse: e.JSON, + InternalPrompt: e.InternalPrompt, + }, + Instructions: strings.Join(args, " "), + } + + if e.Temperature != "" { + temp, err := strconv.ParseFloat(e.Temperature, 32) + if err != nil { + return fmt.Errorf("failed to parse %v: %v", e.Temperature, err) + } + temp32 := float32(temp) + tool.Temperature = &temp32 + } + + prg, err := loader.ProgramFromSource(cmd.Context(), tool.String(), "") + if err != nil { + return err + } + + opts := e.gptscript.NewGPTScriptOpts() + runner, err := gptscript.New(&opts) + if err != nil { + return err + } + + toolInput, err := input.FromFile(e.gptscript.Input) + if err != nil { + return err + } + + toolOutput, err := runner.Run(e.gptscript.NewRunContext(cmd), prg, os.Environ(), toolInput) + if err != nil { + return err + } + + return e.gptscript.PrintOutput("", toolOutput) +} diff --git a/pkg/cli/gptscript.go b/pkg/cli/gptscript.go index 609f1dae..6c41b957 100644 --- a/pkg/cli/gptscript.go +++ b/pkg/cli/gptscript.go @@ -1,9 +1,11 @@ package cli import ( + "context" "fmt" "io" "os" + "sort" "strings" "github.com/acorn-io/cmd" @@ -41,17 +43,38 @@ type GPTScript struct { Quiet *bool `usage:"No output logging (set --quiet=false to force on even when there is no TTY)" short:"q"` Output string `usage:"Save output to a file, or - for stdout" short:"o"` Input string `usage:"Read input from a file (\"-\" for stdin)" short:"f"` - SubTool string `usage:"Use tool of this name, not the first tool in file"` - Assemble bool `usage:"Assemble tool to a single artifact, saved to --output" hidden:"true"` - ListModels bool `usage:"List the models available and exit"` - ListTools bool `usage:"List built-in tools and exit"` - Server bool `usage:"Start server"` - ListenAddress string `usage:"Server listen address" default:"127.0.0.1:9090"` + SubTool string `usage:"Use tool of this name, not the first tool in file" local:"true"` + Assemble bool `usage:"Assemble tool to a single artifact, saved to --output" hidden:"true" local:"true"` + ListModels bool `usage:"List the models available and exit" local:"true"` + ListTools bool `usage:"List built-in tools and exit" local:"true"` + Server bool `usage:"Start server" local:"true"` + ListenAddress string `usage:"Server listen address" default:"127.0.0.1:9090" local:"true"` Chdir string `usage:"Change current working directory" short:"C"` } func New() *cobra.Command { - return cmd.Command(&GPTScript{}) + root := &GPTScript{} + return cmd.Command(root, &Eval{ + gptscript: root, + }) +} + +func (r *GPTScript) NewRunContext(cmd *cobra.Command) context.Context { + ctx := cmd.Context() + if r.Confirm { + ctx = confirm.WithConfirm(ctx, confirm.TextPrompt{}) + } + return ctx +} + +func (r *GPTScript) NewGPTScriptOpts() gptscript.Options { + return gptscript.Options{ + Cache: cache.Options(r.CacheOptions), + OpenAI: openai.Options(r.OpenAIOptions), + Monitor: monitor.Options(r.DisplayOptions), + Quiet: r.Quiet, + Env: os.Environ(), + } } func (r *GPTScript) Customize(cmd *cobra.Command) { @@ -74,16 +97,27 @@ func (r *GPTScript) Customize(cmd *cobra.Command) { } } -func (r *GPTScript) listTools() error { +func (r *GPTScript) listTools(ctx context.Context, gptScript *gptscript.GPTScript, prg types.Program) error { + tools := gptScript.ListTools(ctx, prg) + sort.Slice(tools, func(i, j int) bool { + return tools[i].Name < tools[j].Name + }) var lines []string - for _, tool := range builtin.ListTools() { + for _, tool := range tools { + if tool.Name == "" { + tool.Name = prg.Name + } + + // Don't print instructions + tool.Instructions = "" + lines = append(lines, tool.String()) } fmt.Println(strings.Join(lines, "\n---\n")) return nil } -func (r *GPTScript) Pre(*cobra.Command, []string) error { +func (r *GPTScript) PersistentPre(*cobra.Command, []string) error { // chdir as soon as possible if r.Chdir != "" { if err := os.Chdir(r.Chdir); err != nil { @@ -111,10 +145,7 @@ func (r *GPTScript) Pre(*cobra.Command, []string) error { mvl.SetError() } } - return nil -} -func (r *GPTScript) Run(cmd *cobra.Command, args []string) error { if r.Color != nil { color.NoColor = !*r.Color } @@ -123,14 +154,64 @@ func (r *GPTScript) Run(cmd *cobra.Command, args []string) error { log.Infof("WARNING: Changing the default model can have unknown behavior for existing tools. Use the model field per tool instead.") } - gptOpt := gptscript.Options{ - Cache: cache.Options(r.CacheOptions), - OpenAI: openai.Options(r.OpenAIOptions), - Monitor: monitor.Options(r.DisplayOptions), - Quiet: r.Quiet, - Env: os.Environ(), + return nil +} + +func (r *GPTScript) listModels(ctx context.Context, gptScript *gptscript.GPTScript, args []string) error { + models, err := gptScript.ListModels(ctx, args...) + if err != nil { + return err + } + fmt.Println(strings.Join(models, "\n")) + return nil +} + +func (r *GPTScript) readProgram(ctx context.Context, args []string) (prg types.Program, err error) { + if len(args) == 0 { + return } + if args[0] == "-" { + data, err := io.ReadAll(os.Stdin) + if err != nil { + return prg, err + } + prg, err = loader.ProgramFromSource(ctx, string(data), r.SubTool) + if err != nil { + return prg, err + } + } + + return loader.Program(ctx, args[0], r.SubTool) +} + +func (r *GPTScript) PrintOutput(toolInput, toolOutput string) (err error) { + if r.Output != "" { + err = os.WriteFile(r.Output, []byte(toolOutput), 0644) + if err != nil { + return err + } + } else { + if !*r.Quiet { + if toolInput != "" { + _, _ = fmt.Fprint(os.Stderr, "\nINPUT:\n\n") + _, _ = fmt.Fprintln(os.Stderr, toolInput) + } + _, _ = fmt.Fprint(os.Stderr, "\nOUTPUT:\n\n") + } + fmt.Print(toolOutput) + if !strings.HasSuffix(toolOutput, "\n") { + fmt.Println() + } + } + + return +} + +func (r *GPTScript) Run(cmd *cobra.Command, args []string) error { + gptOpt := r.NewGPTScriptOpts() + ctx := cmd.Context() + if r.Server { s, err := server.New(&server.Options{ ListenAddress: r.ListenAddress, @@ -140,7 +221,7 @@ func (r *GPTScript) Run(cmd *cobra.Command, args []string) error { return err } defer s.Close() - return s.Start(cmd.Context()) + return s.Start(ctx) } gptScript, err := gptscript.New(&gptOpt) @@ -150,42 +231,22 @@ func (r *GPTScript) Run(cmd *cobra.Command, args []string) error { defer gptScript.Close() if r.ListModels { - models, err := gptScript.ListModels(cmd.Context(), args...) - if err != nil { - return err - } - fmt.Println(strings.Join(models, "\n")) - return nil + return r.listModels(ctx, gptScript, args) + } + + prg, err := r.readProgram(ctx, args) + if err != nil { + return err } if r.ListTools { - return r.listTools() + return r.listTools(ctx, gptScript, prg) } if len(args) == 0 { return cmd.Help() } - var ( - prg types.Program - ) - - if args[0] == "-" { - data, err := io.ReadAll(os.Stdin) - if err != nil { - return err - } - prg, err = loader.ProgramFromSource(cmd.Context(), string(data), r.SubTool) - if err != nil { - return err - } - } else { - prg, err = loader.Program(cmd.Context(), args[0], r.SubTool) - if err != nil { - return err - } - } - if r.Assemble { var out io.Writer = os.Stdout if r.Output != "" && r.Output != "-" { @@ -205,33 +266,10 @@ func (r *GPTScript) Run(cmd *cobra.Command, args []string) error { return err } - ctx := cmd.Context() - if r.Confirm { - ctx = confirm.WithConfirm(ctx, confirm.TextPrompt{}) - } - s, err := gptScript.Run(ctx, prg, os.Environ(), toolInput) + s, err := gptScript.Run(r.NewRunContext(cmd), prg, os.Environ(), toolInput) if err != nil { return err } - if r.Output != "" { - err = os.WriteFile(r.Output, []byte(s), 0644) - if err != nil { - return err - } - } else { - if !*r.Quiet { - if toolInput != "" { - _, _ = fmt.Fprint(os.Stderr, "\nINPUT:\n\n") - _, _ = fmt.Fprintln(os.Stderr, toolInput) - } - _, _ = fmt.Fprint(os.Stderr, "\nOUTPUT:\n\n") - } - fmt.Print(s) - if !strings.HasSuffix(s, "\n") { - fmt.Println() - } - } - - return nil + return r.PrintOutput(toolInput, s) } diff --git a/pkg/engine/daemon.go b/pkg/engine/daemon.go index 01e77533..a2911512 100644 --- a/pkg/engine/daemon.go +++ b/pkg/engine/daemon.go @@ -14,7 +14,7 @@ import ( "github.com/gptscript-ai/gptscript/pkg/types" ) -var ( +type Ports struct { daemonPorts map[string]int64 daemonLock sync.Mutex @@ -23,29 +23,29 @@ var ( daemonCtx context.Context daemonClose func() daemonWG sync.WaitGroup -) +} -func CloseDaemons() { - daemonLock.Lock() - if daemonCtx == nil { - daemonLock.Unlock() +func (p *Ports) CloseDaemons() { + p.daemonLock.Lock() + if p.daemonCtx == nil { + p.daemonLock.Unlock() return } - daemonLock.Unlock() + p.daemonLock.Unlock() - daemonClose() - daemonWG.Wait() + p.daemonClose() + p.daemonWG.Wait() } -func (e *Engine) getNextPort() int64 { - if startPort == 0 { - startPort = 10240 - endPort = 11240 +func (p *Ports) NextPort() int64 { + if p.startPort == 0 { + p.startPort = 10240 + p.endPort = 11240 } // This is pretty simple and inefficient approach, but also never releases ports - count := endPort - startPort + 1 + count := p.endPort - p.startPort + 1 toTry := make([]int64, 0, count) - for i := startPort; i <= endPort; i++ { + for i := p.startPort; i <= p.endPort; i++ { toTry = append(toTry, i) } @@ -54,13 +54,13 @@ func (e *Engine) getNextPort() int64 { }) for _, nextPort := range toTry { - if _, ok := usedPorts[nextPort]; ok { + if _, ok := p.usedPorts[nextPort]; ok { continue } - if usedPorts == nil { - usedPorts = map[int64]struct{}{} + if p.usedPorts == nil { + p.usedPorts = map[int64]struct{}{} } - usedPorts[nextPort] = struct{}{} + p.usedPorts[nextPort] = struct{}{} return nextPort } @@ -89,25 +89,25 @@ func getPath(instructions string) (string, string) { } func (e *Engine) startDaemon(_ context.Context, tool types.Tool) (string, error) { - daemonLock.Lock() - defer daemonLock.Unlock() + e.Ports.daemonLock.Lock() + defer e.Ports.daemonLock.Unlock() instructions := strings.TrimPrefix(tool.Instructions, types.DaemonPrefix) instructions, path := getPath(instructions) tool.Instructions = types.CommandPrefix + instructions - port, ok := daemonPorts[tool.ID] + port, ok := e.Ports.daemonPorts[tool.ID] url := fmt.Sprintf("http://127.0.0.1:%d%s", port, path) if ok { return url, nil } - if daemonCtx == nil { - daemonCtx, daemonClose = context.WithCancel(context.Background()) + if e.Ports.daemonCtx == nil { + e.Ports.daemonCtx, e.Ports.daemonClose = context.WithCancel(context.Background()) } - ctx := daemonCtx - port = e.getNextPort() + ctx := e.Ports.daemonCtx + port = e.Ports.NextPort() url = fmt.Sprintf("http://127.0.0.1:%d%s", port, path) cmd, stop, err := e.newCommand(ctx, []string{ @@ -135,10 +135,10 @@ func (e *Engine) startDaemon(_ context.Context, tool types.Tool) (string, error) return url, err } - if daemonPorts == nil { - daemonPorts = map[string]int64{} + if e.Ports.daemonPorts == nil { + e.Ports.daemonPorts = map[string]int64{} } - daemonPorts[tool.ID] = port + e.Ports.daemonPorts[tool.ID] = port killedCtx, cancel := context.WithCancelCause(ctx) defer cancel(nil) @@ -153,18 +153,18 @@ func (e *Engine) startDaemon(_ context.Context, tool types.Tool) (string, error) cancel(err) stop() - daemonLock.Lock() - defer daemonLock.Unlock() + e.Ports.daemonLock.Lock() + defer e.Ports.daemonLock.Unlock() - delete(daemonPorts, tool.ID) + delete(e.Ports.daemonPorts, tool.ID) }() - daemonWG.Add(1) + e.Ports.daemonWG.Add(1) context.AfterFunc(ctx, func() { if err := cmd.Process.Kill(); err != nil { log.Debugf("daemon failed to kill tool [%s] process: %v", tool.Parameters.Name, err) } - daemonWG.Done() + e.Ports.daemonWG.Done() }) for i := 0; i < 20; i++ { diff --git a/pkg/engine/engine.go b/pkg/engine/engine.go index 31788790..ebc83454 100644 --- a/pkg/engine/engine.go +++ b/pkg/engine/engine.go @@ -35,6 +35,7 @@ type Engine struct { RuntimeManager RuntimeManager Env []string Progress chan<- types.CompletionStatus + Ports *Ports } type State struct { diff --git a/pkg/gptscript/gptscript.go b/pkg/gptscript/gptscript.go index 9c64b2ee..41a0a593 100644 --- a/pkg/gptscript/gptscript.go +++ b/pkg/gptscript/gptscript.go @@ -4,6 +4,7 @@ import ( "context" "os" + "github.com/gptscript-ai/gptscript/pkg/builtin" "github.com/gptscript-ai/gptscript/pkg/cache" "github.com/gptscript-ai/gptscript/pkg/engine" "github.com/gptscript-ai/gptscript/pkg/llm" @@ -97,13 +98,20 @@ func (g *GPTScript) Run(ctx context.Context, prg types.Program, envs []string, i } func (g *GPTScript) Close() { - engine.CloseDaemons() + g.Runner.Close() } func (g *GPTScript) GetModel() engine.Model { return g.Registry } +func (g *GPTScript) ListTools(_ context.Context, prg types.Program) []types.Tool { + if prg.EntryToolID == "" { + return builtin.ListTools() + } + return prg.TopLevelTools() +} + func (g *GPTScript) ListModels(ctx context.Context, providers ...string) ([]string, error) { return g.Registry.ListModels(ctx, providers...) } diff --git a/pkg/runner/runner.go b/pkg/runner/runner.go index 292b2903..77636076 100644 --- a/pkg/runner/runner.go +++ b/pkg/runner/runner.go @@ -43,6 +43,7 @@ type Runner struct { c engine.Model factory MonitorFactory runtimeManager engine.RuntimeManager + ports engine.Ports } func New(client engine.Model, opts ...Options) (*Runner, error) { @@ -55,6 +56,10 @@ func New(client engine.Model, opts ...Options) (*Runner, error) { }, nil } +func (r *Runner) Close() { + r.ports.CloseDaemons() +} + func (r *Runner) Run(ctx context.Context, prg types.Program, env []string, input string) (output string, err error) { monitor, err := r.factory.Start(ctx, &prg, env, input) if err != nil { @@ -101,6 +106,7 @@ func (r *Runner) call(callCtx engine.Context, monitor Monitor, env []string, inp RuntimeManager: r.runtimeManager, Progress: progress, Env: env, + Ports: &r.ports, } monitor.Event(Event{ diff --git a/pkg/types/tool.go b/pkg/types/tool.go index 685f3674..777e7953 100644 --- a/pkg/types/tool.go +++ b/pkg/types/tool.go @@ -19,10 +19,16 @@ const ( type ToolSet map[string]Tool type Program struct { - Name string `json:"name,omitempty"` - EntryToolID string `json:"entryToolId,omitempty"` - ToolSet ToolSet `json:"toolSet,omitempty"` - Exports map[string]string `json:"exports,omitempty"` + Name string `json:"name,omitempty"` + EntryToolID string `json:"entryToolId,omitempty"` + ToolSet ToolSet `json:"toolSet,omitempty"` +} + +func (p Program) TopLevelTools() (result []Tool) { + for _, tool := range p.ToolSet[p.EntryToolID].LocalTools { + result = append(result, p.ToolSet[tool]) + } + return } func (p Program) SetBlocking() Program { @@ -82,7 +88,7 @@ func (t Tool) String() string { _, _ = fmt.Fprintf(buf, "Max Tokens: %d\n", t.Parameters.MaxTokens) } if t.Parameters.ModelName != "" { - _, _ = fmt.Fprintf(buf, "Model Name: %s\n", t.Parameters.ModelName) + _, _ = fmt.Fprintf(buf, "Model: %s\n", t.Parameters.ModelName) } if t.Parameters.ModelProvider { _, _ = fmt.Fprintf(buf, "Model Provider: true\n")