From 9e257157a851ec0f572db328c3f151c1e222402f Mon Sep 17 00:00:00 2001 From: Taylor Price Date: Wed, 5 Jun 2024 09:27:02 -0700 Subject: [PATCH] chore: sync docs with cli Signed-off-by: Taylor Price --- .../01-command-line/gptscript.md | 65 ++++++++++--------- .../01-command-line/gptscript_eval.md | 3 - .../01-command-line/gptscript_fmt.md | 3 - .../01-command-line/gptscript_parse.md | 3 - 4 files changed, 33 insertions(+), 41 deletions(-) diff --git a/docs/docs/100-reference/01-command-line/gptscript.md b/docs/docs/100-reference/01-command-line/gptscript.md index 54a88844..694a563e 100644 --- a/docs/docs/100-reference/01-command-line/gptscript.md +++ b/docs/docs/100-reference/01-command-line/gptscript.md @@ -12,38 +12,39 @@ gptscript [flags] PROGRAM_FILE [INPUT...] ### Options ``` - --cache-dir string Directory to store cache (default: $XDG_CACHE_HOME/gptscript) ($GPTSCRIPT_CACHE_DIR) - --chat-state string The chat state to continue, or null to start a new chat and return the state ($GPTSCRIPT_CHAT_STATE) - -C, --chdir string Change current working directory ($GPTSCRIPT_CHDIR) - --color Use color in output (default true) ($GPTSCRIPT_COLOR) - --config string Path to GPTScript config file ($GPTSCRIPT_CONFIG) - --confirm Prompt before running potentially dangerous commands ($GPTSCRIPT_CONFIRM) - --credential-context string Context name in which to store credentials ($GPTSCRIPT_CREDENTIAL_CONTEXT) (default "default") - --credential-override string Credentials to override (ex: --credential-override github.com/example/cred-tool:API_TOKEN=1234) ($GPTSCRIPT_CREDENTIAL_OVERRIDE) - --debug Enable debug logging ($GPTSCRIPT_DEBUG) - --debug-messages Enable logging of chat completion calls ($GPTSCRIPT_DEBUG_MESSAGES) - --default-model string Default LLM model to use ($GPTSCRIPT_DEFAULT_MODEL) (default "gpt-4o") - --disable-cache Disable caching of LLM API responses ($GPTSCRIPT_DISABLE_CACHE) - --disable-tui Don't use chat TUI but instead verbose output ($GPTSCRIPT_DISABLE_TUI) - --dump-state string Dump the internal execution state to a file ($GPTSCRIPT_DUMP_STATE) - --events-stream-to string Stream events to this location, could be a file descriptor/handle (e.g. fd://2), filename, or named pipe (e.g. \\.\pipe\my-pipe) ($GPTSCRIPT_EVENTS_STREAM_TO) - --force-chat Force an interactive chat session if even the top level tool is not a chat tool ($GPTSCRIPT_FORCE_CHAT) - --force-sequential Force parallel calls to run sequentially ($GPTSCRIPT_FORCE_SEQUENTIAL) - -h, --help help for gptscript - -f, --input string Read input from a file ("-" for stdin) ($GPTSCRIPT_INPUT) - --list-models List the models available and exit ($GPTSCRIPT_LIST_MODELS) - --list-tools List built-in tools and exit ($GPTSCRIPT_LIST_TOOLS) - --listen-address string Server listen address ($GPTSCRIPT_LISTEN_ADDRESS) (default "127.0.0.1:9090") - --no-trunc Do not truncate long log messages ($GPTSCRIPT_NO_TRUNC) - --openai-api-key string OpenAI API KEY ($OPENAI_API_KEY) - --openai-base-url string OpenAI base URL ($OPENAI_BASE_URL) - --openai-org-id string OpenAI organization ID ($OPENAI_ORG_ID) - -o, --output string Save output to a file, or - for stdout ($GPTSCRIPT_OUTPUT) - -q, --quiet No output logging (set --quiet=false to force on even when there is no TTY) ($GPTSCRIPT_QUIET) - --server Start server ($GPTSCRIPT_SERVER) - --sub-tool string Use tool of this name, not the first tool in file ($GPTSCRIPT_SUB_TOOL) - --ui Launch the UI ($GPTSCRIPT_UI) - --workspace string Directory to use for the workspace, if specified it will not be deleted on exit ($GPTSCRIPT_WORKSPACE) + --cache-dir string Directory to store cache (default: $XDG_CACHE_HOME/gptscript) ($GPTSCRIPT_CACHE_DIR) + --chat-state string The chat state to continue, or null to start a new chat and return the state ($GPTSCRIPT_CHAT_STATE) + -C, --chdir string Change current working directory ($GPTSCRIPT_CHDIR) + --color Use color in output (default true) ($GPTSCRIPT_COLOR) + --config string Path to GPTScript config file ($GPTSCRIPT_CONFIG) + --confirm Prompt before running potentially dangerous commands ($GPTSCRIPT_CONFIRM) + --credential-context string Context name in which to store credentials ($GPTSCRIPT_CREDENTIAL_CONTEXT) (default "default") + --credential-override string Credentials to override (ex: --credential-override github.com/example/cred-tool:API_TOKEN=1234) ($GPTSCRIPT_CREDENTIAL_OVERRIDE) + --debug Enable debug logging ($GPTSCRIPT_DEBUG) + --debug-messages Enable logging of chat completion calls ($GPTSCRIPT_DEBUG_MESSAGES) + --default-model string Default LLM model to use ($GPTSCRIPT_DEFAULT_MODEL) (default "gpt-4o") + --disable-cache Disable caching of LLM API responses ($GPTSCRIPT_DISABLE_CACHE) + --disable-tui Don't use chat TUI but instead verbose output ($GPTSCRIPT_DISABLE_TUI) + --dump-state string Dump the internal execution state to a file ($GPTSCRIPT_DUMP_STATE) + --events-stream-to string Stream events to this location, could be a file descriptor/handle (e.g. fd://2), filename, or named pipe (e.g. \\.\pipe\my-pipe) ($GPTSCRIPT_EVENTS_STREAM_TO) + --force-chat Force an interactive chat session if even the top level tool is not a chat tool ($GPTSCRIPT_FORCE_CHAT) + --force-sequential Force parallel calls to run sequentially ($GPTSCRIPT_FORCE_SEQUENTIAL) + -h, --help help for gptscript + -f, --input string Read input from a file ("-" for stdin) ($GPTSCRIPT_INPUT) + --list-models List the models available and exit ($GPTSCRIPT_LIST_MODELS) + --list-tools List built-in tools and exit ($GPTSCRIPT_LIST_TOOLS) + --listen-address string Server listen address ($GPTSCRIPT_LISTEN_ADDRESS) (default "127.0.0.1:0") + --no-trunc Do not truncate long log messages ($GPTSCRIPT_NO_TRUNC) + --openai-api-key string OpenAI API KEY ($OPENAI_API_KEY) + --openai-base-url string OpenAI base URL ($OPENAI_BASE_URL) + --openai-org-id string OpenAI organization ID ($OPENAI_ORG_ID) + -o, --output string Save output to a file, or - for stdout ($GPTSCRIPT_OUTPUT) + -q, --quiet No output logging (set --quiet=false to force on even when there is no TTY) ($GPTSCRIPT_QUIET) + --save-chat-state-file string A file to save the chat state to so that a conversation can be resumed with --chat-state ($GPTSCRIPT_SAVE_CHAT_STATE_FILE) + --server Start server ($GPTSCRIPT_SERVER) + --sub-tool string Use tool of this name, not the first tool in file ($GPTSCRIPT_SUB_TOOL) + --ui Launch the UI ($GPTSCRIPT_UI) + --workspace string Directory to use for the workspace, if specified it will not be deleted on exit ($GPTSCRIPT_WORKSPACE) ``` ### SEE ALSO diff --git a/docs/docs/100-reference/01-command-line/gptscript_eval.md b/docs/docs/100-reference/01-command-line/gptscript_eval.md index 0aaf7e80..94710662 100644 --- a/docs/docs/100-reference/01-command-line/gptscript_eval.md +++ b/docs/docs/100-reference/01-command-line/gptscript_eval.md @@ -26,7 +26,6 @@ gptscript eval [flags] ``` --cache-dir string Directory to store cache (default: $XDG_CACHE_HOME/gptscript) ($GPTSCRIPT_CACHE_DIR) - --chat-state string The chat state to continue, or null to start a new chat and return the state ($GPTSCRIPT_CHAT_STATE) -C, --chdir string Change current working directory ($GPTSCRIPT_CHDIR) --color Use color in output (default true) ($GPTSCRIPT_COLOR) --config string Path to GPTScript config file ($GPTSCRIPT_CONFIG) @@ -39,8 +38,6 @@ gptscript eval [flags] --disable-cache Disable caching of LLM API responses ($GPTSCRIPT_DISABLE_CACHE) --dump-state string Dump the internal execution state to a file ($GPTSCRIPT_DUMP_STATE) --events-stream-to string Stream events to this location, could be a file descriptor/handle (e.g. fd://2), filename, or named pipe (e.g. \\.\pipe\my-pipe) ($GPTSCRIPT_EVENTS_STREAM_TO) - --force-chat Force an interactive chat session if even the top level tool is not a chat tool ($GPTSCRIPT_FORCE_CHAT) - --force-sequential Force parallel calls to run sequentially ($GPTSCRIPT_FORCE_SEQUENTIAL) -f, --input string Read input from a file ("-" for stdin) ($GPTSCRIPT_INPUT) --no-trunc Do not truncate long log messages ($GPTSCRIPT_NO_TRUNC) --openai-api-key string OpenAI API KEY ($OPENAI_API_KEY) diff --git a/docs/docs/100-reference/01-command-line/gptscript_fmt.md b/docs/docs/100-reference/01-command-line/gptscript_fmt.md index 602897ef..c4e37856 100644 --- a/docs/docs/100-reference/01-command-line/gptscript_fmt.md +++ b/docs/docs/100-reference/01-command-line/gptscript_fmt.md @@ -20,7 +20,6 @@ gptscript fmt [flags] ``` --cache-dir string Directory to store cache (default: $XDG_CACHE_HOME/gptscript) ($GPTSCRIPT_CACHE_DIR) - --chat-state string The chat state to continue, or null to start a new chat and return the state ($GPTSCRIPT_CHAT_STATE) -C, --chdir string Change current working directory ($GPTSCRIPT_CHDIR) --color Use color in output (default true) ($GPTSCRIPT_COLOR) --config string Path to GPTScript config file ($GPTSCRIPT_CONFIG) @@ -33,8 +32,6 @@ gptscript fmt [flags] --disable-cache Disable caching of LLM API responses ($GPTSCRIPT_DISABLE_CACHE) --dump-state string Dump the internal execution state to a file ($GPTSCRIPT_DUMP_STATE) --events-stream-to string Stream events to this location, could be a file descriptor/handle (e.g. fd://2), filename, or named pipe (e.g. \\.\pipe\my-pipe) ($GPTSCRIPT_EVENTS_STREAM_TO) - --force-chat Force an interactive chat session if even the top level tool is not a chat tool ($GPTSCRIPT_FORCE_CHAT) - --force-sequential Force parallel calls to run sequentially ($GPTSCRIPT_FORCE_SEQUENTIAL) -f, --input string Read input from a file ("-" for stdin) ($GPTSCRIPT_INPUT) --no-trunc Do not truncate long log messages ($GPTSCRIPT_NO_TRUNC) --openai-api-key string OpenAI API KEY ($OPENAI_API_KEY) diff --git a/docs/docs/100-reference/01-command-line/gptscript_parse.md b/docs/docs/100-reference/01-command-line/gptscript_parse.md index a026f75b..d2322a48 100644 --- a/docs/docs/100-reference/01-command-line/gptscript_parse.md +++ b/docs/docs/100-reference/01-command-line/gptscript_parse.md @@ -20,7 +20,6 @@ gptscript parse [flags] ``` --cache-dir string Directory to store cache (default: $XDG_CACHE_HOME/gptscript) ($GPTSCRIPT_CACHE_DIR) - --chat-state string The chat state to continue, or null to start a new chat and return the state ($GPTSCRIPT_CHAT_STATE) -C, --chdir string Change current working directory ($GPTSCRIPT_CHDIR) --color Use color in output (default true) ($GPTSCRIPT_COLOR) --config string Path to GPTScript config file ($GPTSCRIPT_CONFIG) @@ -33,8 +32,6 @@ gptscript parse [flags] --disable-cache Disable caching of LLM API responses ($GPTSCRIPT_DISABLE_CACHE) --dump-state string Dump the internal execution state to a file ($GPTSCRIPT_DUMP_STATE) --events-stream-to string Stream events to this location, could be a file descriptor/handle (e.g. fd://2), filename, or named pipe (e.g. \\.\pipe\my-pipe) ($GPTSCRIPT_EVENTS_STREAM_TO) - --force-chat Force an interactive chat session if even the top level tool is not a chat tool ($GPTSCRIPT_FORCE_CHAT) - --force-sequential Force parallel calls to run sequentially ($GPTSCRIPT_FORCE_SEQUENTIAL) -f, --input string Read input from a file ("-" for stdin) ($GPTSCRIPT_INPUT) --no-trunc Do not truncate long log messages ($GPTSCRIPT_NO_TRUNC) --openai-api-key string OpenAI API KEY ($OPENAI_API_KEY)