diff --git a/README.md b/README.md index 5c4ecae..817263a 100644 --- a/README.md +++ b/README.md @@ -81,6 +81,31 @@ Put your OpenAI API key in the platform configuration directory for chap, e.g., * `chap grep needle` +## `@FILE` arguments + +It's useful to set a bunch of related arguments together, for instance to fully +configure a back-end. This functionality is implemented via `@FILE` arguments. + +Before any other command-line argument parsing is performed, `@FILE` arguments are expanded: + + * An `@FILE` argument is searched relative to the current directory + * An `@:FILE` argument is searched relative to the configuration directory (e.g., $HOME/.config/chap) + * If an argument starts with a literal `@`, double it: `@@` + * `@.` stops processing any further `@FILE` arguments and leaves them unchanged. +The contents of an `@FILE` are parsed according to `shlex.split(comments=True)`. +Comments are not supported. +A typical content might look like this: +``` +# gpt-3.5.txt: Use cheaper gpt 3.5 and custom prompt +--backend openai-chatgpt +-B model:gpt-3.5-turbo +-s my-custom-system-message.txt +``` +and you might use it with +``` +chap @:gpt-3.5.txt ask what version of gpt is this +``` + ## Interactive terminal usage The interactive terminal mode is accessed via `chap tui`. diff --git a/src/chap/backends/openai_chatgpt.py b/src/chap/backends/openai_chatgpt.py index 060329f..054efa3 100644 --- a/src/chap/backends/openai_chatgpt.py +++ b/src/chap/backends/openai_chatgpt.py @@ -72,6 +72,9 @@ class Parameters: max_request_tokens: int = 1024 """The approximate greatest number of tokens to send in a request. When the session is long, the system prompt and 1 or more of the most recent interaction steps are sent.""" + url: str = "https://api.openai.com/v1/chat/completions" + """The URL of a chatgpt-pcompatible server's completion endpoint.""" + def __init__(self) -> None: self.parameters = self.Parameters() @@ -97,7 +100,7 @@ def make_full_prompt(self, all_history: Session) -> Session: def ask(self, session: Session, query: str, *, timeout: float = 60) -> str: full_prompt = self.make_full_prompt(session + [User(query)]) response = httpx.post( - "https://api.openai.com/v1/chat/completions", + self.parameters.url, json={ "model": self.parameters.model, "messages": session_to_list(full_prompt), @@ -128,7 +131,7 @@ async def aask( async with httpx.AsyncClient(timeout=timeout) as client: async with client.stream( "POST", - "https://api.openai.com/v1/chat/completions", + self.parameters.url, headers={"authorization": f"Bearer {self.get_key()}"}, json={ "model": self.parameters.model, diff --git a/src/chap/core.py b/src/chap/core.py index c82bc80..9a3df7f 100644 --- a/src/chap/core.py +++ b/src/chap/core.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: MIT +from collections.abc import Sequence import asyncio import datetime import io @@ -11,6 +12,7 @@ import pathlib import pkgutil import subprocess +import shlex from dataclasses import MISSING, Field, dataclass, fields from typing import ( Any, @@ -40,6 +42,7 @@ conversations_path = platformdirs.user_state_path("chap") / "conversations" conversations_path.mkdir(parents=True, exist_ok=True) +configuration_path = platformdirs.user_config_path("chap") class ABackend(Protocol): @@ -333,7 +336,34 @@ class Obj: session_filename: Optional[pathlib.Path] = None -class MyCLI(click.MultiCommand): +def expand_splats(args: list[str]) -> list[str]: + result = [] + saw_at_dot = False + for a in args: + if a == "@.": + saw_at_dot = True + continue + if saw_at_dot: + result.append(a) + continue + if a.startswith("@@"): ## double @ to escape an argument that starts with @ + result.append(a[1:]) + continue + if not a.startswith("@"): + result.append(a) + continue + if a.startswith("@:"): + fn: pathlib.Path | str = configuration_path / a[2:] + else: + fn = a[1:] + with open(fn, "r", encoding="utf-8") as f: + content = f.read() + parts = shlex.split(content) + result.extend(expand_splats(parts)) + return result + + +class MyCLI(click.Group): def make_context( self, info_name: Optional[str], @@ -370,6 +400,42 @@ def format_options( if hasattr(api, "parameters"): format_backend_help(api, formatter) + def main( + self, + args: Sequence[str] | None = None, + prog_name: str | None = None, + complete_var: str | None = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: Any, + ) -> Any: + if args is None: + args = sys.argv[1:] + if os.name == "nt" and windows_expand_args: + args = click.utils._expand_args(args) + else: + args = list(args) + + args = expand_splats(args) + + return super().main( + args, + prog_name=prog_name, + complete_var=complete_var, + standalone_mode=standalone_mode, + windows_expand_args=windows_expand_args, + **extra, + ) + + +class ConfigRelativeFile(click.File): + def convert( + self, value: Any, param: click.Parameter | None, ctx: click.Context | None + ) -> Any: + if isinstance(value, str) and value.startswith(":"): + value = configuration_path / value[1:] + return super().convert(value, param, ctx) + main = MyCLI( help="Commandline interface to ChatGPT", @@ -382,11 +448,12 @@ def format_options( callback=version_callback, ), click.Option( - ("--system-message-file", "-@"), - type=click.File("r"), + ("--system-message-file", "-s"), + type=ConfigRelativeFile("r"), default=None, callback=set_system_message_from_file, expose_value=False, + help=f"Set the system message from a file. If the filename starts with `:` it is relative to the configuration path {configuration_path}.", ), click.Option( ("--system-message", "-S"),