ref: ce8088b026fe35c27da09797a8f5b80b4d633650
parent: c30d74014c8bc587f00ef0d178d1a4f16b448035
author: sirjofri <sirjofri@sirjofri.de>
date: Tue Dec 30 14:18:03 EST 2025
quiet prompts in oai on request, adds sysprompt customization
--- a/README
+++ b/README
@@ -17,7 +17,7 @@
USAGE:
-oai [-k apikey] [-m model] [-u baseurl]
+oai [-q] [-k apikey] [-m model] [-u baseurl] [-s sysprompt]
ocomplete [-k apikey] [-m model] [-u baseurl]
baseurl is the http url without the v1/... stuff, with llama-server this is usually just http://server:8080.
@@ -27,6 +27,8 @@
After that, you get a user: prompt for your user messages.
Ocomplete: Call the program from within an acme window with some selected text. The whole window contents will be sent to the API as context, and the LLM response will be appended to the selected text.
+
+Oai: -q does not output any prompts, only LLM responses. -s sets the sysprompt.
LIBRARY:
--- a/oai.c
+++ b/oai.c
@@ -6,7 +6,7 @@
static void
usage(void)
{- fprint(2, "usage: %s [-k apikey] [-m model] [-u baseurl]\n", argv0);
+ fprint(2, "usage: %s [-q] [-k apikey] [-m model] [-u baseurl] [-s sysprompt]\n", argv0);
exits("usage");}
@@ -16,8 +16,10 @@
Biobuf *bin;
char *url = nil;
char *s;
+ int quiet = 0;
ORequest req;
OResult res;
+ char *sysprompt = nil;
char *key = nil;
@@ -35,6 +37,12 @@
case 'u':
url = EARGF(usage());
break;
+ case 's':
+ sysprompt = EARGF(usage());
+ break;
+ case 'q':
+ quiet++;
+ break;
}ARGEND;
if (!initoai(url, key))
@@ -44,13 +52,17 @@
assert(bin);
req.prompts = nil;
- print("user: ");+
+ if (sysprompt)
+ addprompt(&req, "system", "%s", sysprompt);
+
+ if (!quiet) print("user: "); while (s = Brdstr(bin, '\n', 1)) {addprompt(&req, "user", s);
res = makerequest(req);
- print("%s: %s\n\n", res.role, res.message);+ print("%s%s%s\n\n", res.role, (quiet ? "" : ": "), res.message);addprompt(&req, res.role, "%s", res.message);
- print("user: ");+ if (!quiet) print("user: ");}
exits(nil);
}
--
⑨