diff options
author | Jordan Bracco <href@random.sh> | 2022-12-11 02:01:29 +0000 |
---|---|---|
committer | Jordan Bracco <href@random.sh> | 2022-12-11 02:03:36 +0000 |
commit | 93e327fb9089751500ad3c69557ca12e7e638738 (patch) | |
tree | 2454a07ac436bace488e9dceabd99d8e9de6146f | |
parent | fix(couch): fix error handling in get/2,3 (diff) |
feat(gpt): continue prompt, some fixes
-rw-r--r-- | lib/irc/plugin/temp_ref.ex | 2 | ||||
-rw-r--r-- | lib/lsg_irc/gpt_plugin.ex | 62 |
2 files changed, 37 insertions, 27 deletions
diff --git a/lib/irc/plugin/temp_ref.ex b/lib/irc/plugin/temp_ref.ex index 84b41bb..923fa1a 100644 --- a/lib/irc/plugin/temp_ref.ex +++ b/lib/irc/plugin/temp_ref.ex @@ -69,7 +69,7 @@ defmodule Irc.Plugin.TempRef do def lookup_temp_ref(key, state, default \\ nil) do case List.keyfind(state.refs, key, 0) do {_, _, data} -> data - _ -> nil + _ -> default end end diff --git a/lib/lsg_irc/gpt_plugin.ex b/lib/lsg_irc/gpt_plugin.ex index bf615fb..ac0373a 100644 --- a/lib/lsg_irc/gpt_plugin.ex +++ b/lib/lsg_irc/gpt_plugin.ex @@ -50,7 +50,7 @@ defmodule LSG.IRC.GptPlugin do case Couch.get(@couch_db, prompt) do {:ok, prompt} -> {:noreply, prompt(m, prompt, Enum.join(args, " "), state)} {:error, :not_found} -> - m.replyfun.("gpt: no such prompt: #{prompt}") + m.replyfun.("gpt: prompt '#{prompt}' does not exists") {:noreply, state} error -> Logger.info("gpt: prompt load error: #{inspect error}") @@ -101,7 +101,7 @@ defmodule LSG.IRC.GptPlugin do end def handle_info({:irc, :trigger, @trigger, m = %IRC.Message{trigger: %IRC.Trigger{type: :query, args: ["show", ref_or_id]}}}, state) do - id = lookup_temp_ref(ref_or_id, state.temprefs, ref_or_id) || ref_or_id + id = lookup_temp_ref(ref_or_id, state.temprefs, ref_or_id) url = if m.channel do LSGWeb.Router.Helpers.gpt_url(LSGWeb.Endpoint, :result, m.network, LSGWeb.format_chan(m.channel), id) else @@ -130,43 +130,48 @@ defmodule LSG.IRC.GptPlugin do prompt_id = Map.get(run, "prompt_id") prompt_rev = Map.get(run, "prompt_rev") - original_prompt = case Couch.get(@couch_db, Map.get(run, "prompt_id")) do + original_prompt = case Couch.get(@couch_db, prompt_id, rev: prompt_rev) do {:ok, prompt} -> prompt _ -> nil end - continue_prompt = %{"_id" => Map.get(run, "prompt_id"), - "_rev" => Map.get(original_prompt, "_rev"), - "type" => Map.get(run, "type"), + if original_prompt do + continue_prompt = %{"_id" => prompt_id, + "_rev" => prompt_rev, + "type" => Map.get(original_prompt, "type"), "parent_run_id" => Map.get(run, "_id"), "openai_params" => Map.get(run, "request") |> Map.delete("prompt")} - continue_prompt = if prompt_string = Map.get(original_prompt, "continue_prompt") do - full_text = get_in(run, ~w(request prompt)) <> "\n" <> Map.get(run, "response") - continue_prompt - |> Map.put("prompt", prompt_string) - |> Map.put("prompt_format", "liquid") - |> Map.put("prompt_liquid_variables", %{"previous" => full_text}) + continue_prompt = if prompt_string = Map.get(original_prompt, "continue_prompt") do + full_text = get_in(run, ~w(request prompt)) <> "\n" <> Map.get(run, "response") + continue_prompt + |> Map.put("prompt", prompt_string) + |> Map.put("prompt_format", "liquid") + |> Map.put("prompt_liquid_variables", %{"previous" => full_text}) + else + prompt_content_tag = if content != "", do: " {{content}}", else: "" + string = get_in(run, ~w(request prompt)) <> "\n" <> Map.get(run, "response") <> prompt_content_tag + continue_prompt + |> Map.put("prompt", string) + |> Map.put("prompt_format", "liquid") + end + + prompt(msg, continue_prompt, content, state) else - prompt_content_tag = if content != "", do: " {{content}}", else: "" - string = get_in(run, ~w(request prompt)) <> "\n" <> Map.get(run, "response") <> prompt_content_tag - continue_prompt - |> Map.put("prompt", string) - |> Map.put("prompt_format", "liquid") + msg.replyfun.("gpt: cannot continue this prompt: original prompt not found #{prompt_id}@v#{prompt_rev}") + state end - - prompt(msg, continue_prompt, content, state) end - defp prompt(msg, prompt = %{"type" => "completions", "prompt" => prompt}, content, state) do + defp prompt(msg, prompt = %{"type" => "completions", "prompt" => prompt_template}, content, state) do Logger.debug("gpt_plugin:prompt/4 #{inspect prompt}") - prompt = case Map.get(prompt, "prompt_format", "liquid") do - "liquid" -> Tmpl.render(prompt, msg, Map.merge(Map.get(prompt, "prompt_liquid_variables", %{}), %{"content" => content})) - "norender" -> prompt + prompt_text = case Map.get(prompt, "prompt_format", "liquid") do + "liquid" -> Tmpl.render(prompt_template, msg, Map.merge(Map.get(prompt, "prompt_liquid_variables", %{}), %{"content" => content})) + "norender" -> prompt_template end args = Map.get(prompt, "openai_params") - |> Map.put("prompt", prompt) + |> Map.put("prompt", prompt_text) |> Map.put("user", msg.account.id) {moderate?, moderation} = moderation(content, msg.account.id) @@ -208,11 +213,16 @@ defmodule LSG.IRC.GptPlugin do {nil, nil, state.temprefs} end stop = cond do - finish_reason == "stop" -> "s" + finish_reason == "stop" -> "" finish_reason == "length" -> " — truncated" true -> " — #{finish_reason}" end - msg.replyfun.(" ↪ #{ref}" <> + ref_and_prefix = if Map.get(usage, "completion_tokens", 0) == 0 do + "GPT had nothing else to say :( ↪ #{ref || "✗"}" + else + " ↪ #{ref || "✗"}" + end + msg.replyfun.(ref_and_prefix <> stop <> " — #{Map.get(usage, "total_tokens", 0)}" <> " (#{Map.get(usage, "prompt_tokens", 0)}/#{Map.get(usage, "completion_tokens", 0)}) tokens" <> |