Page Menu
Home
Phabricator
Search
Configure Global Search
Log In
Files
F73869
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
14 KB
Subscribers
None
View Options
diff --git a/lib/irc/plugin/temp_ref.ex b/lib/irc/plugin/temp_ref.ex
index 84b41bb..923fa1a 100644
--- a/lib/irc/plugin/temp_ref.ex
+++ b/lib/irc/plugin/temp_ref.ex
@@ -1,95 +1,95 @@
defmodule Irc.Plugin.TempRef do
@moduledoc """
This module allows to easily implement local temporary simple references for easy access from IRC.
For example, your plugin output could be acted on, and instead of giving the burden for the user to
write or copy that uuid, you could give them a small alphanumeric reference to use instead.
You can configure how many and for how long the references are kept.
## Usage
`import Irc.Plugin.TempRef`
```elixir
defmodule Irc.MyPlugin do
defstruct [:temprefs]
def init(_) do
# …
{:ok, %__MODULE__{temprefs: new_temp_refs()}
end
end
```
"""
defstruct [:refs, :max, :expire, :build_fun, :build_increase_fun, :build_options]
defmodule SimpleAlphaNumericBuilder do
def build(options) do
length = Keyword.get(options, :length, 3)
for _ <- 1..length, into: "", do: <<Enum.random('bcdfghjkmpqtrvwxy2346789')>>
end
def increase(options) do
Keyword.put(options, :length, Keyword.get(options, :length, 3) + 1)
end
end
def new_temp_refs(options \\ []) do
%__MODULE__{
refs: Keyword.get(options, :init_refs, []),
max: Keyword.get(options, :max, []),
expire: Keyword.get(options, :expire, :infinity),
build_fun: Keyword.get(options, :build_fun, &__MODULE__.SimpleAlphaNumericBuilder.build/1),
build_increase_fun: Keyword.get(options, :build_increase_fun, &__MODULE__.SimpleAlphaNumericBuilder.increase/1),
build_options: Keyword.get(options, :build_options, [length: 3])
}
end
def janitor_refs(state = %__MODULE__{}) do
if length(state.refs) > state.max do
%__MODULE__{refs: state.refs |> Enum.reverse() |> tl() |> Enum.reverse()}
else
state
end
end
def put_temp_ref(data, state = %__MODULE__{}) do
state = janitor_refs(state)
key = new_nonexisting_key(state)
if key do
ref = {key, DateTime.utc_now(), data}
{key, %__MODULE__{state | refs: [ref | state.refs]}}
else
{nil, state}
end
end
def lookup_temp_ref(key, state, default \\ nil) do
case List.keyfind(state.refs, key, 0) do
{_, _, data} -> data
- _ -> nil
+ _ -> default
end
end
defp new_nonexisting_key(state, i) when i > 50 do
nil
end
defp new_nonexisting_key(state = %__MODULE__{refs: refs}, i \\ 1) do
build_options = if rem(i, 5) == 0 do
state.build_increase_fun.(state.build_options)
else
state.build_options
end
key = state.build_fun.(state.build_options)
if !List.keymember?(refs, key, 0) do
key
else
new_nonexisting_key(state, i + 1)
end
end
end
diff --git a/lib/lsg_irc/gpt_plugin.ex b/lib/lsg_irc/gpt_plugin.ex
index bf615fb..ac0373a 100644
--- a/lib/lsg_irc/gpt_plugin.ex
+++ b/lib/lsg_irc/gpt_plugin.ex
@@ -1,248 +1,258 @@
defmodule LSG.IRC.GptPlugin do
require Logger
import Irc.Plugin.TempRef
def irc_doc() do
"""
# OpenAI GPT
Uses OpenAI's GPT-3 API to bring natural language prompts to your IRC channel.
_prompts_ are pre-defined prompts and parameters defined in the bot' CouchDB.
_Runs_ (results of the inference of a _prompt_) are also stored in CouchDB and
may be resumed.
* **!gpt** list GPT prompts
* **!gpt `[prompt]` `<prompt or args>`** run a prompt
* **+gpt `[short ref|run id]` `<prompt or args>`** continue a prompt
* **?gpt offensive `<content>`** is content offensive ?
* **?gpt show `[short ref|run id]`** run information and web link
* **?gpt `[prompt]`** prompt information and web link
"""
end
@couch_db "bot-plugin-openai-prompts"
@couch_run_db "bot-plugin-gpt-history"
@trigger "gpt"
def start_link() do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
defstruct [:temprefs]
def get_result(id) do
Couch.get(@couch_run_db, id)
end
def get_prompt(id) do
Couch.get(@couch_db, id)
end
def init(_) do
regopts = [plugin: __MODULE__]
{:ok, _} = Registry.register(IRC.PubSub, "trigger:#{@trigger}", regopts)
{:ok, %__MODULE__{temprefs: new_temp_refs()}}
end
def handle_info({:irc, :trigger, @trigger, m = %IRC.Message{trigger: %IRC.Trigger{type: :bang, args: [prompt | args]}}}, state) do
case Couch.get(@couch_db, prompt) do
{:ok, prompt} -> {:noreply, prompt(m, prompt, Enum.join(args, " "), state)}
{:error, :not_found} ->
- m.replyfun.("gpt: no such prompt: #{prompt}")
+ m.replyfun.("gpt: prompt '#{prompt}' does not exists")
{:noreply, state}
error ->
Logger.info("gpt: prompt load error: #{inspect error}")
m.replyfun.("gpt: database error")
{:noreply, state}
end
end
def handle_info({:irc, :trigger, @trigger, m = %IRC.Message{trigger: %IRC.Trigger{type: :bang, args: []}}}, state) do
case Couch.get(@couch_db, "_all_docs") do
{:ok, %{"rows" => []}} -> m.replyfun.("gpt: no prompts available")
{:ok, %{"rows" => prompts}} ->
prompts = prompts |> Enum.map(fn(prompt) -> Map.get(prompt, "id") end) |> Enum.join(", ")
m.replyfun.("gpt: prompts: #{prompts}")
error ->
Logger.info("gpt: prompt load error: #{inspect error}")
m.replyfun.("gpt: database error")
end
{:noreply, state}
end
def handle_info({:irc, :trigger, @trigger, m = %IRC.Message{trigger: %IRC.Trigger{type: :plus, args: [ref_or_id | args]}}}, state) do
id = lookup_temp_ref(ref_or_id, state.temprefs, ref_or_id)
case Couch.get(@couch_run_db, id) do
{:ok, run} ->
Logger.debug("+gpt run: #{inspect run}")
{:noreply, continue_prompt(m, run, Enum.join(args, " "), state)}
{:error, :not_found} ->
m.replyfun.("gpt: ref or id not found or expired: #{inspect ref_or_id} (if using short ref, try using full id)")
{:noreply, state}
error ->
Logger.info("+gpt: run load error: #{inspect error}")
m.replyfun.("gpt: database error")
{:noreply, state}
end
end
def handle_info({:irc, :trigger, @trigger, m = %IRC.Message{trigger: %IRC.Trigger{type: :query, args: ["offensive" | text]}}}, state) do
text = Enum.join(text, " ")
{moderate?, moderation} = moderation(text, m.account.id)
reply = cond do
moderate? -> "⚠️ #{Enum.join(moderation, ", ")}"
!moderate? && moderation -> "👍"
!moderate? -> "☠️ error"
end
m.replyfun.(reply)
{:noreply, state}
end
def handle_info({:irc, :trigger, @trigger, m = %IRC.Message{trigger: %IRC.Trigger{type: :query, args: ["show", ref_or_id]}}}, state) do
- id = lookup_temp_ref(ref_or_id, state.temprefs, ref_or_id) || ref_or_id
+ id = lookup_temp_ref(ref_or_id, state.temprefs, ref_or_id)
url = if m.channel do
LSGWeb.Router.Helpers.gpt_url(LSGWeb.Endpoint, :result, m.network, LSGWeb.format_chan(m.channel), id)
else
LSGWeb.Router.Helpers.gpt_url(LSGWeb.Endpoint, :result, id)
end
m.replyfun.("→ #{url}")
{:noreply, state}
end
def handle_info({:irc, :trigger, @trigger, m = %IRC.Message{trigger: %IRC.Trigger{type: :query, args: [prompt]}}}, state) do
url = if m.channel do
LSGWeb.Router.Helpers.gpt_url(LSGWeb.Endpoint, :prompt, m.network, LSGWeb.format_chan(m.channel), prompt)
else
LSGWeb.Router.Helpers.gpt_url(LSGWeb.Endpoint, :prompt, prompt)
end
m.replyfun.("→ #{url}")
{:noreply, state}
end
def handle_info(info, state) do
Logger.debug("gpt: unhandled info: #{inspect info}")
{:noreply, state}
end
defp continue_prompt(msg, run, content, state) do
prompt_id = Map.get(run, "prompt_id")
prompt_rev = Map.get(run, "prompt_rev")
- original_prompt = case Couch.get(@couch_db, Map.get(run, "prompt_id")) do
+ original_prompt = case Couch.get(@couch_db, prompt_id, rev: prompt_rev) do
{:ok, prompt} -> prompt
_ -> nil
end
- continue_prompt = %{"_id" => Map.get(run, "prompt_id"),
- "_rev" => Map.get(original_prompt, "_rev"),
- "type" => Map.get(run, "type"),
+ if original_prompt do
+ continue_prompt = %{"_id" => prompt_id,
+ "_rev" => prompt_rev,
+ "type" => Map.get(original_prompt, "type"),
"parent_run_id" => Map.get(run, "_id"),
"openai_params" => Map.get(run, "request") |> Map.delete("prompt")}
- continue_prompt = if prompt_string = Map.get(original_prompt, "continue_prompt") do
- full_text = get_in(run, ~w(request prompt)) <> "\n" <> Map.get(run, "response")
- continue_prompt
- |> Map.put("prompt", prompt_string)
- |> Map.put("prompt_format", "liquid")
- |> Map.put("prompt_liquid_variables", %{"previous" => full_text})
+ continue_prompt = if prompt_string = Map.get(original_prompt, "continue_prompt") do
+ full_text = get_in(run, ~w(request prompt)) <> "\n" <> Map.get(run, "response")
+ continue_prompt
+ |> Map.put("prompt", prompt_string)
+ |> Map.put("prompt_format", "liquid")
+ |> Map.put("prompt_liquid_variables", %{"previous" => full_text})
+ else
+ prompt_content_tag = if content != "", do: " {{content}}", else: ""
+ string = get_in(run, ~w(request prompt)) <> "\n" <> Map.get(run, "response") <> prompt_content_tag
+ continue_prompt
+ |> Map.put("prompt", string)
+ |> Map.put("prompt_format", "liquid")
+ end
+
+ prompt(msg, continue_prompt, content, state)
else
- prompt_content_tag = if content != "", do: " {{content}}", else: ""
- string = get_in(run, ~w(request prompt)) <> "\n" <> Map.get(run, "response") <> prompt_content_tag
- continue_prompt
- |> Map.put("prompt", string)
- |> Map.put("prompt_format", "liquid")
+ msg.replyfun.("gpt: cannot continue this prompt: original prompt not found #{prompt_id}@v#{prompt_rev}")
+ state
end
-
- prompt(msg, continue_prompt, content, state)
end
- defp prompt(msg, prompt = %{"type" => "completions", "prompt" => prompt}, content, state) do
+ defp prompt(msg, prompt = %{"type" => "completions", "prompt" => prompt_template}, content, state) do
Logger.debug("gpt_plugin:prompt/4 #{inspect prompt}")
- prompt = case Map.get(prompt, "prompt_format", "liquid") do
- "liquid" -> Tmpl.render(prompt, msg, Map.merge(Map.get(prompt, "prompt_liquid_variables", %{}), %{"content" => content}))
- "norender" -> prompt
+ prompt_text = case Map.get(prompt, "prompt_format", "liquid") do
+ "liquid" -> Tmpl.render(prompt_template, msg, Map.merge(Map.get(prompt, "prompt_liquid_variables", %{}), %{"content" => content}))
+ "norender" -> prompt_template
end
args = Map.get(prompt, "openai_params")
- |> Map.put("prompt", prompt)
+ |> Map.put("prompt", prompt_text)
|> Map.put("user", msg.account.id)
{moderate?, moderation} = moderation(content, msg.account.id)
if moderate?, do: msg.replyfun.("⚠️ offensive input: #{Enum.join(moderation, ", ")}")
Logger.debug("GPT: request #{inspect args}")
case OpenAi.post("/v1/completions", args) do
{:ok, %{"choices" => [%{"text" => text, "finish_reason" => finish_reason} | _], "usage" => usage, "id" => gpt_id, "created" => created}} ->
text = String.trim(text)
{o_moderate?, o_moderation} = moderation(text, msg.account.id)
if o_moderate?, do: msg.replyfun.("🚨 offensive output: #{Enum.join(o_moderation, ", ")}")
msg.replyfun.(text)
doc = %{"prompt_id" => Map.get(prompt, "_id"),
"prompt_rev" => Map.get(prompt, "_rev"),
"network" => msg.network,
"channel" => msg.channel,
"nick" => msg.sender.nick,
"account_id" => (if msg.account, do: msg.account.id),
"request" => args,
"response" => text,
"message_at" => msg.at,
"reply_at" => DateTime.utc_now(),
"gpt_id" => gpt_id,
"gpt_at" => created,
"gpt_usage" => usage,
"type" => "completions",
"parent_run_id" => Map.get(prompt, "parent_run_id"),
"moderation" => %{"input" => %{flagged: moderate?, categories: moderation},
"output" => %{flagged: o_moderate?, categories: o_moderation}
}
}
Logger.debug("Saving result to couch: #{inspect doc}")
{id, ref, temprefs} = case Couch.post(@couch_run_db, doc) do
{:ok, id, _rev} ->
{ref, temprefs} = put_temp_ref(id, state.temprefs)
{id, ref, temprefs}
error ->
Logger.error("Failed to save to Couch: #{inspect error}")
{nil, nil, state.temprefs}
end
stop = cond do
- finish_reason == "stop" -> "s"
+ finish_reason == "stop" -> ""
finish_reason == "length" -> " — truncated"
true -> " — #{finish_reason}"
end
- msg.replyfun.(" ↪ #{ref}" <>
+ ref_and_prefix = if Map.get(usage, "completion_tokens", 0) == 0 do
+ "GPT had nothing else to say :( ↪ #{ref || "✗"}"
+ else
+ " ↪ #{ref || "✗"}"
+ end
+ msg.replyfun.(ref_and_prefix <>
stop <>
" — #{Map.get(usage, "total_tokens", 0)}" <>
" (#{Map.get(usage, "prompt_tokens", 0)}/#{Map.get(usage, "completion_tokens", 0)}) tokens" <>
" — #{id || "save failed"}")
%__MODULE__{state | temprefs: temprefs}
{:error, atom} when is_atom(atom) ->
Logger.error("gpt error: #{inspect atom}")
msg.replyfun.("gpt: ☠️ #{to_string(atom)}")
state
error ->
Logger.error("gpt error: #{inspect error}")
msg.replyfun.("gpt: ☠️ ")
state
end
end
defp moderation(content, user_id) do
case OpenAi.post("/v1/moderations", %{"input" => content, "user" => user_id}) do
{:ok, %{"results" => [%{"flagged" => true, "categories" => categories} | _]}} ->
cat = categories
|> Enum.filter(fn({_key, value}) -> value end)
|> Enum.map(fn({key, _}) -> key end)
{true, cat}
{:ok, moderation} ->
Logger.debug("gpt: moderation: not flagged, #{inspect moderation}")
{false, true}
error ->
Logger.error("gpt: moderation error: #{inspect error}")
{false, false}
end
end
end
File Metadata
Details
Attached
Mime Type
text/x-diff
Expires
Mon, Jun 9, 6:34 PM (1 d, 2 h)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
47700
Default Alt Text
(14 KB)
Attached To
rNOLA Nola
Event Timeline
Log In to Comment