diff --git a/README.md b/README.md index 5de6a32..2b93168 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,63 @@ Now you can visit [`localhost:4000`](http://localhost:4000) from your browser. Ready to run in production? Please [check our deployment guides](https://hexdocs.pm/phoenix/deployment.html). +## AI Provider Configuration + +Bodhi supports multiple AI providers that can be switched via configuration. + +### Available Providers + +#### OpenRouter (Default) +- **Module:** `Bodhi.OpenRouter` +- **Default Model:** `deepseek/deepseek-r1-0528:free` +- **Environment Variable:** `OPENROUTER_API_KEY` +- **Website:** https://openrouter.ai/ + +#### Google Gemini +- **Module:** `Bodhi.Gemini` +- **Model:** `gemini-2.0-flash` +- **Environment Variable:** `GEMINI_API_KEY` + +### Switching Providers + +To switch AI providers, update `config/config.exs`: + +```elixir +# Use OpenRouter (default) +config :bodhi, :ai_client, Bodhi.OpenRouter + +# Use Google Gemini +config :bodhi, :ai_client, Bodhi.Gemini +``` + +### Setting Up API Keys + +1. **OpenRouter:** + - Get API key from: https://openrouter.ai/keys + - Set in `.envrc`: `export OPENROUTER_API_KEY=sk-or-v1-your_api_key_here` + +2. **Google Gemini:** + - Get API key from: https://aistudio.google.com/app/apikey + - Set in `.envrc`: `export GEMINI_API_KEY=your_api_key_here` + +3. Reload environment: `direnv allow` (if using direnv) + +### Changing OpenRouter Model + +Edit `lib/bodhi/open_router.ex` and modify the `@default_model` attribute: + +```elixir +@default_model "deepseek/deepseek-r1-0528:free" # Current default + +# Other popular models: +# @default_model "anthropic/claude-3.5-sonnet" +# @default_model "openai/gpt-4-turbo" +# @default_model "meta-llama/llama-3.1-70b-instruct" +# @default_model "google/gemini-pro-1.5" +``` + +See all available models at: https://openrouter.ai/models + ## Learn more * Official website: https://www.phoenixframework.org/ diff --git a/config/config.exs b/config/config.exs index 395b64f..047a4cc 100644 --- a/config/config.exs +++ b/config/config.exs @@ -81,6 +81,6 @@ config :posthog, # Configure behaviour implementations (can be overridden in env-specific configs) config :bodhi, :telegram_client, Bodhi.Telegram.TelegexAdapter -config :bodhi, :ai_client, Bodhi.Gemini +config :bodhi, :ai_client, Bodhi.OpenRouter import_config "#{config_env()}.exs" diff --git a/config/runtime.exs b/config/runtime.exs index afb6552..de99932 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -13,7 +13,8 @@ if System.get_env("PHX_SERVER") && System.get_env("RELEASE_NAME") do end config :bodhi, - gemini_token: "#{System.get_env("GEMINI_API_KEY")}" + gemini_token: "#{System.get_env("GEMINI_API_KEY")}", + openrouter_token: "#{System.get_env("OPENROUTER_API_KEY")}" config :telegex, token: "#{System.get_env("TG_TOKEN")}" diff --git a/lib/bodhi/ai.ex b/lib/bodhi/ai.ex index 36567f9..6f40bdb 100644 --- a/lib/bodhi/ai.ex +++ b/lib/bodhi/ai.ex @@ -10,8 +10,8 @@ defmodule Bodhi.AI do Asks the AI to generate a response based on message history. """ @impl true - def ask_gemini(messages) do - impl().ask_gemini(messages) + def ask_llm(messages) do + impl().ask_llm(messages) end defp impl do diff --git a/lib/bodhi/application.ex b/lib/bodhi/application.ex index 1b901f7..5373a4d 100644 --- a/lib/bodhi/application.ex +++ b/lib/bodhi/application.ex @@ -20,7 +20,7 @@ defmodule Bodhi.Application do BodhiWeb.Endpoint, Bodhi.TgWebhookHandler, {Finch, - name: Gemini, + name: LLM, pools: %{ :default => [size: 10] }}, diff --git a/lib/bodhi/behaviours/ai_client.ex b/lib/bodhi/behaviours/ai_client.ex index 38e958c..0cf6857 100644 --- a/lib/bodhi/behaviours/ai_client.ex +++ b/lib/bodhi/behaviours/ai_client.ex @@ -8,5 +8,5 @@ defmodule Bodhi.Behaviours.AIClient do @doc """ Asks the AI to generate a response based on message history. """ - @callback ask_gemini([Message.t()]) :: {:ok, String.t()} | {:error, String.t()} + @callback ask_llm([Message.t()]) :: {:ok, String.t()} | {:error, String.t()} end diff --git a/lib/bodhi/gemini.ex b/lib/bodhi/gemini.ex index 8625bf6..c5660d4 100644 --- a/lib/bodhi/gemini.ex +++ b/lib/bodhi/gemini.ex @@ -16,8 +16,8 @@ defmodule Bodhi.Gemini do Request Gemini for bot's response in dialogue. """ @impl true - @spec ask_gemini([Message.t()]) :: {:ok, String.t()} | {:error, String.t()} - def ask_gemini(messages) do + @spec ask_llm([Message.t()]) :: {:ok, String.t()} | {:error, String.t()} + def ask_llm(messages) do %Prompt{text: prompt} = Prompts.get_latest_prompt!() messages @@ -43,7 +43,7 @@ defmodule Bodhi.Gemini do [{"x-goog-api-key", Application.get_env(:bodhi, :gemini_token)}], body ) - |> Finch.request!(Gemini) + |> Finch.request!(LLM) |> handle_finch_response() |> Jason.decode!() end diff --git a/lib/bodhi/open_router.ex b/lib/bodhi/open_router.ex new file mode 100644 index 0000000..ba81fd0 --- /dev/null +++ b/lib/bodhi/open_router.ex @@ -0,0 +1,88 @@ +defmodule Bodhi.OpenRouter do + @moduledoc """ + OpenRouter API wrapper + OpenRouter provides unified access to multiple AI models through OpenAI-compatible API. + """ + @behaviour Bodhi.Behaviours.AIClient + + @openrouter_url "https://openrouter.ai/api/v1/chat/completions" + @default_model "deepseek/deepseek-r1-0528:free" + + alias Bodhi.Chats.Message + alias Bodhi.Prompts + alias Bodhi.Prompts.Prompt + + require Logger + + @doc """ + Request OpenRouter for bot's response in dialogue. + """ + @impl true + @spec ask_llm([Message.t()]) :: {:ok, String.t()} | {:error, String.t()} + def ask_llm(messages) do + %Prompt{text: prompt} = Prompts.get_latest_prompt!() + + messages + |> prepare_messages() + |> request_openrouter(prompt) + |> parse_response() + end + + defp prepare_messages(messages), do: Enum.map(messages, &build_message/1) + + defp build_message(%Message{text: text, chat_id: user_id, user_id: user_id}), + do: %{role: "user", content: text} + + defp build_message(%Message{text: text}), do: %{role: "assistant", content: text} + + defp request_openrouter(messages, prompt) do + body = build_body(messages, prompt) + + :post + |> Finch.build( + @openrouter_url, + [ + {"Authorization", "Bearer #{Application.get_env(:bodhi, :openrouter_token)}"}, + {"Content-Type", "application/json"}, + {"HTTP-Referer", "https://lamabot.io"}, + {"X-Title", "Lama Bot"} + ], + body + ) + |> Finch.request!(LLM) + |> handle_finch_response() + |> Jason.decode!() + end + + defp build_body(messages, prompt) do + %{ + model: @default_model, + messages: [ + %{role: "system", content: prompt} + | messages + ] + } + |> Jason.encode!() + end + + defp handle_finch_response(%Finch.Response{status: 200, body: body}), do: body + + defp handle_finch_response(%Finch.Response{status: code, body: body}) do + Logger.warning("OpenRouter request error code: #{code}, body: '#{body}'") + body + end + + defp parse_response(%{"choices" => [%{"message" => %{"content" => content}} | _]}) do + {:ok, content} + end + + defp parse_response(%{"error" => error}) do + Logger.error("OpenRouter API error: #{inspect(error)}") + {:error, "OpenRouter API error: #{inspect(error)}"} + end + + defp parse_response(response) do + Logger.error("Unexpected OpenRouter response format: #{inspect(response)}") + {:error, "Unexpected response format"} + end +end diff --git a/lib/bodhi/tg_webhook_handler.ex b/lib/bodhi/tg_webhook_handler.ex index 6984b3e..ba2294f 100644 --- a/lib/bodhi/tg_webhook_handler.ex +++ b/lib/bodhi/tg_webhook_handler.ex @@ -128,7 +128,7 @@ defmodule Bodhi.TgWebhookHandler do defp get_answer(%_{chat_id: chat_id}, _) do messages = Bodhi.Chats.get_chat_messages(chat_id) - {:ok, _answer} = Bodhi.AI.ask_gemini(messages) + {:ok, _answer} = Bodhi.AI.ask_llm(messages) end defp get_start_message(lang) do diff --git a/test/bodhi/tg_webhook_handler_test.exs b/test/bodhi/tg_webhook_handler_test.exs index bd9eb71..4d92fc7 100644 --- a/test/bodhi/tg_webhook_handler_test.exs +++ b/test/bodhi/tg_webhook_handler_test.exs @@ -135,7 +135,7 @@ defmodule Bodhi.TgWebhookHandlerTest do # Set up expectations based on test parameters if gemini? do - expect(Bodhi.GeminiMock, :ask_gemini, fn _messages -> + expect(Bodhi.GeminiMock, :ask_llm, fn _messages -> {:ok, Faker.Lorem.paragraph()} end) end diff --git a/test/support/conn_case.ex b/test/support/conn_case.ex index deb12e8..5478408 100644 --- a/test/support/conn_case.ex +++ b/test/support/conn_case.ex @@ -88,7 +88,7 @@ defmodule BodhiWeb.ConnCase do # Set up default stub for Gemini mock Bodhi.GeminiMock - |> stub(:ask_gemini, fn _ -> + |> stub(:ask_llm, fn _ -> {:ok, Faker.Lorem.paragraph()} end) diff --git a/test/support/oban_case.ex b/test/support/oban_case.ex index 0c94f96..57b209f 100644 --- a/test/support/oban_case.ex +++ b/test/support/oban_case.ex @@ -87,7 +87,7 @@ defmodule Bodhi.ObanCase do # Set up default stub for Gemini mock Bodhi.GeminiMock - |> stub(:ask_gemini, fn _ -> + |> stub(:ask_llm, fn _ -> {:ok, Faker.Lorem.paragraph()} end) diff --git a/test/test_helper.exs b/test/test_helper.exs index 61767b4..a52788a 100644 --- a/test/test_helper.exs +++ b/test/test_helper.exs @@ -3,6 +3,7 @@ # Define Mox mocks Mox.defmock(Bodhi.TelegramMock, for: Bodhi.Behaviours.TelegramClient) Mox.defmock(Bodhi.GeminiMock, for: Bodhi.Behaviours.AIClient) +Mox.defmock(Bodhi.OpenRouterMock, for: Bodhi.Behaviours.AIClient) Ecto.Adapters.SQL.Sandbox.mode(Bodhi.Repo, :manual) ExUnit.start()