Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 62 additions & 3 deletions cmd/testai/main-testai.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ const (
DefaultAnthropicModel = "claude-sonnet-4-5"
DefaultOpenAIModel = "gpt-5.1"
DefaultOpenRouterModel = "mistralai/mistral-small-3.2-24b-instruct"
DefaultNanoGPTModel = "zai-org/glm-4.7"
DefaultGeminiModel = "gemini-3-pro-preview"
)

Expand Down Expand Up @@ -257,6 +258,55 @@ func testOpenRouter(ctx context.Context, model, message string, tools []uctypes.
}
}

func testNanoGPT(ctx context.Context, model, message string, tools []uctypes.ToolDefinition) {
apiKey := os.Getenv("NANOGPT_KEY")
if apiKey == "" {
fmt.Println("Error: NANOGPT_KEY environment variable not set")
os.Exit(1)
}

opts := &uctypes.AIOptsType{
APIType: uctypes.APIType_OpenAIChat,
APIToken: apiKey,
Endpoint: "https://nano-gpt.com/api/v1/chat/completions",
Model: model,
MaxTokens: 4096,
}

chatID := uuid.New().String()

aiMessage := &uctypes.AIMessage{
MessageId: uuid.New().String(),
Parts: []uctypes.AIMessagePart{
{
Type: uctypes.AIMessagePartTypeText,
Text: message,
},
},
}

fmt.Printf("Testing NanoGPT with WaveAIPostMessageWrap, model: %s\n", model)
fmt.Printf("Message: %s\n", message)
fmt.Printf("Chat ID: %s\n", chatID)
fmt.Println("---")

testWriter := &TestResponseWriter{}
sseHandler := sse.MakeSSEHandlerCh(testWriter, ctx)
defer sseHandler.Close()

chatOpts := uctypes.WaveChatOpts{
ChatId: chatID,
ClientId: uuid.New().String(),
Config: *opts,
Tools: tools,
SystemPrompt: []string{"You are a helpful assistant. Be concise and clear in your responses."},
}
err := aiusechat.WaveAIPostMessageWrap(ctx, sseHandler, aiMessage, chatOpts)
if err != nil {
fmt.Printf("NanoGPT streaming error: %v\n", err)
}
}

func testAnthropic(ctx context.Context, model, message string, tools []uctypes.ToolDefinition) {
apiKey := os.Getenv("ANTHROPIC_APIKEY")
if apiKey == "" {
Expand Down Expand Up @@ -381,7 +431,7 @@ func testT4(ctx context.Context) {
}

func printUsage() {
fmt.Println("Usage: go run main-testai.go [--anthropic|--openaicomp|--openrouter|--gemini] [--tools] [--model <model>] [message]")
fmt.Println("Usage: go run main-testai.go [--anthropic|--openaicomp|--openrouter|--nanogpt|--gemini] [--tools] [--model <model>] [message]")
fmt.Println("Examples:")
fmt.Println(" go run main-testai.go 'What is 2+2?'")
fmt.Println(" go run main-testai.go --model o4-mini 'What is 2+2?'")
Expand All @@ -390,6 +440,8 @@ func printUsage() {
fmt.Println(" go run main-testai.go --openaicomp --model gpt-4o 'What is 2+2?'")
fmt.Println(" go run main-testai.go --openrouter 'What is 2+2?'")
fmt.Println(" go run main-testai.go --openrouter --model anthropic/claude-3.5-sonnet 'What is 2+2?'")
fmt.Println(" go run main-testai.go --nanogpt 'What is 2+2?'")
fmt.Println(" go run main-testai.go --nanogpt --model gpt-4o 'What is 2+2?'")
fmt.Println(" go run main-testai.go --gemini 'What is 2+2?'")
fmt.Println(" go run main-testai.go --gemini --model gemini-1.5-pro 'What is 2+2?'")
fmt.Println(" go run main-testai.go --tools 'Help me configure GitHub Actions monitoring'")
Expand All @@ -399,24 +451,27 @@ func printUsage() {
fmt.Printf(" Anthropic: %s\n", DefaultAnthropicModel)
fmt.Printf(" OpenAI Completions: gpt-4o\n")
fmt.Printf(" OpenRouter: %s\n", DefaultOpenRouterModel)
fmt.Printf(" NanoGPT: %s\n", DefaultNanoGPTModel)
fmt.Printf(" Google Gemini: %s\n", DefaultGeminiModel)
fmt.Println("")
fmt.Println("Environment variables:")
fmt.Println(" OPENAI_APIKEY (for OpenAI models)")
fmt.Println(" ANTHROPIC_APIKEY (for Anthropic models)")
fmt.Println(" OPENROUTER_APIKEY (for OpenRouter models)")
fmt.Println(" NANOGPT_KEY (for NanoGPT models)")
fmt.Println(" GOOGLE_APIKEY (for Google Gemini models)")
}

func main() {
var anthropic, openaicomp, openrouter, gemini, tools, help, t1, t2, t3, t4 bool
var anthropic, openaicomp, openrouter, nanogpt, gemini, tools, help, t1, t2, t3, t4 bool
var model string
flag.BoolVar(&anthropic, "anthropic", false, "Use Anthropic API instead of OpenAI")
flag.BoolVar(&openaicomp, "openaicomp", false, "Use OpenAI Completions API")
flag.BoolVar(&openrouter, "openrouter", false, "Use OpenRouter API")
flag.BoolVar(&nanogpt, "nanogpt", false, "Use NanoGPT API")
flag.BoolVar(&gemini, "gemini", false, "Use Google Gemini API")
flag.BoolVar(&tools, "tools", false, "Enable GitHub Actions Monitor tools for testing")
flag.StringVar(&model, "model", "", fmt.Sprintf("AI model to use (defaults: %s for OpenAI, %s for Anthropic, %s for OpenRouter, %s for Gemini)", DefaultOpenAIModel, DefaultAnthropicModel, DefaultOpenRouterModel, DefaultGeminiModel))
flag.StringVar(&model, "model", "", fmt.Sprintf("AI model to use (defaults: %s for OpenAI, %s for Anthropic, %s for OpenRouter, %s for NanoGPT, %s for Gemini)", DefaultOpenAIModel, DefaultAnthropicModel, DefaultOpenRouterModel, DefaultNanoGPTModel, DefaultGeminiModel))
flag.BoolVar(&help, "help", false, "Show usage information")
flag.BoolVar(&t1, "t1", false, fmt.Sprintf("Run preset T1 test (%s with 'what is 2+2')", DefaultAnthropicModel))
flag.BoolVar(&t2, "t2", false, fmt.Sprintf("Run preset T2 test (%s with 'what is 2+2')", DefaultOpenAIModel))
Expand Down Expand Up @@ -457,6 +512,8 @@ func main() {
model = "gpt-4o"
} else if openrouter {
model = DefaultOpenRouterModel
} else if nanogpt {
model = DefaultNanoGPTModel
} else if gemini {
model = DefaultGeminiModel
} else {
Expand All @@ -481,6 +538,8 @@ func main() {
testOpenAIComp(ctx, model, message, toolDefs)
} else if openrouter {
testOpenRouter(ctx, model, message, toolDefs)
} else if nanogpt {
testNanoGPT(ctx, model, message, toolDefs)
} else if gemini {
testGemini(ctx, model, message, toolDefs)
} else {
Expand Down
35 changes: 35 additions & 0 deletions docs/docs/waveai-modes.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ Wave AI now supports provider-based configuration which automatically applies se

- **`openai`** - OpenAI API (automatically configures endpoint and secret name) [[see example](#openai)]
- **`openrouter`** - OpenRouter API (automatically configures endpoint and secret name) [[see example](#openrouter)]
- **`nanogpt`** - NanoGPT API (automatically configures endpoint and secret name) [[see example](#nanogpt)]
- **`google`** - Google AI (Gemini) [[see example](#google-ai-gemini)]
- **`azure`** - Azure OpenAI Service (modern API) [[see example](#azure-openai-modern-api)]
- **`azure-legacy`** - Azure OpenAI Service (legacy deployment API) [[see example](#azure-openai-legacy-deployment-api)]
Expand Down Expand Up @@ -230,6 +231,40 @@ For OpenRouter, you must manually specify `ai:capabilities` based on your model'
```
:::

### NanoGPT

[NanoGPT](https://nano-gpt.com) provides access to multiple AI models at competitive prices. Using the `nanogpt` provider simplifies configuration:

```json
{
"nanogpt-glm47": {
"display:name": "NanoGPT - GLM 4.7",
"ai:provider": "nanogpt",
"ai:model": "zai-org/glm-4.7"
}
}
```

The provider automatically sets:
- `ai:endpoint` to `https://nano-gpt.com/api/v1/chat/completions`
- `ai:apitype` to `openai-chat`
- `ai:apitokensecretname` to `NANOGPT_KEY` (store your NanoGPT API key with this name)

:::note
NanoGPT is a proxy service that provides access to multiple AI models. You must manually specify `ai:capabilities` based on the model's features. NanoGPT supports OpenAI-compatible tool calling for models that have that capability. Check the model's `capabilities.vision` field from the [NanoGPT models API](https://nano-gpt.com/api/v1/models?detailed=true) to determine image support. Example for a text-only model with tool support:
```json
{
"nanogpt-glm47": {
"display:name": "NanoGPT - GLM 4.7",
"ai:provider": "nanogpt",
"ai:model": "zai-org/glm-4.7",
"ai:capabilities": ["tools"]
}
}
```
For vision-capable models like `openai/gpt-5`, add `"images"` to capabilities.
:::

### Google AI (Gemini)

[Google AI](https://ai.google.dev) provides the Gemini family of models. Using the `google` provider simplifies configuration:
Expand Down
1 change: 1 addition & 0 deletions pkg/aiusechat/uctypes/uctypes.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ const (
AIProvider_Wave = "wave"
AIProvider_Google = "google"
AIProvider_OpenRouter = "openrouter"
AIProvider_NanoGPT = "nanogpt"
AIProvider_OpenAI = "openai"
AIProvider_Azure = "azure"
AIProvider_AzureLegacy = "azure-legacy"
Expand Down
13 changes: 13 additions & 0 deletions pkg/aiusechat/usechat-mode.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ const (
OpenAIResponsesEndpoint = "https://api.openai.com/v1/responses"
OpenAIChatEndpoint = "https://api.openai.com/v1/chat/completions"
OpenRouterChatEndpoint = "https://openrouter.ai/api/v1/chat/completions"
NanoGPTChatEndpoint = "https://nano-gpt.com/api/v1/chat/completions"
AzureLegacyEndpointTemplate = "https://%s.openai.azure.com/openai/deployments/%s/chat/completions?api-version=%s"
AzureResponsesEndpointTemplate = "https://%s.openai.azure.com/openai/v1/responses"
AzureChatEndpointTemplate = "https://%s.openai.azure.com/openai/v1/chat/completions"
Expand All @@ -30,6 +31,7 @@ const (

OpenAIAPITokenSecretName = "OPENAI_KEY"
OpenRouterAPITokenSecretName = "OPENROUTER_KEY"
NanoGPTAPITokenSecretName = "NANOGPT_KEY"
AzureOpenAIAPITokenSecretName = "AZURE_OPENAI_KEY"
GoogleAIAPITokenSecretName = "GOOGLE_AI_KEY"
)
Expand Down Expand Up @@ -99,6 +101,17 @@ func applyProviderDefaults(config *wconfig.AIModeConfigType) {
config.APITokenSecretName = OpenRouterAPITokenSecretName
}
}
if config.Provider == uctypes.AIProvider_NanoGPT {
if config.APIType == "" {
config.APIType = uctypes.APIType_OpenAIChat
}
if config.Endpoint == "" {
config.Endpoint = NanoGPTChatEndpoint
}
if config.APITokenSecretName == "" {
config.APITokenSecretName = NanoGPTAPITokenSecretName
}
}
if config.Provider == uctypes.AIProvider_AzureLegacy {
if config.AzureAPIVersion == "" {
config.AzureAPIVersion = AzureLegacyDefaultAPIVersion
Expand Down
2 changes: 1 addition & 1 deletion pkg/wconfig/settingsconfig.go
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ type AIModeConfigType struct {
DisplayOrder float64 `json:"display:order,omitempty"`
DisplayIcon string `json:"display:icon,omitempty"`
DisplayDescription string `json:"display:description,omitempty"`
Provider string `json:"ai:provider,omitempty" jsonschema:"enum=wave,enum=google,enum=openrouter,enum=openai,enum=azure,enum=azure-legacy,enum=custom"`
Provider string `json:"ai:provider,omitempty" jsonschema:"enum=wave,enum=google,enum=openrouter,enum=nanogpt,enum=openai,enum=azure,enum=azure-legacy,enum=custom"`
APIType string `json:"ai:apitype,omitempty" jsonschema:"enum=google-gemini,enum=openai-responses,enum=openai-chat"`
Model string `json:"ai:model,omitempty"`
ThinkingLevel string `json:"ai:thinkinglevel,omitempty" jsonschema:"enum=low,enum=medium,enum=high"`
Expand Down
1 change: 1 addition & 0 deletions schema/waveai.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
"wave",
"google",
"openrouter",
"nanogpt",
"openai",
"azure",
"azure-legacy",
Expand Down