Skip to main content

AI Model Integration

DEVELOPER Advanced

Integrate AI models and toolsets into your EZ-Console application.

Overview

EZ-Console provides a flexible AI integration system that supports multiple AI providers, tool calling, and streaming responses. You can use built-in providers like OpenAI or register custom AI models.

Built-in AI Providers

OpenAI Provider

EZ-Console includes built-in support for OpenAI-compatible APIs.

Configuration Fields:

  • api_key (required, password): Your OpenAI API key
  • model_id (required, string): Model identifier (e.g., gpt-4, gpt-3.5-turbo)
  • base_url (optional, string): Custom API endpoint URL
  • organization_id (optional, string): OpenAI organization ID

Creating AI Models

Via Admin Console

  1. Navigate to System Settings → AI Models
  2. Click Create AI Model
  3. Fill in configuration:
    • Name: Display name
    • Description: Model description
    • Provider: Select provider (e.g., OpenAI)
    • Config: Provider-specific configuration
    • Default: Set as default model
  4. Click Save

Via API

POST /api/ai/models
Content-Type: application/json
Authorization: Bearer <token>

{
"name": "GPT-4 Model",
"description": "Production GPT-4 model",
"provider": "openai",
"config": {
"api_key": "sk-...",
"model_id": "gpt-4",
"base_url": "https://api.openai.com/v1"
},
"is_default": true
}

Using AI Models in Code

Get AI Model and Create Client

import (
"context"
"github.com/sven-victor/ez-console/pkg/service"
"github.com/sven-victor/ez-console/pkg/clients/ai"
"github.com/sven-victor/ez-console/pkg/model"
)

func useAIModel(ctx context.Context, svc *service.Service) error {
// Get default AI model for organization
organizationID := "org-123"
aiModel, err := svc.GetDefaultAIModel(ctx, organizationID)
if err != nil {
return err
}

// Get AI client factory
factory, exists := ai.GetFactory(aiModel.Provider)
if !exists {
return fmt.Errorf("unsupported provider: %s", aiModel.Provider)
}

// Create AI client from config
client, err := factory.CreateClient(ctx, organizationID, aiModel.Config)
if err != nil {
return err
}

// Use client
messages := []ai.ChatMessage{
{
Role: model.AIChatMessageRoleUser,
Content: "Hello!",
},
}

responses, err := client.CreateChat(ctx, messages)
if err != nil {
return err
}

for _, resp := range responses {
fmt.Println(resp.Content)
}

return nil
}

Streaming Chat Completion

func streamAIChat(ctx context.Context, client ai.AIClient) error {
messages := []ai.ChatMessage{
{
Role: model.AIChatMessageRoleUser,
Content: "Tell me a story",
},
}

// Create streaming chat
stream, err := client.CreateChatStream(ctx, messages)
if err != nil {
return err
}
defer stream.Close()

// Receive and process stream events
for {
event, err := stream.Recv(ctx)
if err != nil {
if err == io.EOF {
break
}
return err
}

switch event.EventType {
case ai.EventTypeContent:
// Handle content delta
fmt.Print(event.Content)
case ai.EventTypeToolCall:
// Handle tool calls
for _, toolCall := range event.ToolCalls {
fmt.Printf("Tool call: %s\n", toolCall.Function.Name)
}
}
}

return nil
}

Registering Custom AI Models

Step 1: Implement AIClient Interface

package customai

import (
"context"
"github.com/sven-victor/ez-console/pkg/clients/ai"
)

type CustomAIClient struct {
apiKey string
endpoint string
modelID string
}

func (c *CustomAIClient) CreateChat(
ctx context.Context,
messages []ai.ChatMessage,
options ...ai.WithChatOptions,
) ([]ai.ChatMessage, error) {
// Implement chat logic
return nil, nil
}

func (c *CustomAIClient) CreateChatStream(
ctx context.Context,
messages []ai.ChatMessage,
options ...ai.WithChatOptions,
) (ai.ChatStream, error) {
// Implement streaming chat logic
return nil, nil
}

Step 2: Implement AIClientFactory

type CustomAIClientFactory struct{}

func (f *CustomAIClientFactory) GetName() string {
return "Custom AI"
}

func (f *CustomAIClientFactory) GetDescription() string {
return "Custom AI provider"
}

func (f *CustomAIClientFactory) GetConfigFields() []util.ConfigField {
return []util.ConfigField{
{
Name: "api_key",
DisplayName: "API Key",
Type: util.FieldTypePassword,
Required: true,
},
{
Name: "endpoint",
DisplayName: "Endpoint",
Type: util.FieldTypeString,
Required: true,
},
}
}

func (f *CustomAIClientFactory) CreateClient(
ctx context.Context,
organizationID string,
config map[string]interface{},
) (ai.AIClient, error) {
return &CustomAIClient{
apiKey: config["api_key"].(string),
endpoint: config["endpoint"].(string),
}, nil
}

Step 3: Register Factory

func init() {
ai.RegisterFactory("custom_ai", &CustomAIClientFactory{})
}

Toolsets

Built-in Toolsets

EZ-Console includes built-in toolsets for common operations:

  • Database queries
  • File operations
  • API calls
  • Email sending

Using Toolsets

import "github.com/sven-victor/ez-console/pkg/clients/ai"

// Register toolset
toolset := ai.Toolset{
Name: "get_weather",
Description: "Get current weather",
Function: ai.Function{
Name: "get_weather",
Description: "Get weather for a location",
Parameters: map[string]interface{}{
"type": "object",
"properties": map[string]interface{}{
"location": map[string]interface{}{
"type": "string",
"description": "Location name",
},
},
"required": []string{"location"},
},
},
Handler: func(ctx context.Context, args map[string]interface{}) (interface{}, error) {
location := args["location"].(string)
// Get weather
return getWeather(location), nil
},
}

ai.RegisterToolset(toolset)

Best Practices

1. Handle Errors Gracefully

responses, err := client.CreateChat(ctx, messages)
if err != nil {
// Log error
log.Error("AI chat failed", "error", err)
// Return user-friendly error
return fmt.Errorf("failed to get AI response")
}

2. Use Context for Timeouts

ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()

responses, err := client.CreateChat(ctx, messages)

3. Stream for Long Responses

// ✅ Good: Use streaming for long responses
stream, err := client.CreateChatStream(ctx, messages)

// ❌ Bad: Blocking call for long responses
responses, err := client.CreateChat(ctx, messages)

Need help? Ask in GitHub Discussions.