|
| 1 | +package main |
| 2 | + |
| 3 | +import ( |
| 4 | + "bufio" |
| 5 | + "context" |
| 6 | + "fmt" |
| 7 | + "log" |
| 8 | + "os" |
| 9 | + |
| 10 | + "github.com/Azure/AppConfiguration-GoProvider/azureappconfiguration" |
| 11 | + "github.com/Azure/azure-sdk-for-go/sdk/azidentity" |
| 12 | + openai "github.com/openai/openai-go" |
| 13 | + "github.com/openai/openai-go/azure" |
| 14 | +) |
| 15 | + |
| 16 | +type AIConfig struct { |
| 17 | + ChatCompletion ChatCompletion |
| 18 | + AzureOpenAI AzureOpenAI |
| 19 | +} |
| 20 | + |
| 21 | +type ChatCompletion struct { |
| 22 | + Model string `json:"model"` |
| 23 | + Messages []Message `json:"messages"` |
| 24 | + MaxTokens int64 `json:"max_tokens"` |
| 25 | + Temperature float64 `json:"temperature"` |
| 26 | + TopP float64 `json:"top_p"` |
| 27 | +} |
| 28 | + |
| 29 | +type AzureOpenAI struct { |
| 30 | + Endpoint string |
| 31 | + APIVersion string |
| 32 | + APIKey string |
| 33 | +} |
| 34 | + |
| 35 | +type Message struct { |
| 36 | + Role string `json:"role"` |
| 37 | + Content string `json:"content"` |
| 38 | +} |
| 39 | + |
| 40 | +var aiConfig AIConfig |
| 41 | +var tokenCredential, _ = azidentity.NewDefaultAzureCredential(nil) |
| 42 | + |
| 43 | +func main() { |
| 44 | + configProvider, err := loadAzureAppConfiguration(context.Background()) |
| 45 | + if err != nil { |
| 46 | + log.Fatal("Error loading Azure App Configuration:", err) |
| 47 | + } |
| 48 | + |
| 49 | + // Configure chat completion with AI configuration |
| 50 | + configProvider.Unmarshal(&aiConfig, &azureappconfiguration.ConstructionOptions{Separator: ":"}) |
| 51 | + |
| 52 | + // Register a callback to refresh AI configuration on changes |
| 53 | + configProvider.OnRefreshSuccess(func() { |
| 54 | + configProvider.Unmarshal(&aiConfig, &azureappconfiguration.ConstructionOptions{Separator: ":"}) |
| 55 | + }) |
| 56 | + |
| 57 | + // Create a chat client using API key if available, otherwise use the DefaultAzureCredential |
| 58 | + var openAIClient openai.Client |
| 59 | + if aiConfig.AzureOpenAI.APIKey != "" { |
| 60 | + openAIClient = openai.NewClient(azure.WithAPIKey(aiConfig.AzureOpenAI.APIKey), azure.WithEndpoint(aiConfig.AzureOpenAI.Endpoint, aiConfig.AzureOpenAI.APIVersion)) |
| 61 | + } else { |
| 62 | + openAIClient = openai.NewClient(azure.WithEndpoint(aiConfig.AzureOpenAI.Endpoint, aiConfig.AzureOpenAI.APIVersion), azure.WithTokenCredential(tokenCredential)) |
| 63 | + } |
| 64 | + |
| 65 | + // Initialize chat conversation |
| 66 | + var chatConversation []openai.ChatCompletionMessageParamUnion |
| 67 | + fmt.Println("Chat started! What's on your mind?") |
| 68 | + reader := bufio.NewReader(os.Stdin) |
| 69 | + |
| 70 | + for { |
| 71 | + // Refresh the configuration from Azure App Configuration |
| 72 | + configProvider.Refresh(context.Background()) |
| 73 | + |
| 74 | + // Get user input |
| 75 | + fmt.Print("You: ") |
| 76 | + userInput, _ := reader.ReadString('\n') |
| 77 | + |
| 78 | + // Exit if user input is empty |
| 79 | + if userInput == "" { |
| 80 | + fmt.Println("Exiting Chat. Goodbye!") |
| 81 | + break |
| 82 | + } |
| 83 | + |
| 84 | + // Add user message to chat conversation |
| 85 | + chatConversation = append(chatConversation, openai.UserMessage(userInput)) |
| 86 | + |
| 87 | + // Get AI response and add it to chat conversation |
| 88 | + response, _ := getAIResponse(openAIClient, chatConversation) |
| 89 | + fmt.Printf("AI: %s\n", response) |
| 90 | + chatConversation = append(chatConversation, openai.AssistantMessage(response)) |
| 91 | + |
| 92 | + fmt.Println() |
| 93 | + } |
| 94 | +} |
| 95 | + |
| 96 | +// Load configuration from Azure App Configuration |
| 97 | +func loadAzureAppConfiguration(ctx context.Context) (*azureappconfiguration.AzureAppConfiguration, error) { |
| 98 | + endpoint := os.Getenv("AZURE_APPCONFIGURATION_ENDPOINT") |
| 99 | + if endpoint == "" { |
| 100 | + return nil, fmt.Errorf("AZURE_APPCONFIGURATION_ENDPOINT environment variable is not set") |
| 101 | + } |
| 102 | + |
| 103 | + authOptions := azureappconfiguration.AuthenticationOptions{ |
| 104 | + Endpoint: endpoint, |
| 105 | + Credential: tokenCredential, |
| 106 | + } |
| 107 | + |
| 108 | + options := &azureappconfiguration.Options{ |
| 109 | + Selectors: []azureappconfiguration.Selector{ |
| 110 | + // Load all keys that start with "ChatApp:" and have no label |
| 111 | + { |
| 112 | + KeyFilter: "ChatApp:*", |
| 113 | + }, |
| 114 | + }, |
| 115 | + TrimKeyPrefixes: []string{"ChatApp:"}, |
| 116 | + // Reload configuration if any selected key-values have changed. |
| 117 | + // Use the default refresh interval of 30 seconds. It can be overridden via RefreshOptions.Interval |
| 118 | + RefreshOptions: azureappconfiguration.KeyValueRefreshOptions{ |
| 119 | + Enabled: true, |
| 120 | + }, |
| 121 | + KeyVaultOptions: azureappconfiguration.KeyVaultOptions{ |
| 122 | + Credential: tokenCredential, |
| 123 | + }, |
| 124 | + } |
| 125 | + |
| 126 | + return azureappconfiguration.Load(ctx, authOptions, options) |
| 127 | +} |
| 128 | + |
| 129 | +func getAIResponse(openAIClient openai.Client, chatConversation []openai.ChatCompletionMessageParamUnion) (string, error) { |
| 130 | + var completionMessages []openai.ChatCompletionMessageParamUnion |
| 131 | + |
| 132 | + for _, msg := range aiConfig.ChatCompletion.Messages { |
| 133 | + switch msg.Role { |
| 134 | + case "system": |
| 135 | + completionMessages = append(completionMessages, openai.SystemMessage(msg.Content)) |
| 136 | + case "user": |
| 137 | + completionMessages = append(completionMessages, openai.UserMessage(msg.Content)) |
| 138 | + case "assistant": |
| 139 | + completionMessages = append(completionMessages, openai.AssistantMessage(msg.Content)) |
| 140 | + } |
| 141 | + } |
| 142 | + |
| 143 | + // Add the chat conversation history |
| 144 | + completionMessages = append(completionMessages, chatConversation...) |
| 145 | + |
| 146 | + // Create chat completion parameters |
| 147 | + params := openai.ChatCompletionNewParams{ |
| 148 | + Messages: completionMessages, |
| 149 | + Model: aiConfig.ChatCompletion.Model, |
| 150 | + MaxTokens: openai.Int(aiConfig.ChatCompletion.MaxTokens), |
| 151 | + Temperature: openai.Float(aiConfig.ChatCompletion.Temperature), |
| 152 | + TopP: openai.Float(aiConfig.ChatCompletion.TopP), |
| 153 | + } |
| 154 | + |
| 155 | + if completion, err := openAIClient.Chat.Completions.New(context.Background(), params); err != nil { |
| 156 | + return "", err |
| 157 | + } else { |
| 158 | + return completion.Choices[0].Message.Content, nil |
| 159 | + } |
| 160 | +} |
0 commit comments