Friday, June 27, 2025

Create a generative AI app that uses your own data

Create a generative AI app that uses your own data
https://microsoftlearning.github.io/mslearn-ai-studio/Instructions/04-Use-own-data.html

C#:
// rm -r mslearn-ai-foundry -f
// git clone https://github.com/microsoftlearning/mslearn-ai-studio mslearn-ai-foundry
// cd mslearn-ai-foundry/labfiles/rag-app/c-sharp
// dotnet add package Azure.AI.OpenAI
// dotnet run

using System;
using Azure;
using System.IO;
using System.Text;
using System.Collections.Generic;
using Microsoft.Extensions.Configuration;
using Azure.AI.OpenAI;
using System.ClientModel;
using Azure.AI.OpenAI.Chat;
using OpenAI.Chat;

namespace rag_app
{
    class Program
    {
        static void Main(string[] args)
        {
            // Clear the console
            Console.Clear();

            try
            {
// {
//     "OPEN_AI_ENDPOINT": "https://ai-myhub1588559212155.openai.azure.com/",
//     "OPEN_AI_KEY": "31IQTmnEDSqTsGIrEighShdn3VJrFdVF78JD9fgBiPHrcjVy0aG2JQQJ99BF
        ACHYHv6XJ3w3AAAAACOGXGSQ",
//     "CHAT_MODEL": "gpt-4o",
//     "EMBEDDING_MODEL": "text-embedding-ada-002",
//     "SEARCH_ENDPOINT": "https://rg1aisearchservice1.search.windows.net",
//     "SEARCH_KEY": "3JfXmon3dnBbi9UDymp3kmO5fa1cPdGiQDiNG9xjcTAzSeBX8Wkm",
//     "INDEX_NAME": "brochures-index"
// }

                // Get config settings
                IConfigurationBuilder builder = new ConfigurationBuilder()
                .AddJsonFile("appsettings.json");
                IConfigurationRoot configuration = builder.Build();
                string open_ai_endpoint = configuration["OPEN_AI_ENDPOINT"];
                string open_ai_key = configuration["OPEN_AI_KEY"];
                string chat_model = configuration["CHAT_MODEL"];
                string embedding_model = configuration["EMBEDDING_MODEL"];
                string search_url = configuration["SEARCH_ENDPOINT"];
                string search_key = configuration["SEARCH_KEY"];
                string index_name = configuration["INDEX_NAME"];

                // Get an Azure OpenAI chat client
                AzureOpenAIClient azureClient = new(
                    new Uri(open_ai_endpoint),
                    new AzureKeyCredential(open_ai_key));
                ChatClient chatClient = azureClient.GetChatClient(chat_model);


                // Initialize prompt with system message
                var prompt = new List<ChatMessage>()
                {
                    new SystemChatMessage("You are a travel assistant that provides
                    information on travel services available from Margie's Travel.")
                };

                // Loop until the user types 'quit'
                string input_text = "";
                while (input_text.ToLower() != "quit")
                {
                    // Get user input
                    Console.WriteLine("Enter the prompt (or type 'quit' to exit):");
                    input_text = Console.ReadLine();

                    if (input_text.ToLower() != "quit")
                    {
                        // Add the user input message to the prompt
                        prompt.Add(new UserChatMessage(input_text));

                        // (DataSource is in preview and subject to breaking changes)
                        #pragma warning disable AOAI001

                        // Additional parameters to apply RAG pattern using the
                        AI Search index
                        ChatCompletionOptions options = new();
                        options.AddDataSource(new AzureSearchChatDataSource()
                        {
                            // The following params are used to search the index
                            Endpoint = new Uri(search_url),
                            IndexName = index_name,
                            Authentication = DataSourceAuthentication
                            .FromApiKey(search_key),
                            // The following params are used to vectorize the query
                            QueryType = "vector",
                            VectorizationSource = DataSourceVectorizer
                            .FromDeploymentName(embedding_model),
                        });

                        // Submit the prompt with the data source options and display
                        the response
                        ChatCompletion completion = chatClient.CompleteChat(prompt,
                        options);
                        var completionText = completion.Content[0].Text;
                        Console.WriteLine(completionText);

                        // Add the response to the chat history
                        prompt.Add(new AssistantChatMessage(completionText));

                        #pragma warning restore AOAI001
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }
        }
    }
}


Output:


Python:
# rm -r mslearn-ai-foundry -f
# git clone https://github.com/microsoftlearning/mslearn-ai-studio mslearn-ai-foundry
# cd mslearn-ai-foundry/labfiles/rag-app/python
# python -m venv labenv
# ./labenv/bin/Activate.ps1
# pip install -r requirements.txt openai
# code rag-app.py
# python rag-app.py

import os
from dotenv import load_dotenv
from openai import AzureOpenAI

def main():
    # Clear the console
    os.system('cls' if os.name == 'nt' else 'clear')

# OPEN_AI_ENDPOINT="https://ai-myhub1588559212155.openai.azure.com/"
# OPEN_AI_KEY="31IQTmnEDSqTsGIrEighShdn3VJrFdVF78JD9fgBiPHrcjVy0aG2
    JQQJ99BFACHYHv6XJ3w3AAAAACOGXGSQ"
# CHAT_MODEL="gpt-4o"
# EMBEDDING_MODEL="text-embedding-ada-002"
# SEARCH_ENDPOINT="https://rg1aisearchservice1.search.windows.net"
# SEARCH_KEY="3JfXmon3dnBbi9UDymp3kmO5fa1cPdGiQDiNG9xjcTAzSeBX8Wkm"
# INDEX_NAME="brochures-index"

    try:
        # Get configuration settings
        load_dotenv()
        open_ai_endpoint = os.getenv("OPEN_AI_ENDPOINT")
        open_ai_key = os.getenv("OPEN_AI_KEY")
        chat_model = os.getenv("CHAT_MODEL")
        embedding_model = os.getenv("EMBEDDING_MODEL")
        search_url = os.getenv("SEARCH_ENDPOINT")
        search_key = os.getenv("SEARCH_KEY")
        index_name = os.getenv("INDEX_NAME")

        # Get an Azure OpenAI chat client
        chat_client = AzureOpenAI(
            api_version = "2024-12-01-preview",
            azure_endpoint = open_ai_endpoint,
            api_key = open_ai_key
        )

        # Initialize prompt with system message
        prompt = [
            {"role": "system", "content": "You are a travel assistant that provides
            information on travel services available from Margie's Travel."}
        ]

        # Loop until the user types 'quit'
        while True:
            # Get input text
            input_text = input("Enter the prompt (or type 'quit' to exit): ")
            if input_text.lower() == "quit":
                break
            if len(input_text) == 0:
                print("Please enter a prompt.")
                continue

            # Add the user input message to the prompt
            prompt.append({"role": "user", "content": input_text})

            # Additional parameters to apply RAG pattern using the AI Search index
            rag_params = {
                "data_sources": [
                    {
                        # he following params are used to search the index
                        "type": "azure_search",
                        "parameters": {
                            "endpoint": search_url,
                            "index_name": index_name,
                            "authentication": {
                                "type": "api_key",
                                "key": search_key,
                            },
                            # The following params are used to vectorize the query
                            "query_type": "vector",
                            "embedding_dependency": {
                                "type": "deployment_name",
                                "deployment_name": embedding_model,
                            },
                        }
                    }
                ],
            }

            # Submit the prompt with the data source options and display the response
            response = chat_client.chat.completions.create(
                model=chat_model,
                messages=prompt,
                extra_body=rag_params
            )
            completion = response.choices[0].message.content
            print(completion)

            # Add the response to the chat history
            prompt.append({"role": "assistant", "content": completion})

    except Exception as ex:
        print(ex)

if __name__ == '__main__':
    main()

output:



No comments:

Post a Comment

Featured Post

Create a generative AI app that uses your own data

Create a generative AI app that uses your own data https://microsoftlearning.github.io/mslearn-ai-studio/Instructions/04-Use-own-data.html C...

Popular posts