diff --git a/firebaseai/testapp/Assets/Firebase/Sample/FirebaseAI/UIHandler.cs b/firebaseai/testapp/Assets/Firebase/Sample/FirebaseAI/UIHandler.cs index 69a0aa43..53dc3da5 100644 --- a/firebaseai/testapp/Assets/Firebase/Sample/FirebaseAI/UIHandler.cs +++ b/firebaseai/testapp/Assets/Firebase/Sample/FirebaseAI/UIHandler.cs @@ -14,6 +14,7 @@ namespace Firebase.Sample.FirebaseAI { using Firebase; + using Firebase.AI; using Firebase.Extensions; using System; using System.Collections; @@ -43,10 +44,6 @@ protected virtual void Start() { UIEnabled = true; } - void PlaceholderFunction() { - DebugLog("Placeholder Function called"); - } - protected void InitializeFirebase() { FirebaseApp.CheckAndFixDependenciesAsync().ContinueWithOnMainThread(task => { dependencyStatus = task.Result; @@ -59,6 +56,47 @@ protected void InitializeFirebase() { }); } + public string ModelName = "gemini-2.0-flash"; + + private int backendSelection = 0; + private string[] backendChoices = new string[] { "Google AI Backend", "Vertex AI Backend" }; + private GenerativeModel GetModel() { + var backend = backendSelection == 0 + ? FirebaseAI.Backend.GoogleAI() + : FirebaseAI.Backend.VertexAI(); + + return FirebaseAI.GetInstance(backend).GetGenerativeModel(ModelName); + } + + // Send a single message to the Generative Model, without any history. + async Task SendSingleMessage(string message) { + DebugLog("Sending message to model: " + message); + var response = await GetModel().GenerateContentAsync(message); + DebugLog("Response: " + response.Text); + } + + private Chat chatSession = null; + void StartChatSession() { + chatSession = GetModel().StartChat(); + } + + void CloseChatSession() { + chatSession = null; + } + + // Send a message to the ongoing Chat with the Generative Model, which + // will preserve the history. + async Task SendChatMessage(string message) { + if (chatSession == null) { + DebugLog("Missing Chat Session"); + return; + } + + DebugLog("Sending chat message: " + message); + var response = await chatSession.SendMessageAsync(message); + DebugLog("Chat response: " + response.Text); + } + // Exit if escape (or back, on mobile) is pressed. protected virtual void Update() { if (Input.GetKeyDown(KeyCode.Escape)) { @@ -86,6 +124,8 @@ void GUIDisplayLog() { GUILayout.EndScrollView(); } + private string textfieldString = "Hello"; + // Render the buttons and other controls. void GUIDisplayControls() { if (UIEnabled) { @@ -93,8 +133,28 @@ void GUIDisplayControls() { GUILayout.BeginVertical(); - if (GUILayout.Button("Placeholder Button")) { - PlaceholderFunction(); + if (chatSession == null) { + backendSelection = GUILayout.SelectionGrid(backendSelection, backendChoices, backendChoices.Length); + + textfieldString = GUILayout.TextField(textfieldString); + + if (GUILayout.Button("Send Single Message")) { + _ = SendSingleMessage(textfieldString); + } + + if (GUILayout.Button("Start Chat Session")) { + StartChatSession(); + } + } else { + textfieldString = GUILayout.TextField(textfieldString); + + if (GUILayout.Button("Send Chat Message")) { + _ = SendChatMessage(textfieldString); + } + + if (GUILayout.Button("Close Chat Session")) { + CloseChatSession(); + } } GUILayout.EndVertical(); diff --git a/firebaseai/testapp/Assets/Firebase/Sample/FirebaseAI/UIHandlerAutomated.cs b/firebaseai/testapp/Assets/Firebase/Sample/FirebaseAI/UIHandlerAutomated.cs index 534b5a1b..b9fd553f 100644 --- a/firebaseai/testapp/Assets/Firebase/Sample/FirebaseAI/UIHandlerAutomated.cs +++ b/firebaseai/testapp/Assets/Firebase/Sample/FirebaseAI/UIHandlerAutomated.cs @@ -157,7 +157,7 @@ private bool ValidProbability(float value) { } // The model name to use for the tests. - private readonly string ModelName = "gemini-2.0-flash"; + private readonly string TestModelName = "gemini-2.0-flash"; private FirebaseAI GetFirebaseAI(Backend backend) { return backend switch { @@ -170,7 +170,7 @@ private FirebaseAI GetFirebaseAI(Backend backend) { // Get a basic version of the GenerativeModel to test against. private GenerativeModel CreateGenerativeModel(Backend backend) { - return GetFirebaseAI(backend).GetGenerativeModel(ModelName); + return GetFirebaseAI(backend).GetGenerativeModel(TestModelName); } // Test if it can create the GenerativeModel. @@ -273,7 +273,7 @@ async Task TestBasicImage(Backend backend) { async Task TestModelOptions(Backend backend) { // Note that most of these settings are hard to reliably verify, so as // long as the call works we are generally happy. - var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName, + var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName, generationConfig: new GenerationConfig( temperature: 0.4f, topP: 0.4f, @@ -315,7 +315,7 @@ async Task TestModelOptions(Backend backend) { async Task TestMultipleCandidates(Backend backend) { var genConfig = new GenerationConfig(candidateCount: 2); - var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName, + var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName, generationConfig: genConfig ); @@ -387,7 +387,7 @@ private GenerativeModel CreateGenerativeModelWithBasicFunctionCall( }) } })); - return GetFirebaseAI(backend).GetGenerativeModel(ModelName, + return GetFirebaseAI(backend).GetGenerativeModel(TestModelName, tools: new Tool[] { tool }, toolConfig: toolConfig ); @@ -439,7 +439,7 @@ async Task TestFunctionCallingNone(Backend backend) { // Test if setting a response schema with an enum works. async Task TestEnumSchemaResponse(Backend backend) { string enumValue = "MyTestEnum"; - var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName, + var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName, generationConfig: new GenerationConfig( responseMimeType: "text/x.enum", responseSchema: Schema.Enum(new string[] { enumValue }))); @@ -452,7 +452,7 @@ async Task TestEnumSchemaResponse(Backend backend) { // Test if setting a response schema with an enum works. async Task TestAnyOfSchemaResponse(Backend backend) { - var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName, + var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName, generationConfig: new GenerationConfig( responseMimeType: "application/json", responseSchema: Schema.Array( @@ -525,7 +525,7 @@ async Task TestChatFunctionCalling(Backend backend) { new Dictionary() { { "input", Schema.String("Input string") }, })); - var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName, + var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName, tools: new Tool[] { tool } ); var chat = model.StartChat(); @@ -619,7 +619,7 @@ async Task TestChatBasicTextStream(Backend backend) { // Test if calling CountTokensAsync works as expected. async Task TestCountTokens(Backend backend) { // Include some additional settings, since they are used in the call. - var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName, + var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName, generationConfig: new GenerationConfig(temperature: 0.8f), systemInstruction: ModelContent.Text("This is a test SystemInstruction") );