Skip to content

[FirebaseAI] Add basic logic to the sample #1256

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
May 20, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 66 additions & 6 deletions firebaseai/testapp/Assets/Firebase/Sample/FirebaseAI/UIHandler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

namespace Firebase.Sample.FirebaseAI {
using Firebase;
using Firebase.AI;
using Firebase.Extensions;
using System;
using System.Collections;
Expand Down Expand Up @@ -43,10 +44,6 @@ protected virtual void Start() {
UIEnabled = true;
}

void PlaceholderFunction() {
DebugLog("Placeholder Function called");
}

protected void InitializeFirebase() {
FirebaseApp.CheckAndFixDependenciesAsync().ContinueWithOnMainThread(task => {
dependencyStatus = task.Result;
Expand All @@ -59,6 +56,47 @@ protected void InitializeFirebase() {
});
}

public string ModelName = "gemini-2.0-flash";

private int backendSelection = 0;
private string[] backendChoices = new string[] { "Google AI Backend", "Vertex AI Backend" };
private GenerativeModel GetModel() {
var backend = backendSelection == 0
? FirebaseAI.Backend.GoogleAI()
: FirebaseAI.Backend.VertexAI();

return FirebaseAI.GetInstance(backend).GetGenerativeModel(ModelName);
}

// Send a single message to the Generative Model, without any history.
async Task SendSingleMessage(string message) {
DebugLog("Sending message to model: " + message);
var response = await GetModel().GenerateContentAsync(message);
DebugLog("Response: " + response.Text);
}

private Chat chatSession = null;
void StartChatSession() {
chatSession = GetModel().StartChat();
}

void CloseChatSession() {
chatSession = null;
}

// Send a message to the ongoing Chat with the Generative Model, which
// will preserve the history.
async Task SendChatMessage(string message) {
if (chatSession == null) {
DebugLog("Missing Chat Session");
return;
}

DebugLog("Sending chat message: " + message);
var response = await chatSession.SendMessageAsync(message);
DebugLog("Chat response: " + response.Text);
}

// Exit if escape (or back, on mobile) is pressed.
protected virtual void Update() {
if (Input.GetKeyDown(KeyCode.Escape)) {
Expand Down Expand Up @@ -86,15 +124,37 @@ void GUIDisplayLog() {
GUILayout.EndScrollView();
}

private string textfieldString = "Hello";

// Render the buttons and other controls.
void GUIDisplayControls() {
if (UIEnabled) {
controlsScrollViewVector = GUILayout.BeginScrollView(controlsScrollViewVector);

GUILayout.BeginVertical();

if (GUILayout.Button("Placeholder Button")) {
PlaceholderFunction();
if (chatSession == null) {
backendSelection = GUILayout.SelectionGrid(backendSelection, backendChoices, backendChoices.Length);

textfieldString = GUILayout.TextField(textfieldString);

if (GUILayout.Button("Send Single Message")) {
_ = SendSingleMessage(textfieldString);
}

if (GUILayout.Button("Start Chat Session")) {
StartChatSession();
}
} else {
textfieldString = GUILayout.TextField(textfieldString);

if (GUILayout.Button("Send Chat Message")) {
_ = SendChatMessage(textfieldString);
}

if (GUILayout.Button("Close Chat Session")) {
CloseChatSession();
}
}

GUILayout.EndVertical();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ private bool ValidProbability(float value) {
}

// The model name to use for the tests.
private readonly string ModelName = "gemini-2.0-flash";
private readonly string TestModelName = "gemini-2.0-flash";

private FirebaseAI GetFirebaseAI(Backend backend) {
return backend switch {
Expand All @@ -170,7 +170,7 @@ private FirebaseAI GetFirebaseAI(Backend backend) {

// Get a basic version of the GenerativeModel to test against.
private GenerativeModel CreateGenerativeModel(Backend backend) {
return GetFirebaseAI(backend).GetGenerativeModel(ModelName);
return GetFirebaseAI(backend).GetGenerativeModel(TestModelName);
}

// Test if it can create the GenerativeModel.
Expand Down Expand Up @@ -273,7 +273,7 @@ async Task TestBasicImage(Backend backend) {
async Task TestModelOptions(Backend backend) {
// Note that most of these settings are hard to reliably verify, so as
// long as the call works we are generally happy.
var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName,
var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName,
generationConfig: new GenerationConfig(
temperature: 0.4f,
topP: 0.4f,
Expand Down Expand Up @@ -315,7 +315,7 @@ async Task TestModelOptions(Backend backend) {
async Task TestMultipleCandidates(Backend backend) {
var genConfig = new GenerationConfig(candidateCount: 2);

var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName,
var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName,
generationConfig: genConfig
);

Expand Down Expand Up @@ -387,7 +387,7 @@ private GenerativeModel CreateGenerativeModelWithBasicFunctionCall(
}) }
}));

return GetFirebaseAI(backend).GetGenerativeModel(ModelName,
return GetFirebaseAI(backend).GetGenerativeModel(TestModelName,
tools: new Tool[] { tool },
toolConfig: toolConfig
);
Expand Down Expand Up @@ -439,7 +439,7 @@ async Task TestFunctionCallingNone(Backend backend) {
// Test if setting a response schema with an enum works.
async Task TestEnumSchemaResponse(Backend backend) {
string enumValue = "MyTestEnum";
var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName,
var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName,
generationConfig: new GenerationConfig(
responseMimeType: "text/x.enum",
responseSchema: Schema.Enum(new string[] { enumValue })));
Expand All @@ -452,7 +452,7 @@ async Task TestEnumSchemaResponse(Backend backend) {

// Test if setting a response schema with an enum works.
async Task TestAnyOfSchemaResponse(Backend backend) {
var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName,
var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName,
generationConfig: new GenerationConfig(
responseMimeType: "application/json",
responseSchema: Schema.Array(
Expand Down Expand Up @@ -525,7 +525,7 @@ async Task TestChatFunctionCalling(Backend backend) {
new Dictionary<string, Schema>() {
{ "input", Schema.String("Input string") },
}));
var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName,
var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName,
tools: new Tool[] { tool }
);
var chat = model.StartChat();
Expand Down Expand Up @@ -619,7 +619,7 @@ async Task TestChatBasicTextStream(Backend backend) {
// Test if calling CountTokensAsync works as expected.
async Task TestCountTokens(Backend backend) {
// Include some additional settings, since they are used in the call.
var model = GetFirebaseAI(backend).GetGenerativeModel(ModelName,
var model = GetFirebaseAI(backend).GetGenerativeModel(TestModelName,
generationConfig: new GenerationConfig(temperature: 0.8f),
systemInstruction: ModelContent.Text("This is a test SystemInstruction")
);
Expand Down