diff --git a/copy_mock_google_services_json.sh b/copy_mock_google_services_json.sh index c33f738b3..0970405b1 100755 --- a/copy_mock_google_services_json.sh +++ b/copy_mock_google_services_json.sh @@ -14,6 +14,7 @@ cp mock-google-services.json crash/app/google-services.json cp mock-google-services.json database/app/google-services.json cp mock-google-services.json dataconnect/app/google-services.json cp mock-google-services.json dynamiclinks/app/google-services.json +cp mock-google-services.json firebase-ai/app/google-services.json cp mock-google-services.json firestore/app/google-services.json cp mock-google-services.json functions/app/google-services.json cp mock-google-services.json inappmessaging/app/google-services.json diff --git a/firebase-ai/.gitignore b/firebase-ai/.gitignore new file mode 100644 index 000000000..aa724b770 --- /dev/null +++ b/firebase-ai/.gitignore @@ -0,0 +1,15 @@ +*.iml +.gradle +/local.properties +/.idea/caches +/.idea/libraries +/.idea/modules.xml +/.idea/workspace.xml +/.idea/navEditor.xml +/.idea/assetWizardSettings.xml +.DS_Store +/build +/captures +.externalNativeBuild +.cxx +local.properties diff --git a/firebase-ai/README.md b/firebase-ai/README.md new file mode 100644 index 000000000..236fbef38 --- /dev/null +++ b/firebase-ai/README.md @@ -0,0 +1,32 @@ +# Firebase AI Logic quickstart sample app + +This Android sample app demonstrates how to use state-of-the-art +generative AI models (like Gemini) to build AI-powered features and applications. +For more information about Firebase AI Logic, visit the [documentation](http://firebase.google.com/docs/ai-logic). + +## Getting Started + +To try out this sample app, you need to use latest stable version of Android Studio. +However, if you want to latest lint checks and AI productivity features in Android +Studio use the latest preview version of [Android Studio](https://developer.android.com/studio/preview). + +## Features + +There are 2 main files that demonstrate the use of Firebase AI Logic: + +- [ChatViewModel.kt](app/src/main/java/com/google/firebase/quickstart/ai/feature/text/ChatViewModel.kt) + which can do things such as: + - [Generate Text](https://firebase.google.com/docs/ai-logic/generate-text) + - [Generate structured output (JSON)](https://firebase.google.com/docs/ai-logic/generate-structured-output) + - [Analyze images](https://firebase.google.com/docs/ai-logic/analyze-images) + - [Analyze video](https://firebase.google.com/docs/ai-logic/analyze-video) + - [Analyze audio](https://firebase.google.com/docs/ai-logic/analyze-audio) + - [Analyze documents (PDFs)](https://firebase.google.com/docs/ai-logic/analyze-documents) + - [Generate images using Gemini 2.0](https://firebase.google.com/docs/ai-logic/generate-images-imagen) +- [ImagenViewModel](app/src/main/java/com/google/firebase/quickstart/ai/feature/media/imagen/ImagenViewModel.kt) + which shows how to [Generate images using Imagen models](https://firebase.google.com/docs/ai-logic/generate-images-imagen) + +## All samples + +The full list of available samples can be found in the +[FirebaseAISamples.kt file](app/src/main/java/com/google/firebase/quickstart/ai/FirebaseAISamples.kt). diff --git a/firebase-ai/app/.gitignore b/firebase-ai/app/.gitignore new file mode 100644 index 000000000..42afabfd2 --- /dev/null +++ b/firebase-ai/app/.gitignore @@ -0,0 +1 @@ +/build \ No newline at end of file diff --git a/firebase-ai/app/build.gradle.kts b/firebase-ai/app/build.gradle.kts new file mode 100644 index 000000000..362f6bc91 --- /dev/null +++ b/firebase-ai/app/build.gradle.kts @@ -0,0 +1,75 @@ +plugins { + id("com.android.application") + id("org.jetbrains.kotlin.android") + id("org.jetbrains.kotlin.plugin.compose") + kotlin("plugin.serialization") version "2.1.20" + id("com.google.gms.google-services") +} + +android { + namespace = "com.google.firebase.quickstart.ai" + compileSdk = 35 + + defaultConfig { + applicationId = "com.google.firebase.quickstart.ai" + minSdk = 23 + targetSdk = 35 + versionCode = 1 + versionName = "1.0" + + testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" + } + + buildTypes { + release { + isMinifyEnabled = false + proguardFiles( + getDefaultProguardFile("proguard-android-optimize.txt"), + "proguard-rules.pro" + ) + } + } + compileOptions { + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 + } + kotlinOptions { + jvmTarget = "11" + } + buildFeatures { + compose = true + } +} + +dependencies { + + implementation(libs.androidx.core.ktx) + implementation(libs.androidx.lifecycle.runtime.ktx) + implementation(libs.androidx.activity.compose) + + implementation(platform(libs.androidx.compose.bom)) + implementation(libs.androidx.ui) + implementation(libs.androidx.ui.graphics) + implementation(libs.androidx.ui.tooling.preview) + implementation(libs.androidx.material3) + implementation(libs.androidx.material.icons.extended) + implementation(libs.androidx.material3.adaptive.navigation.suite) + implementation(libs.compose.navigation) + implementation(libs.androidx.lifecycle.viewmodel.ktx) + // ViewModel utilities for Compose + implementation(libs.androidx.lifecycle.viewmodel.compose) + implementation(libs.androidx.lifecycle.viewmodel.savedstate) + implementation(libs.kotlinx.serialization.json) + + // Firebase + implementation(platform(libs.firebase.bom)) + implementation(libs.firebase.ai) + + testImplementation(libs.junit) + androidTestImplementation(libs.androidx.junit) + androidTestImplementation(libs.androidx.espresso.core) + androidTestImplementation(platform(libs.androidx.compose.bom)) + androidTestImplementation(libs.androidx.ui.test.junit4) + debugImplementation(libs.androidx.ui.tooling) + debugImplementation(libs.androidx.ui.test.manifest) +} \ No newline at end of file diff --git a/firebase-ai/app/proguard-rules.pro b/firebase-ai/app/proguard-rules.pro new file mode 100644 index 000000000..481bb4348 --- /dev/null +++ b/firebase-ai/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile \ No newline at end of file diff --git a/firebase-ai/app/src/main/AndroidManifest.xml b/firebase-ai/app/src/main/AndroidManifest.xml new file mode 100644 index 000000000..0ca921589 --- /dev/null +++ b/firebase-ai/app/src/main/AndroidManifest.xml @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/FirebaseAISamples.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/FirebaseAISamples.kt new file mode 100644 index 000000000..c16f5c399 --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/FirebaseAISamples.kt @@ -0,0 +1,207 @@ +package com.google.firebase.quickstart.ai + +import com.google.firebase.ai.type.ResponseModality +import com.google.firebase.ai.type.content +import com.google.firebase.ai.type.generationConfig +import com.google.firebase.quickstart.ai.ui.navigation.Category +import com.google.firebase.quickstart.ai.ui.navigation.Sample + +val FIREBASE_AI_SAMPLES = listOf( + Sample( + title = "Travel tips", + description = "The user wants the model to help a new traveler" + + " with travel tips", + navRoute = "chat", + categories = listOf(Category.TEXT), + systemInstructions = content { + text( + "You are a Travel assistant. You will answer" + + " questions the user asks based on the information listed" + + " in Relevant Information. Do not hallucinate. Do not use" + + " the internet." + ) + }, + chatHistory = listOf( + content { + role = "user" + text("I have never traveled before. When should I book a flight?") + }, + content { + role = "model" + text( + "You should book flights a couple of months ahead of time." + + " It will be cheaper and more flexible for you." + ) + }, + content { + role = "user" + text("Do I need a passport?") + }, + content { + role = "model" + text( + "If you are traveling outside your own country, make sure" + + " your passport is up-to-date and valid for more" + + " than 6 months during your travel." + ) + } + ), + initialPrompt = content { text("What else is important when traveling?") } + ), + Sample( + title = "Chatbot recommendations for courses", + description = "A chatbot suggests courses for a performing arts program.", + navRoute = "chat", + categories = listOf(Category.TEXT), + systemInstructions = content { + text( + "You are a chatbot for the county's performing and fine arts" + + " program. You help students decide what course they will" + + " take during the summer." + ) + }, + initialPrompt = content { + text("I am interested in Performing Arts. I have taken Theater 1A.") + } + ), + Sample( + title = "Audio Summarization", + description = "Summarize an audio file", + navRoute = "chat", + categories = listOf(Category.AUDIO), + chatHistory = listOf( + content { text("Can you help me summarize an audio file?") }, + content("model") { + text( + "Of course! Click on the attach button" + + " below and choose an audio file for me to summarize." + ) + } + ), + initialPrompt = content { + text( + "I have attached the audio file. Please analyze it and summarize the contents" + + " of the audio as bullet points." + ) + } + ), + Sample( + title = "Translation from audio", + description = "Translate an audio file", + navRoute = "chat", + categories = listOf(Category.AUDIO), + initialPrompt = content { + fileData( + "https://storage.googleapis.com/cloud-samples-data/generative-ai/audio/" + + "How_to_create_a_My_Map_in_Google_Maps.mp3", + "audio/mpeg" + ) + text("Please translate the audio to Mandarin.") + } + ), + Sample( + title = "Blog post creator", + description = "Create a blog post from an image file.", + navRoute = "chat", + categories = listOf(Category.IMAGE), + initialPrompt = content { + fileData( + "https://storage.googleapis.com/cloud-samples-data/generative-ai/image/meal-prep.jpeg", + "image/jpeg" + ) + text( + "Write a short, engaging blog post based on this picture." + + " It should include a description of the meal in the" + + " photo and talk about my journey meal prepping." + ) + } + ), + Sample( + title = "Imagen 3 - image generation", + description = "Generate images using Imagen 3", + navRoute = "imagen", + categories = listOf(Category.IMAGE), + initialPrompt = content { + text( + "A photo of a modern building with water in the background" + ) + } + ), + Sample( + title = "Gemini 2.0 Flash - image generation", + description = "Generate and/or edit images using Gemini 2.0 Flash", + navRoute = "chat", + categories = listOf(Category.IMAGE), + modelName = "gemini-2.0-flash-preview-image-generation", + initialPrompt = content { + text( + "Hi, can you create a 3d rendered image of a pig " + + "with wings and a top hat flying over a happy " + + "futuristic scifi city with lots of greenery?" + ) + }, + generationConfig = generationConfig { + responseModalities = listOf(ResponseModality.TEXT, ResponseModality.IMAGE) + } + ), + Sample( + title = "Document comparison", + description = "Compare the contents of 2 documents", + navRoute = "chat", + categories = listOf(Category.DOCUMENT), + initialPrompt = content { + fileData( + "https://storage.googleapis.com/cloud-samples-data/generative-ai/pdf/form_1040_2013.pdf", + "application/pdf" + ) + fileData( + "https://storage.googleapis.com/cloud-samples-data/generative-ai/pdf/form_1040_2023.pdf", + "application/pdf" + ) + text( + "The first document is from 2013, and the second document is" + + " from 2023. How did the standard deduction evolve?" + ) + } + ), + Sample( + title = "Hashtags for a video", + description = "Generate hashtags for a video ad", + navRoute = "chat", + categories = listOf(Category.VIDEO), + initialPrompt = content { + fileData( + "https://storage.googleapis.com/cloud-samples-data/generative-ai/video/google_home_celebrity_ad.mp4", + "video/mpeg" + ) + text( + "Generate 5-10 hashtags that relate to the video content." + + " Try to use more popular and engaging terms," + + " e.g. #Viral. Do not add content not related to" + + " the video.\n Start the output with 'Tags:'" + ) + } + ), + Sample( + title = "Summarize video", + description = "Summarize a video and extract important dialogue.", + navRoute = "chat", + categories = listOf(Category.VIDEO), + chatHistory = listOf( + content { text("Can you help me with the description of a video file?") }, + content("model") { + text( + "Sure! Click on the attach button below and choose a" + + " video file for me to describe." + ) + } + ), + initialPrompt = content { + text( + "I have attached the video file. Provide a description of" + + " the video. The description should also contain" + + " anything important which people say in the video." + ) + } + ) +) diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/MainActivity.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/MainActivity.kt new file mode 100644 index 000000000..998e4612a --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/MainActivity.kt @@ -0,0 +1,113 @@ +package com.google.firebase.quickstart.ai + +import android.os.Bundle +import androidx.activity.ComponentActivity +import androidx.activity.compose.setContent +import androidx.activity.enableEdgeToEdge +import androidx.compose.foundation.layout.fillMaxSize +import androidx.compose.foundation.layout.padding +import androidx.compose.material3.ExperimentalMaterial3Api +import androidx.compose.material3.MaterialTheme +import androidx.compose.material3.Scaffold +import androidx.compose.material3.Text +import androidx.compose.material3.TopAppBar +import androidx.compose.material3.TopAppBarDefaults +import androidx.compose.runtime.getValue +import androidx.compose.runtime.mutableStateOf +import androidx.compose.runtime.saveable.rememberSaveable +import androidx.compose.runtime.setValue +import androidx.compose.ui.Modifier +import androidx.navigation.NavController +import androidx.navigation.NavDestination +import androidx.navigation.compose.NavHost +import androidx.navigation.compose.composable +import androidx.navigation.compose.rememberNavController +import com.google.firebase.quickstart.ai.feature.live.StreamRealtimeRoute +import com.google.firebase.quickstart.ai.feature.live.StreamRealtimeScreen +import com.google.firebase.quickstart.ai.feature.media.imagen.ImagenRoute +import com.google.firebase.quickstart.ai.feature.media.imagen.ImagenScreen +import com.google.firebase.quickstart.ai.feature.text.ChatRoute +import com.google.firebase.quickstart.ai.feature.text.ChatScreen +import com.google.firebase.quickstart.ai.ui.navigation.MainMenuScreen +import com.google.firebase.quickstart.ai.ui.theme.FirebaseAILogicTheme + +class MainActivity : ComponentActivity() { + @OptIn(ExperimentalMaterial3Api::class) + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + enableEdgeToEdge() + setContent { + val navController = rememberNavController() + + var topBarTitle: String by rememberSaveable { mutableStateOf(getString(R.string.app_name)) } + FirebaseAILogicTheme { + Scaffold( + topBar = { + TopAppBar( + colors = TopAppBarDefaults.topAppBarColors( + containerColor = MaterialTheme.colorScheme.primaryContainer, + titleContentColor = MaterialTheme.colorScheme.primary + ), + title = { + Text(topBarTitle) + } + ) + }, + modifier = Modifier.fillMaxSize() + ) { innerPadding -> + NavHost( + navController, + startDestination = "mainMenu", + modifier = Modifier + .fillMaxSize() + .padding(innerPadding) + ) { + composable("mainMenu") { + MainMenuScreen( + onSampleClicked = { + topBarTitle = it.title + when (it.navRoute) { + "chat" -> { + navController.navigate(ChatRoute(it.id)) + } + + "imagen" -> { + navController.navigate(ImagenRoute(it.id)) + } + + "stream" -> { + navController.navigate(StreamRealtimeRoute(it.id)) + } + } + } + ) + } + // Text Samples + composable { + ChatScreen() + } + // Imagen Samples + composable { + ImagenScreen() + } + // Stream Realtime Samples + composable { + StreamRealtimeScreen() + } + } + } + } + navController.addOnDestinationChangedListener(object : NavController.OnDestinationChangedListener { + override fun onDestinationChanged( + controller: NavController, + destination: NavDestination, + arguments: Bundle? + ) { + if (destination.route == "mainMenu") { + topBarTitle = getString(R.string.app_name) + } + } + }) + } + } +} diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/live/StreamRealtimeScreen.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/live/StreamRealtimeScreen.kt new file mode 100644 index 000000000..8cbdd721f --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/live/StreamRealtimeScreen.kt @@ -0,0 +1,20 @@ +package com.google.firebase.quickstart.ai.feature.live + +import androidx.compose.foundation.layout.Box +import androidx.compose.foundation.layout.fillMaxSize +import androidx.compose.material3.Text +import androidx.compose.runtime.Composable +import androidx.compose.ui.Modifier +import kotlinx.serialization.Serializable + +@Serializable +class StreamRealtimeRoute(val sampleId: String) + +@Composable +fun StreamRealtimeScreen() { + Box( + modifier = Modifier.fillMaxSize() + ) { + Text("Coming soon") + } +} diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/media/imagen/ImagenScreen.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/media/imagen/ImagenScreen.kt new file mode 100644 index 000000000..e5d654a89 --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/media/imagen/ImagenScreen.kt @@ -0,0 +1,122 @@ +package com.google.firebase.quickstart.ai.feature.media.imagen + +import androidx.compose.foundation.Image +import androidx.compose.foundation.layout.Box +import androidx.compose.foundation.layout.Column +import androidx.compose.foundation.layout.fillMaxWidth +import androidx.compose.foundation.layout.padding +import androidx.compose.foundation.lazy.grid.GridCells +import androidx.compose.foundation.lazy.grid.LazyVerticalGrid +import androidx.compose.foundation.lazy.grid.items +import androidx.compose.material3.Card +import androidx.compose.material3.CardDefaults +import androidx.compose.material3.CircularProgressIndicator +import androidx.compose.material3.ElevatedCard +import androidx.compose.material3.MaterialTheme +import androidx.compose.material3.OutlinedTextField +import androidx.compose.material3.Text +import androidx.compose.material3.TextButton +import androidx.compose.runtime.Composable +import androidx.compose.runtime.getValue +import androidx.compose.runtime.mutableStateOf +import androidx.compose.runtime.saveable.rememberSaveable +import androidx.compose.runtime.setValue +import androidx.compose.ui.Alignment +import androidx.compose.ui.Modifier +import androidx.compose.ui.graphics.asImageBitmap +import androidx.compose.ui.unit.dp +import androidx.lifecycle.compose.collectAsStateWithLifecycle +import androidx.lifecycle.viewmodel.compose.viewModel +import kotlinx.serialization.Serializable + +@Serializable +class ImagenRoute(val sampleId: String) + +@Composable +fun ImagenScreen( + imagenViewModel: ImagenViewModel = viewModel() +) { + var imagenPrompt by rememberSaveable { mutableStateOf(imagenViewModel.initialPrompt) } + val errorMessage by imagenViewModel.errorMessage.collectAsStateWithLifecycle() + val isLoading by imagenViewModel.isLoading.collectAsStateWithLifecycle() + val generatedImages by imagenViewModel.generatedBitmaps.collectAsStateWithLifecycle() + + Column( + modifier = Modifier + ) { + ElevatedCard( + modifier = Modifier + .padding(all = 16.dp) + .fillMaxWidth(), + shape = MaterialTheme.shapes.large + ) { + OutlinedTextField( + value = imagenPrompt, + label = { Text("Prompt") }, + placeholder = { Text("Enter text to generate image") }, + onValueChange = { imagenPrompt = it }, + modifier = Modifier + .padding(16.dp) + .fillMaxWidth() + ) + TextButton( + onClick = { + if (imagenPrompt.isNotBlank()) { + imagenViewModel.generateImages(imagenPrompt) + } + }, + modifier = Modifier + .padding(end = 16.dp, bottom = 16.dp) + .align(Alignment.End) + ) { + Text("Generate") + } + } + + if (isLoading) { + Box( + contentAlignment = Alignment.Center, + modifier = Modifier + .padding(all = 8.dp) + .align(Alignment.CenterHorizontally) + ) { + CircularProgressIndicator() + } + } + errorMessage?.let { + Card( + modifier = Modifier + .padding(horizontal = 16.dp) + .fillMaxWidth(), + shape = MaterialTheme.shapes.large, + colors = CardDefaults.cardColors( + containerColor = MaterialTheme.colorScheme.errorContainer + ) + ) { + Text( + text = it, + color = MaterialTheme.colorScheme.error, + modifier = Modifier.padding(all = 16.dp) + ) + } + } + LazyVerticalGrid( + columns = GridCells.Fixed(2), + modifier = Modifier.padding(16.dp) + ) { + items(generatedImages) { image -> + Card( + modifier = Modifier + .padding(8.dp) + .fillMaxWidth(), + shape = MaterialTheme.shapes.large, + colors = CardDefaults.cardColors( + containerColor = MaterialTheme.colorScheme.onSecondaryContainer + ) + ) { + Image(bitmap = image.asImageBitmap(), "Generated image") + } + } + } + } +} diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/media/imagen/ImagenViewModel.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/media/imagen/ImagenViewModel.kt new file mode 100644 index 000000000..bd1b58b01 --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/media/imagen/ImagenViewModel.kt @@ -0,0 +1,80 @@ +package com.google.firebase.quickstart.ai.feature.media.imagen + +import android.graphics.Bitmap +import androidx.lifecycle.SavedStateHandle +import androidx.lifecycle.ViewModel +import androidx.lifecycle.viewModelScope +import androidx.navigation.toRoute +import com.google.firebase.Firebase +import com.google.firebase.ai.ImagenModel +import com.google.firebase.ai.ai +import com.google.firebase.ai.type.GenerativeBackend +import com.google.firebase.ai.type.ImagenAspectRatio +import com.google.firebase.ai.type.ImagenImageFormat +import com.google.firebase.ai.type.ImagenPersonFilterLevel +import com.google.firebase.ai.type.ImagenSafetyFilterLevel +import com.google.firebase.ai.type.ImagenSafetySettings +import com.google.firebase.ai.type.PublicPreviewAPI +import com.google.firebase.ai.type.asTextOrNull +import com.google.firebase.ai.type.imagenGenerationConfig +import com.google.firebase.quickstart.ai.FIREBASE_AI_SAMPLES +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.launch + +@OptIn(PublicPreviewAPI::class) +class ImagenViewModel( + savedStateHandle: SavedStateHandle +) : ViewModel() { + private val sampleId = savedStateHandle.toRoute().sampleId + private val sample = FIREBASE_AI_SAMPLES.first { it.id == sampleId } + val initialPrompt = sample.initialPrompt?.parts?.first()?.asTextOrNull().orEmpty() + + private val _errorMessage: MutableStateFlow = MutableStateFlow(null) + val errorMessage: StateFlow = _errorMessage + + private val _isLoading = MutableStateFlow(false) + val isLoading: StateFlow = _isLoading + + private val _generatedBitmaps = MutableStateFlow(listOf()) + val generatedBitmaps: StateFlow> = _generatedBitmaps + + // Firebase AI Logic + private val imagenModel: ImagenModel + + init { + val config = imagenGenerationConfig { + numberOfImages = 4 + aspectRatio = ImagenAspectRatio.SQUARE_1x1 + imageFormat = ImagenImageFormat.png() + } + val settings = ImagenSafetySettings( + safetyFilterLevel = ImagenSafetyFilterLevel.BLOCK_LOW_AND_ABOVE, + personFilterLevel = ImagenPersonFilterLevel.BLOCK_ALL + ) + imagenModel = Firebase.ai( + backend = GenerativeBackend.googleAI() + ).imagenModel( + modelName = sample.modelName ?: "imagen-3.0-generate-002", + generationConfig = config, + safetySettings = settings + ) + } + + fun generateImages(inputText: String) { + viewModelScope.launch { + _isLoading.value = true + try { + val imageResponse = imagenModel.generateImages( + inputText + ) + _generatedBitmaps.value = imageResponse.images.map { it.asBitmap() } + _errorMessage.value = null // clear error message + } catch (e: Exception) { + _errorMessage.value = e.localizedMessage + } finally { + _isLoading.value = false + } + } + } +} diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/text/ChatScreen.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/text/ChatScreen.kt new file mode 100644 index 000000000..b55cc89ce --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/text/ChatScreen.kt @@ -0,0 +1,473 @@ +package com.google.firebase.quickstart.ai.feature.text + +import android.graphics.Bitmap +import android.net.Uri +import android.provider.OpenableColumns +import android.text.format.Formatter +import androidx.activity.compose.rememberLauncherForActivityResult +import androidx.activity.result.contract.ActivityResultContracts +import androidx.compose.foundation.Image +import androidx.compose.foundation.background +import androidx.compose.foundation.layout.Box +import androidx.compose.foundation.layout.BoxWithConstraints +import androidx.compose.foundation.layout.Column +import androidx.compose.foundation.layout.Row +import androidx.compose.foundation.layout.fillMaxSize +import androidx.compose.foundation.layout.fillMaxWidth +import androidx.compose.foundation.layout.padding +import androidx.compose.foundation.layout.widthIn +import androidx.compose.foundation.lazy.LazyColumn +import androidx.compose.foundation.lazy.LazyListState +import androidx.compose.foundation.lazy.items +import androidx.compose.foundation.lazy.rememberLazyListState +import androidx.compose.foundation.shape.CircleShape +import androidx.compose.foundation.shape.RoundedCornerShape +import androidx.compose.foundation.text.KeyboardOptions +import androidx.compose.material.icons.Icons +import androidx.compose.material.icons.automirrored.filled.Send +import androidx.compose.material.icons.filled.AttachFile +import androidx.compose.material.icons.filled.Attachment +import androidx.compose.material3.Card +import androidx.compose.material3.CardDefaults +import androidx.compose.material3.DropdownMenu +import androidx.compose.material3.DropdownMenuItem +import androidx.compose.material3.Icon +import androidx.compose.material3.IconButton +import androidx.compose.material3.IconButtonDefaults +import androidx.compose.material3.LinearProgressIndicator +import androidx.compose.material3.MaterialTheme +import androidx.compose.material3.OutlinedTextField +import androidx.compose.material3.Text +import androidx.compose.runtime.Composable +import androidx.compose.runtime.getValue +import androidx.compose.runtime.mutableStateOf +import androidx.compose.runtime.remember +import androidx.compose.runtime.rememberCoroutineScope +import androidx.compose.runtime.saveable.rememberSaveable +import androidx.compose.runtime.setValue +import androidx.compose.ui.Alignment +import androidx.compose.ui.Modifier +import androidx.compose.ui.draw.clip +import androidx.compose.ui.graphics.asImageBitmap +import androidx.compose.ui.platform.LocalContext +import androidx.compose.ui.text.input.KeyboardCapitalization +import androidx.compose.ui.text.style.TextAlign +import androidx.compose.ui.unit.dp +import androidx.lifecycle.compose.collectAsStateWithLifecycle +import androidx.lifecycle.viewmodel.compose.viewModel +import com.google.firebase.ai.type.Content +import com.google.firebase.ai.type.FileDataPart +import com.google.firebase.ai.type.ImagePart +import com.google.firebase.ai.type.InlineDataPart +import com.google.firebase.ai.type.TextPart +import kotlinx.coroutines.launch +import kotlinx.serialization.Serializable + +@Serializable +class ChatRoute(val sampleId: String) + +@Composable +fun ChatScreen( + chatViewModel: ChatViewModel = viewModel() +) { + val messages: List by chatViewModel.messages.collectAsStateWithLifecycle() + val isLoading: Boolean by chatViewModel.isLoading.collectAsStateWithLifecycle() + val errorMessage: String? by chatViewModel.errorMessage.collectAsStateWithLifecycle() + val attachments: List by chatViewModel.attachments.collectAsStateWithLifecycle() + + val initialPrompt: String = chatViewModel.initialPrompt + + val listState = rememberLazyListState() + val coroutineScope = rememberCoroutineScope() + + Column( + modifier = Modifier + .fillMaxSize() + ) { + ChatList( + messages, + listState, + modifier = Modifier + .fillMaxSize() + .weight(0.5f) + ) + Box( + contentAlignment = Alignment.BottomCenter + ) { + Column( + modifier = Modifier + .fillMaxWidth() + .background(color = MaterialTheme.colorScheme.surfaceContainer) + ) { + if (isLoading) { + LinearProgressIndicator( + modifier = Modifier + .fillMaxWidth() + .padding(horizontal = 16.dp, vertical = 8.dp) + ) + } + errorMessage?.let { + Card( + colors = CardDefaults.cardColors( + containerColor = MaterialTheme.colorScheme.errorContainer + ), + modifier = Modifier.fillMaxWidth() + ) { + Text( + text = it, + modifier = Modifier.padding(16.dp), + color = MaterialTheme.colorScheme.onErrorContainer + ) + } + } + AttachmentsList(attachments) + val context = LocalContext.current + val contentResolver = context.contentResolver + MessageInput( + initialPrompt = initialPrompt, + onSendMessage = { inputText -> + chatViewModel.sendMessage(inputText) + }, + resetScroll = { + coroutineScope.launch { + listState.scrollToItem(0) + } + }, + onFileAttached = { uri -> + val mimeType = contentResolver.getType(uri).orEmpty() + var fileName: String? = null + // Fetch file name and size + contentResolver.query(uri, null, null, null, null)?.use { cursor -> + val nameIndex = cursor.getColumnIndex(OpenableColumns.DISPLAY_NAME) + val sizeIndex = cursor.getColumnIndex(OpenableColumns.SIZE) + cursor.moveToFirst() + val humanReadableSize = Formatter.formatShortFileSize( + context, + cursor.getLong(sizeIndex) + ) + fileName = "${cursor.getString(nameIndex)} ($humanReadableSize)" + } + + contentResolver.openInputStream(uri)?.use { stream -> + val bytes = stream.readBytes() + chatViewModel.attachFile(bytes, mimeType, fileName) + } + }, + isLoading = isLoading + ) + } + } + } +} + +@Composable +fun ChatBubbleItem( + chatMessage: Content +) { + val isModelMessage = chatMessage.role == "model" + + val backgroundColor = when (chatMessage.role) { + "user" -> MaterialTheme.colorScheme.tertiaryContainer + else -> MaterialTheme.colorScheme.secondaryContainer + } + + val textColor = if (isModelMessage) { + MaterialTheme.colorScheme.onSecondaryContainer + } else { + MaterialTheme.colorScheme.onTertiaryContainer + } + + val bubbleShape = if (isModelMessage) { + RoundedCornerShape(4.dp, 20.dp, 20.dp, 20.dp) + } else { + RoundedCornerShape(20.dp, 4.dp, 20.dp, 20.dp) + } + + val horizontalAlignment = if (isModelMessage) { + Alignment.Start + } else { + Alignment.End + } + + Column( + horizontalAlignment = horizontalAlignment, + modifier = Modifier + .padding(horizontal = 8.dp, vertical = 4.dp) + .fillMaxWidth() + ) { + Text( + text = chatMessage.role?.uppercase() ?: "USER", + style = MaterialTheme.typography.bodySmall, + modifier = Modifier.padding(bottom = 4.dp) + ) + Row { + BoxWithConstraints { + Card( + colors = CardDefaults.cardColors(containerColor = backgroundColor), + shape = bubbleShape, + modifier = Modifier.widthIn(0.dp, maxWidth * 0.9f) + ) { + Column( + modifier = Modifier + .padding(16.dp) + .fillMaxWidth() + ) { + chatMessage.parts.forEach { part -> + when (part) { + is TextPart -> { + Text( + text = part.text.trimIndent(), + modifier = Modifier.fillMaxWidth(), + color = textColor + ) + } + + is ImagePart -> { + Image( + bitmap = part.image.asImageBitmap(), + contentDescription = "Attached image", + modifier = Modifier + .fillMaxWidth() + .padding(bottom = 4.dp) + ) + } + + is InlineDataPart -> { + // TODO: show a human readable version of audio, PDFs and videos + val attachmentType = if (part.mimeType.contains("audio")) { + "audio attached" + } else if (part.mimeType.contains("application/pdf")) { + "PDF attached" + } else if (part.mimeType.contains("video")) { + "video" + } else { + "file attached" + } + Text( + text = "($attachmentType)", + modifier = Modifier + .padding(4.dp) + .fillMaxWidth(), + style = MaterialTheme.typography.bodySmall, + textAlign = TextAlign.End + ) + } + + is FileDataPart -> { + Text( + text = part.uri, + style = MaterialTheme.typography.bodySmall, + textAlign = TextAlign.End, + modifier = Modifier + .background( + backgroundColor.copy( + red = backgroundColor.red * 0.7f, + green = backgroundColor.green * 0.7f, + blue = backgroundColor.blue * 0.7f + ) + ) + .padding(4.dp) + .fillMaxWidth() + ) + } + } + } + } + } + } + } + } +} + +@Composable +fun ChatList( + chatMessages: List, + listState: LazyListState, + modifier: Modifier = Modifier +) { + LazyColumn( + reverseLayout = true, + state = listState, + modifier = modifier + ) { + items(chatMessages.reversed()) { message -> + ChatBubbleItem(message) + } + } +} + +@Composable +fun MessageInput( + initialPrompt: String, + onSendMessage: (String) -> Unit, + resetScroll: () -> Unit = {}, + onFileAttached: (Uri) -> Unit, + isLoading: Boolean = false +) { + var userMessage by rememberSaveable { mutableStateOf(initialPrompt) } + + Row( + modifier = Modifier + .padding(8.dp) + .fillMaxWidth() + ) { + OutlinedTextField( + value = userMessage, + label = { Text("Message") }, + onValueChange = { userMessage = it }, + keyboardOptions = KeyboardOptions( + capitalization = KeyboardCapitalization.Sentences + ), + modifier = Modifier + .align(Alignment.CenterVertically) + .padding(end = 4.dp) + .fillMaxWidth() + .weight(1f) + ) + AttachmentsMenu( + modifier = Modifier.align(Alignment.CenterVertically), + onFileAttached = onFileAttached + ) + IconButton( + onClick = { + if (userMessage.isNotBlank()) { + onSendMessage(userMessage) + userMessage = "" + resetScroll() + } + }, + enabled = !isLoading, + modifier = Modifier + .align(Alignment.CenterVertically) + .clip(CircleShape) + .background( + color = if (isLoading) { + IconButtonDefaults.iconButtonColors().disabledContainerColor + } else { + MaterialTheme.colorScheme.primary + } + ) + ) { + Icon( + Icons.AutoMirrored.Default.Send, + contentDescription = "Send", + tint = MaterialTheme.colorScheme.onPrimary, + modifier = Modifier + .fillMaxSize() + .padding(8.dp) + ) + } + } +} + +@Composable +fun AttachmentsMenu( + modifier: Modifier = Modifier, + onFileAttached: (Uri) -> Unit +) { + var expanded by remember { mutableStateOf(false) } + + val openDocument = rememberLauncherForActivityResult(ActivityResultContracts.OpenDocument()) { uri: Uri? -> + uri?.let { + onFileAttached(it) + } + } + Box( + modifier = modifier + .padding(end = 4.dp) + ) { + IconButton( + onClick = { + expanded = !expanded + } + ) { + Icon( + Icons.Default.AttachFile, + contentDescription = "Attach", + modifier = Modifier + .fillMaxSize() + .padding(4.dp) + ) + } + DropdownMenu( + expanded = expanded, + onDismissRequest = { expanded = false } + ) { + Text( + text = "Attach", + modifier = Modifier.padding(horizontal = 12.dp, vertical = 4.dp), + style = MaterialTheme.typography.bodySmall, + color = MaterialTheme.colorScheme.primary + ) + DropdownMenuItem( + text = { Text("Image / Video") }, + onClick = { + openDocument.launch(arrayOf("image/*", "video/*")) + expanded = !expanded + } + ) + DropdownMenuItem( + text = { Text("Audio") }, + onClick = { + openDocument.launch(arrayOf("audio/*")) + expanded = !expanded + } + ) + DropdownMenuItem( + text = { Text("Document (PDF)") }, + onClick = { + openDocument.launch(arrayOf("application/pdf")) + expanded = !expanded + } + ) + } + } +} + +/** + * Meant to present attachments in the UI + */ +data class Attachment( + val fileName: String, + val image: Bitmap? = null // only for image attachments +) + +@Composable +fun AttachmentsList( + attachments: List +) { + LazyColumn( + modifier = Modifier.fillMaxWidth() + ) { + items(attachments) { attachment -> + Row( + modifier = Modifier + .fillMaxWidth() + .padding(horizontal = 8.dp, vertical = 4.dp) + ) { + Icon( + Icons.Default.Attachment, + contentDescription = "Attachment icon", + modifier = Modifier + .padding(4.dp) + .align(Alignment.CenterVertically) + ) + attachment.image?.let { + Image( + bitmap = it.asImageBitmap(), + contentDescription = attachment.fileName, + modifier = Modifier + .align(Alignment.CenterVertically) + ) + } + Text( + text = attachment.fileName, + style = MaterialTheme.typography.bodySmall, + modifier = Modifier + .align(Alignment.CenterVertically) + .padding(horizontal = 4.dp) + ) + } + } + } +} diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/text/ChatViewModel.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/text/ChatViewModel.kt new file mode 100644 index 000000000..59742a404 --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/feature/text/ChatViewModel.kt @@ -0,0 +1,117 @@ +package com.google.firebase.quickstart.ai.feature.text + +import android.graphics.BitmapFactory +import androidx.compose.runtime.mutableStateListOf +import androidx.compose.runtime.toMutableStateList +import androidx.lifecycle.SavedStateHandle +import androidx.lifecycle.ViewModel +import androidx.lifecycle.viewModelScope +import androidx.navigation.toRoute +import com.google.firebase.Firebase +import com.google.firebase.ai.Chat +import com.google.firebase.ai.ai +import com.google.firebase.ai.type.Content +import com.google.firebase.ai.type.FileDataPart +import com.google.firebase.ai.type.GenerativeBackend +import com.google.firebase.ai.type.TextPart +import com.google.firebase.ai.type.asTextOrNull +import com.google.firebase.quickstart.ai.FIREBASE_AI_SAMPLES +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.launch + +class ChatViewModel( + savedStateHandle: SavedStateHandle +) : ViewModel() { + private val sampleId = savedStateHandle.toRoute().sampleId + private val sample = FIREBASE_AI_SAMPLES.first { it.id == sampleId } + val initialPrompt: String = + sample.initialPrompt?.parts + ?.filterIsInstance() + ?.first() + ?.asTextOrNull().orEmpty() + + private val _isLoading = MutableStateFlow(false) + val isLoading: StateFlow = _isLoading + + private val _errorMessage = MutableStateFlow(null) + val errorMessage: StateFlow = _errorMessage + + private val _messageList: MutableList = + sample.chatHistory.toMutableStateList() + private val _messages = MutableStateFlow>(_messageList) + val messages: StateFlow> = + _messages + + private val _attachmentsList: MutableList = + sample.initialPrompt?.parts?.filterIsInstance()?.map { + Attachment(it.uri) + }?.toMutableStateList() ?: mutableStateListOf() + private val _attachments = MutableStateFlow>(_attachmentsList) + val attachments: StateFlow> + get() = _attachments + + // Firebase AI Logic + private var contentBuilder = Content.Builder() + private val chat: Chat + + init { + val generativeModel = Firebase.ai( + backend = GenerativeBackend.googleAI() + ).generativeModel( + modelName = sample.modelName ?: "gemini-2.0-flash", + systemInstruction = sample.systemInstructions, + generationConfig = sample.generationConfig + ) + chat = generativeModel.startChat(sample.chatHistory) + + // add attachments from initial prompt + sample.initialPrompt?.parts?.forEach { part -> + if (part is TextPart) { + /* Ignore text parts, as the text will come from the textInputField */ + } else { + contentBuilder.part(part) + } + } + } + + fun sendMessage(userMessage: String) { + val prompt = contentBuilder + .text(userMessage) + .build() + + _messageList.add(prompt) + + viewModelScope.launch { + _isLoading.value = true + try { + val response = chat.sendMessage(prompt) + _messageList.add(response.candidates.first().content) + _errorMessage.value = null // clear errors + } catch (e: Exception) { + _errorMessage.value = e.localizedMessage + } finally { + _isLoading.value = false + contentBuilder = Content.Builder() // reset the builder + _attachmentsList.clear() + } + } + } + + fun attachFile( + fileInBytes: ByteArray, + mimeType: String?, + fileName: String? = "Unnamed file" + ) { + if (mimeType?.contains("image") == true) { + // images should be attached as ImageParts + contentBuilder.image(decodeBitmapFromImage(fileInBytes)) + } else { + contentBuilder.inlineData(fileInBytes, mimeType ?: "text/plain") + } + _attachmentsList.add(Attachment(fileName ?: "Unnamed file")) + } + + private fun decodeBitmapFromImage(input: ByteArray) = + BitmapFactory.decodeByteArray(input, 0, input.size) +} diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/navigation/MainMenuScreen.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/navigation/MainMenuScreen.kt new file mode 100644 index 000000000..aa14ed497 --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/navigation/MainMenuScreen.kt @@ -0,0 +1,137 @@ +package com.google.firebase.quickstart.ai.ui.navigation + +import androidx.compose.foundation.clickable +import androidx.compose.foundation.layout.Column +import androidx.compose.foundation.layout.fillMaxSize +import androidx.compose.foundation.layout.fillMaxWidth +import androidx.compose.foundation.layout.heightIn +import androidx.compose.foundation.layout.padding +import androidx.compose.foundation.layout.size +import androidx.compose.foundation.lazy.LazyRow +import androidx.compose.foundation.lazy.grid.GridCells +import androidx.compose.foundation.lazy.grid.LazyVerticalGrid +import androidx.compose.foundation.lazy.grid.items +import androidx.compose.foundation.lazy.items +import androidx.compose.material.icons.Icons +import androidx.compose.material.icons.filled.Done +import androidx.compose.material3.Card +import androidx.compose.material3.FilterChip +import androidx.compose.material3.FilterChipDefaults +import androidx.compose.material3.Icon +import androidx.compose.material3.MaterialTheme +import androidx.compose.material3.Text +import androidx.compose.runtime.Composable +import androidx.compose.runtime.getValue +import androidx.compose.runtime.mutableStateOf +import androidx.compose.runtime.saveable.rememberSaveable +import androidx.compose.runtime.setValue +import androidx.compose.ui.Modifier +import androidx.compose.ui.unit.dp +import com.google.firebase.quickstart.ai.FIREBASE_AI_SAMPLES + +val MIN_CARD_SIZE = 180.dp + +@Composable +fun MainMenuScreen( + onSampleClicked: (Sample) -> Unit +) { + MenuScreen( + filterTitle = "Filter by use case:", + filters = Category.entries.toList(), + samples = FIREBASE_AI_SAMPLES, + onSampleClicked = { + onSampleClicked(it) + } + ) +} + +@Composable +fun MenuScreen( + filterTitle: String, + filters: List, + samples: List, + onSampleClicked: (sample: Sample) -> Unit = {} +) { + Column( + modifier = Modifier + .fillMaxSize() + .padding(16.dp) + ) { + var selectedCategory by rememberSaveable { mutableStateOf(filters.first()) } + Text(text = filterTitle, style = MaterialTheme.typography.titleLarge) + LazyRow { + items(filters) { capability -> + FilterChip( + onClick = { selectedCategory = capability }, + label = { + Text(capability.label) + }, + selected = selectedCategory == capability, + leadingIcon = if (selectedCategory == capability) { + { + Icon( + imageVector = Icons.Filled.Done, + contentDescription = "Done icon", + modifier = Modifier.size(FilterChipDefaults.IconSize) + ) + } + } else { + null + }, + modifier = Modifier.padding(end = 8.dp) + ) + } + } + Text( + text = "Samples", + style = MaterialTheme.typography.titleLarge, + modifier = Modifier.padding(top = 16.dp) + ) + val filteredSamples = samples.filter { + it.categories.contains(selectedCategory) + } + LazyVerticalGrid( + columns = GridCells.Adaptive(MIN_CARD_SIZE), + modifier = Modifier + ) { + items(filteredSamples) { sample -> + SampleItem(sample.title, sample.description, onItemClicked = { + onSampleClicked(sample) + }) + } + } + } +} + +@Composable +fun SampleItem( + titleResId: String, + descriptionResId: String, + onItemClicked: () -> Unit = {} +) { + Card( + modifier = Modifier + .fillMaxWidth() + .heightIn(min = MIN_CARD_SIZE) + .padding(4.dp) + .clickable { + onItemClicked() + } + ) { + Column( + modifier = Modifier + .padding(all = 16.dp) + .fillMaxSize() + ) { + Text( + text = titleResId, + style = MaterialTheme.typography.labelLarge + ) + Text( + text = descriptionResId, + style = MaterialTheme.typography.bodyMedium, + modifier = Modifier.padding(top = 8.dp) + ) + } + } +} diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/navigation/Sample.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/navigation/Sample.kt new file mode 100644 index 000000000..65a1fa06c --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/navigation/Sample.kt @@ -0,0 +1,29 @@ +package com.google.firebase.quickstart.ai.ui.navigation + +import com.google.firebase.ai.type.Content +import com.google.firebase.ai.type.GenerationConfig +import java.util.UUID + +enum class Category( + val label: String +) { + TEXT("Text"), + IMAGE("Image"), + VIDEO("Video"), + AUDIO("Audio"), + DOCUMENT("Document") +} + +data class Sample( + val id: String = UUID.randomUUID().toString(), // used for navigation + val title: String, + val description: String, + val navRoute: String, + val categories: List, + // Optional parameters + val modelName: String? = null, + val initialPrompt: Content? = null, + val systemInstructions: Content? = null, + val generationConfig: GenerationConfig? = null, + val chatHistory: List = emptyList() +) diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/theme/Color.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/theme/Color.kt new file mode 100644 index 000000000..cd3cab035 --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/theme/Color.kt @@ -0,0 +1,11 @@ +package com.google.firebase.quickstart.ai.ui.theme + +import androidx.compose.ui.graphics.Color + +val Purple80 = Color(0xFFD0BCFF) +val PurpleGrey80 = Color(0xFFCCC2DC) +val Pink80 = Color(0xFFEFB8C8) + +val Purple40 = Color(0xFF6650a4) +val PurpleGrey40 = Color(0xFF625b71) +val Pink40 = Color(0xFF7D5260) diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/theme/Theme.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/theme/Theme.kt new file mode 100644 index 000000000..df426fc1d --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/theme/Theme.kt @@ -0,0 +1,57 @@ +package com.google.firebase.quickstart.ai.ui.theme + +import android.os.Build +import androidx.compose.foundation.isSystemInDarkTheme +import androidx.compose.material3.MaterialTheme +import androidx.compose.material3.darkColorScheme +import androidx.compose.material3.dynamicDarkColorScheme +import androidx.compose.material3.dynamicLightColorScheme +import androidx.compose.material3.lightColorScheme +import androidx.compose.runtime.Composable +import androidx.compose.ui.platform.LocalContext + +private val DarkColorScheme = darkColorScheme( + primary = Purple80, + secondary = PurpleGrey80, + tertiary = Pink80 +) + +private val LightColorScheme = lightColorScheme( + primary = Purple40, + secondary = PurpleGrey40, + tertiary = Pink40 + + /* Other default colors to override + background = Color(0xFFFFFBFE), + surface = Color(0xFFFFFBFE), + onPrimary = Color.White, + onSecondary = Color.White, + onTertiary = Color.White, + onBackground = Color(0xFF1C1B1F), + onSurface = Color(0xFF1C1B1F), + */ +) + +@Composable +fun FirebaseAILogicTheme( + darkTheme: Boolean = isSystemInDarkTheme(), + // Dynamic color is available on Android 12+ + dynamicColor: Boolean = true, + content: @Composable () -> Unit +) { + val colorScheme = when { + dynamicColor && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S -> { + val context = LocalContext.current + if (darkTheme) dynamicDarkColorScheme(context) else dynamicLightColorScheme(context) + } + + darkTheme -> DarkColorScheme + else -> LightColorScheme + } + + MaterialTheme( + colorScheme = colorScheme, + typography = Typography, + content = content + ) +} diff --git a/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/theme/Type.kt b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/theme/Type.kt new file mode 100644 index 000000000..54e7bef06 --- /dev/null +++ b/firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/theme/Type.kt @@ -0,0 +1,34 @@ +package com.google.firebase.quickstart.ai.ui.theme + +import androidx.compose.material3.Typography +import androidx.compose.ui.text.TextStyle +import androidx.compose.ui.text.font.FontFamily +import androidx.compose.ui.text.font.FontWeight +import androidx.compose.ui.unit.sp + +// Set of Material typography styles to start with +val Typography = Typography( + bodyLarge = TextStyle( + fontFamily = FontFamily.Default, + fontWeight = FontWeight.Normal, + fontSize = 16.sp, + lineHeight = 24.sp, + letterSpacing = 0.5.sp + ) + /* Other default text styles to override + titleLarge = TextStyle( + fontFamily = FontFamily.Default, + fontWeight = FontWeight.Normal, + fontSize = 22.sp, + lineHeight = 28.sp, + letterSpacing = 0.sp + ), + labelSmall = TextStyle( + fontFamily = FontFamily.Default, + fontWeight = FontWeight.Medium, + fontSize = 11.sp, + lineHeight = 16.sp, + letterSpacing = 0.5.sp + ) + */ +) diff --git a/firebase-ai/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/firebase-ai/app/src/main/res/drawable-v24/ic_launcher_foreground.xml new file mode 100644 index 000000000..cc32292cb --- /dev/null +++ b/firebase-ai/app/src/main/res/drawable-v24/ic_launcher_foreground.xml @@ -0,0 +1,13 @@ + + + diff --git a/firebase-ai/app/src/main/res/drawable/ic_launcher_background.xml b/firebase-ai/app/src/main/res/drawable/ic_launcher_background.xml new file mode 100644 index 000000000..6e5984bd7 --- /dev/null +++ b/firebase-ai/app/src/main/res/drawable/ic_launcher_background.xml @@ -0,0 +1,170 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/firebase-ai/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/firebase-ai/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml new file mode 100644 index 000000000..6f3b755bf --- /dev/null +++ b/firebase-ai/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/firebase-ai/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/firebase-ai/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml new file mode 100644 index 000000000..6f3b755bf --- /dev/null +++ b/firebase-ai/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/firebase-ai/app/src/main/res/mipmap-hdpi/ic_launcher.webp b/firebase-ai/app/src/main/res/mipmap-hdpi/ic_launcher.webp new file mode 100644 index 000000000..c209e78ec Binary files /dev/null and b/firebase-ai/app/src/main/res/mipmap-hdpi/ic_launcher.webp differ diff --git a/firebase-ai/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp b/firebase-ai/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp new file mode 100644 index 000000000..b2dfe3d1b Binary files /dev/null and b/firebase-ai/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp differ diff --git a/firebase-ai/app/src/main/res/mipmap-mdpi/ic_launcher.webp b/firebase-ai/app/src/main/res/mipmap-mdpi/ic_launcher.webp new file mode 100644 index 000000000..4f0f1d64e Binary files /dev/null and b/firebase-ai/app/src/main/res/mipmap-mdpi/ic_launcher.webp differ diff --git a/firebase-ai/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp b/firebase-ai/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp new file mode 100644 index 000000000..62b611da0 Binary files /dev/null and b/firebase-ai/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp differ diff --git a/firebase-ai/app/src/main/res/mipmap-xhdpi/ic_launcher.webp b/firebase-ai/app/src/main/res/mipmap-xhdpi/ic_launcher.webp new file mode 100644 index 000000000..948a3070f Binary files /dev/null and b/firebase-ai/app/src/main/res/mipmap-xhdpi/ic_launcher.webp differ diff --git a/firebase-ai/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp b/firebase-ai/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp new file mode 100644 index 000000000..1b9a6956b Binary files /dev/null and b/firebase-ai/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp differ diff --git a/firebase-ai/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp b/firebase-ai/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp new file mode 100644 index 000000000..28d4b77f9 Binary files /dev/null and b/firebase-ai/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp differ diff --git a/firebase-ai/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp b/firebase-ai/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp new file mode 100644 index 000000000..9287f5083 Binary files /dev/null and b/firebase-ai/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp differ diff --git a/firebase-ai/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp b/firebase-ai/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp new file mode 100644 index 000000000..aa7d6427e Binary files /dev/null and b/firebase-ai/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp differ diff --git a/firebase-ai/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp b/firebase-ai/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp new file mode 100644 index 000000000..9126ae37c Binary files /dev/null and b/firebase-ai/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp differ diff --git a/firebase-ai/app/src/main/res/values/colors.xml b/firebase-ai/app/src/main/res/values/colors.xml new file mode 100644 index 000000000..f8c6127d3 --- /dev/null +++ b/firebase-ai/app/src/main/res/values/colors.xml @@ -0,0 +1,10 @@ + + + #FFBB86FC + #FF6200EE + #FF3700B3 + #FF03DAC5 + #FF018786 + #FF000000 + #FFFFFFFF + \ No newline at end of file diff --git a/firebase-ai/app/src/main/res/values/strings.xml b/firebase-ai/app/src/main/res/values/strings.xml new file mode 100644 index 000000000..82e6c7e22 --- /dev/null +++ b/firebase-ai/app/src/main/res/values/strings.xml @@ -0,0 +1,3 @@ + + Firebase AI Logic + \ No newline at end of file diff --git a/firebase-ai/app/src/main/res/values/themes.xml b/firebase-ai/app/src/main/res/values/themes.xml new file mode 100644 index 000000000..d82976cfb --- /dev/null +++ b/firebase-ai/app/src/main/res/values/themes.xml @@ -0,0 +1,5 @@ + + + +