Skip to content

Commit 47e37b5

Browse files
authored
Update LiveModelFutures to return LiveSessionFutures instead of LiveSession (#6834)
Update the `connect` method of` LiveModelFutures` class to return `ListenableFuture` instead of `LiveSession`. This change reduces the burden on Java developers, as they would have to create a `LiveSessionFutures` object anyway. Additionally, the `startAudioConverstion` method is now annotated with `@JvmOverloads` to maintain compatibility with Java.
1 parent 24fba9b commit 47e37b5

File tree

6 files changed

+28
-19
lines changed

6 files changed

+28
-19
lines changed

firebase-vertexai/CHANGELOG.md

+7
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,11 @@
11
# Unreleased
2+
* [changed] **Breaking Change**: `LiveModelFutures.connect` now returns `ListenableFuture<LiveSessionFutures>` instead of `ListenableFuture<LiveSession>`.
3+
* **Action Required:** Remove any transformations from LiveSession object to LiveSessionFutures object.
4+
* **Action Required:** Change type of variable handling `LiveModelFutures.connect` to `ListenableFuture<LiveSessionsFutures>`
5+
* [changed] **Breaking Change**: Removed `UNSPECIFIED` value for enum class `ResponseModality`
6+
* **Action Required:** Remove all references to `ResponseModality.UNSPECIFIED`
7+
* [changed] **Breaking Change**: Renamed `LiveGenerationConfig.setResponseModalities` to `LiveGenerationConfig.setResponseModality`
8+
* **Action Required:** Replace all references of `LiveGenerationConfig.setResponseModalities` with `LiveGenerationConfig.setResponseModality`
29
* [feature] Added support for `HarmBlockThreshold.OFF`. See the
310
[model documentation](https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters#how_to_configure_content_filters){: .external}
411
for more information.

firebase-vertexai/api.txt

+3-3
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ package com.google.firebase.vertexai.java {
115115
}
116116

117117
@com.google.firebase.vertexai.type.PublicPreviewAPI public abstract class LiveModelFutures {
118-
method public abstract com.google.common.util.concurrent.ListenableFuture<com.google.firebase.vertexai.type.LiveSession> connect();
118+
method public abstract com.google.common.util.concurrent.ListenableFuture<com.google.firebase.vertexai.java.LiveSessionFutures> connect();
119119
method public static final com.google.firebase.vertexai.java.LiveModelFutures from(com.google.firebase.vertexai.LiveGenerativeModel model);
120120
field public static final com.google.firebase.vertexai.java.LiveModelFutures.Companion Companion;
121121
}
@@ -132,6 +132,7 @@ package com.google.firebase.vertexai.java {
132132
method public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> send(String text);
133133
method public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> sendFunctionResponse(java.util.List<com.google.firebase.vertexai.type.FunctionResponsePart> functionList);
134134
method public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> sendMediaStream(java.util.List<com.google.firebase.vertexai.type.MediaData> mediaChunks);
135+
method public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation();
135136
method public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.vertexai.type.FunctionCallPart,com.google.firebase.vertexai.type.FunctionResponsePart>? functionCallHandler);
136137
method public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> stopAudioConversation();
137138
method public abstract void stopReceiving();
@@ -597,7 +598,7 @@ package com.google.firebase.vertexai.type {
597598
method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setFrequencyPenalty(Float? frequencyPenalty);
598599
method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setMaxOutputTokens(Integer? maxOutputTokens);
599600
method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setPresencePenalty(Float? presencePenalty);
600-
method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setResponseModalities(com.google.firebase.vertexai.type.ResponseModality? responseModalities);
601+
method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setResponseModality(com.google.firebase.vertexai.type.ResponseModality? responseModality);
601602
method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setSpeechConfig(com.google.firebase.vertexai.type.SpeechConfig? speechConfig);
602603
method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setTemperature(Float? temperature);
603604
method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setTopK(Integer? topK);
@@ -696,7 +697,6 @@ package com.google.firebase.vertexai.type {
696697
field public static final com.google.firebase.vertexai.type.ResponseModality.Companion Companion;
697698
field public static final com.google.firebase.vertexai.type.ResponseModality IMAGE;
698699
field public static final com.google.firebase.vertexai.type.ResponseModality TEXT;
699-
field public static final com.google.firebase.vertexai.type.ResponseModality UNSPECIFIED;
700700
}
701701

702702
public static final class ResponseModality.Companion {

firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/java/LiveModelFutures.kt

+5-6
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package com.google.firebase.vertexai.java
1919
import androidx.concurrent.futures.SuspendToFutureAdapter
2020
import com.google.common.util.concurrent.ListenableFuture
2121
import com.google.firebase.vertexai.LiveGenerativeModel
22-
import com.google.firebase.vertexai.type.LiveSession
2322
import com.google.firebase.vertexai.type.PublicPreviewAPI
2423
import com.google.firebase.vertexai.type.ServiceConnectionHandshakeFailedException
2524

@@ -32,16 +31,16 @@ import com.google.firebase.vertexai.type.ServiceConnectionHandshakeFailedExcepti
3231
public abstract class LiveModelFutures internal constructor() {
3332

3433
/**
35-
* Start a [LiveSession] with the server for bidirectional streaming.
36-
* @return A [LiveSession] that you can use to stream messages to and from the server.
34+
* Start a [LiveSessionFutures] with the server for bidirectional streaming.
35+
* @return A [LiveSessionFutures] that you can use to stream messages to and from the server.
3736
* @throws [ServiceConnectionHandshakeFailedException] If the client was not able to establish a
3837
* connection with the server.
3938
*/
40-
public abstract fun connect(): ListenableFuture<LiveSession>
39+
public abstract fun connect(): ListenableFuture<LiveSessionFutures>
4140

4241
private class FuturesImpl(private val model: LiveGenerativeModel) : LiveModelFutures() {
43-
override fun connect(): ListenableFuture<LiveSession> {
44-
return SuspendToFutureAdapter.launchFuture { model.connect() }
42+
override fun connect(): ListenableFuture<LiveSessionFutures> {
43+
return SuspendToFutureAdapter.launchFuture { LiveSessionFutures.from(model.connect()) }
4544
}
4645
}
4746

firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/java/LiveSessionFutures.kt

+9
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,12 @@ public abstract class LiveSessionFutures internal constructor() {
4848
functionCallHandler: ((FunctionCallPart) -> FunctionResponsePart)?
4949
): ListenableFuture<Unit>
5050

51+
/**
52+
* Starts an audio conversation with the Gemini server, which can only be stopped using
53+
* [stopAudioConversation].
54+
*/
55+
public abstract fun startAudioConversation(): ListenableFuture<Unit>
56+
5157
/**
5258
* Stops the audio conversation with the Gemini Server.
5359
*
@@ -124,6 +130,9 @@ public abstract class LiveSessionFutures internal constructor() {
124130
functionCallHandler: ((FunctionCallPart) -> FunctionResponsePart)?
125131
) = SuspendToFutureAdapter.launchFuture { session.startAudioConversation(functionCallHandler) }
126132

133+
override fun startAudioConversation() =
134+
SuspendToFutureAdapter.launchFuture { session.startAudioConversation() }
135+
127136
override fun stopAudioConversation() =
128137
SuspendToFutureAdapter.launchFuture { session.stopAudioConversation() }
129138

firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/LiveGenerationConfig.kt

+2-2
Original file line numberDiff line numberDiff line change
@@ -130,8 +130,8 @@ private constructor(
130130
public fun setFrequencyPenalty(frequencyPenalty: Float?): Builder = apply {
131131
this.frequencyPenalty = frequencyPenalty
132132
}
133-
public fun setResponseModalities(responseModalities: ResponseModality?): Builder = apply {
134-
this.responseModality = responseModalities
133+
public fun setResponseModality(responseModality: ResponseModality?): Builder = apply {
134+
this.responseModality = responseModality
135135
}
136136
public fun setSpeechConfig(speechConfig: SpeechConfig?): Builder = apply {
137137
this.speechConfig = speechConfig

firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/ResponseModality.kt

+2-8
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ package com.google.firebase.vertexai.type
1818

1919
import com.google.firebase.vertexai.common.util.FirstOrdinalSerializer
2020
import kotlinx.serialization.KSerializer
21-
import kotlinx.serialization.SerialName
2221
import kotlinx.serialization.Serializable
2322

2423
/** Represents the type of content present in a response (e.g., text, image, audio). */
@@ -27,7 +26,6 @@ public class ResponseModality private constructor(public val ordinal: Int) {
2726

2827
@Serializable(Internal.Serializer::class)
2928
internal enum class Internal {
30-
@SerialName("MODALITY_UNSPECIFIED") UNSPECIFIED,
3129
TEXT,
3230
IMAGE,
3331
AUDIO;
@@ -38,21 +36,17 @@ public class ResponseModality private constructor(public val ordinal: Int) {
3836
when (this) {
3937
TEXT -> ResponseModality.TEXT
4038
IMAGE -> ResponseModality.IMAGE
41-
AUDIO -> ResponseModality.AUDIO
42-
else -> ResponseModality.UNSPECIFIED
39+
else -> ResponseModality.AUDIO
4340
}
4441
}
4542

4643
internal fun toInternal() =
4744
when (this) {
4845
TEXT -> "TEXT"
4946
IMAGE -> "IMAGE"
50-
AUDIO -> "AUDIO"
51-
else -> "UNSPECIFIED"
47+
else -> "AUDIO"
5248
}
5349
public companion object {
54-
/** Unspecified modality. */
55-
@JvmField public val UNSPECIFIED: ResponseModality = ResponseModality(0)
5650

5751
/** Represents a plain text response modality. */
5852
@JvmField public val TEXT: ResponseModality = ResponseModality(1)

0 commit comments

Comments
 (0)