Skip to content

Commit 423a0aa

Browse files
committed
Bug 2010698 - add a basic concept interface for an LLM r=segun
Pull request: #45
1 parent b7e309c commit 423a0aa

File tree

11 files changed

+480
-64
lines changed

11 files changed

+480
-64
lines changed

gradle/libs.versions.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -220,6 +220,7 @@ play-services-base = { group = "com.google.android.gms", name = "play-services-b
220220
play-services-fido = { group = "com.google.android.gms", name = "play-services-fido", version.ref = "play-services-fido" }
221221
protobuf-compiler = { group = "com.google.protobuf", name = "protoc", version.ref = "protobuf" }
222222
protobuf-javalite = { group = "com.google.protobuf", name = "protobuf-javalite", version.ref = "protobuf" }
223+
mlkit-prompt = { group = "com.google.mlkit", name = "genai-prompt", version = "1.0.0-alpha1" }
223224

224225
# Gradle plugins
225226
semanticdb-java = { group = "com.sourcegraph", name = "semanticdb-javac", version.ref = "semanticdb-javac" }

mobile/android/android-components/.buildconfig.yml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1750,6 +1750,11 @@ projects:
17501750
path: components/lib/llm-gemininano
17511751
publish: true
17521752
upstream_dependencies:
1753+
- components:concept-base
1754+
- components:concept-fetch
1755+
- components:concept-llm
1756+
- components:support-base
1757+
- components:support-test
17531758
- components:tooling-lint
17541759
components:lib-llm-mlpa:
17551760
description: A off-device implementation of the LLM concept that uses the MLPA

mobile/android/android-components/components/concept/llm/build.gradle

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ android {
2222
}
2323

2424
dependencies {
25+
implementation libs.kotlinx.coroutines
26+
2527
testImplementation libs.androidx.test.junit
2628
}
2729

mobile/android/android-components/components/concept/llm/src/main/java/mozilla/components/concept/llm/Example.kt

Lines changed: 0 additions & 17 deletions
This file was deleted.
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
/* This Source Code Form is subject to the terms of the Mozilla Public
2+
* License, v. 2.0. If a copy of the MPL was not distributed with this
3+
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
4+
5+
package mozilla.components.concept.llm
6+
7+
import kotlinx.coroutines.flow.Flow
8+
9+
/**
10+
* A value type representing a prompt that can be delivered to a LLM.
11+
*/
12+
@JvmInline
13+
value class Prompt(val value: String)
14+
15+
/**
16+
* An abstract definition of a LLM that can receive prompts.
17+
*/
18+
interface Llm {
19+
/**
20+
* A prompt request delivered to the LLM for inference, which will stream a series
21+
* of [Response]s as they are made available.
22+
*/
23+
suspend fun prompt(prompt: Prompt): Flow<Response>
24+
25+
/**
26+
* A response from prompting a LLM.
27+
*/
28+
sealed class Response {
29+
30+
/**
31+
* A successful response from the LLM has occurred. This may include partial data,
32+
* or be an indication that the reply has completed.
33+
*/
34+
sealed class Success : Response() {
35+
/**
36+
* A (potentially) partial reply from the LLM. This may be a complete reply if
37+
* it is short or the underlying implementation does not stream responses.
38+
*/
39+
data class ReplyPart(val value: String) : Success()
40+
41+
/**
42+
* An indication that the reply from the LLM is finishes.
43+
*/
44+
data object ReplyFinished : Success()
45+
}
46+
47+
/**
48+
* The LLM is engaged in getting ready to receive prompts. This may include actions like
49+
* authenticating with a remote or downloading a local model.
50+
*/
51+
data class Preparing(val status: String) : Response()
52+
53+
/**
54+
* A failure response from a LLM.
55+
*/
56+
data class Failure(val reason: String) : Response()
57+
}
58+
}

mobile/android/android-components/components/concept/llm/src/test/java/mozilla/components/concept/llm/ExampleTest.kt

Lines changed: 0 additions & 15 deletions
This file was deleted.

mobile/android/android-components/components/lib/llm-gemininano/build.gradle

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,13 @@ android {
2222
}
2323

2424
dependencies {
25+
api project(":components:concept-llm")
26+
implementation project(':components:support-base')
27+
implementation libs.mlkit.prompt
28+
29+
testImplementation project(':components:support-test')
2530
testImplementation libs.androidx.test.junit
31+
testImplementation libs.kotlinx.coroutines.test
2632
}
2733

2834
apply from: '../../../common-config.gradle'

mobile/android/android-components/components/lib/llm-gemininano/src/main/java/mozilla/components/lib/llm/gemini/nano/Example.kt

Lines changed: 0 additions & 17 deletions
This file was deleted.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
/* This Source Code Form is subject to the terms of the Mozilla Public
2+
* License, v. 2.0. If a copy of the MPL was not distributed with this
3+
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
4+
5+
package mozilla.components.lib.llm.gemini.nano
6+
7+
import com.google.mlkit.genai.common.DownloadStatus
8+
import com.google.mlkit.genai.common.FeatureStatus
9+
import com.google.mlkit.genai.common.GenAiException
10+
import com.google.mlkit.genai.prompt.Generation
11+
import com.google.mlkit.genai.prompt.GenerativeModel
12+
import kotlinx.coroutines.flow.Flow
13+
import kotlinx.coroutines.flow.FlowCollector
14+
import kotlinx.coroutines.flow.first
15+
import kotlinx.coroutines.flow.flow
16+
import kotlinx.coroutines.flow.onEach
17+
import kotlinx.coroutines.sync.Mutex
18+
import kotlinx.coroutines.sync.withLock
19+
import mozilla.components.concept.llm.Llm
20+
import mozilla.components.concept.llm.Prompt
21+
import mozilla.components.support.base.log.logger.Logger
22+
23+
/**
24+
* An instance of a LLM that uses local, on-device capabilities provided by Gemini Nano to handle
25+
* inference.
26+
*/
27+
class GeminiNanoLlm(
28+
private val buildModel: () -> GenerativeModel = { Generation.getClient() },
29+
private val logger: (String) -> Unit = { message -> Logger("mozac/GeminiNanoLlm").info(message) },
30+
) : Llm {
31+
32+
private val model by lazy {
33+
buildModel()
34+
}
35+
36+
private val downloadMutex = Mutex()
37+
38+
override suspend fun prompt(prompt: Prompt): Flow<Llm.Response> = flow {
39+
val model = model
40+
when (model.checkStatus()) {
41+
FeatureStatus.AVAILABLE -> {
42+
streamPromptResponses(prompt)
43+
}
44+
FeatureStatus.DOWNLOADING -> {
45+
emit(Llm.Response.Preparing("Downloading model"))
46+
// await the completion of the ongoing download
47+
downloadMutex.withLock {
48+
if (model.checkStatus() == FeatureStatus.AVAILABLE) {
49+
streamPromptResponses(prompt)
50+
} else {
51+
emit(Llm.Response.Failure("Model should be downloaded and is not"))
52+
}
53+
}
54+
}
55+
FeatureStatus.DOWNLOADABLE -> {
56+
emit(Llm.Response.Preparing("Downloading model"))
57+
val result = downloadMutex.withLock {
58+
model.download().onEach { status ->
59+
logger("Download update: $status")
60+
}.first { status ->
61+
status == DownloadStatus.DownloadCompleted || status is DownloadStatus.DownloadFailed
62+
}
63+
}
64+
if (result is DownloadStatus.DownloadFailed) {
65+
val message = "Download failed ${result.e.message}"
66+
logger(message)
67+
emit(Llm.Response.Failure(message))
68+
} else {
69+
streamPromptResponses(prompt)
70+
}
71+
}
72+
else -> emit(Llm.Response.Failure("Unavailable"))
73+
}
74+
}
75+
76+
private suspend fun FlowCollector<Llm.Response>.streamPromptResponses(prompt: Prompt) = try {
77+
// consume replies from the model until it provides a finish reason
78+
logger("Beginning model response stream")
79+
model.generateContentStream(prompt.value).onEach { response ->
80+
emit(Llm.Response.Success.ReplyPart(response.candidates[0].text))
81+
}.first {
82+
val finishReason = it.candidates[0].finishReason
83+
(finishReason != null).also {
84+
logger("Model stream completed with: $finishReason")
85+
}
86+
}
87+
emit(Llm.Response.Success.ReplyFinished)
88+
} catch (e: GenAiException) {
89+
val message = "Gemini Nano inference failed: ${e.message}"
90+
logger(message)
91+
emit(Llm.Response.Failure(message))
92+
}
93+
}

mobile/android/android-components/components/lib/llm-gemininano/src/test/java/mozilla/components/lib/llm/gemini/nano/ExampleTest.kt

Lines changed: 0 additions & 15 deletions
This file was deleted.

0 commit comments

Comments
 (0)