Skip to content

Commit 0ff40fb

Browse files
committed
add ollama support
1 parent 8aa61e8 commit 0ff40fb

File tree

8 files changed

+96
-2
lines changed

8 files changed

+96
-2
lines changed

core/src/main/kotlin/io/github/llmagentbuilder/core/tool/AgentToolFactory.kt

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@ import java.util.function.Supplier
99
*/
1010
interface AgentToolFactory<out T : AgentTool<*, *>> {
1111
/**
12-
* @param T Agent tool type
1312
* @return Agent tool
1413
*/
1514
fun create(): T
@@ -21,7 +20,6 @@ interface AgentToolFactory<out T : AgentTool<*, *>> {
2120
interface ConfigurableAgentToolFactory<CONFIG, out T : ConfigurableAgentTool<*, *, CONFIG>> :
2221
AgentToolFactory<T> {
2322
/**
24-
* @param T Agent tool type
2523
* @param config Tool configuration object
2624
* @return Agent tool
2725
*/

llm/ollama/pom.xml

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
<project xmlns="http://maven.apache.org/POM/4.0.0"
3+
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
4+
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
5+
<modelVersion>4.0.0</modelVersion>
6+
<parent>
7+
<groupId>io.github.llmagentbuilder</groupId>
8+
<artifactId>llm</artifactId>
9+
<version>0.4.2</version>
10+
</parent>
11+
12+
<artifactId>llm-ollama</artifactId>
13+
<name>LLM Adapter :: Ollama</name>
14+
15+
<properties>
16+
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
17+
<maven.compiler.source>21</maven.compiler.source>
18+
<maven.compiler.target>21</maven.compiler.target>
19+
</properties>
20+
21+
<dependencies>
22+
<dependency>
23+
<groupId>org.springframework.ai</groupId>
24+
<artifactId>spring-ai-ollama</artifactId>
25+
</dependency>
26+
<dependency>
27+
<groupId>io.github.llmagentbuilder</groupId>
28+
<artifactId>core</artifactId>
29+
<version>${project.version}</version>
30+
</dependency>
31+
</dependencies>
32+
</project>
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
package io.github.llmagentbuilder.llm.ollama
2+
3+
import io.github.llmagentbuilder.core.ChatModelProvider
4+
import io.github.llmagentbuilder.core.MapToObject
5+
import org.springframework.ai.chat.model.ChatModel
6+
import org.springframework.ai.model.function.FunctionCallbackResolver
7+
import org.springframework.ai.ollama.OllamaChatModel
8+
import org.springframework.ai.ollama.api.OllamaApi
9+
import org.springframework.ai.ollama.api.OllamaModel
10+
import org.springframework.ai.ollama.api.OllamaOptions
11+
12+
class OllamaChatModelProvider : ChatModelProvider {
13+
override fun configKey(): String {
14+
return "ollama"
15+
}
16+
17+
override fun provideChatModel(
18+
functionCallbackResolver: FunctionCallbackResolver,
19+
config: Map<String, Any?>?
20+
): ChatModel? {
21+
val ollamaConfig = MapToObject.toObject<OllamaConfig>(config)
22+
if (ollamaConfig?.enabled == false) {
23+
return null
24+
}
25+
val model = ollamaConfig?.model ?: OllamaModel.PHI3.id()
26+
return OllamaChatModel.builder()
27+
.ollamaApi(OllamaApi())
28+
.defaultOptions(OllamaOptions.builder().model(model).build())
29+
.functionCallbackResolver(functionCallbackResolver)
30+
.build()
31+
}
32+
}
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
package io.github.llmagentbuilder.llm.ollama
2+
3+
import io.github.llmagentbuilder.core.ChatOptionsConfigurer
4+
import org.springframework.ai.chat.prompt.ChatOptions
5+
import org.springframework.ai.ollama.api.OllamaOptions
6+
7+
class OllamaChatOptionsConfigurer : ChatOptionsConfigurer {
8+
override fun supports(chatOptions: ChatOptions?): Boolean {
9+
return chatOptions is OllamaOptions
10+
}
11+
12+
override fun configure(
13+
chatOptions: ChatOptions?,
14+
config: ChatOptionsConfigurer.ChatOptionsConfig
15+
): ChatOptions {
16+
val stops = config.stopSequence ?: listOf()
17+
return chatOptions?.let {
18+
val options = OllamaOptions.fromOptions(it as OllamaOptions)
19+
options.stop = ((options.stopSequences ?: listOf()) + stops)
20+
return options
21+
} ?: OllamaOptions.builder().stop(stops).build()
22+
}
23+
}
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
package io.github.llmagentbuilder.llm.ollama
2+
3+
data class OllamaConfig(
4+
val enabled: Boolean? = true,
5+
val model: String? = null,
6+
)
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
io.github.llmagentbuilder.llm.ollama.OllamaChatModelProvider
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
io.github.llmagentbuilder.llm.ollama.OllamaChatOptionsConfigurer

llm/pom.xml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
<packaging>pom</packaging>
1515
<modules>
1616
<module>openai</module>
17+
<module>ollama</module>
1718
</modules>
1819

1920
<properties>

0 commit comments

Comments
 (0)