Skip to content

Commit 1af1922

Browse files
Update java-llama to 2.0
1 parent 96f39a0 commit 1af1922

File tree

3 files changed

+25
-11
lines changed

3 files changed

+25
-11
lines changed

pom.xml

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@
1313
# fast build
1414
mvn -DskipTests package -o
1515
16+
# execute
17+
mvn exec:java -Dexec.mainClass=org.myrobotlab.service.Runtime -Dexec.args="-s webgui WebGui intro Intro python Python"
18+
1619
# specific test
1720
mvn test -Dtest="org.myrobotlab.service.WebGuiTest#postTest"
1821
@@ -80,7 +83,7 @@
8083
<!-- force overriding property at command line, use ${maven.build.timestamp}-->
8184
<timestamp>${maven.build.timestamp}</timestamp>
8285
<maven.build.timestamp.format>yyyyMMddHHmm</maven.build.timestamp.format>
83-
<version>${version}</version>
86+
<version>${version}</version>
8487
<GitBranch>${git.branch}</GitBranch>
8588
<username>${NODE_NAME}</username>
8689
<platform>${NODE_LABELS}</platform>
@@ -618,7 +621,7 @@
618621
<dependency>
619622
<groupId>de.kherud</groupId>
620623
<artifactId>llama</artifactId>
621-
<version>1.1.4</version>
624+
<version>2.2.1</version>
622625
<scope>provided</scope>
623626
</dependency>
624627
<!-- Llama end -->
@@ -1027,6 +1030,12 @@
10271030
<artifactId>jovr</artifactId>
10281031
<version>1.8.0.0</version>
10291032
<scope>provided</scope>
1033+
<exclusions>
1034+
<exclusion>
1035+
<groupId>net.java.dev.jna</groupId>
1036+
<artifactId>jna</artifactId>
1037+
</exclusion>
1038+
</exclusions>
10301039
</dependency>
10311040
<dependency>
10321041
<groupId>slick-util</groupId>

src/main/java/org/myrobotlab/service/Llama.java

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
package org.myrobotlab.service;
22

3+
import de.kherud.llama.InferenceParameters;
34
import de.kherud.llama.LlamaModel;
4-
import de.kherud.llama.Parameters;
5+
import de.kherud.llama.ModelParameters;
56
import org.myrobotlab.framework.Platform;
67
import org.myrobotlab.framework.Service;
78
import org.myrobotlab.logging.Level;
@@ -36,25 +37,29 @@ public Llama(String reservedKey, String inId) {
3637
}
3738

3839
public void loadModel(String modelPath) {
39-
Parameters params = new Parameters.Builder()
40+
ModelParameters params = new ModelParameters.Builder()
4041
.setNGpuLayers(0)
4142
.setNThreads(Platform.getLocalInstance().getNumPhysicalProcessors())
42-
.setTemperature(0.7f)
43-
.setPenalizeNl(true)
44-
.setMirostat(Parameters.MiroStat.V2)
45-
.setAntiPrompt(new String[]{config.userPrompt})
4643
.build();
4744
model = new LlamaModel(modelPath, params);
4845
}
4946

5047
public Response getResponse(String text) {
48+
InferenceParameters inferenceParameters = new InferenceParameters.Builder()
49+
.setTemperature(0.7f)
50+
.setPenalizeNl(true)
51+
.setMirostat(InferenceParameters.MiroStat.V2)
52+
.setAntiPrompt(new String[]{config.userPrompt})
53+
.build();
54+
55+
5156
if (model == null) {
5257
error("Model is not loaded.");
5358
return null;
5459
}
5560

5661
String prompt = config.systemPrompt + config.systemMessage + "\n" + text + "\n";
57-
String response = StreamSupport.stream(model.generate(prompt).spliterator(), false)
62+
String response = StreamSupport.stream(model.generate(prompt, inferenceParameters).spliterator(), false)
5863
.map(LlamaModel.Output::toString)
5964
.reduce("", (a, b) -> a + b);
6065

@@ -132,7 +137,7 @@ public Response publishResponse(Response response) {
132137
}
133138

134139
public void reset() {
135-
model.reset();
140+
model.close();
136141

137142
}
138143

src/main/java/org/myrobotlab/service/meta/LlamaMeta.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,6 @@ public LlamaMeta() {
1010
"llama.cpp project. Can run most GGUF models."
1111
);
1212

13-
addDependency("de.kherud", "llama", "1.1.4");
13+
addDependency("de.kherud", "llama", "2.2.1");
1414
}
1515
}

0 commit comments

Comments
 (0)