Skip to content

Commit

Permalink
#4292 - ollama-based recommender
Browse files Browse the repository at this point in the history
- Refactored prompt generation and response extraction
  • Loading branch information
reckart committed Nov 25, 2023
1 parent 6f48e52 commit 0c6bfec
Show file tree
Hide file tree
Showing 16 changed files with 590 additions and 230 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,12 @@
*/
package de.tudarmstadt.ukp.inception.recommendation.imls.ollama;

import static de.tudarmstadt.ukp.clarin.webanno.api.annotation.util.WebAnnoCasUtil.selectOverlapping;

import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.text.AnnotationFS;
import org.apache.uima.fit.util.CasUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -40,23 +32,24 @@
import com.hubspot.jinjava.loader.ResourceLocator;
import com.hubspot.jinjava.loader.ResourceNotFoundException;

import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence;
import de.tudarmstadt.ukp.inception.recommendation.api.model.Recommender;
import de.tudarmstadt.ukp.inception.recommendation.api.recommender.NonTrainableRecommenderEngineImplBase;
import de.tudarmstadt.ukp.inception.recommendation.api.recommender.RecommendationException;
import de.tudarmstadt.ukp.inception.recommendation.api.recommender.RecommenderContext;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.client.OllamaClient;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.client.OllamaGenerateRequest;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.prompt.PerAnnotationBindingsGenerator;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.prompt.PerDocumentBindingsGenerator;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.prompt.PerSentenceBindingsGenerator;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.prompt.PromptBindingsGenerator;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.prompt.PromptContext;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.response.MentionsFromJsonExtractor;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.response.ResponseAsLabelExtractor;
import de.tudarmstadt.ukp.inception.rendering.model.Range;
import de.tudarmstadt.ukp.inception.support.json.JSONUtil;

public class OllamaRecommender
extends NonTrainableRecommenderEngineImplBase
{
private static final String VAR_TEXT = "text";
private static final String VAR_SENTENCE = "sentence";
private static final String VAR_DOCUMENT = "document";

private final static Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());

private final OllamaRecommenderTraits traits;
Expand Down Expand Up @@ -93,239 +86,39 @@ public Range predict(RecommenderContext aContext, CAS aCas, int aBegin, int aEnd
{
switch (traits.getPromptingMode()) {
case PER_ANNOTATION:
return predictPerAnnotation(aContext, aCas, aBegin, aEnd);
return predict(new PerAnnotationBindingsGenerator(), aContext, aCas, aBegin, aEnd);
case PER_SENTENCE:
return predictPerSentence(aContext, aCas, aBegin, aEnd);
return predict(new PerSentenceBindingsGenerator(), aContext, aCas, aBegin, aEnd);
case PER_DOCUMENT:
return predictPerDocument(aContext, aCas, aBegin, aEnd);
return predict(new PerDocumentBindingsGenerator(), aContext, aCas, aBegin, aEnd);
default:
throw new RecommendationException(
"Unsupported mode [" + traits.getPromptingMode() + "]");
}
}

private Range predictPerDocument(RecommenderContext aContext, CAS aCas, int aBegin, int aEnd)
{
var bindings = Map.of(VAR_TEXT, aCas.getDocumentText());
var prompt = jinjava.render(traits.getPrompt(), bindings);

try {
var candidate = aCas.getDocumentAnnotation();

var response = generate(prompt);

extractPredictions(candidate, response);
}
catch (IOException e) {
LOG.error("Ollama [{}] failed to respond: {}", traits.getModel(),
ExceptionUtils.getRootCauseMessage(e));
}

return new Range(aBegin, aEnd);
}

private void extractPredictions(AnnotationFS aCandidate, String aResponse)
{
switch (traits.getExtractionMode()) {
case RESPONSE_AS_LABEL:
predictResultAsLabel(aCandidate, aResponse);
break;
case MENTIONS_FROM_JSON:
var mentions = extractMentionFromJson(aCandidate, aResponse);
mentionsToPredictions(aCandidate, mentions);
break;
default:
throw new IllegalArgumentException(
"Unsupported extraction mode [" + traits.getExtractionMode() + "]");
}
}

private ArrayList<Pair<String, String>> extractMentionFromJson(AnnotationFS aCandidate,
String aResponse)
{
var mentions = new ArrayList<Pair<String, String>>();
try {
// Ollama JSON mode always returns a JSON object
// See:
// https://github.com/jmorganca/ollama/commit/5cba29b9d666854706a194805c9d66518fe77545#diff-a604f7ba9b7f66dd7b59a9e884d3c82c96e5269fee85c906a7cca5f0c3eff7f8R30-R57
var rootNode = JSONUtil.getObjectMapper().readTree(aResponse);

var fieldIterator = rootNode.fields();
while (fieldIterator.hasNext()) {
var fieldEntry = fieldIterator.next();
if (fieldEntry.getValue().isArray()) {
for (var item : fieldEntry.getValue()) {
if (item.isTextual()) {
// Looks like this
// "Person": ["John"],
// "Location": ["diner", "Starbucks"]
mentions.add(Pair.of(item.asText(), fieldEntry.getKey()));
}
if (item.isObject()) {
// Looks like this
// "politicians": [
// { "name": "President Livingston" },
// { "name": "John" },
// { "name": "Don Horny" }
// ]
var subFieldIterator = item.fields();
while (subFieldIterator.hasNext()) {
var subEntry = subFieldIterator.next();
if (subEntry.getValue().isTextual()) {
mentions.add(Pair.of(subEntry.getValue().asText(),
fieldEntry.getKey()));
}
// We assume that the first item is the most relevant one (the
// mention) so we do not get a bad mention in cases like this:
// {
// "name": "Don Horny",
// "affiliation": "Lord of Darkness"
// }
break;
}
}
}
}

// Looks like this
// "John": {"type": "PERSON"},
// "diner": {"type": "LOCATION"},
// "Starbucks": {"type": "LOCATION"}
if (fieldEntry.getValue().isObject()) {
mentions.add(Pair.of(fieldEntry.getKey(), null));
}

// Looks like this
// "John": "politician",
// "President Livingston": "politician",
// "minister of foreign affairs": "politician",
// "Don Horny": "politician"
if (fieldEntry.getValue().isTextual()) {
mentions.add(Pair.of(fieldEntry.getKey(), fieldEntry.getValue().asText()));
}
}
}
catch (IOException e) {
LOG.error("Unable to extract mentions - not valid JSON: [" + aResponse + "]");
}
return mentions;
}

private void mentionsToPredictions(AnnotationFS aCandidate, List<Pair<String, String>> mentions)
{
var cas = aCandidate.getCAS();
var text = aCandidate.getCoveredText();
var predictedType = getPredictedType(cas);
var predictedFeature = getPredictedFeature(cas);
var isPredictionFeature = getIsPredictionFeature(cas);

for (var entry : mentions) {
var mention = entry.getKey();
if (mention.isBlank()) {
LOG.debug("Blank mention ignored");
continue;
}

var label = entry.getValue();
var lastIndex = 0;
var index = text.indexOf(mention, lastIndex);
var hitCount = 0;
while (index >= 0) {
int begin = aCandidate.getBegin() + index;
var prediction = cas.createAnnotation(predictedType, begin,
begin + mention.length());
prediction.setBooleanValue(isPredictionFeature, true);
if (label != null) {
prediction.setStringValue(predictedFeature, label);
}
cas.addFsToIndexes(prediction);
LOG.debug("Prediction generated [{}] -> [{}]", mention, label);
hitCount++;

lastIndex = index + mention.length();
index = text.indexOf(mention, lastIndex);

if (hitCount > text.length() / mention.length()) {
LOG.error(
"Mention detection seems to have entered into an endless loop - aborting");
break;
}
}

if (hitCount == 0) {
LOG.debug("Mention [{}] not found", mention);
}
}
}

private void predictResultAsLabel(AnnotationFS aCandidate, String aResponse)
private Range predict(PromptBindingsGenerator aGenerator, RecommenderContext aContext, CAS aCas,
int aBegin, int aEnd)
{
var aCas = aCandidate.getCAS();

var predictedType = getPredictedType(aCas);
var predictedFeature = getPredictedFeature(aCas);
var isPredictionFeature = getIsPredictionFeature(aCas);

var prediction = aCas.createAnnotation(predictedType, aCandidate.getBegin(),
aCandidate.getEnd());
prediction.setFeatureValueFromString(predictedFeature, aResponse);
prediction.setBooleanValue(isPredictionFeature, true);
aCas.addFsToIndexes(prediction);

LOG.debug("Prediction generated [{}] -> [{}]", prediction.getCoveredText(), aResponse);
}

private Range predictPerSentence(RecommenderContext aContext, CAS aCas, int aBegin, int aEnd)
{
var candidateType = CasUtil.getAnnotationType(aCas, Sentence.class);

for (var candidate : selectOverlapping(aCas, candidateType, aBegin, aEnd)) {
var bindings = Map.of(VAR_TEXT, candidate.getCoveredText());
var prompt = jinjava.render(traits.getPrompt(), bindings);

aGenerator.generate(aCas, aBegin, aEnd).forEach(promptContext -> {
try {
var response = generate(prompt);
var prompt = jinjava.render(traits.getPrompt(), promptContext.getBindings());
var response = query(prompt);

extractPredictions(candidate, response);
extractPredictions(aCas, promptContext, response);
}
catch (IOException e) {
LOG.error("Ollama [{}] failed to respond: {}", traits.getModel(),
ExceptionUtils.getRootCauseMessage(e));
}
}

return new Range(aBegin, aEnd);
}

private Range predictPerAnnotation(RecommenderContext aContext, CAS aCas, int aBegin, int aEnd)
{
var predictedType = getPredictedType(aCas);

for (var candidate : selectOverlapping(aCas, predictedType, aBegin, aEnd)) {
String sentence = aCas.select(Sentence.class).covering(candidate)
.map(Sentence::getCoveredText).findFirst().orElse("");
var bindings = Map.of( //
VAR_TEXT, candidate.getCoveredText(), //
VAR_SENTENCE, sentence);
var prompt = jinjava.render(traits.getPrompt(), bindings);

try {
var response = generate(prompt);

extractPredictions(candidate, response);
}
catch (IOException e) {
LOG.error("Ollama [{}] failed to respond: {}", traits.getModel(),
ExceptionUtils.getRootCauseMessage(e));
}
}
});

return new Range(aBegin, aEnd);
}

private String generate(String prompt) throws IOException
private String query(String prompt) throws IOException
{
LOG.trace("Asking ollama [{}]: [{}]", traits.getModel(), prompt);
LOG.trace("Querying ollama [{}]: [{}]", traits.getModel(), prompt);
var request = OllamaGenerateRequest.builder() //
.withModel(traits.getModel()) //
.withPrompt(prompt) //
Expand All @@ -337,4 +130,19 @@ private String generate(String prompt) throws IOException
LOG.trace("Ollama [{}] responds: [{}]", traits.getModel(), response);
return response;
}

private void extractPredictions(CAS aCas, PromptContext aContext, String aResponse)
{
switch (traits.getExtractionMode()) {
case RESPONSE_AS_LABEL:
new ResponseAsLabelExtractor().extract(this, aCas, aContext, aResponse);
break;
case MENTIONS_FROM_JSON:
new MentionsFromJsonExtractor().extract(this, aCas, aContext, aResponse);
break;
default:
throw new IllegalArgumentException(
"Unsupported extraction mode [" + traits.getExtractionMode() + "]");
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import com.fasterxml.jackson.annotation.JsonInclude;

import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.client.OllamaGenerateResponseFormat;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.prompt.PromptingMode;

@JsonIgnoreProperties(ignoreUnknown = true)
public class OllamaRecommenderTraits
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;

import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.client.OllamaGenerateResponseFormat;
import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.prompt.PromptingMode;

@JsonIgnoreProperties(ignoreUnknown = true)
public class Preset
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
import org.apache.wicket.markup.html.form.EnumChoiceRenderer;
import org.apache.wicket.model.IModel;

import de.tudarmstadt.ukp.inception.recommendation.imls.ollama.prompt.PromptingMode;

public class PromptingModeSelect
extends DropDownChoice<PromptingMode>
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
/*
* Licensed to the Technische Universität Darmstadt under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The Technische Universität Darmstadt
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.ukp.inception.recommendation.imls.ollama.prompt;

import static de.tudarmstadt.ukp.clarin.webanno.api.annotation.util.WebAnnoCasUtil.selectOverlapping;

import java.util.stream.Stream;

import org.apache.uima.cas.CAS;
import org.apache.uima.fit.util.CasUtil;

import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence;

public class PerAnnotationBindingsGenerator
implements PromptBindingsGenerator
{

@Override
public Stream<PromptContext> generate(CAS aCas, int aBegin, int aEnd)
{
var candidateType = CasUtil.getAnnotationType(aCas, Sentence.class);
return selectOverlapping(aCas, candidateType, aBegin, aEnd).stream().map(candidate -> {
var sentence = aCas.select(Sentence.class).covering(candidate) //
.map(Sentence::getCoveredText) //
.findFirst().orElse("");
var context = new PromptContext(candidate);
context.set(VAR_TEXT, candidate.getCoveredText());
context.set(VAR_SENTENCE, sentence);
return context;

});
}
}
Loading

0 comments on commit 0c6bfec

Please sign in to comment.