This is an automated email from the ASF dual-hosted git repository.

aldettinger pushed a commit to branch data-extraction-example
in repository https://gitbox.apache.org/repos/asf/camel-quarkus-examples.git

commit ce748946d6094119410fc99e58356a91e5e9eeb0
Author: aldettinger <[email protected]>
AuthorDate: Fri Jul 26 15:49:18 2024 +0200

    Add mocking of the LLM
---
 data-extract-langchain4j/pom.xml                   |  8 ++-
 .../org/acme/extraction/OllamaTestResource.java    | 70 +++++++++++++++++++---
 ..._chat-64441899-7a8b-456d-9dba-8dcf21cdc6e6.json | 24 ++++++++
 3 files changed, 92 insertions(+), 10 deletions(-)

diff --git a/data-extract-langchain4j/pom.xml b/data-extract-langchain4j/pom.xml
index bebd8bd..3fc9c02 100644
--- a/data-extract-langchain4j/pom.xml
+++ b/data-extract-langchain4j/pom.xml
@@ -87,7 +87,7 @@
             <groupId>org.apache.camel.quarkus</groupId>
             <artifactId>camel-quarkus-bean</artifactId>
         </dependency>
-         <dependency>
+        <dependency>
             <groupId>org.apache.camel.quarkus</groupId>
             <artifactId>camel-quarkus-platform-http</artifactId>
         </dependency>
@@ -121,6 +121,12 @@
             <artifactId>testcontainers</artifactId>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.wiremock</groupId>
+            <artifactId>wiremock-standalone</artifactId>
+            <version>3.9.1</version>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 
     <build>
diff --git 
a/data-extract-langchain4j/src/test/java/org/acme/extraction/OllamaTestResource.java
 
b/data-extract-langchain4j/src/test/java/org/acme/extraction/OllamaTestResource.java
index 9604ff0..18c42f8 100644
--- 
a/data-extract-langchain4j/src/test/java/org/acme/extraction/OllamaTestResource.java
+++ 
b/data-extract-langchain4j/src/test/java/org/acme/extraction/OllamaTestResource.java
@@ -3,6 +3,7 @@ package org.acme.extraction;
 import java.util.HashMap;
 import java.util.Map;
 
+import com.github.tomakehurst.wiremock.WireMockServer;
 import io.quarkus.test.common.QuarkusTestResourceLifecycleManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -10,33 +11,78 @@ import org.testcontainers.containers.GenericContainer;
 import org.testcontainers.containers.output.Slf4jLogConsumer;
 import org.testcontainers.containers.wait.strategy.Wait;
 
+import static java.lang.String.format;
+
 public class OllamaTestResource implements QuarkusTestResourceLifecycleManager 
{
 
     private static final Logger LOG = 
LoggerFactory.getLogger(OllamaTestResource.class);
+    // TODO: don't use latest version, switch to explicit version
     private static final String OLLAMA_IMAGE = 
"langchain4j/ollama-codellama:latest";
     private static final int OLLAMA_SERVER_PORT = 11434;
 
     private GenericContainer<?> ollamaContainer;
 
+    private WireMockServer wireMockServer;
+    private String baseUrl;
+
+    private static final String MODE_MOCK = "mock";
+    private static final String MODE_RECORDING = "record";
+
+    /**
+     * The testMode value could be defined, for instance by invoking: mvn 
clean test -DtestMode=mock. With the default
+     * value "mock", the llm is faked based on the last recorded run. With the 
value "record", tests are run against a
+     * containerized llm while the HTTP interactions with the llm are 
recorded. Any other value would run the test
+     * against a containzeried llm without recording.
+     */
+    private boolean mock;
+    private boolean recording;
+
+    private static final String BASE_URL_FORMAT = "http://%s:%s";;
+
     @Override
     public Map<String, String> start() {
+
+        // Check the test running mode
+        String testMode = System.getProperty("testMode", MODE_MOCK);
+        mock = MODE_MOCK.equals(testMode);
+        recording = MODE_RECORDING.equals(testMode);
+
         Map<String, String> properties = new HashMap<>();
 
-        LOG.info("Starting Ollama container resource");
-        ollamaContainer = new GenericContainer<>(OLLAMA_IMAGE)
-                .withExposedPorts(OLLAMA_SERVER_PORT)
-                .withLogConsumer(new 
Slf4jLogConsumer(LOG).withPrefix("basicAuthContainer"))
-                .waitingFor(Wait.forLogMessage(".* msg=\"inference compute\" 
.*", 1));
+        if (mock) {
+            LOG.info("Starting a fake Ollama server backed by wiremock");
+            initWireMockServer();
+        } else {
+            LOG.info("Starting an Ollama server backed by testcontainers");
+            ollamaContainer = new GenericContainer<>(OLLAMA_IMAGE)
+                    .withExposedPorts(OLLAMA_SERVER_PORT)
+                    .withLogConsumer(new 
Slf4jLogConsumer(LOG).withPrefix("basicAuthContainer"))
+                    .waitingFor(Wait.forLogMessage(".* msg=\"inference 
compute\" .*", 1));
+            ollamaContainer.start();
+
+            baseUrl = format(BASE_URL_FORMAT, ollamaContainer.getHost(), 
ollamaContainer.getMappedPort(OLLAMA_SERVER_PORT));
 
-        ollamaContainer.start();
+            if (recording) {
+                LOG.info("Recording interactions with the Ollama server backed 
by testcontainers");
+                initWireMockServer();
+            }
+        }
 
-        String baseUrl = String.format("http://%s:%s";, 
ollamaContainer.getHost(),
-                ollamaContainer.getMappedPort(OLLAMA_SERVER_PORT));
         properties.put("quarkus.langchain4j.ollama.base-url", baseUrl);
 
         return properties;
     }
 
+    private void initWireMockServer() {
+        wireMockServer = new WireMockServer();
+        wireMockServer.start();
+        if (recording) {
+            wireMockServer.resetMappings();
+            wireMockServer.startRecording(baseUrl);
+        }
+        baseUrl = format(BASE_URL_FORMAT, "localhost", wireMockServer.port());
+    }
+
     @Override
     public void stop() {
         try {
@@ -46,6 +92,12 @@ public class OllamaTestResource implements 
QuarkusTestResourceLifecycleManager {
         } catch (Exception ex) {
             LOG.error("An issue occurred while stopping " + 
ollamaContainer.getNetworkAliases(), ex);
         }
-    }
 
+        if (mock) {
+            wireMockServer.stop();
+        } else if (recording) {
+            wireMockServer.stopRecording();
+            wireMockServer.saveMappings();
+        }
+    }
 }
diff --git 
a/data-extract-langchain4j/src/test/resources/mappings/api_chat-64441899-7a8b-456d-9dba-8dcf21cdc6e6.json
 
b/data-extract-langchain4j/src/test/resources/mappings/api_chat-64441899-7a8b-456d-9dba-8dcf21cdc6e6.json
new file mode 100644
index 0000000..6477e57
--- /dev/null
+++ 
b/data-extract-langchain4j/src/test/resources/mappings/api_chat-64441899-7a8b-456d-9dba-8dcf21cdc6e6.json
@@ -0,0 +1,24 @@
+{
+  "id" : "64441899-7a8b-456d-9dba-8dcf21cdc6e6",
+  "name" : "api_chat",
+  "request" : {
+    "url" : "/api/chat",
+    "method" : "POST",
+    "bodyPatterns" : [ {
+      "equalToJson" : "{\n  \"model\" : \"codellama\",\n  \"messages\" : [ {\n 
   \"role\" : \"user\",\n    \"content\" : \"Extract information about a 
customer from the text delimited by triple backticks: ```Operator: Hello, how 
may I help you?\\nCustomer: Hello, I am currently at the police station because 
I've got an accident. The police would need a proof that I have an insurance. 
Could you please help me?\\nOperator: Sure, could you please remind me your 
name and birth date?\\nCusto [...]
+      "ignoreArrayOrder" : true,
+      "ignoreExtraElements" : true
+    } ]
+  },
+  "response" : {
+    "status" : 200,
+    "body" : 
"{\"model\":\"codellama\",\"created_at\":\"2024-07-26T13:48:19.801608167Z\",\"message\":{\"role\":\"assistant\",\"content\":\"{\\n\\\"customerSatisfied\\\":
 true,\\n\\\"customerName\\\": \\\"Kate Boss\\\",\\n\\\"customerBirthday\\\": 
\\\"1999-08-13\\\",\\n\\\"summary\\\": \\\"Customer Kate Boss is satisfied with 
the assistance provided by the operator. The customer was able to provide their 
name and birth date correctly, and the operator was able to locate their 
insurance co [...]
+    "headers" : {
+      "Date" : "Fri, 26 Jul 2024 13:48:19 GMT",
+      "Content-Type" : "application/json; charset=utf-8"
+    }
+  },
+  "uuid" : "64441899-7a8b-456d-9dba-8dcf21cdc6e6",
+  "persistent" : true,
+  "insertionIndex" : 2
+}
\ No newline at end of file

Reply via email to