一、引入依赖
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"><modelVersion>4.0.0</modelVersion><groupId>dev.langchain4j</groupId><artifactId>open-ai-examples</artifactId><version>0.35.0</version><properties><maven.compiler.source>8</maven.compiler.source><maven.compiler.target>8</maven.compiler.target><project.build.sourceEncoding>UTF-8</project.build.sourceEncoding></properties><dependencies><dependency><groupId>dev.langchain4j</groupId><artifactId>langchain4j-open-ai</artifactId><version>0.35.0</version></dependency><dependency><groupId>dev.langchain4j</groupId><artifactId>langchain4j</artifactId><version>0.35.0</version></dependency><dependency><groupId>dev.langchain4j</groupId><artifactId>langchain4j-embeddings-all-minilm-l6-v2</artifactId><version>0.35.0</version></dependency><dependency><groupId>org.tinylog</groupId><artifactId>tinylog-impl</artifactId><version>2.6.2</version></dependency><dependency><groupId>org.tinylog</groupId><artifactId>slf4j-tinylog</artifactId><version>2.6.2</version></dependency></dependencies></project>
二、代码实例
1、ApiKeys定义
import static dev.langchain4j.internal.Utils.getOrDefault;public class ApiKeys {public static final String OPENAI_API_KEY = getOrDefault(System.getenv("OPENAI_API_KEY"), "demo");
}
2、OpenAiChatModel调用
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ImageContent;
import dev.langchain4j.data.message.TextContent;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.Response;import static dev.langchain4j.model.openai.OpenAiChatModelName.GPT_4_O_MINI;public class OpenAiChatModelExamples {static class Simple_Prompt {public static void main(String[] args) {ChatLanguageModel model = OpenAiChatModel.builder().apiKey(ApiKeys.OPENAI_API_KEY).modelName(GPT_4_O_MINI).build();String joke = model.generate("Tell me a joke about Java");System.out.println(joke);}}static class Image_Inputs {public static void main(String[] args) {ChatLanguageModel model = OpenAiChatModel.builder().apiKey(ApiKeys.OPENAI_API_KEY) // Please use your own OpenAI API key.modelName(GPT_4_O_MINI).maxTokens(50).build();UserMessage userMessage = UserMessage.from(TextContent.from("What do you see?"),ImageContent.from("https://upload.wikimedia.org/wikipedia/commons/4/47/PNG_transparency_demonstration_1.png"));Response<AiMessage> response = model.generate(userMessage);System.out.println(response.content().text());}}
}
3、OpenAiEmbeddingModel调用
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.output.Response;import static dev.langchain4j.model.openai.OpenAiEmbeddingModelName.TEXT_EMBEDDING_3_SMALL;public class OpenAiEmbeddingModelExamples {public static void main(String[] args) {EmbeddingModel model = OpenAiEmbeddingModel.builder().apiKey("demo").modelName(TEXT_EMBEDDING_3_SMALL).build();Response<Embedding> response = model.embed("I love Java");Embedding embedding = response.content();System.out.println(embedding);}
}
3、OpenAiFunctionCalling调用
import dev.langchain4j.agent.tool.*;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.tool.DefaultToolExecutor;
import dev.langchain4j.service.tool.ToolExecutor;import java.time.LocalDate;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;import static dev.langchain4j.data.message.UserMessage.userMessage;
import static dev.langchain4j.model.openai.OpenAiChatModelName.GPT_4_O;public class OpenAiFunctionCallingExamples {/*** This example demonstrates how to programmatically configure the low-level tool APIs, such as ToolSpecification,* ToolExecutionRequest, and ToolExecutor.* This sample is used in the LangChain4j tutorial: https://docs.langchain4j.dev/tutorials/tools/#low-level-tool-api.* But it is recommended to use higher-level APIs as demonstrated here: https://docs.langchain4j.dev/tutorials/tools/#high-level-tool-api* <p>* This sample goes through 4 different steps:* 1. Specify the tools (WeatherTools) and the query ("What will the weather be like in London tomorrow?")* 2. Model generate function arguments (model decides which tools to invoke)* 3. User execute function to obtain tool results (using ToolExecutor)* 4. Model generate final response based on the query and the tool results*/static class Weather_Low_Level_Configuration {static ChatLanguageModel openAiModel = OpenAiChatModel.builder().apiKey(ApiKeys.OPENAI_API_KEY).modelName(GPT_4_O).strictTools(true) // https://docs.langchain4j.dev/integrations/language-models/open-ai#structured-outputs-for-tools.logRequests(true).logResponses(true).build();public static void main(String[] args) {// STEP 1: User specify tools and query// ToolsWeatherTools weatherTools = new WeatherTools();List<ToolSpecification> toolSpecifications = ToolSpecifications.toolSpecificationsFrom(weatherTools);// User queryList<ChatMessage> chatMessages = new ArrayList<>();UserMessage userMessage = userMessage("What will the weather be like in London tomorrow?");chatMessages.add(userMessage);// STEP 2: Model generate function argumentsAiMessage aiMessage = openAiModel.generate(chatMessages, toolSpecifications).content();List<ToolExecutionRequest> toolExecutionRequests = aiMessage.toolExecutionRequests();System.out.println("Out of the " + toolSpecifications.size() + " functions declared in WeatherTools, " + toolExecutionRequests.size() + " will be invoked:");toolExecutionRequests.forEach(toolExecutionRequest -> {System.out.println("Function name: " + toolExecutionRequest.name());System.out.println("Function args:" + toolExecutionRequest.arguments());});chatMessages.add(aiMessage);// STEP 3: User execute function to obtain tool resultstoolExecutionRequests.forEach(toolExecutionRequest -> {ToolExecutor toolExecutor = new DefaultToolExecutor(weatherTools, toolExecutionRequest);System.out.println("Now let's execute the function " + toolExecutionRequest.name());String result = toolExecutor.execute(toolExecutionRequest, UUID.randomUUID().toString());ToolExecutionResultMessage toolExecutionResultMessages = ToolExecutionResultMessage.from(toolExecutionRequest, result);chatMessages.add(toolExecutionResultMessages);});// STEP 4: Model generate final responseAiMessage finalResponse = openAiModel.generate(chatMessages).content();System.out.println(finalResponse.text()); //According to the payment data, the payment status of transaction T1005 is Pending.}}static class WeatherTools {@Tool("Returns the weather forecast for tomorrow for a given city")String getWeather(@P("The city for which the weather forecast should be returned") String city) {return "The weather tomorrow in " + city + " is 25°C";}@Tool("Returns the date for tomorrow")LocalDate getTomorrow() {return LocalDate.now().plusDays(1);}@Tool("Transforms Celsius degrees into Fahrenheit")double celsiusToFahrenheit(@P("The celsius degree to be transformed into fahrenheit") double celsius) {return (celsius * 1.8) + 32;}String iAmNotATool() {return "I am not a method annotated with @Tool";}}
}