这里分类和汇总了欣宸的全部原创(含配套源码):https://github.com/zq2599/blog_demos
名称 | 链接 | 备注 |
---|---|---|
项目主页 | 该项目在GitHub上的主页 | |
git仓库地址(https) | 该项目源码的仓库地址,https协议 | |
git仓库地址(ssh) | git@github.com:zq2599/blog_demos.git | 该项目源码的仓库地址,ssh协议 |
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>springai-tutorials</artifactId>
<groupId>com.bolingcavalry</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>ollama-chat</artifactId>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
# ollama的通信地址,如果当前应用和ollama通过docker-compose打包部署,host就可以直接写ollama的容器名
spring.ai.ollama.base-url=http://ollama:11434
# 指定大模型,这里用的是通义千问1.8b
spring.ai.ollama.chat.options.model=qwen:1.8b
# 值越小回答越严谨,值越大回答越有创造性
spring.ai.ollama.chat.options.temperature=0.7
# 响应式web服务
spring.main.web-application-type=reactive
package com.bolingcavalry.ollamachat;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
package com.bolingcavalry.ollamachat;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.client.RestClient;
@Configuration
public class RestClientConfig {
@Bean
public RestClient.Builder restClientBuilder() {
return RestClient.builder();
}
}
package com.bolingcavalry.ollamachat.controller;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.ai.chat.messages.UserMessage;
import reactor.core.publisher.Flux;
@RestController
public class ChatController {
private final OllamaChatClient chatClient;
public ChatController(OllamaChatClient chatClient) {
// 依赖注入ollama的客户端类
this.chatClient = chatClient;
}
@GetMapping(value = "/ai/streamresp", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public Flux<String> streamResp(@RequestParam(value = "message", defaultValue = "Hello!") String message) throws InterruptedException {
// 提示词类,包裹了用户发来的问题
Prompt prompt = new Prompt(new UserMessage(message));
// 调用ollama的客户端类的API,将问题发到ollama,并获取流对象,Ollama响应的数据就会通过这个流对象持续输出
Flux<ChatResponse> chatResp = chatClient.stream(prompt);
// ollama客户端返回的数据包含了多个内容,例如system,assistant等,需要做一次变换,取大模型的回答内容返回给前端
return chatResp.map(chatObj -> chatObj.getResult().getOutput().getContent());
}
}
<!DOCTYPE html>
<html xmlns:th="http://www.thymeleaf.org">
<head>
<meta charset="UTF-8">
<title>Reactive Flux Display</title>
<script src="https://unpkg.com/rxjs@^7/dist/bundles/rxjs.umd.min.js"></script>
<script th:src="@{/js/custom.js}"></script>
</head>
<body>
<div>
<input type="text" id="inputField" placeholder="Enter something...">
<button id="sendButton">Submit</button>
<div id="displayArea"></div>
</div>
<script>
// 自定义的JavaScript代码将放在这里
document.getElementById('sendButton').addEventListener('click', function() {
const inputField = document.getElementById('inputField');
const displayArea = document.getElementById('displayArea');
const input = inputField.value;
if (input) {
displayArea.textContent = ""
const url = `/ai/streamresp?message=${encodeURIComponent(input)}`;
fetch(url)
.then(response => {
if (response.ok) return response.body.pipeThrough(new TextDecoderStream()).pipeTo(new WritableStream({
write(chunk) {
displayArea.textContent += chunk.replace(/data:/g, "").trim();
}
}));
})
.catch(error => console.error('Error:', error));
}
});
</script>
</body>
</html>
# 使用Spring Boot官方镜像作为基础镜像
FROM openjdk:17-jdk-slim
# 设置环境变量
ENV SPRING_OUTPUT_ANSI_ENABLED=ALWAYS \
JHIPSTER_SLEEP=0 \
JAVA_OPTS=""
# 复制项目jar文件到Docker镜像中
COPY target/*.jar /app.jar
# 运行应用
ENTRYPOINT ["sh", "-c", "java $JAVA_OPTS -Djava.security.egd=file:/dev/./urandom -jar /app.jar --spring.config.location=file:/app/application.properties"]
mvn clean compile package -U -DskipTests
docker build -t bolingcavalry/ollam-tutorial:0.0.1-SNAPSHOT .
docker push bolingcavalry/ollam-tutorial:0.0.1-SNAPSHOT
version: '3.8'
services:
ollama:
image: ollama/ollama:latest
ports:
- 11434:11434
volumes:
- /home/will/data/ollama:/root/.ollama
container_name: ollama
pull_policy: if_not_present
tty: true
restart: always
networks:
- ollama-docker
open-webui:
image: ghcr.io/open-webui/open-webui:main
container_name: open-webui
pull_policy: if_not_present
volumes:
- /home/will/data/webui:/app/backend/data
depends_on:
- ollama
ports:
- 13000:8080
environment:
- 'OLLAMA_BASE_URL=http://ollama:11434'
- 'WEBUI_SECRET_KEY=123456'
- 'HF_ENDPOINT=https://hf-mirror.com'
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
networks:
- ollama-docker
java-app:
image: bolingcavalry/ollam-tutorial:0.0.1-SNAPSHOT
volumes:
- /home/will/temp/202405/15/application.properties:/app/application.properties
container_name: java-app
pull_policy: if_not_present
depends_on:
- ollama
ports:
- 18080:8080
restart: always
networks:
- ollama-docker
networks:
ollama-docker:
external: false