diff --git a/build.gradle.kts b/build.gradle.kts index 4576e4c..4c6ab04 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -22,9 +22,25 @@ extra["springAiVersion"] = "1.0.0-M6" dependencies { implementation("org.springframework.boot:spring-boot-starter-web") + + // Kotlin 지원 implementation("com.fasterxml.jackson.module:jackson-module-kotlin") implementation("org.jetbrains.kotlin:kotlin-reflect") + + // 코루틴 의존성 + implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core") + implementation("org.jetbrains.kotlinx:kotlinx-coroutines-reactor") + + // Spring AI 의존성 implementation("org.springframework.ai:spring-ai-openai-spring-boot-starter") + + // Swagger/OpenAPI 의존성 + implementation("org.springdoc:springdoc-openapi-starter-webmvc-ui:2.3.0") + + // 로깅 라이브러리 + implementation("io.github.oshai:kotlin-logging:6.0.3") + + // 테스트 의존성 testImplementation("org.springframework.boot:spring-boot-starter-test") testImplementation("org.jetbrains.kotlin:kotlin-test-junit5") testRuntimeOnly("org.junit.platform:junit-platform-launcher") diff --git a/src/main/kotlin/com/example/spring_ai_tutorial/config/OpenAiConfig.kt b/src/main/kotlin/com/example/spring_ai_tutorial/config/OpenAiConfig.kt new file mode 100644 index 0000000..8ad303d --- /dev/null +++ b/src/main/kotlin/com/example/spring_ai_tutorial/config/OpenAiConfig.kt @@ -0,0 +1,29 @@ +package com.example.spring_ai_tutorial.config + +import io.github.oshai.kotlinlogging.KotlinLogging +import org.springframework.ai.openai.api.OpenAiApi +import org.springframework.beans.factory.annotation.Value +import org.springframework.context.annotation.Bean +import org.springframework.context.annotation.Configuration + +/** + * OpenAI API 설정 + */ +@Configuration +class OpenAiConfig { + private val logger = KotlinLogging.logger {} + + @Value("\${spring.ai.openai.api-key}") + private lateinit var apiKey: String + + /** + * OpenAI API 클라이언트 빈 등록 + */ + @Bean + fun openAiApi(): OpenAiApi { + logger.debug { "OpenAI API 클라이언트 초기화" } + return OpenAiApi.builder() + .apiKey(apiKey) + .build() + } +} \ No newline at end of file diff --git a/src/main/kotlin/com/example/spring_ai_tutorial/config/OpenApiConfig.kt b/src/main/kotlin/com/example/spring_ai_tutorial/config/OpenApiConfig.kt new file mode 100644 index 0000000..400db70 --- /dev/null +++ b/src/main/kotlin/com/example/spring_ai_tutorial/config/OpenApiConfig.kt @@ -0,0 +1,21 @@ +package com.example.spring_ai_tutorial.config + +import io.swagger.v3.oas.models.OpenAPI +import io.swagger.v3.oas.models.info.Info +import org.springframework.context.annotation.Bean +import org.springframework.context.annotation.Configuration + +@Configuration +class OpenApiConfig { + + @Bean + fun springOpenAPI(): OpenAPI { + return OpenAPI() + .info( + Info() + .title("Spring AI Tutorial API") + .version("1.0") + .description("Spring AI를 활용한 챗봇 API") + ) + } +} diff --git a/src/main/kotlin/com/example/spring_ai_tutorial/controller/ChatController.kt b/src/main/kotlin/com/example/spring_ai_tutorial/controller/ChatController.kt new file mode 100644 index 0000000..8c14733 --- /dev/null +++ b/src/main/kotlin/com/example/spring_ai_tutorial/controller/ChatController.kt @@ -0,0 +1,116 @@ +package com.example.spring_ai_tutorial.controller + +import com.example.spring_ai_tutorial.service.ChatService +import io.github.oshai.kotlinlogging.KotlinLogging +import io.swagger.v3.oas.annotations.Operation +import io.swagger.v3.oas.annotations.Parameter +import io.swagger.v3.oas.annotations.media.Content +import io.swagger.v3.oas.annotations.media.Schema +import io.swagger.v3.oas.annotations.responses.ApiResponse as SwaggerResponse +import io.swagger.v3.oas.annotations.tags.Tag +import org.springframework.http.HttpStatus +import org.springframework.http.ResponseEntity +import org.springframework.web.bind.annotation.* + +/** + * Chat API 컨트롤러 + * + * LLM API를 통해 채팅 기능을 제공합니다. + */ +@RestController +@RequestMapping("/api/v1/chat") +@Tag(name = "Chat API", description = "OpenAI API를 통한 채팅 기능") +class ChatController( + private val chatService: ChatService +) { + private val logger = KotlinLogging.logger {} + + /** + * 사용자의 메시지를 받아 OpenAI API로 응답 생성 + */ + @Operation( + summary = "LLM 채팅 메시지 전송", + description = "사용자의 메시지를 받아 OpenAI API를 통해 응답을 생성합니다." + ) + @SwaggerResponse( + responseCode = "200", + description = "LLM 응답 성공", + content = [Content(schema = Schema(implementation = ApiResponse::class))] + ) + @SwaggerResponse(responseCode = "400", description = "잘못된 요청") + @SwaggerResponse(responseCode = "500", description = "서버 오류") + @PostMapping("/query") + suspend fun sendMessage( + @Parameter(description = "채팅 요청 객체", required = true) + @RequestBody request: ChatRequest + ): ResponseEntity>> { + logger.info { "Chat API 요청 받음: model=${request.model}" } + + // 유효성 검사 + if (request.query.isBlank()) { + logger.warn { "빈 질의가 요청됨" } + return ResponseEntity.badRequest().body( + ApiResponse(success = false, error = "질의가 비어있습니다.") + ) + } + + return try { + // 시스템 프롬프트 지정 + val systemMessage = "You are a helpful AI assistant." + + // AI 응답 생성 + val response = chatService.openAiChat( + userInput = request.query, + systemMessage = systemMessage, + model = request.model + ) + logger.debug { "LLM 응답 생성: $response" } + + response?.let { chatResponse -> + ResponseEntity.ok( + ApiResponse( + success = true, + data = mapOf("answer" to chatResponse.result.output.text) + ) + ) + } ?: run { + logger.error { "LLM 응답 생성 실패" } + ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body( + ApiResponse( + success = false, + error = "LLM 응답 생성 중 오류 발생" + ) + ) + } + } catch (e: Exception) { + logger.error(e) { "Chat API 처리 중 오류 발생" } + ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body( + ApiResponse( + success = false, + error = e.message ?: "알 수 없는 오류 발생" + ) + ) + } + } +} + +@Schema(description = "채팅 요청 데이터 모델") +data class ChatRequest( + @Schema(description = "사용자 질문", example = "안녕하세요") + val query: String, + + @Schema(description = "사용할 LLM 모델", example = "gpt-3.5-turbo", defaultValue = "gpt-3.5-turbo") + val model: String = "gpt-3.5-turbo" +) + +@Schema(description = "API 응답 포맷") +data class ApiResponse( + @Schema(description = "요청 처리 성공 여부") + val success: Boolean, + + @Schema(description = "성공 응답 데이터") + val data: T? = null, + + @Schema(description = "실패 오류 메시지") + val error: String? = null +) diff --git a/src/main/kotlin/com/example/spring_ai_tutorial/service/ChatService.kt b/src/main/kotlin/com/example/spring_ai_tutorial/service/ChatService.kt new file mode 100644 index 0000000..8470353 --- /dev/null +++ b/src/main/kotlin/com/example/spring_ai_tutorial/service/ChatService.kt @@ -0,0 +1,48 @@ +package com.example.spring_ai_tutorial.service + +import io.github.oshai.kotlinlogging.KotlinLogging +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.withContext +import org.springframework.ai.chat.model.ChatResponse +import org.springframework.ai.openai.api.OpenAiApi +import org.springframework.stereotype.Service + +/** + * OpenAI API를 사용하여 질의응답을 수행하는 서비스 + */ +@Service +class ChatService( + private val openAiApi: OpenAiApi +) { + private val logger = KotlinLogging.logger {} + + /** + * OpenAI 챗 API를 이용하여 응답을 생성합니다. + * + * @param userInput 사용자 입력 메시지 + * @param systemMessage 시스템 프롬프트 + * @param model 사용할 LLM 모델명 + * @return 챗 응답 객체, 오류 시 null + */ + suspend fun openAiChat( + userInput: String, + systemMessage: String, + model: String = "gpt-3.5-turbo" + ): ChatResponse? = withContext(Dispatchers.IO) { + logger.debug { "OpenAI 챗 호출 시작 - 모델: $model" } + try { + // 메시지 구성 + + // 챗 옵션 설정 + + // 프롬프트 생성 + + // 챗 모델 생성 및 호출 + + return@withContext TODO("응답 생성 로직을 작성하세요") + } catch (e: Exception) { + logger.error(e) { "OpenAI 챗 호출 중 오류 발생: ${e.message}" } + return@withContext null + } + } +} \ No newline at end of file diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 3b15fda..29c8587 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -1 +1,14 @@ spring.application.name=spring-ai-tutorial + +# Server Configuration +server.port=8080 + +# OpenAI API Configuration +spring.ai.openai.api-key=${OPENAI_API_KEY} + +# File Upload Settings +spring.servlet.multipart.max-file-size=20MB +spring.servlet.multipart.max-request-size=20MB + +# Swagger/OpenAPI Configuration +springdoc.api-docs.path=/api-docs