Skip to content

Commit

Permalink
Merge branch 'main' into feature/backend
Browse files Browse the repository at this point in the history
  • Loading branch information
since1909 committed Nov 5, 2024
2 parents da3ec79 + 105ce37 commit 2fad034
Show file tree
Hide file tree
Showing 8 changed files with 115 additions and 18 deletions.
10 changes: 10 additions & 0 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# 📋 Pull Request {}

### 📌 변경 사항 요약
- 이번 PR에서 변경된 주요 내용을 간단히 작성해 주세요.

### 📂 관련 이슈
- 관련된 이슈가 있다면 여기에 적어주세요. 예: #123

### 🚀 추가 설명
- 추가로 설명이 필요한 사항이 있다면 여기에 적어 주세요.
3 changes: 2 additions & 1 deletion DEVELOP.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,8 @@ IntelliJ에서 `docker-compose`로 실행 환경을 구성하는 방법은 다
### 2.2 Docker 설정
`File > Settings > Build, Execution, Deployment`에서 설치한 Docker 추가
### 2.3 Run Configuration 추가
`Run/Debug Configuration`에서 Docker 환경 추가 후 `Compose Files``./docker-compose.yml` 설정
`Run/Debug Configuration`에서 Docker 환경 추가 후 `Compose Files``./docker-compose.yml` 설정
`Modify Option` 클릭 후 `Build > always` 옵션 추가

## 3. 테스트 방법 ✅
todo
Expand Down
4 changes: 2 additions & 2 deletions backend/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# 1단계: 빌드 단계
FROM openjdk:21-jdk-slim AS builder
FROM openjdk:17-slim AS builder

WORKDIR /build

Expand All @@ -15,7 +15,7 @@ RUN chmod +x ./gradlew
RUN ./gradlew clean bootJar --info

# 2단계: 실행 단계
FROM openjdk:21-jdk-slim
FROM openjdk:17-slim

WORKDIR /app

Expand Down
2 changes: 1 addition & 1 deletion backend/src/main/resources/application.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ spring:
application:
name: backend
kafka:
bootstrap-servers: ${SPRING_KAFKA_BOOTSTRAP_SERVERS:localhost:9092}
bootstrap-servers: broker-1:29092
consumer:
group-id: my-group
auto-offset-reset: earliest
Expand Down
29 changes: 18 additions & 11 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,31 +7,38 @@ services:
ZOOKEEPER_TICK_TIME: 2000
ports:
- "2181:2181"
networks:
- app-network


kafka:
broker-1:
image: 'confluentinc/cp-kafka:7.2.1'
hostname: broker-1
container_name: broker-1
depends_on:
- zookeeper
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092,INTERNAL://kafka:9093
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,INTERNAL:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker-1:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 # 토픽 오프셋 복제 계수
ports:
- "9092:9092"
networks:
- app-network

backend:
build:
context: ./backend
dockerfile: Dockerfile
environment:
SPRING_KAFKA_BOOTSTRAP_SERVERS: kafka:9093
SPRING_KAFKA_BOOTSTRAP_SERVERS: broker-1:29092
depends_on: # kafka 실행 후 백엔드 실행되도록 설정
- kafka
- broker-1
networks:
- kafka-network
- app-network
ports:
- "8180:8180"

Expand All @@ -41,14 +48,14 @@ services:
context: ./report
dockerfile: Dockerfile
environment:
SPRING_KAFKA_BOOTSTRAP_SERVERS: kafka:9093
SPRING_KAFKA_BOOTSTRAP_SERVERS: broker-1:29092
depends_on: # kafka 실행 후 레포트 실행되도록 설정
- kafka
- broker-1
networks:
- kafka-network
- app-network
ports:
- "9090:9090"

networks:
kafka-network:
app-network:
driver: bridge
4 changes: 2 additions & 2 deletions report/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# 1단계: 빌드 단계
FROM openjdk:21-jdk-slim AS builder
FROM openjdk:17-slim AS builder

WORKDIR /build

Expand All @@ -15,7 +15,7 @@ RUN chmod +x ./gradlew
RUN ./gradlew clean bootJar --info

# 2단계: 실행 단계
FROM openjdk:21-jdk-slim
FROM openjdk:17-slim

WORKDIR /app

Expand Down
2 changes: 1 addition & 1 deletion report/src/main/resources/application.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ spring:
application:
name: report
kafka:
bootstrap-servers: ${SPRING_KAFKA_BOOTSTRAP_SERVERS:localhost:9092}
bootstrap-servers: broker-1:29092
consumer:
group-id: my-group
auto-offset-reset: earliest
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
package org.devpalsboot.report;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.devpalsboot.report.domain.ReportComplete;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.utils.KafkaTestUtils;

import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;

import static org.assertj.core.api.AssertionsForClassTypes.assertThat;

/**
* 보고서 통합 테스트
*/
@SpringBootTest
@EmbeddedKafka(partitions = 1, topics = {"TEST_REPORT_CREATION", "TEST_REPORT_COMPLETE"})
public class KafkaIntegrationTest {
public static final String CREATE_REQUEST_EVENT_VALUE = "report create test value";
public static final String EXAMPLE_GROUP = "test-example-group";
@Autowired
private KafkaTemplate<String, Object> kafkaTemplate;
@Autowired
private EmbeddedKafkaBroker embeddedKafkaBroker;
@Autowired
private ConsumerFactory<String, Object> consumerFactory;
private CountDownLatch latch;
private String receivedCompleteEvent;
private ObjectMapper objectMapper;

@BeforeEach
public void setUp() {
latch = new CountDownLatch(1);
receivedCompleteEvent = null;
objectMapper = new ObjectMapper();
}

@Test
public void testReportKafkaMainLogic() throws InterruptedException, JsonProcessingException {
// given
kafkaTemplate.send("TEST_REPORT_CREATION", CREATE_REQUEST_EVENT_VALUE);
boolean messageConsumed = latch.await(30, TimeUnit.SECONDS);
assertThat(messageConsumed).isTrue();
assertThat(receivedCompleteEvent).isEqualTo(CREATE_REQUEST_EVENT_VALUE);

// when
System.out.println("보고서 생성 시작!");
Thread.sleep(3000);
ReportComplete reportComplete = new ReportComplete("test/report.pdf", ReportStatus.COMPLETE);
System.out.println("보고서 생성 완료!");
kafkaTemplate.send("TEST_REPORT_COMPLETE", reportComplete);

// then
try (Consumer<String, Object> consumer = consumerFactory.createConsumer()) {
embeddedKafkaBroker.consumeFromAnEmbeddedTopic(consumer, "TEST_REPORT_COMPLETE");
ConsumerRecord<String, Object> completeEventRecord = KafkaTestUtils.getSingleRecord(consumer, "TEST_REPORT_COMPLETE");
assertThat(completeEventRecord.value()).isEqualTo(objectMapper.writeValueAsString(reportComplete));
}
}

@KafkaListener(topics = "TEST_REPORT_CREATION", groupId = EXAMPLE_GROUP)
public void consumeCreateRequest(ConsumerRecord<String, Object> record) {
String value = (String) record.value();
receivedCompleteEvent = value.replace("\"", "");
latch.countDown(); // 요청 이벤트 수신 완료 표시
}

}

0 comments on commit 2fad034

Please sign in to comment.