Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,18 @@ RUN ./gradlew dependencies
COPY . .
RUN chmod +x gradlew

RUN ./gradlew clean build --no-daemon
RUN ./gradlew clean build --no-daemon -x test

FROM amazoncorretto:17

WORKDIR /app

COPY --from=builder /app/build/libs/*.jar /app

EXPOSE 80
EXPOSE 8080

ENV PROJECT_NAME=discodeit \
PROJECT_VERSION=1.2-M8 \
PROJECT_VERSION=3.0-M12 \
JVM_OPTS=""

ENTRYPOINT ["/bin/bash", "-c", "java -jar ${JVM_OPTS} ${PROJECT_NAME}-${PROJECT_VERSION}.jar"]
13 changes: 12 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@ jacocoTestReport {
}

group = 'com.sprint.mission'
version = '2.0-M9'
version = '3.0-M12'

java {
modularity.inferModulePath = false
toolchain {
languageVersion = JavaLanguageVersion.of(17)
}
Expand All @@ -42,11 +43,21 @@ dependencies {
implementation 'org.springframework.boot:spring-boot-starter-data-jpa'
implementation 'org.springframework.boot:spring-boot-starter-validation'
implementation 'org.springframework.boot:spring-boot-starter-actuator'
implementation 'org.springframework.retry:spring-retry'
implementation 'software.amazon.awssdk:s3:2.31.7'

implementation 'org.springframework.boot:spring-boot-starter-security'
implementation 'io.jsonwebtoken:jjwt:0.12.6'
testImplementation 'org.springframework.security:spring-security-test'

implementation 'org.springframework.boot:spring-boot-starter-cache'
implementation 'org.springframework.boot:spring-boot-starter-data-redis'

implementation 'org.springframework.kafka:spring-kafka'
testImplementation 'org.springframework.kafka:spring-kafka-test'

implementation 'org.springframework.boot:spring-boot-starter-websocket'

compileOnly 'org.projectlombok:lombok'
annotationProcessor 'org.projectlombok:lombok'
developmentOnly 'org.springframework.boot:spring-boot-devtools'
Expand Down
51 changes: 48 additions & 3 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ services:
build: .
image: discodeit
ports:
- "8080:80"
- "8080:8080"
environment:
SPRING_DATASOURCE_URL: ${SPRING_DATASOURCE_URL}
SPRING_DATASOURCE_USERNAME: ${SPRING_DATASOURCE_USERNAME}
Expand All @@ -15,11 +15,23 @@ services:
AWS_S3_BUCKET: ${AWS_S3_BUCKET}
AWS_S3_PRESIGNED_URL_EXPIRATION: ${AWS_S3_PRESIGNED_URL_EXPIRATION}
SPRING_PROFILES_ACTIVE: ${SPRING_PROFILES_ACTIVE}
JVM_OPTS: ${JVM_OPTS}
DISCODEIT_ADMIN_USERNAME: ${DISCODEIT_ADMIN_USERNAME}
DISCODEIT_ADMIN_EMAIL: ${DISCODEIT_ADMIN_EMAIL}
DISCODEIT_ADMIN_PASSWORD: ${DISCODEIT_ADMIN_PASSWORD}
SECURITY_REMEMBER_ME_KEY: ${SECURITY_REMEMBER_ME_KEY}
SECURITY_REMEMBER_ME_TOKEN_VALIDITY_SECONDS: ${SECURITY_REMEMBER_ME_TOKEN_VALIDITY_SECONDS}
SECURITY_JWT_SECRET: ${SECURITY_JWT_SECRET}
SECURITY_JWT_ACCESS_TOKEN_VALIDITY_SECONDS: ${SECURITY_JWT_ACCESS_TOKEN_VALIDITY_SECONDS}
SECURITY_JWT_REFRESH_TOKEN_VALIDITY_SECONDS: ${SECURITY_JWT_REFRESH_TOKEN_VALIDITY_SECONDS}
KAFKA_BOOTSTRAP_SERVERS: ${KAFKA_BOOTSTRAP_SERVERS}
REDIS_HOST: ${REDIS_HOST}
REDIS_PORT: ${REDIS_PORT}
volumes:
- binary-content-storage:/app/files
depends_on:
- postgres
- broker
- redis

postgres:
image: postgres
Expand All @@ -31,6 +43,39 @@ services:
- postgres-data:/var/lib/postgresql/data
- ./src/main/resources/init.sql:/docker-entrypoint-initdb.d/01-init.sql

broker:
image: apache/kafka:latest
hostname: broker
container_name: broker
ports:
- "9092:9092"
environment:
KAFKA_BROKER_ID: 1
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,CONTROLLER:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_PROCESS_ROLES: broker,controller
KAFKA_NODE_ID: 1
KAFKA_CONTROLLER_QUORUM_VOTERS: 1@broker:29093
KAFKA_LISTENERS: PLAINTEXT://broker:29092,CONTROLLER://broker:29093,PLAINTEXT_HOST://0.0.0.0:9092
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
KAFKA_LOG_DIRS: /tmp/kraft-combined-logs
CLUSTER_ID: MkU3OEVBNTcwNTJENDM2Qk

redis:
image: redis:7.2-alpine
container_name: redis
ports:
- "6379:6379"
volumes:
- redis-data:/data
command: redis-server --appendonly yes

volumes:
binary-content-storage:
postgres-data:
postgres-data:
redis-data:
55 changes: 55 additions & 0 deletions src/main/java/com/sprint/mission/discodeit/config/AppConfig.java
Original file line number Diff line number Diff line change
@@ -1,10 +1,65 @@
package com.sprint.mission.discodeit.config;

import java.util.Map;
import org.slf4j.MDC;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.AsyncTaskExecutor;
import org.springframework.core.task.TaskDecorator;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
import org.springframework.retry.annotation.EnableRetry;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.security.task.DelegatingSecurityContextAsyncTaskExecutor;

@Configuration
@EnableJpaAuditing
@EnableAsync
@EnableRetry
public class AppConfig {

@Bean(name = "binaryContentExecutor")
public AsyncTaskExecutor binaryContentExecutor() {
ThreadPoolTaskExecutor delegate = new ThreadPoolTaskExecutor();
delegate.setCorePoolSize(4);
delegate.setMaxPoolSize(16);
delegate.setQueueCapacity(100);
delegate.setThreadNamePrefix("binaryContent-");
delegate.setTaskDecorator(new MdcTaskDecorator());
delegate.initialize();
return new DelegatingSecurityContextAsyncTaskExecutor(delegate);
}

@Bean(name = "eventExecutor")
public AsyncTaskExecutor eventExecutor() {
ThreadPoolTaskExecutor delegate = new ThreadPoolTaskExecutor();
delegate.setCorePoolSize(2);
delegate.setMaxPoolSize(4);
delegate.setQueueCapacity(100);
delegate.setThreadNamePrefix("event-");
delegate.setTaskDecorator(new MdcTaskDecorator());
delegate.initialize();
return new DelegatingSecurityContextAsyncTaskExecutor(delegate);
}

private static class MdcTaskDecorator implements TaskDecorator {
@Override
public Runnable decorate(Runnable runnable) {
Map<String, String> contextMap = MDC.getCopyOfContextMap();

return () -> {
Map<String, String> previous = MDC.getCopyOfContextMap();
try {
if (contextMap != null) {
MDC.setContextMap(contextMap);
}
runnable.run();
} finally {
MDC.clear();
if (previous != null) MDC.setContextMap(previous);
}
};
}
}

}
52 changes: 52 additions & 0 deletions src/main/java/com/sprint/mission/discodeit/config/CacheConfig.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
package com.sprint.mission.discodeit.config;

import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectMapper.DefaultTyping;
import com.fasterxml.jackson.databind.jsontype.impl.LaissezFaireSubTypeValidator;
import java.time.Duration;
import java.util.Set;
import lombok.RequiredArgsConstructor;
import org.springframework.boot.autoconfigure.cache.RedisCacheManagerBuilderCustomizer;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.cache.RedisCacheConfiguration;
import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.RedisSerializationContext;

@Configuration
@EnableCaching
@RequiredArgsConstructor
public class CacheConfig {

private final ObjectMapper objectMapper;

private RedisCacheConfiguration redisCacheConfiguration() {
ObjectMapper redisObjectMapper = objectMapper.copy();
redisObjectMapper.activateDefaultTyping(
LaissezFaireSubTypeValidator.instance,
DefaultTyping.EVERYTHING,
As.PROPERTY
);

return RedisCacheConfiguration.defaultCacheConfig()
.serializeValuesWith(
RedisSerializationContext.SerializationPair.fromSerializer(
new GenericJackson2JsonRedisSerializer(redisObjectMapper)
)
)
.prefixCacheNameWith("discodeit:")
.entryTtl(Duration.ofSeconds(600))
.disableCachingNullValues();
}

@Bean
public RedisCacheManagerBuilderCustomizer redisCustomizer() {
return builder -> builder
.cacheDefaults(redisCacheConfiguration())
.initialCacheNames(Set.of(CacheName.ALL))
.enableStatistics()
.build();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package com.sprint.mission.discodeit.config;

public class CacheName {
public static final String USERS = "users";
public static final String NOTIFICATIONS_BY_USER = "notificationsByUser";
public static final String CHANNELS_BY_USER = "channelsByUser";

public static final String[] ALL = {USERS, NOTIFICATIONS_BY_USER, CHANNELS_BY_USER};
}
Loading