Skip to content

Commit

Permalink
feat: Profanity 도메인 분리 및 추가(파일, 단일), 삭제, csv 다운 기능 구현
Browse files Browse the repository at this point in the history
  • Loading branch information
juno-junho committed May 18, 2024
1 parent 8cb6e2f commit 270c8e5
Show file tree
Hide file tree
Showing 3 changed files with 120 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
package com.spaceclub.global.annotation.profanity.domain;

import com.spaceclub.global.BaseTimeEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import lombok.Getter;

@Entity
@Getter
public class Profanity extends BaseTimeEntity {

protected Profanity() {}

public Profanity(String banWord) {
this.banWord = banWord;
this.useCount = 0L;
}

@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;

@Column(nullable = false, unique = true)
private String banWord;

@Column(nullable = false)
private long useCount;

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
package com.spaceclub.global.annotation.profanity.domain;

import org.springframework.data.jpa.repository.JpaRepository;

public interface ProfanityRepository extends JpaRepository<Profanity, Long> {

boolean existsByBanWord(String word);

void deleteByBanWord(String word);

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
package com.spaceclub.global.annotation.profanity.service;

import com.spaceclub.global.annotation.profanity.domain.Profanity;
import com.spaceclub.global.annotation.profanity.domain.ProfanityRepository;
import com.spaceclub.global.aws.s3.S3FileUploader;
import lombok.RequiredArgsConstructor;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.util.List;

import static com.spaceclub.global.annotation.profanity.ProfanityExceptionMessage.BAD_WORD_ALREADY_EXISTS;
import static com.spaceclub.global.annotation.profanity.ProfanityExceptionMessage.FAILED_TO_CREATE_CSV;
import static com.spaceclub.global.annotation.profanity.ProfanityExceptionMessage.FAILED_TO_SAVE;

@Service
@RequiredArgsConstructor
public class ProfanityService {

private final ProfanityRepository profanityRepository;
private final S3FileUploader s3FileUploader;

public void saveProfanitiesFromFile(MultipartFile file) {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(file.getInputStream()))) {
List<Profanity> profanities = reader.lines()
.map(Profanity::new)
.toList();

profanityRepository.saveAllAndFlush(profanities); // 트랜잭션 원자화
} catch (IOException e) {
throw new IllegalStateException(FAILED_TO_SAVE.getMessage());
}
}

@Transactional
public void saveProfanity(String word) {
if (profanityRepository.existsByBanWord(word)) {
throw new IllegalArgumentException(BAD_WORD_ALREADY_EXISTS.getMessage());
}

profanityRepository.save(new Profanity(word));
}

@Transactional
public void deleteProfanity(String word) {
profanityRepository.deleteByBanWord(word);
}

public String createCsvFile() {
List<Profanity> profanities = profanityRepository.findAll();

CSVFormat csvFormat = CSVFormat.DEFAULT.builder()
.setHeader("ID", "금칙어", "사용횟수")
.build();
try (
final StringWriter writer = new StringWriter();
final CSVPrinter printer = new CSVPrinter(writer, csvFormat)
) {
for (Profanity profanity : profanities) {
printer.printRecord(profanity.getId(), profanity.getBanWord(), profanity.getUseCount());
}
printer.flush();

return s3FileUploader.uploadProfanityInfo(writer.toString());
} catch (IOException e) {
throw new IllegalStateException(FAILED_TO_CREATE_CSV.getMessage());
}
}

}

0 comments on commit 270c8e5

Please sign in to comment.