Skip to content

Commit

Permalink
Merge remote-tracking branch 'remotes/origin/dev_FixTest_Hao_CrawlNew…
Browse files Browse the repository at this point in the history
…sService'
  • Loading branch information
tuansu2021998 committed Apr 24, 2020
2 parents 22a677f + 345b3ed commit 460ae87
Show file tree
Hide file tree
Showing 4 changed files with 162 additions and 47 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,23 +20,25 @@

import capstone.lip.landinformationportal.business.repository.CrawledNewsRepository;
import capstone.lip.landinformationportal.business.service.Interface.ICrawledNewsService;
import capstone.lip.landinformationportal.business.validation.CrawledNewsValidation;
import capstone.lip.landinformationportal.common.config.CrawlNewsNowJob;
import capstone.lip.landinformationportal.common.config.CrawlNewsScheduleJob;
import capstone.lip.landinformationportal.common.constant.StatusCrawledNewsConstant;
import capstone.lip.landinformationportal.common.entity.CrawledNews;

@Service
public class CrawledNewsService implements ICrawledNewsService{
public class CrawledNewsService implements ICrawledNewsService {

private JobKey jobKey = new JobKey("crawlerNewsJob", "crawlerNews");
private TriggerKey triggerKey = new TriggerKey("crawlerNewsTriggler", "crawlerNews");
private JobKey jobKeyNow = new JobKey("crawlerNewsNowJob", "crawlerNews");

@Autowired
Scheduler scheduler;

private Trigger trigger;
private JobDetail job;

@Autowired
private CrawledNewsRepository crawledNewsRepository;

Expand All @@ -45,38 +47,42 @@ public String initCrawlJob() {
try {
String timeCrawl = "";
JobDetail jobDetail;
//find current job if exist
// find current job if exist
jobDetail = scheduler.getJobDetail(jobKey);
if (jobDetail == null) return timeCrawl;
if (jobDetail == null)
return timeCrawl;
List<? extends Trigger> triggers = scheduler.getTriggersOfJob(jobDetail.getKey());
for (Trigger trigger : triggers) {

SimpleScheduleBuilder scheduleBuilder = (SimpleScheduleBuilder)trigger.getScheduleBuilder();
if (scheduleBuilder != null) {

Field privateStringField = SimpleScheduleBuilder.class.
getDeclaredField("interval");

privateStringField.setAccessible(true);
Long fieldValue = ((Long) privateStringField.get(scheduleBuilder))/3600000;
System.out.println("fieldValue = " + fieldValue);
timeCrawl = String.valueOf(fieldValue);
}
}
return timeCrawl;
for (Trigger trigger : triggers) {

SimpleScheduleBuilder scheduleBuilder = (SimpleScheduleBuilder) trigger.getScheduleBuilder();
if (scheduleBuilder != null) {

Field privateStringField = SimpleScheduleBuilder.class.getDeclaredField("interval");

privateStringField.setAccessible(true);
Long fieldValue = ((Long) privateStringField.get(scheduleBuilder)) / 3600000;
System.out.println("fieldValue = " + fieldValue);
timeCrawl = String.valueOf(fieldValue);
}
}
return timeCrawl;
} catch (Exception e) {
e.printStackTrace();
return null;
}

}

@Override
public boolean setTimeCrawlJob(int value) {
try {
if (value == 0) {
throw new Exception("time crawl job can not be zero");
}
trigger = TriggerBuilder.newTrigger().withIdentity(triggerKey)
.withSchedule(SimpleScheduleBuilder.simpleSchedule().withIntervalInSeconds(value).repeatForever()).build();

.withSchedule(SimpleScheduleBuilder.simpleSchedule().withIntervalInSeconds(value).repeatForever())
.build();

job = JobBuilder.newJob(CrawlNewsScheduleJob.class).withIdentity(jobKey).build();
return true;
} catch (Exception e) {
Expand All @@ -88,7 +94,7 @@ public boolean setTimeCrawlJob(int value) {
@Override
public boolean turnOnCrawler() {
try {
if (scheduler!= null) {
if (scheduler != null) {
if (!scheduler.isStarted()) {
scheduler.start();
}
Expand All @@ -104,26 +110,27 @@ public boolean turnOnCrawler() {
@Override
public boolean turnOffCrawler() {
try {
if (scheduler!= null) {
if (scheduler != null) {
scheduler.deleteJob(jobKey);
}
return true;
} catch (SchedulerException e) {
e.printStackTrace();
return false;
}

}

@Override
public boolean crawlNow() {
try {
if (scheduler!= null) {
if (scheduler != null) {
if (!scheduler.isStarted()) {
scheduler.start();
}
}
JobDetail jobNow = JobBuilder.newJob(CrawlNewsNowJob.class).storeDurably(true).withIdentity(jobKeyNow).build();
JobDetail jobNow = JobBuilder.newJob(CrawlNewsNowJob.class).storeDurably(true).withIdentity(jobKeyNow)
.build();
scheduler.addJob(jobNow, true);
scheduler.triggerJob(jobKeyNow);
scheduler.deleteJob(jobKeyNow);
Expand All @@ -137,19 +144,33 @@ public boolean crawlNow() {
@Override
public boolean delete(CrawledNews news) {
try {
CrawledNewsValidation crawledNewsValidation = new CrawledNewsValidation();
String error = crawledNewsValidation.isValidCrawledNewsOne(news);
if (!error.isEmpty()) {
throw new Exception(error);
}
crawledNewsRepository.delete(news);
return true;
}catch(Exception e) {
} catch (Exception e) {
e.printStackTrace();
return false;
}
}

@Override
public Page<CrawledNews> findByCrawledNewsStatus(String status, Pageable page) {
public Page<CrawledNews> findByCrawledNewsStatus(String status, Pageable page) {
try {
if (status == null || (status != null && status.isEmpty())) {
throw new Exception("Status can not be null or empty");
}
if (status.equals(StatusCrawledNewsConstant.DISPLAY)
|| status.equals(StatusCrawledNewsConstant.NON_DISPLAY)) {

} else {
throw new Exception("Status must be display or non display");
}
return crawledNewsRepository.findByCrawledNewsStatus(status, page);
}catch(Exception e) {
} catch (Exception e) {
e.printStackTrace();
return null;
}
Expand All @@ -158,20 +179,39 @@ public Page<CrawledNews> findByCrawledNewsStatus(String status, Pageable page) {
@Override
public long countByStatus(String status) {
try {
if (status == null || (status != null && status.isEmpty())) {
throw new Exception("Status can not be null or empty");
}
if (status.equals(StatusCrawledNewsConstant.DISPLAY)
|| status.equals(StatusCrawledNewsConstant.NON_DISPLAY)) {

} else {
throw new Exception("Status must be display or non display");
}
return crawledNewsRepository.countByCrawledNewsStatus(status);
}catch(Exception e) {
} catch (Exception e) {
e.printStackTrace();
return -1;
}

}

@Override
public boolean delete(List<CrawledNews> listNews) {
try {
CrawledNewsValidation crawledNewsValidation = new CrawledNewsValidation();
String error = crawledNewsValidation.isValidCrawledNews(listNews);
if (!error.isEmpty()) {
throw new Exception(error);
}
for (CrawledNews news : listNews) {
Optional<CrawledNews> newsTemp = crawledNewsRepository.findById(news.getCrawledNewsID());
if (!newsTemp.isPresent()) {
throw new Exception("can't delete if news ID is not exist");
}
}
crawledNewsRepository.deleteAll(listNews);
return true;
}catch(Exception e) {
} catch (Exception e) {
e.printStackTrace();
return false;
}
Expand All @@ -180,31 +220,45 @@ public boolean delete(List<CrawledNews> listNews) {
@Override
public List<CrawledNews> saveAll(List<CrawledNews> listNews) {
try {
CrawledNewsValidation crawledNewsValidation = new CrawledNewsValidation();
String error = crawledNewsValidation.isValidCrawledNews(listNews);

if (!error.isEmpty()) {
throw new Exception(error);
}
return crawledNewsRepository.saveAll(listNews);
}catch(Exception e) {
} catch (Exception e) {
e.printStackTrace();
return null;
}
}

@Override
public CrawledNews findById(Long newsId) {
Optional<CrawledNews> news = crawledNewsRepository.findById(newsId);
if (news.isPresent()) {
try {
if (newsId == null) {
throw new Exception("userId can not be null");
}
Optional<CrawledNews> news = crawledNewsRepository.findById(newsId);
if (!news.isPresent()) {
return null;
}
return news.get();
} catch (Exception e) {
e.printStackTrace();
return null;
}
return null;
}

@Override
public CrawledNews findByCrawledNewsLink(String link) {
try {
CrawledNews news = crawledNewsRepository.findByCrawledNewsLink(link);
return news;
}catch(Exception e) {
} catch (Exception e) {
e.printStackTrace();
return null;
}
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
package capstone.lip.landinformationportal.business.validation;

import java.sql.Timestamp;
import java.util.List;
import java.util.regex.Pattern;

import capstone.lip.landinformationportal.common.constant.StatusCrawledNewsConstant;
import capstone.lip.landinformationportal.common.entity.CrawledNews;

public class CrawledNewsValidation {

public String isValidCrawledNews(List<CrawledNews> listNews) {
Timestamp time = new Timestamp(System.currentTimeMillis());
if(listNews == null || (listNews != null && listNews.isEmpty()) ) {
return "List news can not be null or empty";
}
for (CrawledNews news : listNews) {
if (time.getTime() < news.getCrawledNewsTime().getTime()) {
return "Can't insert future time";
}
if (news.getCrawledNewsLink() == null
|| (news.getCrawledNewsLink() != null && news.getCrawledNewsLink().trim().isEmpty())) {
return "Link of news can not be null or empty";
}
if (news.getCrawledNewsWebsite() == null
|| (news.getCrawledNewsWebsite() != null && news.getCrawledNewsWebsite().trim().isEmpty())) {
return "Website of crawled news can not be null or empty";
}
if (news.getCrawledNewsStatus().equals(StatusCrawledNewsConstant.DISPLAY)
|| news.getCrawledNewsStatus().equals(StatusCrawledNewsConstant.NON_DISPLAY)) {

} else {
return "News status muse be display or non display";
}
}
return "";
}

public String isValidCrawledNewsOne(CrawledNews news) {
Timestamp time = new Timestamp(System.currentTimeMillis());
if(news.getCrawledNewsID() <= 0) {
return "News ID can not be negative or equal zero";
}
if (time.getTime() < news.getCrawledNewsTime().getTime()) {
return "Can't insert future time";
}
if (news.getCrawledNewsLink() == null
|| (news.getCrawledNewsLink() != null && news.getCrawledNewsLink().trim().isEmpty())) {
return "Link of news can not be null or empty";
}
if (news.getCrawledNewsWebsite() == null
|| (news.getCrawledNewsWebsite() != null && news.getCrawledNewsWebsite().trim().isEmpty())) {
return "Website of crawled news can not be null or empty";
}
if (news.getCrawledNewsStatus().equals(StatusCrawledNewsConstant.DISPLAY)
|| news.getCrawledNewsStatus().equals(StatusCrawledNewsConstant.NON_DISPLAY)) {

} else {
return "News status muse be display or non display";
}
return "";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ public void FT_CNS_1_08() {
List<CrawledNews> result = instance.saveAll(input);

//TEST RESULT
testFail(result);
testInsertSuccess(result);
}

/**
Expand Down Expand Up @@ -299,7 +299,7 @@ public void FT_CNS_1_15() {
List<CrawledNews> input = new ArrayList();
input.add(sampleCrawledNews
.setCrawledNewsID(NOT_EXISTED_ID)
.setCrawledNewsLink(DEFAULT_LINK));
.setCrawledNewsLink(NULL_STRING));

//TEST METHOD
List<CrawledNews> result = instance.saveAll(input);
Expand Down Expand Up @@ -330,7 +330,7 @@ public void FT_CNS_1_16() {

/**
* @Description: Save list contain 1 record
* @Dependency: News contains special character website source
* @Dependency: News contains all space website source
* @Expected Result: Fail
*/
@Test
Expand Down Expand Up @@ -419,7 +419,7 @@ public void FT_CNS_1_21() {
List<CrawledNews> input = new ArrayList();
input.add(sampleCrawledNews
.setCrawledNewsID(NOT_EXISTED_ID)
.setCrawledNewsStatus(STATUS_INVALID));
.setCrawledNewsStatus(STATUS_VALID_NOT_EXISTED));

//TEST METHOD
List<CrawledNews> result = instance.saveAll(input);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,10 +210,8 @@ public void FT_CNS_8_06() {
//TEST DATA
List<CrawledNews> input = new ArrayList();
for (int i = 0; i < EXISTED_IDs.length; i++) {
input.add(sampleCrawledNews
.setCrawledNewsID(EXISTED_IDs[i]));
input.add(repository.findById(EXISTED_IDs[i]).get());
}
input.set(input.size()-1, null);

//TEST METHOD
boolean result = instance.delete(input);
Expand Down

0 comments on commit 460ae87

Please sign in to comment.