diff --git a/.gitignore b/.gitignore
index fd6154d..7c2953d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -37,3 +37,4 @@ build/
/data/**
.DS_Store
+/logs/
diff --git a/pom.xml b/pom.xml
index e1eb2fc..c376383 100644
--- a/pom.xml
+++ b/pom.xml
@@ -23,13 +23,18 @@
org.springframework.boot
- spring-boot-starter-quartz
+ spring-boot-starter-web
+
+
+ org.springframework.boot
+ spring-boot-starter-logging
+
+
org.springframework.boot
- spring-boot-starter-web
+ spring-boot-starter-log4j2
-
com.h2database
h2
@@ -56,6 +61,18 @@
1.0.0-rc1
test
+
+ org.assertj
+ assertj-core
+ 3.23.1
+ test
+
+
+ org.yaml
+ snakeyaml
+ 1.33
+
+
diff --git a/src/main/java/hub/event/adminapi/AdminController.java b/src/main/java/hub/event/adminapi/AdminController.java
index 3790a86..4c62a2e 100644
--- a/src/main/java/hub/event/adminapi/AdminController.java
+++ b/src/main/java/hub/event/adminapi/AdminController.java
@@ -1,9 +1,5 @@
package hub.event.adminapi;
-import hub.event.auth.AuthService;
-import hub.event.users.UserService;
-import hub.event.statistics.StatsService;
-
class AdminController {
diff --git a/src/main/java/hub/event/newsletter/NewsLetterService.java b/src/main/java/hub/event/newsletter/NewsLetterService.java
index dcc1424..99addcf 100644
--- a/src/main/java/hub/event/newsletter/NewsLetterService.java
+++ b/src/main/java/hub/event/newsletter/NewsLetterService.java
@@ -1,7 +1,4 @@
package hub.event.newsletter;
-import hub.event.mailmodule.MailService;
-import hub.event.events.EventService;
-
public class NewsLetterService {
}
diff --git a/src/main/java/hub/event/scrapers/ScrapersService.java b/src/main/java/hub/event/scrapers/ScrapersService.java
deleted file mode 100644
index 50eb8c4..0000000
--- a/src/main/java/hub/event/scrapers/ScrapersService.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package hub.event.scrapers;
-
-import hub.event.events.EventService;
-
-public class ScrapersService {
-
-}
diff --git a/src/main/java/hub/event/scrapers/Scrappable.java b/src/main/java/hub/event/scrapers/Scrappable.java
deleted file mode 100644
index 2a1a9ee..0000000
--- a/src/main/java/hub/event/scrapers/Scrappable.java
+++ /dev/null
@@ -1,4 +0,0 @@
-package hub.event.scrapers;
-
-public interface Scrappable {
-}
diff --git a/src/main/java/hub/event/scrapers/core/EntityLastScrapedEventMarker.java b/src/main/java/hub/event/scrapers/core/EntityLastScrapedEventMarker.java
new file mode 100644
index 0000000..98ab746
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/EntityLastScrapedEventMarker.java
@@ -0,0 +1,86 @@
+package hub.event.scrapers.core;
+
+import javax.persistence.*;
+import java.io.Serializable;
+import java.time.Instant;
+import java.util.Objects;
+
+@Entity(name = "scraper_scraped_event_maker")
+class EntityLastScrapedEventMarker implements Serializable {
+ @Id
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ private Integer markerId;
+ @Column(nullable = false)
+ private Instant runTime;
+ private String eventTitle;
+ @Column(nullable = false)
+ private String marker;
+ @Column(nullable = false)
+ private Boolean isComplete;
+ @Column(nullable = false)
+ private Integer scraperId;
+
+ EntityLastScrapedEventMarker() {
+ }
+
+ Integer getMarkerId() {
+ return markerId;
+ }
+
+ void setMarkerId(Integer markerId) {
+ this.markerId = markerId;
+ }
+
+ Instant getRunTime() {
+ return runTime;
+ }
+
+ void setRunTime(Instant runTime) {
+ this.runTime = runTime;
+ }
+
+ String getEventTitle() {
+ return eventTitle;
+ }
+
+ void setEventTitle(String eventTitle) {
+ this.eventTitle = eventTitle;
+ }
+
+ String getMarker() {
+ return marker;
+ }
+
+ void setMarker(String marker) {
+ this.marker = marker;
+ }
+
+ Boolean getComplete() {
+ return isComplete;
+ }
+
+ void setComplete(Boolean complete) {
+ isComplete = complete;
+ }
+
+ Integer getScraperId() {
+ return scraperId;
+ }
+
+ void setScraperId(Integer scraperId) {
+ this.scraperId = scraperId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EntityLastScrapedEventMarker that = (EntityLastScrapedEventMarker) o;
+ return Objects.equals(markerId, that.markerId) && Objects.equals(runTime, that.runTime) && Objects.equals(eventTitle, that.eventTitle) && Objects.equals(marker, that.marker) && Objects.equals(isComplete, that.isComplete) && Objects.equals(scraperId, that.scraperId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(markerId, runTime, eventTitle, marker, isComplete, scraperId);
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/EntityScraperConfig.java b/src/main/java/hub/event/scrapers/core/EntityScraperConfig.java
new file mode 100644
index 0000000..73ddbb9
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/EntityScraperConfig.java
@@ -0,0 +1,72 @@
+package hub.event.scrapers.core;
+
+import javax.persistence.*;
+import java.io.Serializable;
+import java.util.Objects;
+
+@Entity(name = "scraper_config")
+class EntityScraperConfig implements Serializable {
+ @Id
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ private Integer scraperId;
+ @Column(unique = true)
+ private String configurationName;
+ @Column(nullable = false)
+ private String timeZone;
+ @Column(nullable = false)
+ private boolean isActive;
+
+ EntityScraperConfig() {
+ }
+
+ EntityScraperConfig(String configurationName, String timeZone, boolean isActive) {
+ this.configurationName = configurationName;
+ this.timeZone = timeZone;
+ this.isActive = isActive;
+ }
+
+ Integer getScraperId() {
+ return scraperId;
+ }
+
+ void setScraperId(Integer scraperId) {
+ this.scraperId = scraperId;
+ }
+
+ String getConfigurationName() {
+ return configurationName;
+ }
+
+ void setConfigurationName(String configurationName) {
+ this.configurationName = configurationName;
+ }
+
+ String getTimeZone() {
+ return timeZone;
+ }
+
+ void setTimeZone(String timeZone) {
+ this.timeZone = timeZone;
+ }
+
+ boolean isActive() {
+ return isActive;
+ }
+
+ void setActive(boolean active) {
+ isActive = active;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EntityScraperConfig that = (EntityScraperConfig) o;
+ return isActive == that.isActive && Objects.equals(scraperId, that.scraperId) && Objects.equals(configurationName, that.configurationName) && Objects.equals(timeZone, that.timeZone);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(scraperId, configurationName, timeZone, isActive);
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/EntityScraperRunErrorLog.java b/src/main/java/hub/event/scrapers/core/EntityScraperRunErrorLog.java
new file mode 100644
index 0000000..db6aa05
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/EntityScraperRunErrorLog.java
@@ -0,0 +1,85 @@
+package hub.event.scrapers.core;
+
+import javax.persistence.*;
+import java.io.Serializable;
+import java.time.Instant;
+import java.util.Objects;
+
+@Entity(name = "scraperErrorLog")
+class EntityScraperRunErrorLog implements Serializable {
+ @Id
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ private Integer logId;
+
+ @Column(nullable = false, name = "error_time")
+ private Instant time;
+
+ @Column(nullable = false)
+ private String errorCode;
+ private String description;
+ @Column(nullable = false)
+ private Integer scraperId;
+
+ EntityScraperRunErrorLog() {
+ }
+
+ EntityScraperRunErrorLog(Integer scraperId, Instant time, String errorCode, String description) {
+ this.scraperId = scraperId;
+ this.time = time;
+ this.errorCode = errorCode;
+ this.description = description;
+ }
+
+ Integer getLogId() {
+ return logId;
+ }
+
+ void setLogId(Integer logId) {
+ this.logId = logId;
+ }
+
+ Instant getTime() {
+ return time;
+ }
+
+ void setTime(Instant time) {
+ this.time = time;
+ }
+
+ String getErrorCode() {
+ return errorCode;
+ }
+
+ void setErrorCode(String errorCode) {
+ this.errorCode = errorCode;
+ }
+
+ String getDescription() {
+ return description;
+ }
+
+ void setDescription(String description) {
+ this.description = description;
+ }
+
+ Integer getScraperId() {
+ return scraperId;
+ }
+
+ void setScraperId(Integer scraperId) {
+ this.scraperId = scraperId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EntityScraperRunErrorLog that = (EntityScraperRunErrorLog) o;
+ return Objects.equals(logId, that.logId) && Objects.equals(time, that.time) && Objects.equals(errorCode, that.errorCode) && Objects.equals(description, that.description) && Objects.equals(scraperId, that.scraperId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(logId, time, errorCode, description, scraperId);
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/EntityScraperRunStatusLog.java b/src/main/java/hub/event/scrapers/core/EntityScraperRunStatusLog.java
new file mode 100644
index 0000000..1d95e6c
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/EntityScraperRunStatusLog.java
@@ -0,0 +1,97 @@
+package hub.event.scrapers.core;
+
+import javax.persistence.*;
+import java.io.Serializable;
+import java.time.Instant;
+import java.util.Objects;
+
+@Entity(name = "scraperStatusLog")
+class EntityScraperRunStatusLog implements Serializable {
+ @Id
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ private Integer logId;
+ @Column(nullable = false)
+ private Instant startTime;
+ @Column(nullable = false)
+ private Instant finishTime;
+ @Column(nullable = false)
+ private Integer scannedEventCount;
+
+ @Column(nullable = false)
+ private Integer errorCount;
+
+ @Column(nullable = false)
+ private Integer scraperId;
+
+ EntityScraperRunStatusLog() {
+ }
+
+ EntityScraperRunStatusLog(Integer scraperId, Instant startTime, Instant finishTime, Integer scannedEventCount, Integer errorCount) {
+ this.scraperId = scraperId;
+ this.startTime = startTime;
+ this.finishTime = finishTime;
+ this.scannedEventCount = scannedEventCount;
+ this.errorCount = errorCount;
+ }
+
+ Integer getLogId() {
+ return logId;
+ }
+
+ void setLogId(Integer logId) {
+ this.logId = logId;
+ }
+
+ Instant getStartTime() {
+ return startTime;
+ }
+
+ void setStartTime(Instant startTime) {
+ this.startTime = startTime;
+ }
+
+ Instant getFinishTime() {
+ return finishTime;
+ }
+
+ void setFinishTime(Instant finishTime) {
+ this.finishTime = finishTime;
+ }
+
+ Integer getScannedEventCount() {
+ return scannedEventCount;
+ }
+
+ void setScannedEventCount(Integer scannedEventCount) {
+ this.scannedEventCount = scannedEventCount;
+ }
+
+ Integer getErrorCount() {
+ return errorCount;
+ }
+
+ void setErrorCount(Integer errorCount) {
+ this.errorCount = errorCount;
+ }
+
+ Integer getScraperId() {
+ return scraperId;
+ }
+
+ void setScraperId(Integer scraperId) {
+ this.scraperId = scraperId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EntityScraperRunStatusLog that = (EntityScraperRunStatusLog) o;
+ return Objects.equals(logId, that.logId) && Objects.equals(startTime, that.startTime) && Objects.equals(finishTime, that.finishTime) && Objects.equals(scannedEventCount, that.scannedEventCount) && Objects.equals(errorCount, that.errorCount) && Objects.equals(scraperId, that.scraperId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(logId, startTime, finishTime, scannedEventCount, errorCount, scraperId);
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/EventFacadeAdapter.java b/src/main/java/hub/event/scrapers/core/EventFacadeAdapter.java
new file mode 100644
index 0000000..826a417
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/EventFacadeAdapter.java
@@ -0,0 +1,71 @@
+package hub.event.scrapers.core;
+
+import hub.event.events.EventFacade;
+import hub.event.events.event.Event;
+import hub.event.events.type.Type;
+import hub.event.scrapers.core.datewithlocation.MultipleEventDateWithLocations;
+import hub.event.scrapers.core.datewithlocation.MultipleMappedEvents;
+import hub.event.scrapers.core.datewithlocation.SingleDateMappedToEvent;
+import hub.event.scrapers.core.datewithlocation.SingleEventDateWithLocation;
+import org.springframework.stereotype.Service;
+
+import java.util.Collection;
+import java.util.List;
+
+@Service
+class EventFacadeAdapter {
+ private final EventFacade eventFacade;
+
+ EventFacadeAdapter(EventFacade eventFacade) {
+ this.eventFacade = eventFacade;
+ }
+
+ public void saveAll(List scrapedEventList) {
+ List eventList = mapScrapedEventListToEventList(scrapedEventList);
+ eventFacade.saveEvents(eventList);
+ }
+
+ private List mapScrapedEventListToEventList(List scrapedEventList) {
+ return scrapedEventList.stream()
+ .map(this::mapEvent)
+ .flatMap(Collection::stream)
+ .toList();
+ }
+
+ private List mapEvent(ScrapedEvent scrapedEvent) {
+ return scrapedEvent.hasMultipleDateAndLocations()
+ ? mapMultipleDateToEvent(scrapedEvent)
+ : mapSingleDateToEvent(scrapedEvent);
+ }
+
+ private List mapMultipleDateToEvent(ScrapedEvent scrapedEvent) {
+ final MultipleEventDateWithLocations multipleEventDateWithLocations = scrapedEvent.multipleEventDateWithLocations();
+
+ List eventList = new MultipleMappedEvents(multipleEventDateWithLocations).events();
+
+ for (Event event : eventList) {
+ event.setTitle(scrapedEvent.title());
+ event.setDescription(scrapedEvent.description());
+ event.setTypes(mapTypes(scrapedEvent.types()));
+ }
+
+ return eventList;
+ }
+
+ private List mapSingleDateToEvent(ScrapedEvent scrapedEvent) {
+ final SingleEventDateWithLocation eventDateWithLocation = scrapedEvent.singleEventDateWithLocation();
+ final Event event = new SingleDateMappedToEvent(eventDateWithLocation);
+
+ event.setTitle(scrapedEvent.title());
+ event.setDescription(scrapedEvent.description());
+ event.setTypes(mapTypes(scrapedEvent.types()));
+
+ return List.of(event);
+ }
+
+ private List mapTypes(List types) {
+ return types.stream()
+ .map(type -> new Type(null, type))
+ .toList();
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/JpaLastScrapedEventMarkerRepository.java b/src/main/java/hub/event/scrapers/core/JpaLastScrapedEventMarkerRepository.java
new file mode 100644
index 0000000..ce7f76d
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/JpaLastScrapedEventMarkerRepository.java
@@ -0,0 +1,21 @@
+package hub.event.scrapers.core;
+
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.Modifying;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.repository.query.Param;
+
+import javax.transaction.Transactional;
+import java.time.Instant;
+import java.util.List;
+import java.util.Optional;
+
+interface JpaLastScrapedEventMarkerRepository extends JpaRepository {
+
+ Optional findByScraperId(Integer scraperId);
+
+ @Modifying
+ @Query("UPDATE scraper_scraped_event_maker m SET m.isComplete = true WHERE m.isComplete = false AND m.runTime >= :date AND m.scraperId IN :configurationIds")
+ @Transactional
+ void setAllAsCompleteByConfigurationsIds(@Param("configurationIds") List configurationIds, @Param("date")Instant date);
+}
diff --git a/src/main/java/hub/event/scrapers/core/JpaScraperConfigRepository.java b/src/main/java/hub/event/scrapers/core/JpaScraperConfigRepository.java
new file mode 100644
index 0000000..542e64e
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/JpaScraperConfigRepository.java
@@ -0,0 +1,17 @@
+package hub.event.scrapers.core;
+
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.Modifying;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.repository.query.Param;
+
+import javax.transaction.Transactional;
+
+interface JpaScraperConfigRepository extends JpaRepository {
+
+ @Modifying
+ @Transactional
+ @Query("update scraper_config e set e.isActive = :state where e.scraperId = :id")
+ void setActiveState(@Param("id") Integer scraperId, @Param("state") boolean activeState);
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/JpaScraperRunErrorRepository.java b/src/main/java/hub/event/scrapers/core/JpaScraperRunErrorRepository.java
new file mode 100644
index 0000000..10f9606
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/JpaScraperRunErrorRepository.java
@@ -0,0 +1,7 @@
+package hub.event.scrapers.core;
+
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
+
+interface JpaScraperRunErrorRepository extends JpaRepository, JpaSpecificationExecutor {
+}
diff --git a/src/main/java/hub/event/scrapers/core/JpaScraperRunLogRepository.java b/src/main/java/hub/event/scrapers/core/JpaScraperRunLogRepository.java
new file mode 100644
index 0000000..6c38f4d
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/JpaScraperRunLogRepository.java
@@ -0,0 +1,7 @@
+package hub.event.scrapers.core;
+
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
+
+interface JpaScraperRunLogRepository extends JpaRepository, JpaSpecificationExecutor {
+}
diff --git a/src/main/java/hub/event/scrapers/core/LastScrapedEventMarkerRepository.java b/src/main/java/hub/event/scrapers/core/LastScrapedEventMarkerRepository.java
new file mode 100644
index 0000000..9eaa9b7
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/LastScrapedEventMarkerRepository.java
@@ -0,0 +1,58 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.scraper.LastScrapedEventMarker;
+import org.springframework.stereotype.Repository;
+
+import java.time.Instant;
+import java.time.ZonedDateTime;
+import java.util.List;
+import java.util.Optional;
+
+@Repository
+class LastScrapedEventMarkerRepository {
+ private final JpaLastScrapedEventMarkerRepository jpaLastScrapedEventMarkerRepository;
+ private final ScraperIdNameCache scraperIdNameCache;
+
+ LastScrapedEventMarkerRepository(JpaLastScrapedEventMarkerRepository lastScrapedEventMarkerEntityRepository, ScraperIdNameCache scraperIdNameCache) {
+ this.jpaLastScrapedEventMarkerRepository = lastScrapedEventMarkerEntityRepository;
+ this.scraperIdNameCache = scraperIdNameCache;
+ }
+
+ void store(LastScrapedEventMarker lastScrapedEventMarker) {
+ final EntityLastScrapedEventMarker entityLastScrapedEventMarker = mapToEntity(lastScrapedEventMarker);
+ jpaLastScrapedEventMarkerRepository.save(entityLastScrapedEventMarker);
+ }
+
+ void setAllAsCompleteByConfigurationsIds(List ids) {
+ jpaLastScrapedEventMarkerRepository.setAllAsCompleteByConfigurationsIds(ids, ZonedDateTime.now().minusDays(1).toInstant());
+ }
+
+ Optional findLastCompletedByScraperConfigurationId(Integer scraperId) {
+ final Optional scraperConfigurationName = jpaLastScrapedEventMarkerRepository.findByScraperId(scraperId);
+ return scraperConfigurationName.map(this::mapToMaker);
+ }
+
+ private EntityLastScrapedEventMarker mapToEntity(LastScrapedEventMarker lastScrapedEventMarker) {
+ final Integer scraperId = scraperIdNameCache.getIdByScraperName(lastScrapedEventMarker.scraperConfigurationName());
+ final EntityLastScrapedEventMarker entityLastScrapedEventMarker = new EntityLastScrapedEventMarker();
+
+ entityLastScrapedEventMarker.setMarker(lastScrapedEventMarker.marker());
+ entityLastScrapedEventMarker.setEventTitle(lastScrapedEventMarker.eventTitle());
+ entityLastScrapedEventMarker.setRunTime(lastScrapedEventMarker.runDateTime());
+ entityLastScrapedEventMarker.setScraperId(scraperId);
+ entityLastScrapedEventMarker.setComplete(lastScrapedEventMarker.complete());
+
+ return entityLastScrapedEventMarker;
+ }
+
+ private LastScrapedEventMarker mapToMaker(EntityLastScrapedEventMarker entityLastScrapedEventMarker) {
+ final String marker = entityLastScrapedEventMarker.getMarker();
+ final Instant date = entityLastScrapedEventMarker.getRunTime();
+ final String title = entityLastScrapedEventMarker.getEventTitle();
+ final String configurationName = scraperIdNameCache.getScraperNameById(entityLastScrapedEventMarker.getScraperId());
+ final Boolean isComplete = entityLastScrapedEventMarker.getComplete();
+
+ return new LastScrapedEventMarker(configurationName, date, title, marker, isComplete);
+ }
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/PageScraperPort.java b/src/main/java/hub/event/scrapers/core/PageScraperPort.java
new file mode 100644
index 0000000..629f639
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/PageScraperPort.java
@@ -0,0 +1,53 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.runlog.ScraperRunErrorLog;
+import hub.event.scrapers.core.runlog.ScraperRunStatusLog;
+import hub.event.scrapers.core.scraper.LastScrapedEventMarker;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.time.Instant;
+import java.time.ZoneId;
+import java.util.Collection;
+import java.util.Optional;
+
+public abstract class PageScraperPort {
+ @Autowired
+ private ScraperLogRepository scraperLogRepository;
+ @Autowired
+ private LastScrapedEventMarkerRepository lastScrapedEventMarkerRepository;
+
+ @Autowired
+ private ScraperIdNameCache scraperIdNameCache;
+
+ protected String configurationName() {
+ // chyba załatwi sprawę w modularnym monolicie
+ return this.getClass().getName();
+ }
+
+ protected abstract Collection scrap();
+
+ protected ZoneId timeZone() {
+ return ZoneId.systemDefault();
+ }
+
+ protected void logError(Instant time, String errorCode, String description) {
+ final ScraperRunErrorLog scraperRunErrorLog = new ScraperRunErrorLog(configurationName(), time, errorCode, description);
+ scraperLogRepository.save(scraperRunErrorLog);
+ }
+
+ protected void logStatus(Instant startTime, Instant finishTime, Integer scannedEventCount, Integer errorCount) {
+ final ScraperRunStatusLog scraperRunStatusLog = new ScraperRunStatusLog(configurationName(), startTime, finishTime, scannedEventCount, errorCount);
+ scraperLogRepository.save(scraperRunStatusLog);
+ }
+
+ protected Optional lastScrapedEventMarkerByConfigurationName() {
+ Integer id = scraperIdNameCache.getIdByScraperName(configurationName());
+ return lastScrapedEventMarkerRepository.findLastCompletedByScraperConfigurationId(id);
+ }
+
+ protected void saveLastScrapedEventMarker(Instant runDateTime, String eventTitle, String marker) {
+ final LastScrapedEventMarker newScrapedEventMarkerToSave = new LastScrapedEventMarker(configurationName(), runDateTime, eventTitle, marker);
+ lastScrapedEventMarkerRepository.store(newScrapedEventMarkerToSave);
+ }
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/ScrapedEvent.java b/src/main/java/hub/event/scrapers/core/ScrapedEvent.java
new file mode 100644
index 0000000..85a7387
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/ScrapedEvent.java
@@ -0,0 +1,126 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.datewithlocation.MultipleEventDateWithLocations;
+import hub.event.scrapers.core.datewithlocation.SingleEventDateWithLocation;
+
+import java.util.*;
+
+public class ScrapedEvent {
+
+ private final Map metadata;
+ private final String title;
+ private final String description;
+ private final String sourceLink;
+ private final List types;
+ private final SingleEventDateWithLocation singleEventDateWithLocation;
+ private final MultipleEventDateWithLocations multipleEventDateWithLocations;
+
+ // nowa klasa na powtarzający się event
+ // Period - od kiedy do kiedy
+ // List - lista dat kiedy
+ // Interval - co ile się powtarza
+ // można na razie nie robić
+
+ private ScrapedEvent(String title, String description, String sourceLink, SingleEventDateWithLocation singleEventDateWithLocation, MultipleEventDateWithLocations multipleEventDateWithLocations, Map metadata, List types) {
+ this.metadata = metadata;
+ this.title = title;
+ this.description = description;
+ this.sourceLink = sourceLink;
+ this.types = types;
+ this.singleEventDateWithLocation = singleEventDateWithLocation;
+ this.multipleEventDateWithLocations = multipleEventDateWithLocations;
+ }
+
+ public static ScrapedEventBuilder builder(SingleEventDateWithLocation singleEventDateWithLocation) {
+ return new ScrapedEventBuilder(singleEventDateWithLocation);
+ }
+
+ public static ScrapedEventBuilder builder(MultipleEventDateWithLocations multipleEventDateWithLocations) {
+ return new ScrapedEventBuilder(multipleEventDateWithLocations);
+ }
+
+ String title() {
+ return title;
+ }
+
+ String description() {
+ return description;
+ }
+
+ String sourceLink() {
+ return sourceLink;
+ }
+
+ Map metadata() {
+ return metadata;
+ }
+
+ SingleEventDateWithLocation singleEventDateWithLocation() {
+ return singleEventDateWithLocation;
+ }
+
+ MultipleEventDateWithLocations multipleEventDateWithLocations() {
+ return multipleEventDateWithLocations;
+ }
+
+ boolean hasMultipleDateAndLocations() {
+ return Objects.nonNull(multipleEventDateWithLocations);
+ }
+
+ List types() {
+ return types;
+ }
+
+ public static class ScrapedEventBuilder {
+ private final Map metadata;
+ private final List types;
+ private String title;
+ private String description;
+ private String sourceLink;
+ private SingleEventDateWithLocation singleEventDateWithLocation;
+ private MultipleEventDateWithLocations multipleEventDateWithLocations;
+
+ private ScrapedEventBuilder() {
+ this.metadata = new HashMap<>();
+ this.types = new ArrayList<>();
+ }
+
+ public ScrapedEventBuilder(SingleEventDateWithLocation singleEventDateWithLocation) {
+ this();
+ this.singleEventDateWithLocation = singleEventDateWithLocation;
+ }
+
+ public ScrapedEventBuilder(MultipleEventDateWithLocations multipleEventDateWithLocations) {
+ this();
+ this.multipleEventDateWithLocations = multipleEventDateWithLocations;
+ }
+
+ public ScrapedEventBuilder title(String title) {
+ this.title = title;
+ return this;
+ }
+
+ public ScrapedEventBuilder description(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public ScrapedEventBuilder sourceLink(String sourceLink) {
+ this.sourceLink = sourceLink;
+ return this;
+ }
+
+ public ScrapedEventBuilder metadata(String key, String value) {
+ this.metadata.put(key, value);
+ return this;
+ }
+ public ScrapedEventBuilder type(String type) {
+ this.types.add(type);
+ return this;
+ }
+
+ public ScrapedEvent build() {
+ return new ScrapedEvent(title, description, sourceLink, singleEventDateWithLocation, multipleEventDateWithLocations, metadata, types);
+ }
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/ScraperConfigRepository.java b/src/main/java/hub/event/scrapers/core/ScraperConfigRepository.java
new file mode 100644
index 0000000..accc246
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/ScraperConfigRepository.java
@@ -0,0 +1,47 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.scraper.ScraperConfig;
+import org.springframework.stereotype.Repository;
+
+import java.time.ZoneId;
+import java.util.List;
+
+@Repository
+class ScraperConfigRepository {
+ private final JpaScraperConfigRepository jpaScraperConfigRepository;
+
+ ScraperConfigRepository(JpaScraperConfigRepository jpaScraperConfigRepository) {
+ this.jpaScraperConfigRepository = jpaScraperConfigRepository;
+ }
+
+
+ boolean exists(Integer scraperId) {
+ return jpaScraperConfigRepository.existsById(scraperId);
+ }
+
+ ScraperConfig create(String scraperName, ZoneId timeZone, boolean activeState) {
+ final EntityScraperConfig entityScraperConfig = new EntityScraperConfig(scraperName, timeZone.toString(), activeState);
+ final EntityScraperConfig savedEntity = jpaScraperConfigRepository.save(entityScraperConfig);
+ return mapToScraperConfig(savedEntity);
+ }
+
+ void activate(Integer scraperId) {
+ jpaScraperConfigRepository.setActiveState(scraperId, true);
+ }
+
+ void deactivate(Integer scraperId) {
+ jpaScraperConfigRepository.setActiveState(scraperId, false);
+ }
+
+ public List allScraperConfigs() {
+ return jpaScraperConfigRepository.findAll()
+ .stream()
+ .map(this::mapToScraperConfig)
+ .toList();
+ }
+
+ private ScraperConfig mapToScraperConfig(EntityScraperConfig entityScraperConfig) {
+ ZoneId timeZone = ZoneId.of(entityScraperConfig.getTimeZone());
+ return new ScraperConfig(entityScraperConfig.getScraperId(), entityScraperConfig.getConfigurationName(), timeZone, entityScraperConfig.isActive());
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/ScraperFacade.java b/src/main/java/hub/event/scrapers/core/ScraperFacade.java
new file mode 100644
index 0000000..47e29ec
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/ScraperFacade.java
@@ -0,0 +1,34 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.exceptions.ScraperConfigurationByNameNotExists;
+import org.springframework.stereotype.Service;
+
+@Service
+public class ScraperFacade {
+
+ private final ScraperConfigRepository scraperConfigRepository;
+ private final ScraperIdNameCache scraperIdNameCache;
+
+ public ScraperFacade(ScraperConfigRepository scraperConfigRepository, ScraperIdNameCache scraperIdNameCache) {
+ this.scraperConfigRepository = scraperConfigRepository;
+ this.scraperIdNameCache = scraperIdNameCache;
+ }
+
+ public void activateScraperByConfigurationName(String scraperName) throws ScraperConfigurationByNameNotExists {
+ Integer scraperId = scraperIdNameCache.getIdByScraperName(scraperName);
+ validateScraperExists(scraperName, scraperId);
+ scraperConfigRepository.activate(scraperId);
+ }
+
+ public void deactivateScraperByConfigurationName(String scraperName) throws ScraperConfigurationByNameNotExists {
+ Integer scraperId = scraperIdNameCache.getIdByScraperName(scraperName);
+ validateScraperExists(scraperName, scraperId);
+ scraperConfigRepository.deactivate(scraperId);
+ }
+
+ private void validateScraperExists(String scraperName, Integer scraperId) throws ScraperConfigurationByNameNotExists {
+ if (!scraperConfigRepository.exists(scraperId)) {
+ throw new ScraperConfigurationByNameNotExists(scraperName);
+ }
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/ScraperIdNameCache.java b/src/main/java/hub/event/scrapers/core/ScraperIdNameCache.java
new file mode 100644
index 0000000..35591df
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/ScraperIdNameCache.java
@@ -0,0 +1,42 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.scraper.ScraperConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.util.Collection;
+import java.util.concurrent.ConcurrentHashMap;
+
+@Component
+class ScraperIdNameCache {
+
+ private final Logger logger = LoggerFactory.getLogger(ScraperIdNameCache.class);
+ private final ConcurrentHashMap nameToIdMap;
+ private final ConcurrentHashMap idToNameMap;
+
+ ScraperIdNameCache() {
+ nameToIdMap = new ConcurrentHashMap<>();
+ idToNameMap = new ConcurrentHashMap<>();
+ }
+
+ Integer getIdByScraperName(String scraperConfigurationName) {
+ return nameToIdMap.get(scraperConfigurationName);
+ }
+
+ String getScraperNameById(Integer scraperId) {
+ return idToNameMap.get(scraperId);
+ }
+
+ void add(Collection scraperConfigs) {
+ for (ScraperConfig scraperConfig : scraperConfigs) {
+ add(scraperConfig);
+ }
+ }
+
+ public void add(ScraperConfig scraperConfig) {
+ idToNameMap.put(scraperConfig.scraperId(), scraperConfig.configurationName());
+ nameToIdMap.put(scraperConfig.configurationName(), scraperConfig.scraperId());
+ logger.debug("Added values to cache for config: {}", scraperConfig);
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/ScraperLogRepository.java b/src/main/java/hub/event/scrapers/core/ScraperLogRepository.java
new file mode 100644
index 0000000..f62f6ce
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/ScraperLogRepository.java
@@ -0,0 +1,60 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.runlog.ScraperRunErrorLog;
+import hub.event.scrapers.core.runlog.ScraperRunStatusLog;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Repository;
+
+@Repository
+public class ScraperLogRepository {
+ private final Logger logger = LoggerFactory.getLogger(ScraperLogRepository.class);
+ private final JpaScraperRunLogRepository jpaScraperRunLogRepository;
+ private final JpaScraperRunErrorRepository jpaScraperRunErrorRepository;
+ private final ScraperIdNameCache scraperIdNameCache;
+
+ @Autowired
+ public ScraperLogRepository(JpaScraperRunLogRepository jpaScraperRunLogRepository, JpaScraperRunErrorRepository jpaScraperRunErrorRepository, ScraperIdNameCache scraperIdNameCache) {
+ this.jpaScraperRunLogRepository = jpaScraperRunLogRepository;
+ this.jpaScraperRunErrorRepository = jpaScraperRunErrorRepository;
+ this.scraperIdNameCache = scraperIdNameCache;
+ }
+
+ void save(ScraperRunStatusLog scraperRunStatusLog) {
+ EntityScraperRunStatusLog entityScraperRunStatusLog = mapToEntity(scraperRunStatusLog);
+ logger.debug("Mapped EntityScraperRunStatusLog :{}", entityScraperRunStatusLog);
+ jpaScraperRunLogRepository.save(entityScraperRunStatusLog);
+ }
+
+ void save(ScraperRunErrorLog scraperRunError) {
+ EntityScraperRunErrorLog entityScraperRunErrorLog = mapToEntity(scraperRunError);
+ logger.debug("Mapped EntityScraperRunErrorLog :{}", entityScraperRunErrorLog);
+ jpaScraperRunErrorRepository.save(entityScraperRunErrorLog);
+ }
+
+
+ private EntityScraperRunStatusLog mapToEntity(ScraperRunStatusLog scraperRunStatusLog) {
+ final Integer scraperId = scraperIdNameCache.getIdByScraperName(scraperRunStatusLog.configurationName());
+
+ return new EntityScraperRunStatusLog(
+ scraperId,
+ scraperRunStatusLog.startTime(),
+ scraperRunStatusLog.finishTime(),
+ scraperRunStatusLog.scannedEventCount(),
+ scraperRunStatusLog.errorCount()
+ );
+ }
+
+ private EntityScraperRunErrorLog mapToEntity(ScraperRunErrorLog scraperRunError) {
+ final Integer scraperId = scraperIdNameCache.getIdByScraperName(scraperRunError.configurationName());
+
+ return new EntityScraperRunErrorLog(
+ scraperId,
+ scraperRunError.time(),
+ scraperRunError.errorCode(),
+ scraperRunError.description()
+ );
+ }
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/ScraperRunService.java b/src/main/java/hub/event/scrapers/core/ScraperRunService.java
new file mode 100644
index 0000000..7a350cb
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/ScraperRunService.java
@@ -0,0 +1,121 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.scraper.ScraperConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.ApplicationArguments;
+import org.springframework.boot.ApplicationRunner;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+
+@Service
+@EnableScheduling
+class ScraperRunService implements ApplicationRunner {
+
+ private final Logger logger = LoggerFactory.getLogger(ScraperRunService.class);
+ private final ScraperConfigRepository scraperConfigRepository;
+ private final EventFacadeAdapter eventFacadeAdapter;
+ private final LastScrapedEventMarkerRepository lastScrapedEventMarkerRepository;
+ private final List pageScrapers;
+ private final ScraperIdNameCache scraperIdNameCache;
+
+ @Autowired
+ ScraperRunService(ScraperConfigRepository scraperConfigRepository, EventFacadeAdapter eventFacadeAdapter, LastScrapedEventMarkerRepository lastScrapedEventMarkerRepository, List pageScrapers, ScraperIdNameCache scraperIdNameCache) {
+ this.scraperConfigRepository = scraperConfigRepository;
+ this.eventFacadeAdapter = eventFacadeAdapter;
+ this.lastScrapedEventMarkerRepository = lastScrapedEventMarkerRepository;
+ this.pageScrapers = pageScrapers;
+ this.scraperIdNameCache = scraperIdNameCache;
+ }
+
+ @Scheduled(cron = "${scrapers.run.cron.expression}")
+// @Scheduled(cron = "0 0 * * *")
+ void start() {
+ logger.info("Run scraper schedule");
+ final Collection scraperConfigs = scraperConfigRepository.allScraperConfigs();
+
+ final List pageScrapersToRun = getActiveScrapersThatShouldBeRun(scraperConfigs);
+ logger.debug("Scrapers to run:");
+ pageScrapersToRun.forEach(pageScraperPort -> logger.debug(pageScraperPort.configurationName()));
+ final List scrapedEventList = runScrapersForEvents(pageScrapersToRun);
+
+ logger.info("Scan done by all active scrapers, founded events = {}", scrapedEventList.size());
+ logger.debug("Scanned events:");
+ scrapedEventList.forEach(scrapedEvent -> logger.debug(scrapedEvent.toString()));
+ eventFacadeAdapter.saveAll(scrapedEventList);
+
+ final List runScraperConfigurationsIds = getRunScraperConfigurationsIds(pageScrapersToRun);
+ lastScrapedEventMarkerRepository.setAllAsCompleteByConfigurationsIds(runScraperConfigurationsIds);
+ logger.info("Events saved, markers completed ");
+ logger.info("Run scraper schedule done");
+ }
+
+ private List getScraperThatConfigNotFoundByConfigurationName(Collection scraperConfigs) {
+ final List availableScraperConfigByName = scraperConfigs.stream()
+ .map(ScraperConfig::configurationName)
+ .toList();
+
+ return pageScrapers.stream()
+ .filter(pageScraper -> !availableScraperConfigByName.contains(pageScraper.configurationName()))
+ .toList();
+ }
+
+ private List getActiveScrapersThatShouldBeRun(Collection scraperConfigs) {
+ final Map configStatusByScraperConfigurationNameMap = scraperConfigs.stream()
+ .collect(Collectors.toMap(ScraperConfig::configurationName, ScraperConfig::isActive));
+
+ return pageScrapers.stream()
+ .filter(scraper -> configStatusByScraperConfigurationNameMap.get(scraper.configurationName()))
+ .toList();
+ }
+
+ private List runScrapersForEvents(List pageScrapersToRun) {
+ return pageScrapersToRun.parallelStream()
+ .map(this::tryRunScraperTask)
+ .flatMap(Collection::stream)
+ .toList();
+ }
+
+ private Collection tryRunScraperTask(PageScraperPort pageScraperPort) {
+ try {
+ return pageScraperPort.scrap();
+ } catch (Exception runScraperTaskException) {
+ logger.error(String.format("Run scraper %s failed", pageScraperPort.configurationName()), runScraperTaskException);
+ return new ArrayList<>();
+ }
+ }
+
+ private List getRunScraperConfigurationsIds(List pageScrapersToRun) {
+ return pageScrapersToRun.stream()
+ .map(PageScraperPort::configurationName)
+ .map(scraperIdNameCache::getIdByScraperName)
+ .toList();
+ }
+
+ @Override
+ public void run(ApplicationArguments args) {
+ createScrapersConfigsIfMissingAndFillIdNameCache();
+ }
+
+ void createScrapersConfigsIfMissingAndFillIdNameCache() {
+ final Collection scraperConfigs = scraperConfigRepository.allScraperConfigs();
+ scraperIdNameCache.add(scraperConfigs);
+
+ final List scrapersWithoutConfig = getScraperThatConfigNotFoundByConfigurationName(scraperConfigs);
+
+ for (PageScraperPort pageScraperPort : scrapersWithoutConfig) {
+ ScraperConfig scraperConfig = scraperConfigRepository.create(pageScraperPort.configurationName(), pageScraperPort.timeZone(), true);
+ scraperIdNameCache.add(scraperConfig);
+ }
+ logger.info("Scrapers config validated and added to cache");
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/datewithlocation/EventDateType.java b/src/main/java/hub/event/scrapers/core/datewithlocation/EventDateType.java
new file mode 100644
index 0000000..eb4a752
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/datewithlocation/EventDateType.java
@@ -0,0 +1,6 @@
+package hub.event.scrapers.core.datewithlocation;
+
+enum EventDateType {
+ SINGLE,
+ PERIOD,
+}
diff --git a/src/main/java/hub/event/scrapers/core/datewithlocation/EventDateWithLocation.java b/src/main/java/hub/event/scrapers/core/datewithlocation/EventDateWithLocation.java
new file mode 100644
index 0000000..b14c390
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/datewithlocation/EventDateWithLocation.java
@@ -0,0 +1,75 @@
+package hub.event.scrapers.core.datewithlocation;
+
+import hub.event.scrapers.core.exceptions.EventDateInPastException;
+
+import java.time.LocalDate;
+import java.time.LocalTime;
+import java.time.ZoneId;
+
+class EventDateWithLocation {
+ private final LocalDate startDate;
+ private final LocalDate endDate;
+ private final LocalTime startTime;
+ private final LocalTime endTime;
+
+ private final ZoneId timeZone;
+
+ private final EventLocation eventLocation;
+
+ public EventDateWithLocation(LocalDate startDate, LocalTime startTime, LocalDate endDate, LocalTime endTime, ZoneId timeZone, String city, String address, String locationName) {
+ this.startDate = startDate;
+ this.endDate = endDate;
+ this.startTime = startTime;
+ this.endTime = endTime;
+ this.timeZone = timeZone;
+ this.eventLocation = new EventLocation(city, address, locationName);
+ }
+
+ public EventDateWithLocation(LocalDate date, LocalTime time, ZoneId timeZone, String city, String address, String locationName) throws EventDateInPastException {
+ inputDateValidation(date);
+ this.timeZone = timeZone;
+ this.startDate = date;
+ this.endDate = null;
+ this.startTime = time;
+ this.endTime = null;
+ this.eventLocation = new EventLocation(city, address, locationName);
+ }
+
+ String city() {
+ return eventLocation.city();
+ }
+
+ LocalDate startDate() {
+ return startDate;
+ }
+
+ LocalTime startTime() {
+ return startTime;
+ }
+
+ LocalDate endDate() {
+ return endDate;
+ }
+
+ LocalTime endTime() {
+ return endTime;
+ }
+
+ String address() {
+ return eventLocation.address();
+ }
+
+ String locationName() {
+ return eventLocation.name();
+ }
+
+ ZoneId timeZone() {
+ return timeZone;
+ }
+
+ private void inputDateValidation(LocalDate date) throws EventDateInPastException {
+ if (date.isBefore(LocalDate.now())) {
+ throw new EventDateInPastException(date);
+ }
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/datewithlocation/EventLocation.java b/src/main/java/hub/event/scrapers/core/datewithlocation/EventLocation.java
new file mode 100644
index 0000000..fbd13fa
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/datewithlocation/EventLocation.java
@@ -0,0 +1,5 @@
+package hub.event.scrapers.core.datewithlocation;
+
+record EventLocation(String city, String address, String name) {
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/datewithlocation/MultipleDateMappedToEvent.java b/src/main/java/hub/event/scrapers/core/datewithlocation/MultipleDateMappedToEvent.java
new file mode 100644
index 0000000..f79567c
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/datewithlocation/MultipleDateMappedToEvent.java
@@ -0,0 +1,24 @@
+package hub.event.scrapers.core.datewithlocation;
+
+import hub.event.events.city.City;
+import hub.event.events.event.Event;
+import hub.event.events.place.Place;
+
+import java.time.LocalDateTime;
+import java.time.ZonedDateTime;
+
+public class MultipleDateMappedToEvent extends Event {
+ public MultipleDateMappedToEvent(EventDateWithLocation dateWithLocation) {
+ super();
+
+ final LocalDateTime startDateTime = LocalDateTime.of(dateWithLocation.startDate(), dateWithLocation.startTime());
+ final ZonedDateTime zonedStartDateTime = ZonedDateTime.of(startDateTime, dateWithLocation.timeZone());
+
+ final City city = new City(null, dateWithLocation.city());
+ final Place place = new Place(null, dateWithLocation.address(), null, null);
+
+ setCity(city);
+ setPlace(place);
+ setStartDate(zonedStartDateTime);
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/datewithlocation/MultipleEventDateWithLocations.java b/src/main/java/hub/event/scrapers/core/datewithlocation/MultipleEventDateWithLocations.java
new file mode 100644
index 0000000..98b2956
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/datewithlocation/MultipleEventDateWithLocations.java
@@ -0,0 +1,38 @@
+package hub.event.scrapers.core.datewithlocation;
+
+import hub.event.scrapers.core.exceptions.EventDateInPastException;
+
+import java.time.LocalDate;
+import java.time.LocalTime;
+import java.time.ZoneId;
+import java.util.ArrayList;
+import java.util.Collection;
+
+public class MultipleEventDateWithLocations {
+ private final Collection eventDateWithLocations;
+
+ private MultipleEventDateWithLocations() {
+ eventDateWithLocations = new ArrayList<>();
+ }
+
+ private MultipleEventDateWithLocations(LocalDate date, LocalTime time, ZoneId timeZone, String city, String address, String locationName) throws EventDateInPastException {
+ this();
+ final EventDateWithLocation eventDateWithLocation = new EventDateWithLocation(date, time, timeZone, city, address, locationName);
+ this.eventDateWithLocations.add(eventDateWithLocation);
+ }
+
+ public static MultipleEventDateWithLocations create(LocalDate date, LocalTime time, ZoneId timeZone, String city, String address, String locationName) throws EventDateInPastException {
+ return new MultipleEventDateWithLocations(date, time, timeZone,city, address, locationName);
+ }
+
+ public MultipleEventDateWithLocations add(LocalDate date, LocalTime time, ZoneId timeZone, String city, String address, String locationName) throws EventDateInPastException {
+ final EventDateWithLocation eventDateWithLocation = new EventDateWithLocation(date, time, timeZone, city, address, locationName);
+ this.eventDateWithLocations.add(eventDateWithLocation);
+ return this;
+ }
+
+ public Collection eventDateWithLocations() {
+ return this.eventDateWithLocations;
+ }
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/datewithlocation/MultipleMappedEvents.java b/src/main/java/hub/event/scrapers/core/datewithlocation/MultipleMappedEvents.java
new file mode 100644
index 0000000..5e1bbdb
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/datewithlocation/MultipleMappedEvents.java
@@ -0,0 +1,22 @@
+package hub.event.scrapers.core.datewithlocation;
+
+import hub.event.events.event.Event;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class MultipleMappedEvents {
+ private MultipleEventDateWithLocations multipleEventDateWithLocations;
+
+ public MultipleMappedEvents(MultipleEventDateWithLocations multipleEventDateWithLocations) {
+
+ this.multipleEventDateWithLocations = multipleEventDateWithLocations;
+ }
+
+ public List events() {
+ return multipleEventDateWithLocations.eventDateWithLocations()
+ .stream()
+ .map(MultipleDateMappedToEvent::new)
+ .collect(Collectors.toList());
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/datewithlocation/SingleDateMappedToEvent.java b/src/main/java/hub/event/scrapers/core/datewithlocation/SingleDateMappedToEvent.java
new file mode 100644
index 0000000..52fc6d1
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/datewithlocation/SingleDateMappedToEvent.java
@@ -0,0 +1,33 @@
+package hub.event.scrapers.core.datewithlocation;
+
+import hub.event.events.city.City;
+import hub.event.events.event.Event;
+import hub.event.events.place.Place;
+
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.ZonedDateTime;
+import java.util.Objects;
+import java.util.Optional;
+
+public class SingleDateMappedToEvent extends Event {
+
+ public SingleDateMappedToEvent(SingleEventDateWithLocation dateWithLocation) {
+ final LocalDateTime startDateTime = LocalDateTime.of(dateWithLocation.startDate(), dateWithLocation.startTime());
+ final ZonedDateTime zonedStartDateTime = ZonedDateTime.of(startDateTime, dateWithLocation.timeZone());
+
+
+ final City city = new City(null, dateWithLocation.city());
+ final Place place = new Place(null, dateWithLocation.address(), null, null);
+
+ setCity(city);
+ setPlace(place);
+ setStartDate(zonedStartDateTime);
+
+ if (Objects.nonNull(dateWithLocation.endDate())) {
+ final LocalDateTime endDateTime = LocalDateTime.of(dateWithLocation.endDate(), Optional.ofNullable(dateWithLocation.endTime()).orElse(LocalTime.of(0, 0)));
+ final ZonedDateTime endStartDateTime = ZonedDateTime.of(endDateTime, dateWithLocation.timeZone());
+ setEndDate(endStartDateTime);
+ }
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/datewithlocation/SingleEventDateWithLocation.java b/src/main/java/hub/event/scrapers/core/datewithlocation/SingleEventDateWithLocation.java
new file mode 100644
index 0000000..a87effa
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/datewithlocation/SingleEventDateWithLocation.java
@@ -0,0 +1,107 @@
+package hub.event.scrapers.core.datewithlocation;
+
+import hub.event.scrapers.core.exceptions.EventDateEndDateTimeBeforeStartDateTimeException;
+import hub.event.scrapers.core.exceptions.EventDateInPastException;
+
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.ZoneId;
+
+public class SingleEventDateWithLocation {
+ private final EventDateType eventDateType;
+ private final EventDateWithLocation eventDateWithLocation;
+
+ private SingleEventDateWithLocation(EventDateType eventDateType, LocalDate startDate, LocalTime startTime, LocalDate endDate, LocalTime endTime, ZoneId timeZone, String city, String address, String locationName) {
+ this.eventDateType = eventDateType;
+ this.eventDateWithLocation = new EventDateWithLocation(startDate, startTime, endDate, endTime, timeZone, city, address, locationName);
+ }
+
+ public static SingleEventDateWithLocation single(LocalDate startDate, LocalTime startTime, ZoneId timeZone, String city, String address, String locationName) throws EventDateInPastException {
+ validateStartDateTime(startDate, startTime);
+ return new SingleEventDateWithLocation(EventDateType.SINGLE, startDate, startTime, null, null, timeZone, city, address, locationName);
+ }
+
+ public static SingleEventDateWithLocation single(LocalDate startDate, LocalTime startTime, LocalDate endDate, LocalTime endTime, ZoneId timeZone, String city, String address, String locationName) throws EventDateEndDateTimeBeforeStartDateTimeException, EventDateInPastException {
+ validateStartDateTimeAndEndDateTime(startDate, startTime, endDate, endTime);
+ return new SingleEventDateWithLocation(EventDateType.SINGLE, startDate, startTime, endDate, endTime, timeZone, city, address, locationName);
+ }
+
+ public static SingleEventDateWithLocation period(LocalDate startDate, LocalTime startTime, LocalDate endDate, LocalTime endTime, ZoneId timeZone, String city, String address, String locationName) throws EventDateEndDateTimeBeforeStartDateTimeException, EventDateInPastException {
+ validateStartDateTimeAndEndDateTime(startDate, startTime, endDate, endTime);
+ return new SingleEventDateWithLocation(EventDateType.PERIOD, startDate, startTime, endDate, endTime, timeZone, city, address, locationName);
+ }
+
+ public static SingleEventDateWithLocation period(LocalDate startDate, LocalTime startTime, LocalDate endDate, ZoneId timeZone, String city, String address, String locationName) throws EventDateEndDateTimeBeforeStartDateTimeException, EventDateInPastException {
+ validateStartDateTimeAndEndDateTime(startDate, startTime, endDate, startTime);
+ return new SingleEventDateWithLocation(EventDateType.PERIOD, startDate, startTime, endDate, null, timeZone, city, address, locationName);
+ }
+
+ LocalDate startDate() {
+ return eventDateWithLocation.startDate();
+ }
+
+ LocalTime startTime() {
+ return eventDateWithLocation.startTime();
+ }
+
+ String city() {
+ return eventDateWithLocation.city();
+ }
+
+ LocalDate endDate() {
+ return eventDateWithLocation.endDate();
+ }
+
+ LocalTime endTime() {
+ return eventDateWithLocation.endTime();
+ }
+
+
+ boolean isSingleDate() {
+ return EventDateType.SINGLE.equals(this.eventDateType);
+ }
+
+ boolean isPeriodDate() {
+ return EventDateType.PERIOD.equals(this.eventDateType);
+ }
+
+ String address() {
+ return eventDateWithLocation.address();
+ }
+
+ String locationName() {
+ return eventDateWithLocation.locationName();
+ }
+
+ ZoneId timeZone(){
+ return eventDateWithLocation.timeZone();
+ }
+
+ private static void validateStartDateTimeAndEndDateTime(LocalDate startDate, LocalTime startTime, LocalDate endDate, LocalTime endTime) throws EventDateEndDateTimeBeforeStartDateTimeException, EventDateInPastException {
+ final LocalDateTime startLocalDateTime = LocalDateTime.of(startDate, startTime);
+ final LocalDateTime endLocalDateTime = LocalDateTime.of(endDate, endTime);
+ final LocalDateTime now = LocalDateTime.now();
+
+ if (startLocalDateTime.isBefore(now)) {
+ throw new EventDateInPastException(startLocalDateTime);
+ }
+ if (endLocalDateTime.isBefore(now)) {
+ throw new EventDateInPastException(endLocalDateTime);
+ }
+
+ if (endLocalDateTime.isBefore(startLocalDateTime)) {
+ throw new EventDateEndDateTimeBeforeStartDateTimeException(startLocalDateTime, endLocalDateTime);
+ }
+ }
+
+ private static void validateStartDateTime(LocalDate startDate, LocalTime startTime) throws EventDateInPastException {
+ final LocalDateTime startLocalDateTime = LocalDateTime.of(startDate, startTime);
+ final LocalDateTime now = LocalDateTime.now();
+
+ if (startLocalDateTime.isBefore(now)) {
+ throw new EventDateInPastException(startLocalDateTime);
+ }
+
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/exceptions/EventDateEndDateTimeBeforeStartDateTimeException.java b/src/main/java/hub/event/scrapers/core/exceptions/EventDateEndDateTimeBeforeStartDateTimeException.java
new file mode 100644
index 0000000..990ded2
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/exceptions/EventDateEndDateTimeBeforeStartDateTimeException.java
@@ -0,0 +1,9 @@
+package hub.event.scrapers.core.exceptions;
+
+import java.time.LocalDateTime;
+
+public class EventDateEndDateTimeBeforeStartDateTimeException extends Exception {
+ public EventDateEndDateTimeBeforeStartDateTimeException(LocalDateTime startDate, LocalDateTime endDate) {
+ super(String.format("End date = %s is before start date = %s", endDate, startDate));
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/exceptions/EventDateInPastException.java b/src/main/java/hub/event/scrapers/core/exceptions/EventDateInPastException.java
new file mode 100644
index 0000000..cacf1ce
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/exceptions/EventDateInPastException.java
@@ -0,0 +1,14 @@
+package hub.event.scrapers.core.exceptions;
+
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+
+public class EventDateInPastException extends Exception {
+ public EventDateInPastException(LocalDate date) {
+ super(String.format("Past event date = %s is not allowed, only time travelers can come to it ", date));
+ }
+
+ public EventDateInPastException(LocalDateTime incorrectLocalDateTime) {
+ super(String.format("Past event date = %s is not allowed, only time travelers can come to it ", incorrectLocalDateTime));
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/exceptions/ScraperConfigurationByNameNotExists.java b/src/main/java/hub/event/scrapers/core/exceptions/ScraperConfigurationByNameNotExists.java
new file mode 100644
index 0000000..60073f3
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/exceptions/ScraperConfigurationByNameNotExists.java
@@ -0,0 +1,7 @@
+package hub.event.scrapers.core.exceptions;
+
+public class ScraperConfigurationByNameNotExists extends Exception {
+ public ScraperConfigurationByNameNotExists(String scraperConfigurationName) {
+ super(String.format("Scraper configuration by name %s not found", scraperConfigurationName));
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/EntityFindAllErrorLogSpecification.java b/src/main/java/hub/event/scrapers/core/runlog/EntityFindAllErrorLogSpecification.java
new file mode 100644
index 0000000..a5c306b
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/EntityFindAllErrorLogSpecification.java
@@ -0,0 +1,60 @@
+package hub.event.scrapers.core.runlog;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.data.jpa.domain.Specification;
+
+import javax.persistence.criteria.CriteriaBuilder;
+import javax.persistence.criteria.CriteriaQuery;
+import javax.persistence.criteria.Predicate;
+import javax.persistence.criteria.Root;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+
+class EntityFindAllErrorLogSpecification implements Specification {
+
+ private final transient Logger logger = LoggerFactory.getLogger(EntityFindAllErrorLogSpecification.class);
+ private final transient ErrorLogSearchQuery searchQuery;
+
+ EntityFindAllErrorLogSpecification(ErrorLogSearchQuery errorLogSearchQuery) {
+ this.searchQuery = errorLogSearchQuery;
+ }
+
+ @Override
+ public Predicate toPredicate(Root root, CriteriaQuery> query, CriteriaBuilder criteriaBuilder) {
+ final var scraperConfig = root.join("scraperConfig");
+
+ List outputPredicates = new ArrayList<>();
+
+ if (searchQuery.hasConfigurationNames()) {
+ outputPredicates.add(criteriaBuilder.in(scraperConfig.get(EntityScraperRunErrorLogProperties.SCRAPER_CONFIGURATION_NAME)).value(searchQuery.configurationNames()));
+ }
+
+ if (Objects.nonNull(searchQuery.fromDate())) {
+ outputPredicates.add(criteriaBuilder.greaterThanOrEqualTo(root.get(EntityScraperRunErrorLogProperties.TIME), searchQuery.fromDate()));
+ }
+
+ if (Objects.nonNull(searchQuery.toDate())) {
+ outputPredicates.add(criteriaBuilder.lessThanOrEqualTo(root.get(EntityScraperRunErrorLogProperties.TIME), searchQuery.toDate()));
+ }
+
+ if (Objects.nonNull(searchQuery.description())) {
+ outputPredicates.add(criteriaBuilder.equal(root.get(EntityScraperRunErrorLogProperties.DESCRIPTION), searchQuery.description()));
+ }
+
+ if (searchQuery.hasErrorCodes()) {
+ outputPredicates.add(criteriaBuilder.in(root.get(EntityScraperRunErrorLogProperties.ERROR_CODE)).value(searchQuery.errorCodes()));
+ }
+
+ final Predicate outputPredicate = outputPredicates.stream()
+ .reduce(criteriaBuilder::and)
+ .orElse(null);
+
+ Optional.ofNullable(outputPredicate)
+ .ifPresent(predicate -> logger.debug("FindAllErrorLogSpecification output predicate: {}", predicate));
+
+ return outputPredicate;
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/EntityFindAllStatusLogSpecification.java b/src/main/java/hub/event/scrapers/core/runlog/EntityFindAllStatusLogSpecification.java
new file mode 100644
index 0000000..b0b8db1
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/EntityFindAllStatusLogSpecification.java
@@ -0,0 +1,83 @@
+package hub.event.scrapers.core.runlog;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.data.jpa.domain.Specification;
+
+import javax.persistence.criteria.CriteriaBuilder;
+import javax.persistence.criteria.CriteriaQuery;
+import javax.persistence.criteria.Predicate;
+import javax.persistence.criteria.Root;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+
+class EntityFindAllStatusLogSpecification implements Specification {
+
+ private final transient Logger logger = LoggerFactory.getLogger(EntityFindAllStatusLogSpecification.class);
+ private final transient StatusLogSearchQuery searchQuery;
+
+
+ EntityFindAllStatusLogSpecification(StatusLogSearchQuery statusLogSearchQuery) {
+
+ this.searchQuery = statusLogSearchQuery;
+ }
+
+ @Override
+ public Predicate toPredicate(Root root, CriteriaQuery> query, CriteriaBuilder criteriaBuilder) {
+ final var scraperConfig = root.join("scraperConfig");
+
+ List outputPredicates = new ArrayList<>();
+
+ if (searchQuery.hasConfigurationNames()) {
+ outputPredicates.add(criteriaBuilder.in(scraperConfig.get(EntityScraperRunStatusLogProperties.SCRAPER_CONFIGURATION_NAME)).value(searchQuery.configurationNames()));
+ }
+
+ if (Objects.nonNull(searchQuery.startTimeFrom())) {
+ outputPredicates.add(criteriaBuilder.greaterThanOrEqualTo(root.get(EntityScraperRunStatusLogProperties.START_TIME), searchQuery.startTimeFrom()));
+ }
+
+ if (Objects.nonNull(searchQuery.startTimeTo())) {
+ outputPredicates.add(criteriaBuilder.lessThanOrEqualTo(root.get(EntityScraperRunStatusLogProperties.START_TIME), searchQuery.startTimeTo()));
+ }
+
+ if (Objects.nonNull(searchQuery.finishTimeFrom())) {
+ outputPredicates.add(criteriaBuilder.greaterThanOrEqualTo(root.get(EntityScraperRunStatusLogProperties.FINISH_TIME), searchQuery.finishTimeFrom()));
+ }
+
+ if (Objects.nonNull(searchQuery.finishTimeTo())) {
+ outputPredicates.add(criteriaBuilder.lessThanOrEqualTo(root.get(EntityScraperRunStatusLogProperties.FINISH_TIME), searchQuery.finishTimeTo()));
+ }
+
+ if (Objects.nonNull(searchQuery.hasScannedEvent())) {
+ final Predicate predicate = Boolean.TRUE.equals(searchQuery.hasScannedEvent())
+ ? criteriaBuilder.greaterThan(root.get(EntityScraperRunStatusLogProperties.SCANNED_EVENT_COUNT), 0)
+ : criteriaBuilder.or(criteriaBuilder.equal(root.get(EntityScraperRunStatusLogProperties.SCANNED_EVENT_COUNT), 0), criteriaBuilder.isNull(root.get(EntityScraperRunStatusLogProperties.SCANNED_EVENT_COUNT)));
+
+ outputPredicates.add(predicate);
+ } else if (Objects.nonNull(searchQuery.scannedEventGreaterThanOrEqualTo())) {
+ outputPredicates.add(criteriaBuilder.greaterThanOrEqualTo(root.get(EntityScraperRunStatusLogProperties.SCANNED_EVENT_COUNT), searchQuery.scannedEventGreaterThanOrEqualTo()));
+ }
+
+ if (Objects.nonNull(searchQuery.hasErrors())) {
+ final Predicate predicate = Boolean.TRUE.equals(searchQuery.hasErrors())
+ ? criteriaBuilder.greaterThan(root.get(EntityScraperRunStatusLogProperties.ERROR_COUNT), 0)
+ : criteriaBuilder.or(criteriaBuilder.equal(root.get(EntityScraperRunStatusLogProperties.ERROR_COUNT), 0), criteriaBuilder.isNull(root.get(EntityScraperRunStatusLogProperties.ERROR_COUNT)));
+
+ outputPredicates.add(predicate);
+ } else if (Objects.nonNull(searchQuery.errorCountGreaterThanOrEqualTo())) {
+ outputPredicates.add(criteriaBuilder.greaterThanOrEqualTo(root.get(EntityScraperRunStatusLogProperties.ERROR_COUNT), searchQuery.errorCountGreaterThanOrEqualTo()));
+ }
+
+ final Predicate outputPredicate = outputPredicates.stream()
+ .reduce(criteriaBuilder::and)
+ .orElse(null);
+
+ Optional.ofNullable(outputPredicate)
+ .ifPresent(predicate -> logger.debug("FindAllStatusLogSpecification output predicate: {}", predicate));
+
+ return outputPredicate;
+ }
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/EntityScraperConfig.java b/src/main/java/hub/event/scrapers/core/runlog/EntityScraperConfig.java
new file mode 100644
index 0000000..61b2c61
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/EntityScraperConfig.java
@@ -0,0 +1,55 @@
+package hub.event.scrapers.core.runlog;
+
+import javax.persistence.*;
+import java.io.Serializable;
+import java.util.Objects;
+
+@Entity(name = "queryScraperConfig")
+@Table(name = "scraper_config")
+class EntityScraperConfig implements Serializable {
+ @Id
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ private Integer scraperId;
+ @Column(unique = true)
+ private String configurationName;
+
+ EntityScraperConfig() {
+ }
+
+ Integer getScraperId() {
+ return scraperId;
+ }
+
+ void setScraperId(Integer scraperId) {
+ this.scraperId = scraperId;
+ }
+
+ String getConfigurationName() {
+ return configurationName;
+ }
+
+ void setConfigurationName(String configurationName) {
+ this.configurationName = configurationName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EntityScraperConfig that = (EntityScraperConfig) o;
+ return Objects.equals(scraperId, that.scraperId) && Objects.equals(configurationName, that.configurationName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(scraperId, configurationName);
+ }
+
+ @Override
+ public String toString() {
+ return "EntityScraperConfig{" +
+ "scraperId=" + scraperId +
+ ", configurationName='" + configurationName + '\'' +
+ '}';
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunErrorLog.java b/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunErrorLog.java
new file mode 100644
index 0000000..885a202
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunErrorLog.java
@@ -0,0 +1,95 @@
+package hub.event.scrapers.core.runlog;
+
+import javax.persistence.*;
+import java.io.Serializable;
+import java.time.Instant;
+import java.util.Objects;
+
+@Entity(name = "queryScraperErrorLog")
+@Table(name = "scraper_error_log")
+class EntityScraperRunErrorLog implements Serializable {
+ @Id
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ private Integer logId;
+
+ @Column(nullable = false, name = "error_time")
+ private Instant time;
+
+ @Column(nullable = false)
+ private String errorCode;
+ private String description;
+ @ManyToOne
+ @JoinColumn(name = "scraperId", nullable = false, insertable = false, updatable = false)
+ private EntityScraperConfig scraperConfig;
+
+ EntityScraperRunErrorLog() {
+ }
+
+ Integer getLogId() {
+ return logId;
+ }
+
+ void setLogId(Integer logId) {
+ this.logId = logId;
+ }
+
+ Instant getTime() {
+ return time;
+ }
+
+ void setTime(Instant time) {
+ this.time = time;
+ }
+
+ String getErrorCode() {
+ return errorCode;
+ }
+
+ void setErrorCode(String errorCode) {
+ this.errorCode = errorCode;
+ }
+
+ String getDescription() {
+ return description;
+ }
+
+ void setDescription(String description) {
+ this.description = description;
+ }
+
+ void setScraperConfig(EntityScraperConfig scraperConfig) {
+ this.scraperConfig = scraperConfig;
+ }
+
+ EntityScraperConfig getScraperConfig() {
+ return scraperConfig;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EntityScraperRunErrorLog that = (EntityScraperRunErrorLog) o;
+ return Objects.equals(logId, that.logId) && Objects.equals(time, that.time) && Objects.equals(errorCode, that.errorCode) && Objects.equals(description, that.description) && Objects.equals(scraperConfig, that.scraperConfig);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(logId, time, errorCode, description, scraperConfig);
+ }
+
+ @Override
+ public String toString() {
+ return "EntityScraperRunErrorLog{" +
+ "logId=" + logId +
+ ", time=" + time +
+ ", errorCode='" + errorCode + '\'' +
+ ", description='" + description + '\'' +
+ ", scraperConfig=" + scraperConfig +
+ '}';
+ }
+
+ String getScraperConfigurationName() {
+ return scraperConfig.getConfigurationName();
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunErrorLogProperties.java b/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunErrorLogProperties.java
new file mode 100644
index 0000000..b44428c
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunErrorLogProperties.java
@@ -0,0 +1,13 @@
+package hub.event.scrapers.core.runlog;
+
+class EntityScraperRunErrorLogProperties {
+ static final String ID = "logId";
+ static final String SCRAPER_CONFIGURATION_NAME_PATH = "scraperConfig.configurationName" ;
+ static final String SCRAPER_CONFIGURATION_NAME = "configurationName";
+ static final String TIME = "time";
+ static final String DESCRIPTION = "description";
+ static final String ERROR_CODE = "errorCode";
+
+ private EntityScraperRunErrorLogProperties() {
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunStatusLog.java b/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunStatusLog.java
new file mode 100644
index 0000000..efa748c
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunStatusLog.java
@@ -0,0 +1,107 @@
+package hub.event.scrapers.core.runlog;
+
+import javax.persistence.*;
+import java.io.Serializable;
+import java.time.Instant;
+import java.util.Objects;
+
+@Entity(name = "queryScraperStatusLog")
+@Table(name = "scraper_status_log")
+class EntityScraperRunStatusLog implements Serializable {
+ @Id
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ private Integer logId;
+ @Column(nullable = false)
+ private Instant startTime;
+ @Column(nullable = false)
+ private Instant finishTime;
+ @Column(nullable = false)
+ private Integer scannedEventCount;
+
+ @Column(nullable = false)
+ private Integer errorCount;
+
+ @ManyToOne
+ @JoinColumn(name = "scraperId", nullable = false, insertable = false, updatable = false)
+ private EntityScraperConfig scraperConfig;
+
+ EntityScraperRunStatusLog() {
+ }
+
+ Integer getLogId() {
+ return logId;
+ }
+
+ void setLogId(Integer logId) {
+ this.logId = logId;
+ }
+
+ Instant getStartTime() {
+ return startTime;
+ }
+
+ void setStartTime(Instant startTime) {
+ this.startTime = startTime;
+ }
+
+ Instant getFinishTime() {
+ return finishTime;
+ }
+
+ void setFinishTime(Instant finishTime) {
+ this.finishTime = finishTime;
+ }
+
+ Integer getScannedEventCount() {
+ return scannedEventCount;
+ }
+
+ void setScannedEventCount(Integer scannedEventCount) {
+ this.scannedEventCount = scannedEventCount;
+ }
+
+ Integer getErrorCount() {
+ return errorCount;
+ }
+
+ void setErrorCount(Integer errorCount) {
+ this.errorCount = errorCount;
+ }
+
+ void setScraperConfig(EntityScraperConfig scraperConfig) {
+ this.scraperConfig = scraperConfig;
+ }
+
+ EntityScraperConfig getScraperConfig() {
+ return scraperConfig;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EntityScraperRunStatusLog that = (EntityScraperRunStatusLog) o;
+ return Objects.equals(logId, that.logId) && Objects.equals(startTime, that.startTime) && Objects.equals(finishTime, that.finishTime) && Objects.equals(scannedEventCount, that.scannedEventCount) && Objects.equals(errorCount, that.errorCount) && Objects.equals(scraperConfig, that.scraperConfig);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(logId, startTime, finishTime, scannedEventCount, errorCount, scraperConfig);
+ }
+
+ @Override
+ public String toString() {
+ return "EntityScraperRunStatusLog{" +
+ "logId=" + logId +
+ ", startTime=" + startTime +
+ ", finishTime=" + finishTime +
+ ", scannedEventCount=" + scannedEventCount +
+ ", errorCount=" + errorCount +
+ ", scraperConfig=" + scraperConfig +
+ '}';
+ }
+
+ String getScraperConfigurationName() {
+ return scraperConfig.getConfigurationName();
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunStatusLogProperties.java b/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunStatusLogProperties.java
new file mode 100644
index 0000000..29a9e71
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/EntityScraperRunStatusLogProperties.java
@@ -0,0 +1,14 @@
+package hub.event.scrapers.core.runlog;
+
+class EntityScraperRunStatusLogProperties {
+ static final String ID = "logId";
+ static final String SCRAPER_CONFIGURATION_NAME_PATH = "scraperConfig.configurationName" ;
+ static final String SCRAPER_CONFIGURATION_NAME = "configurationName";
+ static final String START_TIME = "startTime";
+ static final String FINISH_TIME = "finishTime";
+ static final String SCANNED_EVENT_COUNT = "scannedEventCount";
+ static final String ERROR_COUNT = "errorCount";
+
+ private EntityScraperRunStatusLogProperties() {
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/ErrorLogSearchQuery.java b/src/main/java/hub/event/scrapers/core/runlog/ErrorLogSearchQuery.java
new file mode 100644
index 0000000..ea574b4
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/ErrorLogSearchQuery.java
@@ -0,0 +1,120 @@
+package hub.event.scrapers.core.runlog;
+
+import java.time.Instant;
+import java.util.List;
+import java.util.Objects;
+
+public class ErrorLogSearchQuery {
+ private final List configurationNames;
+ private final Instant fromDate;
+ private final Instant toDate;
+ private final List errorCodes;
+ private final String description;
+
+ private final Integer pageSize;
+ private final Integer page;
+
+ private ErrorLogSearchQuery(List configurationNames, Instant fromDate, Instant toDate, List errorCodes, String description, Integer page, Integer pageSize) {
+ this.configurationNames = configurationNames;
+ this.fromDate = fromDate;
+ this.toDate = toDate;
+ this.errorCodes = errorCodes;
+ this.description = description;
+ this.pageSize = pageSize;
+ this.page = page;
+ }
+
+
+ public List configurationNames() {
+ return configurationNames;
+ }
+
+ public Instant fromDate() {
+ return fromDate;
+ }
+
+ public Instant toDate() {
+ return toDate;
+ }
+
+ public List errorCodes() {
+ return errorCodes;
+ }
+
+ public String description() {
+ return description;
+ }
+
+ public Integer pageSize() {
+ return pageSize;
+ }
+
+ public Integer page() {
+ return page;
+ }
+
+ public boolean hasConfigurationNames() {
+ return Objects.nonNull(configurationNames) && !configurationNames.isEmpty();
+ }
+
+ public boolean hasErrorCodes() {
+ return Objects.nonNull(errorCodes) && !errorCodes.isEmpty();
+ }
+
+ public boolean hasPageSetting() {
+ return Objects.nonNull(page) && Objects.nonNull(pageSize);
+ }
+
+ public static ErrorLogSearchQueryBuilder builder() {
+ return new ErrorLogSearchQueryBuilder();
+ }
+
+ public static class ErrorLogSearchQueryBuilder {
+ private List configurationNames;
+ private Instant fromDate;
+ private Instant toDate;
+ private List errorCodes;
+ private String description;
+
+ private Integer pageSize;
+ private Integer page;
+
+ private ErrorLogSearchQueryBuilder() {
+ }
+
+ public ErrorLogSearchQueryBuilder description(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public ErrorLogSearchQueryBuilder errorCodes(List errorCodes) {
+ this.errorCodes = errorCodes;
+ return this;
+ }
+
+ public ErrorLogSearchQueryBuilder fromDate(Instant fromDate) {
+ this.fromDate = fromDate;
+ return this;
+ }
+
+ public ErrorLogSearchQueryBuilder toDate(Instant toDate) {
+ this.toDate = toDate;
+ return this;
+ }
+
+ public ErrorLogSearchQueryBuilder configurationNames(List configurationNames) {
+ this.configurationNames = configurationNames;
+ return this;
+ }
+
+ public ErrorLogSearchQueryBuilder page(int page, int pageSize) {
+ this.page = page;
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public ErrorLogSearchQuery build() {
+ return new ErrorLogSearchQuery(configurationNames, fromDate, toDate, errorCodes, description, page, pageSize);
+ }
+ }
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/JpaScraperRunErrorQueryRepository.java b/src/main/java/hub/event/scrapers/core/runlog/JpaScraperRunErrorQueryRepository.java
new file mode 100644
index 0000000..0cad8a0
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/JpaScraperRunErrorQueryRepository.java
@@ -0,0 +1,7 @@
+package hub.event.scrapers.core.runlog;
+
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
+
+interface JpaScraperRunErrorQueryRepository extends JpaRepository, JpaSpecificationExecutor {
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/JpaScraperRunLogQueryRepository.java b/src/main/java/hub/event/scrapers/core/runlog/JpaScraperRunLogQueryRepository.java
new file mode 100644
index 0000000..a5c4be1
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/JpaScraperRunLogQueryRepository.java
@@ -0,0 +1,7 @@
+package hub.event.scrapers.core.runlog;
+
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
+
+interface JpaScraperRunLogQueryRepository extends JpaRepository, JpaSpecificationExecutor {
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/ScraperLogQueryFacade.java b/src/main/java/hub/event/scrapers/core/runlog/ScraperLogQueryFacade.java
new file mode 100644
index 0000000..44e9500
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/ScraperLogQueryFacade.java
@@ -0,0 +1,23 @@
+package hub.event.scrapers.core.runlog;
+
+import org.springframework.stereotype.Service;
+
+import java.util.List;
+
+@Service
+public class ScraperLogQueryFacade {
+ private final ScraperLogQueryRepository scraperLogRepository;
+
+ public ScraperLogQueryFacade(ScraperLogQueryRepository scraperLogQueryRepository) {
+ this.scraperLogRepository = scraperLogQueryRepository;
+ }
+
+ public List findAllErrorLog(ErrorLogSearchQuery errorLogSearchQuery) {
+ return scraperLogRepository.findAllErrorLog(errorLogSearchQuery);
+ }
+
+ public List findAllStatusLog(StatusLogSearchQuery statusLogSearchQuery) {
+ return scraperLogRepository.findAllStatusLog(statusLogSearchQuery);
+ }
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/ScraperLogQueryRepository.java b/src/main/java/hub/event/scrapers/core/runlog/ScraperLogQueryRepository.java
new file mode 100644
index 0000000..5500d67
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/ScraperLogQueryRepository.java
@@ -0,0 +1,100 @@
+package hub.event.scrapers.core.runlog;
+
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.Pageable;
+import org.springframework.data.domain.Sort;
+import org.springframework.data.jpa.domain.Specification;
+import org.springframework.stereotype.Repository;
+
+import java.util.List;
+import java.util.Objects;
+
+@Repository
+class ScraperLogQueryRepository {
+ private final JpaScraperRunLogQueryRepository runLogQueryRepository;
+ private final JpaScraperRunErrorQueryRepository runErrorQueryRepository;
+
+ ScraperLogQueryRepository(JpaScraperRunLogQueryRepository runLogQueryRepository, JpaScraperRunErrorQueryRepository runErrorQueryRepository) {
+ this.runLogQueryRepository = runLogQueryRepository;
+ this.runErrorQueryRepository = runErrorQueryRepository;
+ }
+
+ List findAllErrorLog(ErrorLogSearchQuery errorLogSearchQuery) {
+ final Sort sort = Sort.by(
+ Sort.Order.asc(EntityScraperRunErrorLogProperties.SCRAPER_CONFIGURATION_NAME_PATH),
+ Sort.Order.desc(EntityScraperRunErrorLogProperties.TIME),
+ Sort.Order.desc(EntityScraperRunErrorLogProperties.ID)
+ );
+
+ final Specification findAllSpecification = new EntityFindAllErrorLogSpecification(errorLogSearchQuery);
+ final Pageable pageable = extractPageSettings(errorLogSearchQuery, sort);
+
+ return findAllErrorLog(findAllSpecification, pageable , sort)
+ .stream()
+ .map(this::mapToLog)
+ .toList();
+
+ }
+
+ List findAllStatusLog(StatusLogSearchQuery statusLogSearchQuery) {
+ final Sort sort = Sort.by(
+ Sort.Order.asc(EntityScraperRunStatusLogProperties.SCRAPER_CONFIGURATION_NAME_PATH),
+ Sort.Order.desc(EntityScraperRunStatusLogProperties.START_TIME),
+ Sort.Order.desc(EntityScraperRunStatusLogProperties.ID)
+ );
+ final Specification findAllSpecification = new EntityFindAllStatusLogSpecification(statusLogSearchQuery);
+ final Pageable pageable = extractPageSettings(statusLogSearchQuery, sort);
+
+ return findAllStatusLog(findAllSpecification, pageable, sort)
+ .stream()
+ .map(this::mapToLog)
+ .toList();
+ }
+
+ private List findAllStatusLog(Specification findAllSpecification, Pageable pageable, Sort sort) {
+ if (Objects.isNull(pageable)) {
+ return runLogQueryRepository.findAll(findAllSpecification, sort);
+ }
+ return runLogQueryRepository.findAll(findAllSpecification, pageable).toList();
+ }
+
+ private List findAllErrorLog(Specification findAllSpecification, Pageable pageable, Sort sort) {
+ if (Objects.isNull(pageable)) {
+ return runErrorQueryRepository.findAll(findAllSpecification, sort);
+ }
+ return runErrorQueryRepository.findAll(findAllSpecification, pageable).toList();
+ }
+
+ private ScraperRunStatusLog mapToLog(EntityScraperRunStatusLog entityScraperRunStatusLog) {
+
+ return new ScraperRunStatusLog(
+ entityScraperRunStatusLog.getScraperConfigurationName(),
+ entityScraperRunStatusLog.getStartTime(),
+ entityScraperRunStatusLog.getFinishTime(),
+ entityScraperRunStatusLog.getScannedEventCount(),
+ entityScraperRunStatusLog.getErrorCount()
+ );
+ }
+
+ private ScraperRunErrorLog mapToLog(EntityScraperRunErrorLog entityScraperRunErrorLog) {
+
+ return new ScraperRunErrorLog(
+ entityScraperRunErrorLog.getScraperConfigurationName(),
+ entityScraperRunErrorLog.getTime(),
+ entityScraperRunErrorLog.getErrorCode(),
+ entityScraperRunErrorLog.getDescription());
+ }
+
+ private Pageable extractPageSettings(StatusLogSearchQuery statusLogSearchQuery, Sort sort) {
+ return statusLogSearchQuery.hasPageSetting()
+ ? PageRequest.of(statusLogSearchQuery.page(), statusLogSearchQuery.pageSize(),sort )
+ : null;
+ }
+
+ private Pageable extractPageSettings(ErrorLogSearchQuery errorLogSearchQuery, Sort sort) {
+ return errorLogSearchQuery.hasPageSetting()
+ ? PageRequest.of(errorLogSearchQuery.page(), errorLogSearchQuery.pageSize(), sort)
+ : null;
+ }
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/ScraperRunErrorLog.java b/src/main/java/hub/event/scrapers/core/runlog/ScraperRunErrorLog.java
new file mode 100644
index 0000000..3ed632b
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/ScraperRunErrorLog.java
@@ -0,0 +1,6 @@
+package hub.event.scrapers.core.runlog;
+
+import java.time.Instant;
+
+public record ScraperRunErrorLog(String configurationName, Instant time, String errorCode, String description) {
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/ScraperRunStatusLog.java b/src/main/java/hub/event/scrapers/core/runlog/ScraperRunStatusLog.java
new file mode 100644
index 0000000..fe5d4ce
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/ScraperRunStatusLog.java
@@ -0,0 +1,7 @@
+package hub.event.scrapers.core.runlog;
+
+import java.time.Instant;
+
+public record ScraperRunStatusLog(String configurationName, Instant startTime, Instant finishTime,
+ Integer scannedEventCount, Integer errorCount) {
+}
diff --git a/src/main/java/hub/event/scrapers/core/runlog/StatusLogSearchQuery.java b/src/main/java/hub/event/scrapers/core/runlog/StatusLogSearchQuery.java
new file mode 100644
index 0000000..111c0c4
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/runlog/StatusLogSearchQuery.java
@@ -0,0 +1,164 @@
+package hub.event.scrapers.core.runlog;
+
+import java.time.Instant;
+import java.util.List;
+import java.util.Objects;
+
+public class StatusLogSearchQuery {
+ private final List configurationNames;
+ private final Instant startTimeTo;
+ private final Instant startTimeFrom;
+ private final Instant finishTimeTo;
+ private final Instant finishTimeFrom;
+ private final Integer scannedEventGreaterThanOrEqualTo;
+ private final Integer errorCountGreaterThanOrEqualTo;
+ private final Boolean hasErrors;
+ private final Boolean hasScannedEvent;
+ private final Integer pageSize;
+ private final Integer page;
+
+ private StatusLogSearchQuery(List configurationNames, Instant startTimeTo, Instant startTimeFrom, Instant finishTimeTo, Instant finishTime, Integer scannedEventGreaterThanOrEqualTo, Integer errorCountGreaterThanOrEqualTo, Boolean hasErrors, Boolean hasScannedEvent, Integer page, Integer pageSize) {
+ this.configurationNames = configurationNames;
+ this.startTimeTo = startTimeTo;
+ this.startTimeFrom = startTimeFrom;
+ this.finishTimeTo = finishTimeTo;
+ this.finishTimeFrom = finishTime;
+ this.scannedEventGreaterThanOrEqualTo = scannedEventGreaterThanOrEqualTo;
+ this.errorCountGreaterThanOrEqualTo = errorCountGreaterThanOrEqualTo;
+ this.hasErrors = hasErrors;
+ this.hasScannedEvent = hasScannedEvent;
+ this.pageSize = pageSize;
+ this.page = page;
+ }
+
+ public static StatusLogSearchCommandBuilder builder() {
+ return new StatusLogSearchCommandBuilder();
+ }
+
+ public List configurationNames() {
+ return configurationNames;
+ }
+
+ public Instant startTimeFrom() {
+ return startTimeFrom;
+ }
+
+ public Instant finishTimeFrom() {
+ return finishTimeFrom;
+ }
+
+ public Instant startTimeTo() {
+ return startTimeTo;
+ }
+
+ public Instant finishTimeTo() {
+ return finishTimeTo;
+ }
+
+ public Integer scannedEventGreaterThanOrEqualTo() {
+ return scannedEventGreaterThanOrEqualTo;
+ }
+
+ public Integer errorCountGreaterThanOrEqualTo() {
+ return errorCountGreaterThanOrEqualTo;
+ }
+
+ public Boolean hasErrors() {
+ return hasErrors;
+ }
+
+ public Boolean hasScannedEvent() {
+ return hasScannedEvent;
+ }
+
+ public Integer pageSize() {
+ return pageSize;
+ }
+
+ public Integer page() {
+ return page;
+ }
+
+ public boolean hasConfigurationNames() {
+ return Objects.nonNull(configurationNames) && !configurationNames.isEmpty();
+ }
+
+ public boolean hasPageSetting() {
+ return Objects.nonNull(page) && Objects.nonNull(pageSize);
+ }
+
+ public static class StatusLogSearchCommandBuilder {
+ private List configurationNames;
+ private Instant startTimeTo;
+ private Instant startTimeFrom;
+ private Instant finishTimeTo;
+ private Instant finishTimeFrom;
+ private Integer scannedEventGreaterThanOrEqualTo;
+ private Integer errorCountGreaterThanOrEqualTo;
+ private Boolean hasErrors;
+
+ private Boolean hasScannedEvent;
+
+ private Integer pageSize;
+ private Integer page;
+
+ private StatusLogSearchCommandBuilder() {
+ }
+
+ public StatusLogSearchCommandBuilder configurationNames(List configurationNames) {
+ this.configurationNames = configurationNames;
+ return this;
+ }
+
+ public StatusLogSearchCommandBuilder startTimeTo(Instant startTimeTo) {
+ this.startTimeTo = startTimeTo;
+ return this;
+ }
+
+ public StatusLogSearchCommandBuilder startTimeFrom(Instant startTimeFrom) {
+ this.startTimeFrom = startTimeFrom;
+ return this;
+ }
+
+ public StatusLogSearchCommandBuilder finishTimeTo(Instant finishTimeTo) {
+ this.finishTimeTo = finishTimeTo;
+ return this;
+ }
+
+ public StatusLogSearchCommandBuilder finishTimeFrom(Instant finishTimeFrom) {
+ this.finishTimeFrom = finishTimeFrom;
+ return this;
+ }
+
+ public StatusLogSearchCommandBuilder scannedEventGreaterThanOrEqualTo(int scannedEventCount) {
+ this.scannedEventGreaterThanOrEqualTo = scannedEventCount;
+ return this;
+ }
+
+ public StatusLogSearchCommandBuilder errorCountGreaterThanOrEqualTo(int errorCount) {
+ this.errorCountGreaterThanOrEqualTo = errorCount;
+ return this;
+ }
+
+ public StatusLogSearchCommandBuilder hasScannedEvent(Boolean hasScannedEvent) {
+ this.hasScannedEvent = hasScannedEvent;
+ return this;
+ }
+
+ public StatusLogSearchCommandBuilder hasErrors(Boolean hasErrors) {
+ this.hasErrors = hasErrors;
+ return this;
+ }
+
+ public StatusLogSearchCommandBuilder page(int page, int pageSize) {
+ this.page = page;
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public StatusLogSearchQuery build() {
+ return new StatusLogSearchQuery(configurationNames, startTimeTo, startTimeFrom, finishTimeTo, finishTimeFrom, scannedEventGreaterThanOrEqualTo, errorCountGreaterThanOrEqualTo, hasErrors, hasScannedEvent, page, pageSize);
+ }
+ }
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/scraper/LastScrapedEventMarker.java b/src/main/java/hub/event/scrapers/core/scraper/LastScrapedEventMarker.java
new file mode 100644
index 0000000..bcf0ae8
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/scraper/LastScrapedEventMarker.java
@@ -0,0 +1,72 @@
+package hub.event.scrapers.core.scraper;
+
+import java.time.Instant;
+import java.util.Objects;
+
+public class LastScrapedEventMarker {
+ private final String scraperConfigurationName;
+ private final Instant runDateTime;
+ private final String eventTitle;
+ private final String marker;
+ private final boolean complete;
+
+ public LastScrapedEventMarker(String scraperConfigurationName, Instant runDateTime, String eventTitle, String marker) {
+ this(scraperConfigurationName, runDateTime, eventTitle,marker,false);
+ }
+
+ public LastScrapedEventMarker(String scraperConfigurationName, Instant runDateTime, String eventTitle, String marker, boolean complete) {
+ this.scraperConfigurationName = scraperConfigurationName;
+ this.runDateTime = runDateTime;
+ this.eventTitle = eventTitle;
+ this.marker = marker;
+ this.complete = complete;
+ }
+
+ public String scraperConfigurationName() {
+ return scraperConfigurationName;
+ }
+
+ public Instant runDateTime() {
+ return runDateTime;
+ }
+
+ public String eventTitle() {
+ return eventTitle;
+ }
+
+ public String marker() {
+ return marker;
+ }
+
+ public boolean complete() {
+ return complete;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) return true;
+ if (obj == null || obj.getClass() != this.getClass()) return false;
+ var that = (LastScrapedEventMarker) obj;
+ return Objects.equals(this.scraperConfigurationName, that.scraperConfigurationName) &&
+ Objects.equals(this.runDateTime, that.runDateTime) &&
+ Objects.equals(this.eventTitle, that.eventTitle) &&
+ Objects.equals(this.marker, that.marker) &&
+ this.complete == that.complete;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(scraperConfigurationName, runDateTime, eventTitle, marker, complete);
+ }
+
+ @Override
+ public String toString() {
+ return "LastScrapedEventMarker[" +
+ "scraperConfigurationName=" + scraperConfigurationName + ", " +
+ "runDateTime=" + runDateTime + ", " +
+ "eventTitle=" + eventTitle + ", " +
+ "marker=" + marker + ", " +
+ "complete=" + complete + ']';
+ }
+
+}
diff --git a/src/main/java/hub/event/scrapers/core/scraper/ScraperConfig.java b/src/main/java/hub/event/scrapers/core/scraper/ScraperConfig.java
new file mode 100644
index 0000000..58f1ab0
--- /dev/null
+++ b/src/main/java/hub/event/scrapers/core/scraper/ScraperConfig.java
@@ -0,0 +1,6 @@
+package hub.event.scrapers.core.scraper;
+
+import java.time.ZoneId;
+
+public record ScraperConfig(Integer scraperId, String configurationName, ZoneId timeZone, boolean isActive) {
+}
diff --git a/src/main/java/hub/event/scrapers/ebilet/EbiletScraper.java b/src/main/java/hub/event/scrapers/ebilet/EbiletScraper.java
index fa175cd..c79dd94 100644
--- a/src/main/java/hub/event/scrapers/ebilet/EbiletScraper.java
+++ b/src/main/java/hub/event/scrapers/ebilet/EbiletScraper.java
@@ -1,6 +1,17 @@
package hub.event.scrapers.ebilet;
-import hub.event.scrapers.Scrappable;
+import hub.event.scrapers.core.PageScraperPort;
+import hub.event.scrapers.core.ScrapedEvent;
+import org.springframework.stereotype.Component;
-class EbiletScraper implements Scrappable {
+import java.util.Collection;
+import java.util.Collections;
+
+@Component
+class EbiletScraper extends PageScraperPort {
+
+ @Override
+ protected Collection scrap() {
+ return Collections.emptyList();
+ }
}
diff --git a/src/main/java/hub/event/scrapers/empikbilet/EmpikBiletScraper.java b/src/main/java/hub/event/scrapers/empikbilet/EmpikBiletScraper.java
index 3942aab..908ef34 100644
--- a/src/main/java/hub/event/scrapers/empikbilet/EmpikBiletScraper.java
+++ b/src/main/java/hub/event/scrapers/empikbilet/EmpikBiletScraper.java
@@ -1,6 +1,18 @@
package hub.event.scrapers.empikbilet;
-import hub.event.scrapers.Scrappable;
+import hub.event.scrapers.core.PageScraperPort;
+import hub.event.scrapers.core.ScrapedEvent;
+import org.springframework.stereotype.Component;
-class EmpikBiletScraper implements Scrappable {
+import java.util.Collection;
+import java.util.Collections;
+
+@Component
+class EmpikBiletScraper extends PageScraperPort {
+
+
+ @Override
+ protected Collection scrap() {
+ return Collections.emptyList();
+ }
}
diff --git a/src/main/java/hub/event/scrapers/goingapp/GoingAppScraper.java b/src/main/java/hub/event/scrapers/goingapp/GoingAppScraper.java
index 724336a..9d7c360 100644
--- a/src/main/java/hub/event/scrapers/goingapp/GoingAppScraper.java
+++ b/src/main/java/hub/event/scrapers/goingapp/GoingAppScraper.java
@@ -1,7 +1,18 @@
package hub.event.scrapers.goingapp;
-import hub.event.scrapers.Scrappable;
+import hub.event.scrapers.core.PageScraperPort;
+import hub.event.scrapers.core.ScrapedEvent;
+import org.springframework.stereotype.Component;
+import java.util.Collection;
+import java.util.Collections;
-class GoingAppScraper implements Scrappable {
+
+@Component
+class GoingAppScraper extends PageScraperPort {
+
+ @Override
+ protected Collection scrap() {
+ return Collections.emptyList();
+ }
}
diff --git a/src/main/java/hub/event/scrapers/kupbilecik/KupBilecikScraper.java b/src/main/java/hub/event/scrapers/kupbilecik/KupBilecikScraper.java
index cda11f1..0ea5575 100644
--- a/src/main/java/hub/event/scrapers/kupbilecik/KupBilecikScraper.java
+++ b/src/main/java/hub/event/scrapers/kupbilecik/KupBilecikScraper.java
@@ -1,9 +1,18 @@
package hub.event.scrapers.kupbilecik;
-import hub.event.scrapers.Scrappable;
+import hub.event.scrapers.core.PageScraperPort;
+import hub.event.scrapers.core.ScrapedEvent;
+import org.springframework.stereotype.Component;
+import java.util.Collection;
+import java.util.Collections;
-class KupBilecikScraper implements Scrappable {
+@Component
+class KupBilecikScraper extends PageScraperPort {
+ @Override
+ protected Collection scrap() {
+ return Collections.emptyList();
+ }
}
diff --git a/src/main/java/hub/event/scrapers/proanima/ProanimaScraper.java b/src/main/java/hub/event/scrapers/proanima/ProanimaScraper.java
index ca8f821..4b882f2 100644
--- a/src/main/java/hub/event/scrapers/proanima/ProanimaScraper.java
+++ b/src/main/java/hub/event/scrapers/proanima/ProanimaScraper.java
@@ -1,6 +1,16 @@
package hub.event.scrapers.proanima;
-import hub.event.scrapers.Scrappable;
+import hub.event.scrapers.core.PageScraperPort;
+import hub.event.scrapers.core.ScrapedEvent;
+import org.springframework.stereotype.Component;
-class ProanimaScraper implements Scrappable {
+import java.util.Collection;
+import java.util.Collections;
+
+@Component
+class ProanimaScraper extends PageScraperPort {
+ @Override
+ protected Collection scrap() {
+ return Collections.emptyList();
+ }
}
diff --git a/src/main/java/hub/event/statistics/StatsService.java b/src/main/java/hub/event/statistics/StatsService.java
index 5e4365d..9302f0e 100644
--- a/src/main/java/hub/event/statistics/StatsService.java
+++ b/src/main/java/hub/event/statistics/StatsService.java
@@ -1,8 +1,4 @@
package hub.event.statistics;
-import hub.event.events.EventService;
-import hub.event.scrapers.ScrapersService;
-import hub.event.users.UserService;
-
public class StatsService {
}
diff --git a/src/main/java/hub/event/users/UserService.java b/src/main/java/hub/event/users/UserService.java
index 12d4b75..191e24a 100644
--- a/src/main/java/hub/event/users/UserService.java
+++ b/src/main/java/hub/event/users/UserService.java
@@ -1,6 +1,4 @@
package hub.event.users;
-import hub.event.auth.AuthService;
-
public class UserService {
}
diff --git a/src/main/java/hub/event/usersapi/UserController.java b/src/main/java/hub/event/usersapi/UserController.java
index 8531ee7..1263f68 100644
--- a/src/main/java/hub/event/usersapi/UserController.java
+++ b/src/main/java/hub/event/usersapi/UserController.java
@@ -1,9 +1,4 @@
package hub.event.usersapi;
-import hub.event.auth.AuthService;
-import hub.event.users.UserService;
-import hub.event.newsletter.NewsLetterService;
-import hub.event.events.EventService;
-
class UserController {
}
diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties
index 037a898..0eff880 100644
--- a/src/main/resources/application.properties
+++ b/src/main/resources/application.properties
@@ -2,11 +2,13 @@
spring.profiles.active=prod
spring.liquibase.change-log=classpath:database/changelog-root.xml
spring.jpa.hibernate.show-sql=true
+#scrapers.run.cron.expression=0 0 0 * * *
+scrapers.run.cron.expression=0 * * * * *
#---
spring.config.activate.on-profile=prod
spring.jpa.properties.hibernate.dialect = org.hibernate.dialect.PostgreSQLDialect
-spring.jpa.hibernate.ddl-auto=update
+spring.jpa.hibernate.ddl-auto=none
spring.jpa.hibernate.show-sql=true
spring.datasource.url=jdbc:postgresql://localhost:5432/EventScraper
spring.datasource.username=admin
diff --git a/src/main/resources/database/change/change_2.yaml b/src/main/resources/database/change/change_2.yaml
new file mode 100644
index 0000000..4320b4c
--- /dev/null
+++ b/src/main/resources/database/change/change_2.yaml
@@ -0,0 +1,205 @@
+databaseChangeLog:
+ - changeSet:
+ id: scraper_config_table_2022_10_17
+ author: batonikleonardo
+ comment: Configuration for all avilible scrapers
+ changes:
+ - createTable:
+ preConditions:
+ not:
+ tableExists:
+ tableName: scraper_config
+ tableName: scraper_config
+ remarks: Create scraper for configuration for all avilible scrapers
+ columns:
+ - column:
+ name: scraper_id
+ type: int
+ autoIncrement: true
+ constraints:
+ primaryKey: true
+ - column:
+ name: configuration_name
+ type: varchar
+ constraints:
+ nullable: false
+ unique: true
+ - column:
+ name: is_active
+ type: boolean
+ defaultValueBoolean: false
+ constraints:
+ nullable: false
+ - column:
+ name: time_zone
+ type: varchar
+ constraints:
+ nullable: false
+
+ - changeSet:
+ id: scraper_status_log_table_2022_10_17
+ author: batonikleonardo
+ comment : Create teble for scrapers status logs
+ changes:
+ - createTable:
+ preConditions:
+ not:
+ tableExists:
+ tableName: scraper_status_log
+ tableName: scraper_status_log
+ remarks: Scrapers status logs
+ columns:
+ - column:
+ name: log_id
+ type: int
+ autoIncrement: true
+ constraints:
+ primaryKey: true
+ - column:
+ name: error_count
+ type: int
+ - column:
+ name: start_time
+ type: timestamp with time zone
+ constraints:
+ nullable: false
+ - column:
+ name: finish_time
+ type: timestamp with time zone
+ constraints:
+ nullable: false
+ - column:
+ name: scanned_event_count
+ type: int
+ - column:
+ name: scraper_id
+ type: int
+ constraints:
+ nullable: false
+ foreignKeyName: status_log_scraper_fk
+ referencedTableName: scraper_config
+ referencedColumnNames: scraper_id
+
+ - changeSet:
+ id: scraper_error_log_table_2022_10_17
+ author: batonikleonardo
+ comment: Create table for scrapres error logs
+ changes:
+ - createTable:
+ preConditions:
+ not:
+ tableExists:
+ tableName: scraper_error_log
+ tableName: scraper_error_log
+ remarks: Scrapres error logs
+ columns:
+ - column:
+ name: log_id
+ type: int
+ autoIncrement: true
+ constraints:
+ primaryKey: true
+ - column:
+ name: description
+ type: varchar
+ - column:
+ name: error_code
+ type: varchar
+ - column:
+ name: error_time
+ type: timestamp with time zone
+ constraints:
+ nullable: false
+ - column:
+ name: scraper_id
+ type: int
+ constraints:
+ nullable: false
+ foreignKeyName: error_log_scraper_fk
+ referencedTableName: scraper_config
+ referencedColumnNames: scraper_id
+
+ - changeSet:
+ id: scraper_scraped_event_maker_table_2022_10_17
+ author: batonikleonardo
+ comment: Create table for for last sraped event marker
+ changes:
+ - createTable:
+ preConditions:
+ not:
+ tableExists:
+ tableName: scraper_scraped_event_maker
+ tableName: scraper_scraped_event_maker
+ remarks: Marker for last sraped event
+ columns:
+ - column:
+ name: marker_id
+ type: int
+ autoIncrement: true
+ constraints:
+ primaryKey: true
+ - column:
+ name: event_title
+ type: varchar
+ - column:
+ name: is_complete
+ type: boolean
+ defaultValueBoolean: false
+ constraints:
+ nullable: false
+ - column:
+ name: marker
+ type: varchar
+ - column:
+ name: run_time
+ type: timestamp with time zone
+ constraints:
+ nullable: false
+ - column:
+ name: scraper_id
+ type: int
+ constraints:
+ nullable: false
+ foreignKeyName: maker_scraper_fk
+ referencedTableName: scraper_config
+ referencedColumnNames: scraper_id
+
+ - changeSet:
+ id: scraper_status_log_index_2022_10_17
+ author: batonikleonardo
+ comment : scrapers status logs scraper id index
+ changes:
+ - createIndex:
+ preConditions:
+ not:
+ indexExists:
+ indexName: status_log_serach_idx
+ tableName: scraper_status_log
+ indexName: status_log_scraper_id_idx
+ columns:
+ - column:
+ name: start_time
+ descending: true
+ - column:
+ name: log_id
+ descending: true
+
+ - changeSet:
+ id: scraper_error_log_index_2022_10_17
+ author: batonikleonardo
+ comment: scrapers error logs scraper id index
+ changes:
+ - createIndex:
+ preConditions:
+ not:
+ indexExists:
+ indexName: error_log_serach_idx
+ tableName: scraper_error_log
+ indexName: error_log_scraper_id_idx
+ columns:
+ - column:
+ name: error_time
+ descending: true
+ - column:
+ name: log_id
+ descending: true
\ No newline at end of file
diff --git a/src/main/resources/log4j2.xml b/src/main/resources/log4j2.xml
new file mode 100644
index 0000000..b131f92
--- /dev/null
+++ b/src/main/resources/log4j2.xml
@@ -0,0 +1,48 @@
+
+
+
+
+
+
+
+
+
+ %d %p %C{1.} [%t] %m%n
+
+
+
+
+
+
+
+
+
+
+ %d %p %C{1.} [%t] %m%n
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/test/java/hub/event/ScraperApplicationTests.java b/src/test/java/hub/event/ScraperApplicationTests.java
index e95cdd3..821946b 100644
--- a/src/test/java/hub/event/ScraperApplicationTests.java
+++ b/src/test/java/hub/event/ScraperApplicationTests.java
@@ -2,8 +2,10 @@
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.context.ActiveProfiles;
@SpringBootTest
+@ActiveProfiles(profiles = "dev")
class ScraperApplicationTests {
@Test
diff --git a/src/test/java/hub/event/arch/ArchTest.java b/src/test/java/hub/event/arch/ArchTest.java
index ccc71a8..e8af861 100644
--- a/src/test/java/hub/event/arch/ArchTest.java
+++ b/src/test/java/hub/event/arch/ArchTest.java
@@ -39,6 +39,7 @@ class ArchTest {
public static final String JAVAX = "..javax..";
public static final String HTMLUNIT = "..htmlunit..";
public static final String JSOUP = "..jsoup..";
+ private static final String SLF4J = "..org.slf4j..";
@Test
@@ -53,7 +54,7 @@ void givenScrapersModule_thenCheckDependencyOnlyOnEvents() {
SCRAPERS_KUPBILECIK, SCRAPERS_PROANIMA)
.should().onlyDependOnClassesThat()
.resideInAnyPackage(JAVA, JAVAX, SPRING,
- EVENTS, SCRAPERS);
+ EVENTS, SCRAPERS, SLF4J);
archRule.check(javaClasses);
diff --git a/src/test/java/hub/event/scrapers/core/EventFacadeAdapterTest.java b/src/test/java/hub/event/scrapers/core/EventFacadeAdapterTest.java
new file mode 100644
index 0000000..c5f22a2
--- /dev/null
+++ b/src/test/java/hub/event/scrapers/core/EventFacadeAdapterTest.java
@@ -0,0 +1,128 @@
+package hub.event.scrapers.core;
+
+import hub.event.events.EventFacade;
+import hub.event.events.event.Event;
+import hub.event.events.type.Type;
+import hub.event.scrapers.core.datewithlocation.MultipleEventDateWithLocations;
+import hub.event.scrapers.core.datewithlocation.SingleEventDateWithLocation;
+import hub.event.scrapers.core.exceptions.EventDateEndDateTimeBeforeStartDateTimeException;
+import hub.event.scrapers.core.exceptions.EventDateInPastException;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import java.time.*;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.tuple;
+import static org.mockito.Mockito.verify;
+
+@ExtendWith(MockitoExtension.class)
+class EventFacadeAdapterTest {
+
+ @Mock
+ private EventFacade eventFacade;
+ @InjectMocks
+ private EventFacadeAdapter eventFacadeAdapter;
+
+ @Captor
+ ArgumentCaptor> eventListCaptor;
+
+ @Test
+ void saveAllTest() throws EventDateInPastException, EventDateEndDateTimeBeforeStartDateTimeException {
+ //given
+ final LocalDate date1 = LocalDate.now().plusDays(2);
+ final LocalTime time1 = LocalTime.of(10, 20);
+ final String city1 = "Thessia";
+ final LocalDate date2 = LocalDate.now().plusDays(4);
+ final LocalTime time2 = LocalTime.of(13, 0);
+ final String city2 = "Eden Prime";
+ final LocalDate date3 = LocalDate.now().plusDays(2).plusDays(8);
+ final LocalTime time3 = LocalTime.of(18, 30);
+ final String city3 = "Rannoch";
+ final String address = "Nightmare Street 102/34";
+ final String locationName = "Black hole mirror club";
+ final ZoneId timeZone = ZoneId.systemDefault();
+
+ final LocalDate startDate = LocalDate.now().plusDays(2);
+ final LocalDate endDate = LocalDate.now().plusDays(2);
+ final LocalTime startTime = LocalTime.of(10, 20);
+ final LocalTime endTime = LocalTime.of(20, 40);
+ final String city = "Thessia";
+ final String address1 = "Nightmare Street 102/34";
+ final String locationName1 = "Black hole mirror club";
+ final ZoneId timeZone1 = ZoneId.systemDefault();
+
+ final MultipleEventDateWithLocations multipleDate = MultipleEventDateWithLocations.create(date1, time1, timeZone, city1, address, locationName)
+ .add(date2, time2, timeZone, city2, address, locationName)
+ .add(date3, time3, timeZone, city3, address, locationName);
+ final MultipleEventDateWithLocations multipleDate2 = MultipleEventDateWithLocations.create(date2, time3, timeZone, city1, address, locationName);
+ final SingleEventDateWithLocation singleDate1 = SingleEventDateWithLocation.single(startDate, startTime, timeZone1, city, address1, locationName1);
+ final SingleEventDateWithLocation singleDate2 = SingleEventDateWithLocation.single(startDate, startTime, endDate, endTime, timeZone1, city, address1, locationName1);
+
+ final ScrapedEvent scrapedEvent1 = ScrapedEvent.builder(multipleDate)
+ .title("title1")
+ .description("description1")
+ .type("type1")
+ .type("type2")
+ .type("type3")
+ .sourceLink("http://eventhub.com/event1")
+ .build();
+
+ final ScrapedEvent scrapedEvent2 = ScrapedEvent.builder(multipleDate2)
+ .title("title2")
+ .description("description2")
+ .type("type56")
+ .sourceLink("http://eventhub.com/event2")
+ .build();
+
+ final ScrapedEvent scrapedEvent3 = ScrapedEvent.builder(singleDate1)
+ .title("title3")
+ .description("description3")
+ .type("type56")
+ .sourceLink("http://eventhub.com/event3")
+ .build();
+
+ final ScrapedEvent scrapedEvent4 = ScrapedEvent.builder(singleDate2)
+ .title("title4")
+ .description("description4")
+ .type("type564")
+ .sourceLink("http://eventhub.com/event4")
+ .build();
+
+ final List scrapedEventList = List.of(scrapedEvent1, scrapedEvent2, scrapedEvent3, scrapedEvent4);
+ //then
+ eventFacadeAdapter.saveAll(scrapedEventList);
+
+ //verify
+ verify(eventFacade).saveEvents(eventListCaptor.capture());
+
+ Iterable facadeInputEventIterableList = eventListCaptor.getValue();
+
+ assertThat(facadeInputEventIterableList)
+ .extracting(
+ Event::getTitle,
+ Event::getDescription,
+ event -> event.getCity().getName(),
+ event -> event.getPlace().getName(),
+ Event::getStartDate,
+ Event::getEndDate,
+ event -> event.getTypes().stream().map(Type::getType).sorted().collect(Collectors.joining(","))
+ )
+ .contains(
+ tuple("title1", "description1", city1, address1, ZonedDateTime.of(LocalDateTime.of(date1, time1), timeZone), null, "type1,type2,type3"),
+ tuple("title1", "description1", city2, address, ZonedDateTime.of(LocalDateTime.of(date2, time2), timeZone), null, "type1,type2,type3"),
+ tuple("title1", "description1", city3, address, ZonedDateTime.of(LocalDateTime.of(date3, time3), timeZone), null, "type1,type2,type3"),
+ tuple("title2", "description2", city1, address, ZonedDateTime.of(LocalDateTime.of(date2, time3), timeZone), null, "type56"),
+ tuple("title3", "description3", city1, address, ZonedDateTime.of(LocalDateTime.of(startDate, startTime), timeZone), null, "type56"),
+ tuple("title4", "description4", city1, address, ZonedDateTime.of(LocalDateTime.of(startDate, startTime), timeZone), ZonedDateTime.of(LocalDateTime.of(endDate, endTime), timeZone), "type564")
+ );
+
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/hub/event/scrapers/core/LastScrapedEventMarkerRepositoryTest.java b/src/test/java/hub/event/scrapers/core/LastScrapedEventMarkerRepositoryTest.java
new file mode 100644
index 0000000..eef7b61
--- /dev/null
+++ b/src/test/java/hub/event/scrapers/core/LastScrapedEventMarkerRepositoryTest.java
@@ -0,0 +1,118 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.scraper.LastScrapedEventMarker;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
+import java.util.Optional;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.tuple;
+import static org.mockito.Mockito.*;
+
+@ExtendWith(MockitoExtension.class)
+class LastScrapedEventMarkerRepositoryTest {
+
+ @Mock
+ private JpaLastScrapedEventMarkerRepository lastScrapedEventMarkerEntityRepository;
+ @Mock
+ private ScraperIdNameCache scraperIdNameCache;
+ @InjectMocks
+ private LastScrapedEventMarkerRepository lastScrapedEventMarkerRepository;
+
+ @Captor
+ ArgumentCaptor lastScrapedEventMarkerEntityCaptor;
+
+ @Test
+ void saveTest() {
+ //given
+ LastScrapedEventMarker eventMarker1 = new LastScrapedEventMarker("config1", LocalDateTime.now().toInstant(ZoneOffset.UTC), "title1", "marker1");
+ LastScrapedEventMarker eventMarker2 = new LastScrapedEventMarker("config2", LocalDateTime.now().toInstant(ZoneOffset.UTC), "title2", "marker2");
+
+ //when
+ when(scraperIdNameCache.getIdByScraperName("config1")).thenReturn(20);
+ when(scraperIdNameCache.getIdByScraperName("config2")).thenReturn(40);
+
+ //then
+ lastScrapedEventMarkerRepository.store(eventMarker1);
+ lastScrapedEventMarkerRepository.store(eventMarker2);
+
+ verify(lastScrapedEventMarkerEntityRepository, times(2)).save(lastScrapedEventMarkerEntityCaptor.capture());
+
+ assertThat(lastScrapedEventMarkerEntityCaptor.getAllValues())
+ .extracting(
+ EntityLastScrapedEventMarker::getScraperId,
+ EntityLastScrapedEventMarker::getRunTime,
+ EntityLastScrapedEventMarker::getEventTitle,
+ EntityLastScrapedEventMarker::getMarker
+ ).contains(
+ tuple(20, eventMarker1.runDateTime(), eventMarker1.eventTitle(), eventMarker1.marker()),
+ tuple(40, eventMarker2.runDateTime(), eventMarker2.eventTitle(), eventMarker2.marker())
+ );
+
+ }
+
+ @Nested
+ class FindByScraperConfigurationNameTest {
+
+ @Test
+ void whenNotExistsThenReturnEmptyMarker() {
+ //given
+ final int scraperId = 4560;
+
+ //when
+ when(lastScrapedEventMarkerEntityRepository.findByScraperId(scraperId))
+ .thenReturn(Optional.empty());
+
+ //then
+ final Optional lastScrapedEventMarker = lastScrapedEventMarkerRepository.findLastCompletedByScraperConfigurationId(scraperId);
+
+ assertThat(lastScrapedEventMarker).isEmpty();
+ verify(lastScrapedEventMarkerEntityRepository).findByScraperId(scraperId);
+ }
+
+ @Test
+ void whenExistsThenReturnMarker() {
+ //given
+ final String scraperConfigurationName = "exists_scraper";
+ final Instant localDateTime = LocalDateTime.now().toInstant(ZoneOffset.UTC);
+ final int scraperId = 4570;
+
+ final EntityLastScrapedEventMarker entityLastScrapedEventMarker = new EntityLastScrapedEventMarker();
+ entityLastScrapedEventMarker.setMarker("maker1");
+ entityLastScrapedEventMarker.setRunTime(localDateTime);
+ entityLastScrapedEventMarker.setEventTitle("title1");
+ entityLastScrapedEventMarker.setScraperId(scraperId);
+ entityLastScrapedEventMarker.setComplete(true);
+
+ //when
+ when(scraperIdNameCache.getScraperNameById(scraperId)).thenReturn(scraperConfigurationName);
+ when(lastScrapedEventMarkerEntityRepository.findByScraperId(scraperId))
+ .thenReturn(Optional.of(entityLastScrapedEventMarker));
+
+ //then
+ final Optional lastScrapedEventMarker = lastScrapedEventMarkerRepository.findLastCompletedByScraperConfigurationId(scraperId);
+
+ assertThat(lastScrapedEventMarker).isNotEmpty()
+ .get()
+ .extracting(
+ LastScrapedEventMarker::scraperConfigurationName,
+ LastScrapedEventMarker::runDateTime,
+ LastScrapedEventMarker::eventTitle,
+ LastScrapedEventMarker::marker
+ )
+ .contains(scraperConfigurationName, localDateTime, "title1", "maker1");
+
+ verify(lastScrapedEventMarkerEntityRepository).findByScraperId(scraperId);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/hub/event/scrapers/core/PageScraperPortTest.java b/src/test/java/hub/event/scrapers/core/PageScraperPortTest.java
new file mode 100644
index 0000000..4358547
--- /dev/null
+++ b/src/test/java/hub/event/scrapers/core/PageScraperPortTest.java
@@ -0,0 +1,170 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.runlog.ScraperRunErrorLog;
+import hub.event.scrapers.core.runlog.ScraperRunStatusLog;
+import hub.event.scrapers.core.scraper.LastScrapedEventMarker;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Optional;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+@ExtendWith(MockitoExtension.class)
+class PageScraperPortTest {
+ @Mock
+ private ScraperLogRepository scraperLogRepository;
+
+ @Mock
+ private LastScrapedEventMarkerRepository lastScrapedEventMarkerRepository;
+ @Mock
+ private ScraperIdNameCache scraperIdNameCache;
+
+ @InjectMocks
+ private final PageScraperPort pageScraperPort = new PageScraperPort() {
+ @Override
+ protected Collection scrap() {
+ return Collections.emptyList();
+ }
+ };
+
+ @Captor
+ ArgumentCaptor lastScrapedEventMarkerArgumentCaptor;
+ @Captor
+ private ArgumentCaptor scraperRunStatusLogArgumentCaptor;
+ @Captor
+ private ArgumentCaptor scraperRunErrorLogArgumentCaptor;
+
+
+ @Nested
+ class LogSaveTest {
+
+ @Test
+ void logError() {
+ //given
+ final String configurationName = pageScraperPort.configurationName();
+ final Instant time = Instant.now();
+ final String errorCode = "ER1";
+ final String description = "Error 1 server is down";
+
+ //then
+ pageScraperPort.logError(time, errorCode, description);
+
+ verify(scraperLogRepository).save(scraperRunErrorLogArgumentCaptor.capture());
+
+ final ScraperRunErrorLog scraperRunErrorLog = scraperRunErrorLogArgumentCaptor.getValue();
+
+ assertNotNull(scraperRunErrorLog);
+ assertEquals(configurationName, scraperRunErrorLog.configurationName());
+ assertEquals(time, scraperRunErrorLog.time());
+ assertEquals(errorCode, scraperRunErrorLog.errorCode());
+ assertEquals(description, scraperRunErrorLog.description());
+
+ }
+
+ @Test
+ void logStatus() {
+ //given
+ final String configurationName = pageScraperPort.configurationName();
+ final Instant startTime = Instant.now();
+ final Instant finishTime = LocalDateTime.now().plusMinutes(10).toInstant(ZoneOffset.UTC);
+ final Integer scannedEventCount = 23;
+ final Integer errorCount = 1;
+
+
+ //then
+ pageScraperPort.logStatus(startTime, finishTime, scannedEventCount, errorCount);
+
+ verify(scraperLogRepository).save(scraperRunStatusLogArgumentCaptor.capture());
+
+ final ScraperRunStatusLog scraperRunStatusLog = scraperRunStatusLogArgumentCaptor.getValue();
+
+ assertNotNull(scraperRunStatusLog);
+ assertEquals(configurationName, scraperRunStatusLog.configurationName());
+ assertEquals(startTime, scraperRunStatusLog.startTime());
+ assertEquals(finishTime, scraperRunStatusLog.finishTime());
+ assertEquals(scannedEventCount, scraperRunStatusLog.scannedEventCount());
+ assertEquals(errorCount, scraperRunStatusLog.errorCount());
+
+ }
+ }
+
+ @Nested
+ class LastScrapedEventMarkerTest {
+ @Test
+ void whenFoundLastScrapedEventMarkerThenReturnNotEmptyOptional() {
+ //given
+ final String scraperName = pageScraperPort.configurationName();
+ final int scraperId = 102;
+ final LastScrapedEventMarker lastScrapedEventMarker = new LastScrapedEventMarker(scraperName, LocalDateTime.now().minusDays(2).toInstant(ZoneOffset.UTC), "Event Title", "Marker", false);
+ //when
+
+ when(scraperIdNameCache.getIdByScraperName(pageScraperPort.configurationName())).thenReturn(scraperId);
+ when(lastScrapedEventMarkerRepository.findLastCompletedByScraperConfigurationId(scraperId))
+ .thenReturn(Optional.of(lastScrapedEventMarker));
+ //then
+
+ assertDoesNotThrow(() -> {
+ final Optional lastScrapedEventMarkerResult = pageScraperPort.lastScrapedEventMarkerByConfigurationName();
+ assertThat(lastScrapedEventMarkerResult)
+ .isNotEmpty()
+ .get()
+ .isEqualTo(lastScrapedEventMarker);
+ });
+
+ verify(lastScrapedEventMarkerRepository).findLastCompletedByScraperConfigurationId(scraperId);
+ }
+
+ @Test
+ void whenNotFoundLastScrapedEventMarkerThenReturnOptionalEmpty() {
+ //given
+ final int scraperId = 103;
+ //when
+ when(scraperIdNameCache.getIdByScraperName(pageScraperPort.configurationName())).thenReturn(scraperId);
+ when(lastScrapedEventMarkerRepository.findLastCompletedByScraperConfigurationId(scraperId))
+ .thenReturn(Optional.empty());
+ //then
+
+ assertDoesNotThrow(() -> {
+ final Optional lastScrapedEventMarkerResult = pageScraperPort.lastScrapedEventMarkerByConfigurationName();
+ assertThat(lastScrapedEventMarkerResult)
+ .isEmpty();
+ });
+
+ verify(lastScrapedEventMarkerRepository).findLastCompletedByScraperConfigurationId(scraperId);
+ }
+ }
+
+ @Test
+ void saveLastScrapedEventMarker() {
+ //given
+ final Instant runDateTime = ZonedDateTime.of(LocalDateTime.now(), pageScraperPort.timeZone()).toInstant();
+ final String eventTitle = "Test Event Title 1";
+ final String marker = "Example marker value for test";
+
+ //then
+ assertDoesNotThrow(() -> pageScraperPort.saveLastScrapedEventMarker(runDateTime, eventTitle, marker));
+
+ verify(lastScrapedEventMarkerRepository).store(lastScrapedEventMarkerArgumentCaptor.capture());
+ final LastScrapedEventMarker capturedLastScrapedEventMarker = lastScrapedEventMarkerArgumentCaptor.getValue();
+
+ assertEquals(runDateTime, capturedLastScrapedEventMarker.runDateTime());
+ assertEquals(eventTitle, capturedLastScrapedEventMarker.eventTitle());
+ assertEquals(marker, capturedLastScrapedEventMarker.marker());
+ }
+}
diff --git a/src/test/java/hub/event/scrapers/core/ScrapedEventBuilderTest.java b/src/test/java/hub/event/scrapers/core/ScrapedEventBuilderTest.java
new file mode 100644
index 0000000..a7c0d70
--- /dev/null
+++ b/src/test/java/hub/event/scrapers/core/ScrapedEventBuilderTest.java
@@ -0,0 +1,77 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.datewithlocation.MultipleEventDateWithLocations;
+import hub.event.scrapers.core.datewithlocation.SingleEventDateWithLocation;
+import hub.event.scrapers.core.exceptions.EventDateEndDateTimeBeforeStartDateTimeException;
+import hub.event.scrapers.core.exceptions.EventDateInPastException;
+import org.junit.jupiter.api.Test;
+
+import java.time.LocalDate;
+import java.time.LocalTime;
+import java.time.ZoneId;
+import java.util.Map;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+class ScrapedEventBuilderTest {
+
+ @Test
+ void whenUseBuilderThenEventIsBuiltCorrectly() throws EventDateInPastException, EventDateEndDateTimeBeforeStartDateTimeException {
+ //given
+ final String title = "Long party on Normandy";
+ final String sourceLink = "citadel://eden.news@human.colonies.gh/evens/123edkfke344";
+ final String city = "Eden Prime";
+ final String address = "Dark Rose 12";
+ final String location = "Blue Bar";
+ final String description = "example description";
+ final Map metadata = Map.of("MetaKey1", "MetaValue1", "MetaKey2", "MetaValue2", "MetaKey3", "MetaValue3");
+
+ final LocalDate startDate = LocalDate.now().plusDays(2);
+ final LocalTime startTime = LocalTime.of(14, 0);
+ final LocalDate endDate = LocalDate.now().plusDays(4);
+ final LocalTime endTime = LocalTime.of(22, 30);
+ final ZoneId timeZone = ZoneId.systemDefault();
+
+ final SingleEventDateWithLocation singleEventDateWithLocation = SingleEventDateWithLocation.period(startDate, startTime, endDate, endTime, timeZone, city, address, location);
+
+ final MultipleEventDateWithLocations multipleEventDateWithLocations = MultipleEventDateWithLocations.create(startDate, startTime, timeZone, city, address, location);
+
+ //then
+ ScrapedEvent scrapedEvent1 = ScrapedEvent.builder(singleEventDateWithLocation)
+ .title(title)
+ .description(description)
+ .sourceLink(sourceLink)
+ .metadata("MetaKey1", "MetaValue1")
+ .metadata("MetaKey2", "MetaValue2")
+ .metadata("MetaKey3", "MetaValue3")
+ .type("Inline Skating")
+ .type("Skating Workshops")
+ .build();
+
+ ScrapedEvent scrapedEvent2 = ScrapedEvent.builder(multipleEventDateWithLocations)
+ .title(title)
+ .description(description)
+ .sourceLink(sourceLink)
+ .build();
+
+ assertNotNull(scrapedEvent1);
+ assertNotNull(scrapedEvent2);
+
+ assertEquals(title, scrapedEvent1.title());
+ assertEquals(description, scrapedEvent1.description());
+ assertEquals(sourceLink, scrapedEvent1.sourceLink());
+ assertEquals(metadata, scrapedEvent1.metadata());
+ assertEquals(2, scrapedEvent1.types().size());
+ assertTrue(scrapedEvent1.types().contains("Inline Skating"));
+ assertTrue(scrapedEvent1.types().contains("Skating Workshops"));
+ assertEquals(singleEventDateWithLocation, scrapedEvent1.singleEventDateWithLocation());
+ assertNull(scrapedEvent1.multipleEventDateWithLocations());
+ assertFalse(scrapedEvent1.hasMultipleDateAndLocations());
+
+ assertEquals(multipleEventDateWithLocations, scrapedEvent2.multipleEventDateWithLocations());
+ assertNull(scrapedEvent2.singleEventDateWithLocation());
+ assertTrue(scrapedEvent2.hasMultipleDateAndLocations());
+
+ }
+
+}
\ No newline at end of file
diff --git a/src/test/java/hub/event/scrapers/core/ScraperConfigRepositoryTest.java b/src/test/java/hub/event/scrapers/core/ScraperConfigRepositoryTest.java
new file mode 100644
index 0000000..8a62d1b
--- /dev/null
+++ b/src/test/java/hub/event/scrapers/core/ScraperConfigRepositoryTest.java
@@ -0,0 +1,78 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.scraper.ScraperConfig;
+import org.junit.jupiter.api.Order;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.context.ActiveProfiles;
+
+import java.time.ZoneId;
+
+import static org.assertj.core.api.Assertions.*;
+
+@SpringBootTest
+@ActiveProfiles(profiles = "dev")
+class ScraperConfigRepositoryTest {
+ private final ScraperConfigRepository scraperConfigRepository;
+
+ @Autowired
+ ScraperConfigRepositoryTest(ScraperConfigRepository scraperConfigRepository) {
+ this.scraperConfigRepository = scraperConfigRepository;
+ }
+
+ @Test
+ @Order(1)
+ void repositoryTest() {
+ //given
+ final String scraper1Name = "SC1";
+ final String scraper2Name = "SC2";
+ final String scraper3Name = "SC3";
+ final String scraper4Name = "SC4";
+ final ZoneId zoneId = ZoneId.systemDefault();
+
+ assertThat(scraperConfigRepository).isNotNull();
+
+ final ScraperConfig createdScraperConfig1 = scraperConfigRepository.create(scraper1Name, zoneId, false);
+ final ScraperConfig createdScraperConfig2 = scraperConfigRepository.create(scraper2Name, zoneId, true);
+ final ScraperConfig createdScraperConfig3 = scraperConfigRepository.create(scraper3Name, zoneId, false);
+ final ScraperConfig createdScraperConfig4 = scraperConfigRepository.create(scraper4Name, zoneId, true);
+
+ assertThatException().isThrownBy(() -> scraperConfigRepository.create(scraper4Name, zoneId, true));
+
+ assertThat(createdScraperConfig1.scraperId()).isPositive();
+ assertThat(createdScraperConfig2.scraperId()).isPositive();
+ assertThat(createdScraperConfig3.scraperId()).isPositive();
+ assertThat(createdScraperConfig4.scraperId()).isPositive();
+
+ assertThat(scraperConfigRepository.exists(createdScraperConfig2.scraperId())).isTrue();
+ assertThat(scraperConfigRepository.exists(createdScraperConfig4.scraperId())).isTrue();
+ assertThat(scraperConfigRepository.exists(createdScraperConfig4.scraperId() + 1000)).isFalse();
+
+ assertThat(scraperConfigRepository.allScraperConfigs())
+ .extracting(ScraperConfig::configurationName, ScraperConfig::timeZone, ScraperConfig::isActive)
+ .contains(
+ tuple(scraper1Name, zoneId, false),
+ tuple(scraper2Name, zoneId, true),
+ tuple(scraper3Name, zoneId, false),
+ tuple(scraper4Name, zoneId, true)
+ );
+
+ scraperConfigRepository.activate(createdScraperConfig3.scraperId());
+ scraperConfigRepository.deactivate(createdScraperConfig2.scraperId());
+
+ assertThat(scraperConfigRepository.allScraperConfigs())
+ .extracting(ScraperConfig::configurationName, ScraperConfig::timeZone, ScraperConfig::isActive)
+ .contains(
+ tuple(scraper1Name, zoneId, false),
+ tuple(scraper2Name, zoneId, false),
+ tuple(scraper3Name, zoneId, true),
+ tuple(scraper4Name, zoneId, true)
+ ).doesNotContain(
+ tuple(scraper2Name, zoneId, true),
+ tuple(scraper3Name, zoneId, false)
+ );
+
+ }
+
+}
\ No newline at end of file
diff --git a/src/test/java/hub/event/scrapers/core/ScraperFacadeTest.java b/src/test/java/hub/event/scrapers/core/ScraperFacadeTest.java
new file mode 100644
index 0000000..f77a815
--- /dev/null
+++ b/src/test/java/hub/event/scrapers/core/ScraperFacadeTest.java
@@ -0,0 +1,93 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.exceptions.ScraperConfigurationByNameNotExists;
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import java.time.ZoneId;
+
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.mockito.Mockito.*;
+
+@ExtendWith(MockitoExtension.class)
+class ScraperFacadeTest {
+ @Mock
+ private ScraperConfigRepository scraperConfigRepository;
+ @Mock
+ private ScraperIdNameCache scraperIdNameCache;
+ @InjectMocks
+ private ScraperFacade scraperFacade;
+
+ @Nested
+ class ScraperConfigTest {
+ @Test
+ void whenActivateScraperThatNotExistThenThrowException() {
+ //given
+ final String scraperName = "sc1";
+ //when
+ int scraperId = 100;
+ when(scraperIdNameCache.getIdByScraperName(scraperName)).thenReturn(scraperId);
+ when(scraperConfigRepository.exists(scraperId)).thenReturn(false);
+ //then
+ Assertions.assertThatExceptionOfType(ScraperConfigurationByNameNotExists.class)
+ .isThrownBy(() -> scraperFacade.activateScraperByConfigurationName(scraperName));
+
+ verify(scraperConfigRepository).exists(scraperId);
+ verify(scraperConfigRepository, never()).activate(scraperId);
+ }
+
+ @Test
+ void whenActivateScraperThatExistThenCallActive() {
+ //given
+ final String scraperName = "sc4";
+ //when
+ int scraperId = 4;
+ when(scraperIdNameCache.getIdByScraperName(scraperName)).thenReturn(scraperId);
+ when(scraperConfigRepository.exists(scraperId)).thenReturn(true);
+ //then
+ Assertions.assertThatCode(() -> scraperFacade.activateScraperByConfigurationName(scraperName))
+ .doesNotThrowAnyException();
+
+ verify(scraperConfigRepository).exists(scraperId);
+ verify(scraperConfigRepository, never()).create(scraperName, ZoneId.systemDefault(), true);
+ verify(scraperConfigRepository).activate(scraperId);
+ }
+
+ @Test
+ void whenDeactivateNotExistsScraperThanThrowConfigNotExistsException() {
+ //given
+ final String scraperName = "sc2";
+ //when
+ int scraperId = 2;
+ when(scraperIdNameCache.getIdByScraperName(scraperName)).thenReturn(scraperId);
+ when(scraperConfigRepository.exists(scraperId)).thenReturn(false);
+ //then
+ assertThrows(ScraperConfigurationByNameNotExists.class, () -> scraperFacade.deactivateScraperByConfigurationName(scraperName));
+
+ verify(scraperConfigRepository).exists(scraperId);
+ verify(scraperConfigRepository, never()).deactivate(scraperId);
+ }
+
+ @Test
+ void whenDeactivateExistsScraperThenCallDeactivate() {
+ //given
+ final String scraperName = "sc3";
+ //when
+ int scraperId = 3;
+ when(scraperIdNameCache.getIdByScraperName(scraperName)).thenReturn(scraperId);
+ when(scraperConfigRepository.exists(scraperId)).thenReturn(true);
+ //then
+ assertDoesNotThrow(() -> scraperFacade.deactivateScraperByConfigurationName(scraperName));
+
+ verify(scraperConfigRepository).exists(scraperId);
+ verify(scraperConfigRepository).deactivate(scraperId);
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/src/test/java/hub/event/scrapers/core/ScraperRunServiceTest.java b/src/test/java/hub/event/scrapers/core/ScraperRunServiceTest.java
new file mode 100644
index 0000000..b5be512
--- /dev/null
+++ b/src/test/java/hub/event/scrapers/core/ScraperRunServiceTest.java
@@ -0,0 +1,243 @@
+package hub.event.scrapers.core;
+
+import hub.event.scrapers.core.datewithlocation.SingleEventDateWithLocation;
+import hub.event.scrapers.core.exceptions.EventDateInPastException;
+import hub.event.scrapers.core.scraper.ScraperConfig;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import java.time.LocalDate;
+import java.time.LocalTime;
+import java.time.ZoneId;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.Mockito.*;
+
+@ExtendWith(MockitoExtension.class)
+class ScraperRunServiceTest {
+
+ @Mock
+ private ScraperConfigRepository scraperConfigRepository;
+ @Mock
+ private EventFacadeAdapter eventFacadeAdapter;
+ @Mock
+ private LastScrapedEventMarkerRepository lastScrapedEventMarkerRepository;
+
+ @Nested
+ class ScrapersStartTest {
+
+ @Test
+ void whenListContainsInactiveScraperThenSkipRunIt() {
+ //given
+ final PageScraperPort activeScraper1 = mock(PageScraperPort.class);
+ final PageScraperPort activeScraper2 = mock(PageScraperPort.class);
+ final PageScraperPort inactiveScraper = mock(PageScraperPort.class);
+
+ final String activeScraperName1 = "active1";
+ final String activeScraperName2 = "active2";
+ final String inactiveScraperName = "inactive2";
+
+ final ScraperConfig activeScraper1Config = new ScraperConfig(1, activeScraperName1, ZoneId.systemDefault(), true);
+ final ScraperConfig activeScraper2Config = new ScraperConfig(2, activeScraperName2, ZoneId.systemDefault(), true);
+ final ScraperConfig inactiveScraperConfig = new ScraperConfig(3, inactiveScraperName, ZoneId.systemDefault(), false);
+
+ final List pageScrapers = Arrays.asList(activeScraper1, activeScraper2, inactiveScraper);
+ final List scraperConfigs = new ArrayList<>(Arrays.asList(activeScraper1Config, activeScraper2Config, inactiveScraperConfig));
+
+ final ScraperIdNameCache scraperIdNameCache = new ScraperIdNameCache();
+ scraperIdNameCache.add(scraperConfigs);
+ final ScraperRunService scraperRunService = new ScraperRunService(scraperConfigRepository, eventFacadeAdapter, lastScrapedEventMarkerRepository, pageScrapers, scraperIdNameCache);
+
+ //when
+ when(activeScraper1.configurationName()).thenReturn(activeScraperName1);
+ when(activeScraper2.configurationName()).thenReturn(activeScraperName2);
+ when(inactiveScraper.configurationName()).thenReturn(inactiveScraperName);
+
+ when(activeScraper1.configurationName()).thenReturn(activeScraperName1);
+ when(activeScraper2.configurationName()).thenReturn(activeScraperName2);
+ when(inactiveScraper.configurationName()).thenReturn(inactiveScraperName);
+
+ when(scraperConfigRepository.allScraperConfigs()).thenReturn(scraperConfigs);
+
+ //then
+ scraperRunService.start();
+
+ verify(scraperConfigRepository, never()).create(anyString(), any(ZoneId.class), anyBoolean());
+ verify(activeScraper1).scrap();
+ verify(activeScraper2).scrap();
+ verify(inactiveScraper, never()).scrap();
+ }
+
+ @Test
+ void whenScrapedEventExistsThenSaveAsEvent() throws EventDateInPastException {
+ //given
+ final PageScraperPort activeScraper1 = mock(PageScraperPort.class);
+ final PageScraperPort activeScraper2 = mock(PageScraperPort.class);
+
+ final String activeScraperName1 = "active1";
+ final String activeScraperName2 = "active2";
+
+ final ScraperConfig activeScraper1Config = new ScraperConfig(10, activeScraperName1, ZoneId.systemDefault(), true);
+ final ScraperConfig activeScraper2Config = new ScraperConfig(11, activeScraperName2, ZoneId.systemDefault(), true);
+ final List scraperConfigs = new ArrayList<>(Arrays.asList(activeScraper1Config, activeScraper2Config));
+
+ final ScrapedEvent scrapedEvent1 = ScrapedEvent.builder(SingleEventDateWithLocation.single(LocalDate.now().plusDays(1), LocalTime.now().plusHours(2), ZoneId.systemDefault(), "Palaven", "Addres 1", "location 2"))
+ .title("Title1")
+ .description("Description1")
+ .build();
+ final ScrapedEvent scrapedEvent2 = ScrapedEvent.builder(SingleEventDateWithLocation.single(LocalDate.now().plusDays(1), LocalTime.now().plusHours(2), ZoneId.systemDefault(), "Thessia", "Addres 2", "location 22"))
+ .title("Title2")
+ .description("Description2")
+ .build();
+ final ScrapedEvent scrapedEvent3 = ScrapedEvent.builder(SingleEventDateWithLocation.single(LocalDate.now().plusDays(1), LocalTime.now().plusHours(2), ZoneId.systemDefault(), "Eden Prime", "Addres 134", "location 166"))
+ .title("Title1")
+ .description("Description1")
+ .build();
+ final List scrapedEventList = Arrays.asList(scrapedEvent1, scrapedEvent2, scrapedEvent3);
+
+ final List pageScrapers = Arrays.asList(activeScraper1, activeScraper2);
+ final ScraperIdNameCache scraperIdNameCache = new ScraperIdNameCache();
+ scraperIdNameCache.add(scraperConfigs);
+ final ScraperRunService scraperRunService = new ScraperRunService(scraperConfigRepository, eventFacadeAdapter, lastScrapedEventMarkerRepository, pageScrapers, scraperIdNameCache);
+
+ //when
+ when(activeScraper1.configurationName()).thenReturn(activeScraperName1);
+ when(activeScraper2.configurationName()).thenReturn(activeScraperName2);
+
+ when(activeScraper1.scrap()).thenReturn(Arrays.asList(scrapedEvent1));
+ when(activeScraper2.scrap()).thenReturn(Arrays.asList(scrapedEvent2, scrapedEvent3));
+
+ when(scraperConfigRepository.allScraperConfigs()).thenReturn(scraperConfigs);
+
+ //then
+ scraperRunService.start();
+
+ verify(activeScraper1).scrap();
+ verify(activeScraper2).scrap();
+ verify(eventFacadeAdapter).saveAll(scrapedEventList);
+ }
+
+ @Test
+ void whenScrapedEventSavedThenMakeLastScrapedEventMarkerDraftActive() throws EventDateInPastException {
+ //given
+ final PageScraperPort activeScraper1 = mock(PageScraperPort.class);
+ final PageScraperPort activeScraper2 = mock(PageScraperPort.class);
+
+ final String activeScraperName1 = "active1";
+ final String activeScraperName2 = "active2";
+
+ final ScraperConfig activeScraper1Config = new ScraperConfig(20, activeScraperName1, ZoneId.systemDefault(), true);
+ final ScraperConfig activeScraper2Config = new ScraperConfig(21, activeScraperName2, ZoneId.systemDefault(), true);
+ final List scraperConfigs = new ArrayList<>(Arrays.asList(activeScraper1Config, activeScraper2Config));
+
+ final ScrapedEvent scrapedEvent1 = ScrapedEvent.builder(SingleEventDateWithLocation.single(LocalDate.now().plusDays(1), LocalTime.now().plusHours(2), ZoneId.systemDefault(), "Palaven", "Addres 1", "location 2"))
+ .title("Title1")
+ .description("Description1")
+ .build();
+ final ScrapedEvent scrapedEvent2 = ScrapedEvent.builder(SingleEventDateWithLocation.single(LocalDate.now().plusDays(1), LocalTime.now().plusHours(2), ZoneId.systemDefault(), "Thessia", "Addres 2", "location 22"))
+ .title("Title2")
+ .description("Description2")
+ .build();
+ final ScrapedEvent scrapedEvent3 = ScrapedEvent.builder(SingleEventDateWithLocation.single(LocalDate.now().plusDays(1), LocalTime.now().plusHours(2), ZoneId.systemDefault(), "Eden Prime", "Addres 134", "location 166"))
+ .title("Title1")
+ .description("Description1")
+ .build();
+
+ final List pageScrapers = Arrays.asList(activeScraper1, activeScraper2);
+ final ScraperIdNameCache scraperIdNameCache = new ScraperIdNameCache();
+ scraperIdNameCache.add(scraperConfigs);
+ final ScraperRunService scraperRunService = new ScraperRunService(scraperConfigRepository, eventFacadeAdapter, lastScrapedEventMarkerRepository, pageScrapers, scraperIdNameCache);
+
+ //when
+ when(activeScraper1.configurationName()).thenReturn(activeScraperName1);
+ when(activeScraper2.configurationName()).thenReturn(activeScraperName2);
+
+ when(activeScraper1.scrap()).thenReturn(Arrays.asList(scrapedEvent1));
+ when(activeScraper2.scrap()).thenReturn(Arrays.asList(scrapedEvent2, scrapedEvent3));
+
+ when(scraperConfigRepository.allScraperConfigs()).thenReturn(scraperConfigs);
+
+ //then
+ scraperRunService.start();
+
+ verify(lastScrapedEventMarkerRepository).setAllAsCompleteByConfigurationsIds(List.of(activeScraper1Config.scraperId(), activeScraper2Config.scraperId()));
+ }
+ }
+
+ @Nested
+ class CreateConfigsIfMissingAndCacheBuildTest {
+ @Test
+ void whenFoundScrapersWithoutConfigThenCreateDefault() {
+ //given
+ final PageScraperPort scraper1 = mock(PageScraperPort.class);
+ final PageScraperPort scraper2 = mock(PageScraperPort.class);
+ final List pageScrapers = Arrays.asList(scraper1, scraper2);
+
+ final ZoneId timeZone = ZoneId.systemDefault();
+
+ final ScraperConfig scraperConfig1 = new ScraperConfig(1, "Scraper1", timeZone, true);
+ final ScraperConfig scraperConfig2 = new ScraperConfig(2, "Scraper2", timeZone, true);
+
+ final ScraperRunService scraperRunService = new ScraperRunService(scraperConfigRepository, eventFacadeAdapter, lastScrapedEventMarkerRepository, pageScrapers, new ScraperIdNameCache());
+ //when
+ when(scraper2.timeZone()).thenReturn(timeZone);
+ when(scraper1.configurationName()).thenReturn("Scraper1");
+ when(scraper2.configurationName()).thenReturn("Scraper2");
+ when(scraperConfigRepository.allScraperConfigs()).thenReturn(List.of(scraperConfig1));
+ when(scraperConfigRepository.create("Scraper2", timeZone, true)).thenReturn(scraperConfig2);
+
+ //then
+ scraperRunService.createScrapersConfigsIfMissingAndFillIdNameCache();
+
+ verify(scraperConfigRepository).create("Scraper2", timeZone, true);
+ }
+
+ @Test
+ void cacheContainsFoundedAndCreatedScrapersConfig() {
+ //given
+ final PageScraperPort scraper1 = mock(PageScraperPort.class);
+ final PageScraperPort scraper2 = mock(PageScraperPort.class);
+ final List pageScrapers = Arrays.asList(scraper1, scraper2);
+
+ final ZoneId timeZone = ZoneId.systemDefault();
+
+ final ScraperConfig scraperConfig1 = new ScraperConfig(1, "Scraper1", timeZone, true);
+ final ScraperConfig scraperConfig2 = new ScraperConfig(2, "Scraper2", timeZone, true);
+ final ScraperConfig scraperConfig3 = new ScraperConfig(3, "Scraper3", ZoneId.systemDefault(), true);
+ final ScraperConfig scraperConfig4 = new ScraperConfig(4, "Scraper4", ZoneId.systemDefault(), false);
+ final ScraperConfig scraperConfig5 = new ScraperConfig(5, "Scraper5", ZoneId.systemDefault(), true);
+ final List scraperConfigList = List.of(scraperConfig1, scraperConfig3, scraperConfig4, scraperConfig5);
+
+ final ScraperIdNameCache scraperIdNameCache = new ScraperIdNameCache();
+ final ScraperRunService scraperRunService = new ScraperRunService(scraperConfigRepository, eventFacadeAdapter, lastScrapedEventMarkerRepository, pageScrapers, scraperIdNameCache);
+
+ //when
+ when(scraper2.timeZone()).thenReturn(timeZone);
+ when(scraper1.configurationName()).thenReturn("Scraper1");
+ when(scraper2.configurationName()).thenReturn("Scraper2");
+ when(scraperConfigRepository.allScraperConfigs()).thenReturn(scraperConfigList);
+ when(scraperConfigRepository.create("Scraper2", timeZone, true)).thenReturn(scraperConfig2);
+
+ //then
+ scraperRunService.createScrapersConfigsIfMissingAndFillIdNameCache();
+
+ assertEquals(scraperConfig1.scraperId(), scraperIdNameCache.getIdByScraperName(scraperConfig1.configurationName()));
+ assertEquals(scraperConfig2.scraperId(), scraperIdNameCache.getIdByScraperName(scraperConfig2.configurationName()));
+ assertEquals(scraperConfig3.scraperId(), scraperIdNameCache.getIdByScraperName(scraperConfig3.configurationName()));
+ assertEquals(scraperConfig4.scraperId(), scraperIdNameCache.getIdByScraperName(scraperConfig4.configurationName()));
+ assertEquals(scraperConfig5.scraperId(), scraperIdNameCache.getIdByScraperName(scraperConfig5.configurationName()));
+
+ assertEquals(scraperConfig1.configurationName(), scraperIdNameCache.getScraperNameById(scraperConfig1.scraperId()));
+ assertEquals(scraperConfig2.configurationName(), scraperIdNameCache.getScraperNameById(scraperConfig2.scraperId()));
+ assertEquals(scraperConfig3.configurationName(), scraperIdNameCache.getScraperNameById(scraperConfig3.scraperId()));
+ assertEquals(scraperConfig4.configurationName(), scraperIdNameCache.getScraperNameById(scraperConfig4.scraperId()));
+ assertEquals(scraperConfig5.configurationName(), scraperIdNameCache.getScraperNameById(scraperConfig5.scraperId()));
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/hub/event/scrapers/core/datewithlocation/MultipleEventDateWithLocationsTest.java b/src/test/java/hub/event/scrapers/core/datewithlocation/MultipleEventDateWithLocationsTest.java
new file mode 100644
index 0000000..887b31a
--- /dev/null
+++ b/src/test/java/hub/event/scrapers/core/datewithlocation/MultipleEventDateWithLocationsTest.java
@@ -0,0 +1,66 @@
+package hub.event.scrapers.core.datewithlocation;
+
+import hub.event.scrapers.core.exceptions.EventDateInPastException;
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.Test;
+
+import java.time.LocalDate;
+import java.time.LocalTime;
+import java.time.ZoneId;
+
+import static org.assertj.core.api.Assertions.*;
+
+class MultipleEventDateWithLocationsTest {
+
+ @Test
+ void whenBuildWithIncorrectInputThenThrows() throws EventDateInPastException {
+ final LocalDate date = LocalDate.now().plusDays(10);
+ final LocalDate dateInPast = LocalDate.of(2022, 7, 12);
+ final LocalTime time = LocalTime.of(14, 20);
+ final String city = "Thessia";
+ final String address = "Nightmare Street 102/34";
+ final String locationName = "Black hole mirror club";
+ final ZoneId timeZone = ZoneId.systemDefault();
+ final MultipleEventDateWithLocations multipleEventDateWithLocations = MultipleEventDateWithLocations.create(date, time, timeZone, city, address, locationName);
+
+ assertThatExceptionOfType(EventDateInPastException.class)
+ .isThrownBy(() -> MultipleEventDateWithLocations.create(dateInPast, time, timeZone, city, address, locationName));
+
+ assertThatExceptionOfType(EventDateInPastException.class)
+ .isThrownBy(() -> multipleEventDateWithLocations.add(dateInPast, time, timeZone, city, address, locationName));
+ }
+
+ @Test
+ void whenCorrectInputThenBuildCorrectly() {
+ final LocalDate date1 = LocalDate.now().plusDays(2);
+ final LocalTime time1 = LocalTime.of(10, 20);
+ final String city1 = "Thessia";
+ final LocalDate date2 = LocalDate.now().plusDays(4);
+ final LocalTime time2 = LocalTime.of(13, 0);
+ final String city2 = "Eden Prime";
+ final LocalDate date3 = LocalDate.now().plusDays(2).plusDays(8);
+ final LocalTime time3 = LocalTime.of(18, 30);
+ final String city3 = "Rannoch";
+ final String address = "Nightmare Street 102/34";
+ final String locationName = "Black hole mirror club";
+ final ZoneId timeZone = ZoneId.systemDefault();
+
+ Assertions.assertThatNoException().isThrownBy(() -> {
+ MultipleEventDateWithLocations multipleDate = MultipleEventDateWithLocations.create(date1, time1, timeZone, city1, address, locationName)
+ .add(date2, time2, timeZone, city2, address, locationName)
+ .add(date3, time3, timeZone, city3, address, locationName);
+
+ assertThat(multipleDate).isNotNull();
+
+ assertThat(multipleDate.eventDateWithLocations()).isNotNull()
+ .hasSize(3)
+ .extracting(EventDateWithLocation::startDate, EventDateWithLocation::startTime, EventDateWithLocation::city, EventDateWithLocation::address, EventDateWithLocation::locationName, EventDateWithLocation::timeZone)
+ .contains(tuple(date1, time1, city1, address, locationName, timeZone),
+ tuple(date2, time2, city2, address, locationName, timeZone),
+ tuple(date3, time3, city3, address, locationName, timeZone)
+ );
+ });
+
+ }
+
+}
\ No newline at end of file
diff --git a/src/test/java/hub/event/scrapers/core/datewithlocation/SingleEventDateWithLocationTest.java b/src/test/java/hub/event/scrapers/core/datewithlocation/SingleEventDateWithLocationTest.java
new file mode 100644
index 0000000..6055903
--- /dev/null
+++ b/src/test/java/hub/event/scrapers/core/datewithlocation/SingleEventDateWithLocationTest.java
@@ -0,0 +1,161 @@
+package hub.event.scrapers.core.datewithlocation;
+
+import hub.event.scrapers.core.exceptions.EventDateEndDateTimeBeforeStartDateTimeException;
+import hub.event.scrapers.core.exceptions.EventDateInPastException;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+
+import java.time.LocalDate;
+import java.time.LocalTime;
+import java.time.ZoneId;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+class SingleEventDateWithLocationTest {
+
+ @Nested
+ class SingleTypedEventDateTest {
+ @Test
+ void whenBuildWithIncorrectInputThenThrows() {
+ final LocalDate startDate = LocalDate.now().plusDays(2);
+ final LocalDate startDateInPast = LocalDate.of(2022, 1, 12);
+ final LocalTime startTime = LocalTime.of(14, 20);
+ final LocalDate incorrectEndDate = LocalDate.now().plusDays(1);
+ final LocalDate correctEndDate = LocalDate.now().plusDays(2);
+ final LocalDate endDateInPast = LocalDate.of(2022, 7, 12);
+ final LocalTime incorrectEndTime = LocalTime.of(10, 10);
+ final LocalTime correctEndTime = LocalTime.of(20, 10);
+ final String city = "Thessia";
+ final String address = "Nightmare Street 102/34";
+ final String locationName = "Black hole mirror club";
+ final ZoneId timeZone = ZoneId.systemDefault();
+
+ assertThrows(EventDateEndDateTimeBeforeStartDateTimeException.class, () -> SingleEventDateWithLocation.single(startDate, startTime, incorrectEndDate, correctEndTime, timeZone, city, address, locationName));
+ assertThrows(EventDateEndDateTimeBeforeStartDateTimeException.class, () -> SingleEventDateWithLocation.single(startDate, startTime, correctEndDate, incorrectEndTime, timeZone, city, address, locationName));
+
+ assertThrows(EventDateInPastException.class, () -> SingleEventDateWithLocation.single(startDateInPast, startTime, timeZone, city, address, locationName));
+ assertThrows(EventDateInPastException.class, () -> SingleEventDateWithLocation.single(startDateInPast, startTime, correctEndDate, correctEndTime, timeZone, city, address, locationName));
+
+ assertThrows(EventDateInPastException.class, () -> SingleEventDateWithLocation.single(startDate, startTime, endDateInPast, correctEndTime, timeZone, city, address, locationName));
+ }
+
+ @Test
+ void whenCorrectInputThenBuildCorrectly() {
+ final LocalDate startDate = LocalDate.now().plusDays(2);
+ final LocalTime startTime = LocalTime.of(10, 20);
+ final LocalDate endDate = LocalDate.now().plusDays(2);
+ final LocalTime endTime = LocalTime.of(20, 10);
+ final String city = "Thessia";
+ final String address = "Nightmare Street 102/34";
+ final String locationName = "Black hole mirror club";
+ final ZoneId timeZone = ZoneId.systemDefault();
+
+ assertDoesNotThrow(() -> {
+ SingleEventDateWithLocation singleDateContainsStartDateAndTime = SingleEventDateWithLocation.single(startDate, startTime, timeZone, city, address, locationName);
+ assertNotNull(singleDateContainsStartDateAndTime);
+
+ assertEquals(startDate, singleDateContainsStartDateAndTime.startDate());
+ assertEquals(startTime, singleDateContainsStartDateAndTime.startTime());
+ assertEquals(city, singleDateContainsStartDateAndTime.city());
+ assertEquals(address, singleDateContainsStartDateAndTime.address());
+ assertEquals(locationName, singleDateContainsStartDateAndTime.locationName());
+ assertEquals(timeZone, singleDateContainsStartDateAndTime.timeZone());
+ assertNull(singleDateContainsStartDateAndTime.endDate());
+ assertNull(singleDateContainsStartDateAndTime.endTime());
+ assertTrue(singleDateContainsStartDateAndTime.isSingleDate());
+ assertFalse(singleDateContainsStartDateAndTime.isPeriodDate());
+
+ });
+
+ assertDoesNotThrow(() -> {
+ SingleEventDateWithLocation fullEventDate = SingleEventDateWithLocation.single(startDate, startTime, endDate, endTime, timeZone, city, address, locationName);
+ assertNotNull(fullEventDate);
+
+ assertEquals(startDate, fullEventDate.startDate());
+ assertEquals(startTime, fullEventDate.startTime());
+ assertEquals(city, fullEventDate.city());
+ assertEquals(endDate, fullEventDate.endDate());
+ assertEquals(endTime, fullEventDate.endTime());
+ assertEquals(address, fullEventDate.address());
+ assertEquals(locationName, fullEventDate.locationName());
+ assertEquals(timeZone, fullEventDate.timeZone());
+ assertTrue(fullEventDate.isSingleDate());
+ assertFalse(fullEventDate.isPeriodDate());
+ });
+
+ }
+ }
+
+ @Nested
+ class PeriodTypedDateTest {
+ @Test
+ void whenBuildWithIncorrectInputThenThrows() {
+ final LocalDate startDate = LocalDate.now().plusDays(2);
+ final LocalDate startDateInPast = LocalDate.of(2022, 1, 12);
+ final LocalTime startTime = LocalTime.of(14, 20);
+ final LocalDate incorrectEndDate = LocalDate.now().plusDays(1);
+ final LocalDate correctEndDate = LocalDate.now().plusDays(2);
+ final LocalDate endDateInPast = LocalDate.of(2022, 2, 12);
+ final LocalTime incorrectEndTime = LocalTime.of(10, 10);
+ final LocalTime correctEndTime = LocalTime.of(20, 10);
+ final String city = "Thessia";
+ final String address = "Nightmare Street 102/34";
+ final String locationName = "Black hole mirror club";
+ final ZoneId timeZone = ZoneId.systemDefault();
+
+ assertThrows(EventDateEndDateTimeBeforeStartDateTimeException.class, () -> SingleEventDateWithLocation.period(startDate, startTime, incorrectEndDate, correctEndTime, timeZone, city, address, locationName));
+ assertThrows(EventDateEndDateTimeBeforeStartDateTimeException.class, () -> SingleEventDateWithLocation.period(startDate, startTime, correctEndDate, incorrectEndTime, timeZone, city, address, locationName));
+
+ assertThrows(EventDateInPastException.class, () -> SingleEventDateWithLocation.period(startDateInPast, startTime, correctEndDate, timeZone, city, address, locationName));
+ assertThrows(EventDateInPastException.class, () -> SingleEventDateWithLocation.period(startDateInPast, startTime, correctEndDate, correctEndTime, timeZone, city, address, locationName));
+ assertThrows(EventDateInPastException.class, () -> SingleEventDateWithLocation.period(startDate, startTime, endDateInPast, correctEndTime, timeZone, city, address, locationName));
+ }
+
+
+ @Test
+ void whenCorrectInputThenBuildCorrectly() {
+ final LocalDate startDate = LocalDate.now().plusDays(2);
+ final LocalTime startTime = LocalTime.of(14, 0);
+ final LocalDate endDate = LocalDate.now().plusDays(10);
+ final LocalTime endTime = LocalTime.of(16, 30);
+ final String city = "Thessia";
+ final String address = "Nightmare Street 102/34";
+ final String locationName = "Black hole mirror club";
+ final ZoneId timeZone = ZoneId.systemDefault();
+
+ assertDoesNotThrow(() -> {
+ SingleEventDateWithLocation periodDateContainsStartDateAndTime = SingleEventDateWithLocation.period(startDate, startTime, endDate, timeZone, city, address, locationName);
+
+ assertNotNull(periodDateContainsStartDateAndTime);
+
+ assertEquals(startDate, periodDateContainsStartDateAndTime.startDate());
+ assertEquals(startTime, periodDateContainsStartDateAndTime.startTime());
+ assertEquals(city, periodDateContainsStartDateAndTime.city());
+ assertEquals(endDate, periodDateContainsStartDateAndTime.endDate());
+ assertEquals(address, periodDateContainsStartDateAndTime.address());
+ assertEquals(locationName, periodDateContainsStartDateAndTime.locationName());
+ assertEquals(timeZone, periodDateContainsStartDateAndTime.timeZone());
+ assertNull(periodDateContainsStartDateAndTime.endTime());
+ assertTrue(periodDateContainsStartDateAndTime.isPeriodDate());
+ assertFalse(periodDateContainsStartDateAndTime.isSingleDate());
+ });
+
+ assertDoesNotThrow(() -> {
+ SingleEventDateWithLocation fullEventDate = SingleEventDateWithLocation.period(startDate, startTime, endDate, endTime, timeZone, city, address, locationName);
+
+ assertNotNull(fullEventDate);
+
+ assertEquals(startDate, fullEventDate.startDate());
+ assertEquals(startTime, fullEventDate.startTime());
+ assertEquals(city, fullEventDate.city());
+ assertEquals(endDate, fullEventDate.endDate());
+ assertEquals(endTime, fullEventDate.endTime());
+ assertEquals(address, fullEventDate.address());
+ assertEquals(locationName, fullEventDate.locationName());
+ assertEquals(timeZone, fullEventDate.timeZone());
+ assertTrue(fullEventDate.isPeriodDate());
+ assertFalse(fullEventDate.isSingleDate());
+ });
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/hub/event/scrapers/core/runlog/ScraperLogQueryFacadeTest.java b/src/test/java/hub/event/scrapers/core/runlog/ScraperLogQueryFacadeTest.java
new file mode 100644
index 0000000..2c964b5
--- /dev/null
+++ b/src/test/java/hub/event/scrapers/core/runlog/ScraperLogQueryFacadeTest.java
@@ -0,0 +1,478 @@
+package hub.event.scrapers.core.runlog;
+
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.context.ActiveProfiles;
+import org.springframework.test.context.jdbc.Sql;
+
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.util.List;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.groups.Tuple.tuple;
+
+@SpringBootTest
+@ActiveProfiles(profiles = "dev")
+@Sql(executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD, scripts = {"/database/scrapers/core/test_data_init.sql"})
+class ScraperLogQueryFacadeTest {
+ private static final String ZONE_ID = "Europe/Warsaw";
+ @Autowired
+ private ScraperLogQueryFacade scraperLogQueryFacade;
+ @Autowired
+ private ScraperLogQueryRepository scraperLogQueryRepository;
+
+ private final String configurationName1 = "Scraper3";
+ private final String configurationName2 = "Scraper2";
+ private final String configurationName3 = "Scraper1";
+
+
+ @Nested
+ class ErrorLogQueryTest {
+
+ @Test
+ void testSearchByEmptyQuery() {
+ ErrorLogSearchQuery errorLogSearchQuery = ErrorLogSearchQuery.builder().build();
+ assertThat(scraperLogQueryFacade.findAllErrorLog(errorLogSearchQuery))
+ .hasSize(9)
+ .extracting(
+ ScraperRunErrorLog::configurationName,
+ ScraperRunErrorLog::description,
+ ScraperRunErrorLog::errorCode,
+ scraperRunErrorLog -> scraperRunErrorLog.time().toString())
+ .containsExactly(
+ tuple(configurationName3, "Error 20", "ERR_20", "2022-10-19T21:23:00.020Z"),
+ tuple(configurationName3, "Error 1", "ERR_1", "2022-10-19T21:22:00.015Z"),
+ tuple(configurationName2, "Error 10", "ERR_10", "2022-10-30T22:21:00.011Z"),
+ tuple(configurationName2, "Error 1", "ERR_1", "2022-10-19T21:20:00.019Z"),
+ tuple(configurationName1, "Error 1", "ERR_1", "2022-10-24T21:19:00.020Z"),
+ tuple(configurationName1, "Error 1", "ERR_1", "2022-10-23T21:18:00.022Z"),
+ tuple(configurationName1, "Error 0", "ERR_0", "2022-10-21T21:17:00.013Z"),
+ tuple(configurationName1, "Error 0", "ERR_0", "2022-10-20T21:16:00.011Z"),
+ tuple(configurationName1, "Error 0", "ERR_0", "2022-10-19T21:15:00.015Z")
+ );
+ }
+
+ @Test
+ void testSearchByDescription() {
+ ErrorLogSearchQuery errorLogSearchQuery = ErrorLogSearchQuery.builder()
+ .description("Error 0")
+ .build();
+ assertThat(scraperLogQueryFacade.findAllErrorLog(errorLogSearchQuery))
+ .hasSize(3)
+ .extracting(
+ ScraperRunErrorLog::configurationName, ScraperRunErrorLog::description, ScraperRunErrorLog::errorCode)
+ .contains(
+ tuple(configurationName1, "Error 0", "ERR_0"),
+ tuple(configurationName1, "Error 0", "ERR_0"),
+ tuple(configurationName1, "Error 0", "ERR_0")
+ );
+ }
+
+ @Test
+ void testSearchByErrorsCodes() {
+ ErrorLogSearchQuery errorLogSearchQuery = ErrorLogSearchQuery.builder()
+ .errorCodes(List.of("ERR_10", "ERR_20"))
+ .build();
+ assertThat(scraperLogQueryFacade.findAllErrorLog(errorLogSearchQuery))
+ .hasSize(2)
+ .extracting(
+ ScraperRunErrorLog::configurationName, ScraperRunErrorLog::description, ScraperRunErrorLog::errorCode)
+ .contains(
+ tuple(configurationName2, "Error 10", "ERR_10"),
+ tuple(configurationName3, "Error 20", "ERR_20")
+ );
+ }
+
+ @Test
+ void testSearchByDateRange() {
+ final ZonedDateTime fromDate = ZonedDateTime.of(LocalDateTime.of(2022, 10, 20, 12, 0), ZoneId.of(ZONE_ID));
+ final ZonedDateTime toDate = ZonedDateTime.of(LocalDateTime.of(2022, 10, 24, 23, 50), ZoneId.of(ZONE_ID));
+ ErrorLogSearchQuery errorLogSearchQuery = ErrorLogSearchQuery.builder()
+ .fromDate(fromDate.toInstant())
+ .toDate(toDate.toInstant())
+ .build();
+ assertThat(scraperLogQueryFacade.findAllErrorLog(errorLogSearchQuery))
+ .hasSize(4)
+ .extracting(
+ ScraperRunErrorLog::configurationName,
+ ScraperRunErrorLog::description,
+ ScraperRunErrorLog::errorCode,
+ scraperRunErrorLog -> scraperRunErrorLog.time().toString())
+ .contains(
+ tuple(configurationName1, "Error 0", "ERR_0", "2022-10-20T21:16:00.011Z"),
+ tuple(configurationName1, "Error 0", "ERR_0", "2022-10-21T21:17:00.013Z"),
+ tuple(configurationName1, "Error 1", "ERR_1", "2022-10-23T21:18:00.022Z"),
+ tuple(configurationName1, "Error 1", "ERR_1", "2022-10-24T21:19:00.020Z")
+ );
+ }
+
+ @Test
+ void testSearchByConfigurationNames() {
+ ErrorLogSearchQuery errorLogSearchQuery = ErrorLogSearchQuery.builder()
+ .configurationNames(List.of(configurationName2, configurationName3))
+ .build();
+ assertThat(scraperLogQueryFacade.findAllErrorLog(errorLogSearchQuery))
+ .hasSize(4)
+ .extracting(
+ ScraperRunErrorLog::configurationName, ScraperRunErrorLog::description, ScraperRunErrorLog::errorCode)
+ .contains(
+ tuple(configurationName2, "Error 1", "ERR_1"),
+ tuple(configurationName2, "Error 10", "ERR_10"),
+ tuple(configurationName3, "Error 1", "ERR_1"),
+ tuple(configurationName3, "Error 20", "ERR_20")
+ );
+ }
+
+ @Test
+ void testSearchWithPagination() {
+ ErrorLogSearchQuery errorLogSearchQuery = ErrorLogSearchQuery.builder()
+ .page(1, 2)
+ .build();
+ assertThat(scraperLogQueryFacade.findAllErrorLog(errorLogSearchQuery))
+ .hasSize(2)
+ .extracting(
+ ScraperRunErrorLog::configurationName,
+ ScraperRunErrorLog::description,
+ ScraperRunErrorLog::errorCode,
+ scraperRunErrorLog -> scraperRunErrorLog.time().toString())
+ .containsExactly(
+ tuple(configurationName2, "Error 10", "ERR_10", "2022-10-30T22:21:00.011Z"),
+ tuple(configurationName2, "Error 1", "ERR_1", "2022-10-19T21:20:00.019Z")
+ );
+ }
+
+ @Test
+ void testSearchByMultipleConditionQuery() {
+ ErrorLogSearchQuery errorLogSearchQuery = ErrorLogSearchQuery.builder()
+ .errorCodes(List.of("ERR_0", "ERR_1"))
+ .configurationNames(List.of(configurationName3, configurationName2, configurationName1))
+ .fromDate(ZonedDateTime.of(LocalDateTime.of(2022, 1, 1, 0, 0), ZoneId.of(ZONE_ID)).toInstant())
+ .toDate(ZonedDateTime.of(LocalDateTime.of(2022, 12, 30, 0, 0), ZoneId.of(ZONE_ID)).toInstant())
+ .build();
+ assertThat(scraperLogQueryFacade.findAllErrorLog(errorLogSearchQuery))
+ .hasSize(7)
+ .extracting(
+ ScraperRunErrorLog::configurationName, ScraperRunErrorLog::description, ScraperRunErrorLog::errorCode)
+ .contains(
+ tuple(configurationName1, "Error 0", "ERR_0"),
+ tuple(configurationName1, "Error 0", "ERR_0"),
+ tuple(configurationName1, "Error 0", "ERR_0"),
+ tuple(configurationName1, "Error 1", "ERR_1"),
+ tuple(configurationName1, "Error 1", "ERR_1"),
+ tuple(configurationName2, "Error 1", "ERR_1"),
+ tuple(configurationName3, "Error 1", "ERR_1")
+ );
+ }
+ }
+
+ @Nested
+ class StatusLogQueryTest {
+ @Test
+ void testSearchByEmptyQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(12)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .containsExactly(
+ tuple("Scraper1", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-02T19:43:44.735Z", "2022-11-02T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-01T19:43:44.735Z", "2022-11-01T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-10-05T18:43:44.735Z", "2022-10-05T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-10-04T18:43:44.735Z", "2022-10-04T18:43:58.794Z", 230, 100),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 0, 80),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 310, 0),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 3, null),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 2, 0),
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 0, 0),
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 30, 0),
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100)
+ );
+ }
+
+ @Test
+ void testSearchByStartDateQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .startTimeFrom(ZonedDateTime.of(LocalDateTime.of(2022, 10, 1, 0, 0), ZoneId.of(ZONE_ID)).toInstant())
+ .startTimeTo(ZonedDateTime.of(LocalDateTime.of(2022, 10, 5, 0, 0), ZoneId.of(ZONE_ID)).toInstant())
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(1)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper1", "2022-10-04T18:43:44.735Z", "2022-10-04T18:43:58.794Z", 230, 100)
+ );
+ }
+
+ @Test
+ void testSearchByFinishDateQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .finishTimeFrom(ZonedDateTime.of(LocalDateTime.of(2022, 10, 1, 0, 0), ZoneId.of(ZONE_ID)).toInstant())
+ .finishTimeTo(ZonedDateTime.of(LocalDateTime.of(2022, 10, 5, 0, 0), ZoneId.of(ZONE_ID)).toInstant())
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(1)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper1", "2022-10-04T18:43:44.735Z", "2022-10-04T18:43:58.794Z", 230, 100)
+ );
+ }
+
+ @Test
+ void testSearchByNamesQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .configurationNames(List.of(configurationName1, configurationName3))
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(8)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100),
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 30, 0),
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 0, 0),
+ tuple("Scraper1", "2022-10-04T18:43:44.735Z", "2022-10-04T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-10-05T18:43:44.735Z", "2022-10-05T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-01T19:43:44.735Z", "2022-11-01T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-02T19:43:44.735Z", "2022-11-02T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100)
+ );
+ }
+
+ @Test
+ void testSearchByMultipleConditionQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .startTimeFrom(ZonedDateTime.of(LocalDateTime.of(2022, 10, 1, 0, 0), ZoneId.of(ZONE_ID)).toInstant())
+ .startTimeTo(ZonedDateTime.of(LocalDateTime.of(2022, 10, 5, 0, 0), ZoneId.of(ZONE_ID)).toInstant())
+ .configurationNames(List.of(configurationName1, configurationName3))
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(1)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper1", "2022-10-04T18:43:44.735Z", "2022-10-04T18:43:58.794Z", 230, 100)
+ );
+ }
+
+
+ @Test
+ void testSearchWithPaginationQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .page(2, 3)
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(3)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 310, 0),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 3, null),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 2, 0)
+ );
+ }
+ }
+
+ @Nested
+ class StatusLogErrorQueryTest {
+ @Test
+ void testSearchByErrorCountQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .errorCountGreaterThanOrEqualTo(75)
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(7)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 0, 80),
+ tuple("Scraper1", "2022-10-04T18:43:44.735Z", "2022-10-04T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-10-05T18:43:44.735Z", "2022-10-05T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-01T19:43:44.735Z", "2022-11-01T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-02T19:43:44.735Z", "2022-11-02T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100)
+ );
+ }
+
+ @Test
+ void testSearchByHasErrorQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .hasErrors(true)
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(7)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 0, 80),
+ tuple("Scraper1", "2022-10-04T18:43:44.735Z", "2022-10-04T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-10-05T18:43:44.735Z", "2022-10-05T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-01T19:43:44.735Z", "2022-11-01T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-02T19:43:44.735Z", "2022-11-02T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100)
+ );
+ }
+
+ @Test
+ void testSearchByNotHasErrorsQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .hasErrors(false)
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(5)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 30, 0),
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 0, 0),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 2, 0),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 3, null),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 310, 0)
+ );
+ }
+ }
+
+ @Nested
+ class StatusLogScannedEventQueryTest {
+ @Test
+ void testSearchByScannedEventCountQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .scannedEventGreaterThanOrEqualTo(30)
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(8)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100),
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 30, 0),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 310, 0),
+ tuple("Scraper1", "2022-10-04T18:43:44.735Z", "2022-10-04T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-10-05T18:43:44.735Z", "2022-10-05T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-01T19:43:44.735Z", "2022-11-01T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-02T19:43:44.735Z", "2022-11-02T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100)
+ );
+ }
+
+ @Test
+ void testSearchByHasScannedEventQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .hasScannedEvent(true)
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(10)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100),
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 30, 0),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 2, 0),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 3, null),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 310, 0),
+ tuple("Scraper1", "2022-10-04T18:43:44.735Z", "2022-10-04T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-10-05T18:43:44.735Z", "2022-10-05T18:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-01T19:43:44.735Z", "2022-11-01T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-02T19:43:44.735Z", "2022-11-02T19:43:58.794Z", 230, 100),
+ tuple("Scraper1", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 230, 100)
+ );
+ }
+
+ @Test
+ void testSearchByNotHasScannedEventQuery() {
+ StatusLogSearchQuery statusLogSearchQuery = StatusLogSearchQuery.builder()
+ .hasScannedEvent(false)
+ .build();
+
+ assertThat(scraperLogQueryFacade.findAllStatusLog(statusLogSearchQuery))
+ .hasSize(2)
+ .extracting(
+ ScraperRunStatusLog::configurationName,
+ scraperRunStatusLog -> scraperRunStatusLog.startTime().toString(),
+ scraperRunStatusLog -> scraperRunStatusLog.finishTime().toString(),
+ ScraperRunStatusLog::scannedEventCount,
+ ScraperRunStatusLog::errorCount
+ )
+ .contains(
+ tuple("Scraper3", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 0, 0),
+ tuple("Scraper2", "2022-11-03T19:43:44.735Z", "2022-11-03T19:43:58.794Z", 0, 80)
+ );
+ }
+
+ }
+}
\ No newline at end of file
diff --git a/src/test/resources/database/scrapers/core/test_data_init.sql b/src/test/resources/database/scrapers/core/test_data_init.sql
new file mode 100644
index 0000000..964d2d1
--- /dev/null
+++ b/src/test/resources/database/scrapers/core/test_data_init.sql
@@ -0,0 +1,32 @@
+DELETE FROM scraper_error_log;
+DELETE FROM scraper_status_log;
+DELETE FROM scraper_config;
+
+INSERT INTO scraper_config (scraper_id, configuration_name, is_active, time_zone) VALUES(100000, 'Scraper3', true, 'Europe/Warsaw');
+INSERT INTO scraper_config (scraper_id, configuration_name, is_active, time_zone) VALUES(200000, 'Scraper2', true, 'Europe/Warsaw');
+INSERT INTO scraper_config (scraper_id, configuration_name, is_active, time_zone) VALUES(300000, 'Scraper1', true, 'Europe/Warsaw');
+INSERT INTO scraper_config (scraper_id, configuration_name, is_active, time_zone) VALUES(400000, 'Scraper4', true, 'Europe/Warsaw');
+INSERT INTO scraper_config (scraper_id, configuration_name, is_active, time_zone) VALUES(500000, 'Scraper5', true, 'Europe/Warsaw');
+
+INSERT INTO scraper_error_log (log_id, description, error_code, error_time, scraper_id) VALUES(101, 'Error 0', 'ERR_0', '2022-10-19 23:15:00.015', 100000);
+INSERT INTO scraper_error_log (log_id, description, error_code, error_time, scraper_id) VALUES(102, 'Error 0', 'ERR_0', '2022-10-20 23:16:00.011', 100000);
+INSERT INTO scraper_error_log (log_id, description, error_code, error_time, scraper_id) VALUES(103, 'Error 0', 'ERR_0', '2022-10-21 23:17:00.013', 100000);
+INSERT INTO scraper_error_log (log_id, description, error_code, error_time, scraper_id) VALUES(104, 'Error 1', 'ERR_1', '2022-10-23 23:18:00.022', 100000);
+INSERT INTO scraper_error_log (log_id, description, error_code, error_time, scraper_id) VALUES(105, 'Error 1', 'ERR_1', '2022-10-24 23:19:00.020', 100000);
+INSERT INTO scraper_error_log (log_id, description, error_code, error_time, scraper_id) VALUES(106, 'Error 1', 'ERR_1', '2022-10-19 23:20:00.019', 200000);
+INSERT INTO scraper_error_log (log_id, description, error_code, error_time, scraper_id) VALUES(107, 'Error 10', 'ERR_10', '2022-10-30 23:21:00.011', 200000);
+INSERT INTO scraper_error_log (log_id, description, error_code, error_time, scraper_id) VALUES(108, 'Error 1', 'ERR_1', '2022-10-19 23:22:00.015', 300000);
+INSERT INTO scraper_error_log (log_id, description, error_code, error_time, scraper_id) VALUES(109, 'Error 20', 'ERR_20', '2022-10-19 23:23:00.020', 300000);
+
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(201, 100, '2022-11-03 20:43:44.735', '2022-11-03 20:43:58.794', 230, 100000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(202, 0, '2022-11-03 20:43:44.735', '2022-11-03 20:43:58.794', 30, 100000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(203, 0, '2022-11-03 20:43:44.735', '2022-11-03 20:43:58.794', 0, 100000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(204, 0, '2022-11-03 20:43:44.735', '2022-11-03 20:43:58.794', 2, 200000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(205, null, '2022-11-03 20:43:44.735', '2022-11-03 20:43:58.794', 3, 200000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(206, 0, '2022-11-03 20:43:44.735', '2022-11-03 20:43:58.794', 310, 200000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(207, 80, '2022-11-03 20:43:44.735', '2022-11-03 20:43:58.794', 0, 200000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(208, 100, '2022-10-04 20:43:44.735', '2022-10-04 20:43:58.794', 230, 300000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(209, 100, '2022-10-05 20:43:44.735', '2022-10-05 20:43:58.794', 230, 300000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(210, 100, '2022-11-01 20:43:44.735', '2022-11-01 20:43:58.794', 230, 300000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(211, 100, '2022-11-02 20:43:44.735', '2022-11-02 20:43:58.794', 230, 300000);
+INSERT INTO scraper_status_log (log_id, error_count, start_time, finish_time, scanned_event_count, scraper_id) VALUES(212, 100, '2022-11-03 20:43:44.735', '2022-11-03 20:43:58.794', 230, 300000);