mirror of
https://github.com/booklore-app/booklore.git
synced 2026-02-18 00:17:53 +01:00
refactor(exception): modernize exception handling and method signatures across services and controllers (#1670)
* refactor(exception): modernize exception handling and method signatures across services and controllers - Remove unnecessary checked exceptions from method signatures - Replace IOException with UncheckedIOException where appropriate - Update test methods to match new signatures and remove throws declarations - Improve converter classes with type safety, logging, and null handling - Simplify collection assignment logic in metadata and book services - Enhance author and category tokenization in HardcoverParser - Add equals/hashCode to ShelfEntity for proper set behavior - Add comprehensive unit tests for converter and service classes Signed-off-by: Balázs Szücs <bszucs1209@gmail.com> * refactor(entity): update equality and hashCode for JPA entities to use ID; add comprehensive tests and adjust test data to set IDs Signed-off-by: Balázs Szücs <bszucs1209@gmail.com> --------- Signed-off-by: Balázs Szücs <bszucs1209@gmail.com>
This commit is contained in:
@@ -90,7 +90,7 @@ public class SecurityConfig {
|
||||
|
||||
@Bean
|
||||
@Order(2)
|
||||
public SecurityFilterChain koreaderSecurityChain(HttpSecurity http, KoreaderAuthFilter koreaderAuthFilter) throws Exception {
|
||||
public SecurityFilterChain koreaderSecurityChain(HttpSecurity http, KoreaderAuthFilter koreaderAuthFilter) {
|
||||
http
|
||||
.securityMatcher("/api/koreader/**")
|
||||
.csrf(AbstractHttpConfigurer::disable)
|
||||
|
||||
@@ -64,7 +64,7 @@ public class AdditionalFileController {
|
||||
@PreAuthorize("@securityUtil.canDeleteBook() or @securityUtil.isAdmin()")
|
||||
public ResponseEntity<Void> deleteAdditionalFile(
|
||||
@PathVariable Long bookId,
|
||||
@PathVariable Long fileId) throws IOException {
|
||||
@PathVariable Long fileId) {
|
||||
additionalFileService.deleteAdditionalFile(fileId);
|
||||
return ResponseEntity.noContent().build();
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ public class FileUploadController {
|
||||
public ResponseEntity<?> uploadFile(
|
||||
@Parameter(description = "File to upload") @RequestParam("file") MultipartFile file,
|
||||
@Parameter(description = "Library ID") @RequestParam("libraryId") long libraryId,
|
||||
@Parameter(description = "Path ID") @RequestParam("pathId") long pathId) throws IOException {
|
||||
@Parameter(description = "Path ID") @RequestParam("pathId") long pathId) {
|
||||
if (file.isEmpty()) {
|
||||
throw new IllegalArgumentException("Uploaded file is missing.");
|
||||
}
|
||||
|
||||
@@ -165,7 +165,7 @@ public class KoboController {
|
||||
@ApiResponse(responseCode = "200", description = "Book downloaded successfully")
|
||||
@GetMapping("/v1/books/{bookId}/download")
|
||||
public void downloadBook(
|
||||
@Parameter(description = "Book ID") @PathVariable String bookId, HttpServletResponse response) throws IOException {
|
||||
@Parameter(description = "Book ID") @PathVariable String bookId, HttpServletResponse response) {
|
||||
if (StringUtils.isNumeric(bookId)) {
|
||||
bookDownloadService.downloadKoboBook(Long.parseLong(bookId), response);
|
||||
} else {
|
||||
|
||||
@@ -7,14 +7,17 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||
import jakarta.persistence.AttributeConverter;
|
||||
import jakarta.persistence.Converter;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
@Converter
|
||||
@Slf4j
|
||||
public class BookRecommendationIdsListConverter implements AttributeConverter<Set<BookRecommendationLite>, String> {
|
||||
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
private static final TypeReference<Set<BookRecommendationLite>> SET_TYPE_REF = new TypeReference<>() {};
|
||||
|
||||
static {
|
||||
objectMapper.registerModule(new JavaTimeModule());
|
||||
@@ -22,9 +25,13 @@ public class BookRecommendationIdsListConverter implements AttributeConverter<Se
|
||||
|
||||
@Override
|
||||
public String convertToDatabaseColumn(Set<BookRecommendationLite> recommendations) {
|
||||
if (recommendations == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return objectMapper.writeValueAsString(recommendations);
|
||||
} catch (JsonProcessingException e) {
|
||||
log.error("Failed to convert BookRecommendation set to JSON string: {}", recommendations, e);
|
||||
throw new RuntimeException("Error converting BookRecommendation list to JSON", e);
|
||||
}
|
||||
}
|
||||
@@ -35,9 +42,9 @@ public class BookRecommendationIdsListConverter implements AttributeConverter<Se
|
||||
return Set.of();
|
||||
}
|
||||
try {
|
||||
return objectMapper.readValue(json, new TypeReference<Set<BookRecommendationLite>>() {
|
||||
});
|
||||
return objectMapper.readValue(json, SET_TYPE_REF);
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to convert JSON string to BookRecommendation set: {}", json, e);
|
||||
throw new RuntimeException("Error converting JSON to BookRecommendation list", e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.adityachandel.booklore.convertor;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import jakarta.persistence.AttributeConverter;
|
||||
import jakarta.persistence.Converter;
|
||||
@@ -10,12 +11,13 @@ import java.util.Map;
|
||||
|
||||
@Converter
|
||||
@Slf4j
|
||||
public class JpaJsonConverter implements AttributeConverter<Map, String> {
|
||||
public class JpaJsonConverter implements AttributeConverter<Map<String, Object>, String> {
|
||||
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
private static final TypeReference<Map<String, Object>> MAP_TYPE_REF = new TypeReference<>() {};
|
||||
|
||||
@Override
|
||||
public String convertToDatabaseColumn(Map attribute) {
|
||||
public String convertToDatabaseColumn(Map<String, Object> attribute) {
|
||||
if (attribute == null) {
|
||||
return null;
|
||||
}
|
||||
@@ -28,12 +30,12 @@ public class JpaJsonConverter implements AttributeConverter<Map, String> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map convertToEntityAttribute(String dbData) {
|
||||
public Map<String, Object> convertToEntityAttribute(String dbData) {
|
||||
if (dbData == null || dbData.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return objectMapper.readValue(dbData, Map.class);
|
||||
return objectMapper.readValue(dbData, MAP_TYPE_REF);
|
||||
} catch (JsonProcessingException e) {
|
||||
log.error("Error converting JSON to map", e);
|
||||
return null;
|
||||
|
||||
@@ -1,31 +1,43 @@
|
||||
package com.adityachandel.booklore.convertor;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import jakarta.persistence.AttributeConverter;
|
||||
import jakarta.persistence.Converter;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
@Converter(autoApply = true)
|
||||
public class MapToStringConverter implements AttributeConverter<Map, String> {
|
||||
@Slf4j
|
||||
public class MapToStringConverter implements AttributeConverter<Map<String, Object>, String> {
|
||||
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
private static final TypeReference<Map<String, Object>> MAP_TYPE_REF = new TypeReference<>() {};
|
||||
|
||||
@Override
|
||||
public String convertToDatabaseColumn(Map attribute) {
|
||||
public String convertToDatabaseColumn(Map<String, Object> attribute) {
|
||||
if (attribute == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return objectMapper.writeValueAsString(attribute);
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to convert map to JSON string: {}", attribute, e);
|
||||
throw new IllegalArgumentException("Error converting Map to String", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map convertToEntityAttribute(String dbData) {
|
||||
public Map<String, Object> convertToEntityAttribute(String dbData) {
|
||||
if (dbData == null || dbData.trim().isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return objectMapper.readValue(dbData, Map.class);
|
||||
return objectMapper.readValue(dbData, MAP_TYPE_REF);
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to convert JSON string to map: {}", dbData, e);
|
||||
throw new IllegalArgumentException("Error converting String to Map", e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,13 +28,12 @@ public class AuthorEntity {
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
AuthorEntity authorEntity = (AuthorEntity) o;
|
||||
return Objects.equals(name, authorEntity.name);
|
||||
if (!(o instanceof AuthorEntity that)) return false;
|
||||
return getId() != null && Objects.equals(getId(), that.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return name != null ? name.hashCode() : 0;
|
||||
return getClass().hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.adityachandel.booklore.model.entity;
|
||||
import jakarta.persistence.Embeddable;
|
||||
import lombok.*;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
@@ -13,6 +14,9 @@ import java.util.Objects;
|
||||
@AllArgsConstructor
|
||||
public class BookMetadataAuthorKey implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private Long bookId;
|
||||
private Long authorId;
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.adityachandel.booklore.model.entity;
|
||||
import jakarta.persistence.Embeddable;
|
||||
import lombok.*;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
@@ -13,6 +14,9 @@ import java.util.Objects;
|
||||
@AllArgsConstructor
|
||||
public class BookMetadataCategoryKey implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private Long bookId;
|
||||
private Long categoryId;
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.adityachandel.booklore.model.entity;
|
||||
import jakarta.persistence.Embeddable;
|
||||
import lombok.*;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
@@ -13,6 +14,9 @@ import java.util.Objects;
|
||||
@AllArgsConstructor
|
||||
public class BookShelfKey implements Serializable {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private Long bookId;
|
||||
private Long shelfId;
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import jakarta.persistence.*;
|
||||
import lombok.*;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
@Entity
|
||||
@@ -30,12 +31,12 @@ public class CategoryEntity {
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof CategoryEntity that)) return false;
|
||||
return name != null && name.equalsIgnoreCase(that.name);
|
||||
return getId() != null && Objects.equals(getId(), that.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return name != null ? name.toLowerCase().hashCode() : 0;
|
||||
return getClass().hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import jakarta.persistence.*;
|
||||
import lombok.*;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
@Entity
|
||||
@@ -30,12 +31,12 @@ public class MoodEntity {
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof MoodEntity that)) return false;
|
||||
return name != null && name.equalsIgnoreCase(that.name);
|
||||
return getId() != null && Objects.equals(getId(), that.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return name != null ? name.toLowerCase().hashCode() : 0;
|
||||
return getClass().hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ import java.util.Set;
|
||||
@Builder
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
@EqualsAndHashCode(of = "id")
|
||||
@Entity
|
||||
@Table(name = "shelf")
|
||||
public class ShelfEntity {
|
||||
|
||||
@@ -4,6 +4,7 @@ import jakarta.persistence.*;
|
||||
import lombok.*;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
@Entity
|
||||
@@ -30,11 +31,11 @@ public class TagEntity {
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof TagEntity that)) return false;
|
||||
return name != null && name.equalsIgnoreCase(that.name);
|
||||
return getId() != null && Objects.equals(getId(), that.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return name != null ? name.toLowerCase().hashCode() : 0;
|
||||
return getClass().hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,7 +86,8 @@ public class AppSettingService {
|
||||
builder.remoteAuthEnabled(appProperties.getRemoteAuth().isEnabled());
|
||||
|
||||
builder.defaultMetadataRefreshOptions(settingPersistenceHelper.getJsonSetting(settingsMap, AppSettingKey.QUICK_BOOK_MATCH, MetadataRefreshOptions.class, settingPersistenceHelper.getDefaultMetadataRefreshOptions(), true));
|
||||
builder.libraryMetadataRefreshOptions(settingPersistenceHelper.getJsonSetting(settingsMap, AppSettingKey.LIBRARY_METADATA_REFRESH_OPTIONS, new TypeReference<List<MetadataRefreshOptions>>() {}, List.of(), true));
|
||||
builder.libraryMetadataRefreshOptions(settingPersistenceHelper.getJsonSetting(settingsMap, AppSettingKey.LIBRARY_METADATA_REFRESH_OPTIONS, new TypeReference<>() {
|
||||
}, List.of(), true));
|
||||
builder.oidcProviderDetails(settingPersistenceHelper.getJsonSetting(settingsMap, AppSettingKey.OIDC_PROVIDER_DETAILS, OidcProviderDetails.class, null, false));
|
||||
builder.oidcAutoProvisionDetails(settingPersistenceHelper.getJsonSetting(settingsMap, AppSettingKey.OIDC_AUTO_PROVISION_DETAILS, OidcAutoProvisionDetails.class, null, false));
|
||||
builder.metadataProviderSettings(settingPersistenceHelper.getJsonSetting(settingsMap, AppSettingKey.METADATA_PROVIDER_SETTINGS, MetadataProviderSettings.class, settingPersistenceHelper.getDefaultMetadataProviderSettings(), true));
|
||||
|
||||
@@ -23,6 +23,7 @@ import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
@@ -124,10 +125,12 @@ public class BookDownloadService {
|
||||
response.setHeader(HttpHeaders.CONTENT_DISPOSITION, contentDisposition);
|
||||
}
|
||||
|
||||
private void streamFileToResponse(File file, HttpServletResponse response) throws IOException {
|
||||
private void streamFileToResponse(File file, HttpServletResponse response) {
|
||||
try (InputStream in = Files.newInputStream(file.toPath())) {
|
||||
in.transferTo(response.getOutputStream());
|
||||
response.getOutputStream().flush();
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException("Failed to stream file to response", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -500,11 +500,7 @@ public class BookService {
|
||||
List<ShelfEntity> shelvesToAssign = shelfRepository.findAllById(shelfIdsToAssign);
|
||||
for (BookEntity bookEntity : bookEntities) {
|
||||
bookEntity.getShelves().removeIf(shelf -> shelfIdsToUnassign.contains(shelf.getId()));
|
||||
for (ShelfEntity shelf : shelvesToAssign) {
|
||||
if (!bookEntity.getShelves().contains(shelf)) {
|
||||
bookEntity.getShelves().add(shelf);
|
||||
}
|
||||
}
|
||||
bookEntity.getShelves().addAll(shelvesToAssign);
|
||||
}
|
||||
bookRepository.saveAll(bookEntities);
|
||||
|
||||
@@ -604,7 +600,7 @@ public class BookService {
|
||||
: ResponseEntity.status(HttpStatus.MULTI_STATUS).body(response);
|
||||
}
|
||||
|
||||
public void deleteEmptyParentDirsUpToLibraryFolders(Path currentDir, Set<Path> libraryRoots) throws IOException {
|
||||
public void deleteEmptyParentDirsUpToLibraryFolders(Path currentDir, Set<Path> libraryRoots) {
|
||||
Path dir = currentDir;
|
||||
Set<String> ignoredFilenames = Set.of(".DS_Store", "Thumbs.db");
|
||||
dir = dir.toAbsolutePath().normalize();
|
||||
|
||||
@@ -34,7 +34,7 @@ import com.adityachandel.booklore.util.FileUtils;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.core.io.PathResource;
|
||||
import org.springframework.core.io.FileSystemResource;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -97,7 +97,7 @@ public class BookDropService {
|
||||
String coverPath = Paths.get(appProperties.getPathConfig(), "bookdrop_temp", bookdropId + ".jpg").toString();
|
||||
File coverFile = new File(coverPath);
|
||||
if (coverFile.exists() && coverFile.isFile()) {
|
||||
return new PathResource(coverFile.toPath());
|
||||
return new FileSystemResource(coverFile.toPath());
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
@@ -442,7 +442,7 @@ public class BookDropService {
|
||||
File targetFile,
|
||||
LibraryEntity library,
|
||||
LibraryPathEntity path,
|
||||
BookMetadata metadata) throws Exception {
|
||||
BookMetadata metadata) {
|
||||
FileProcessResult fileProcessResult = processFileInLibrary(targetFile.getName(), library, path, targetFile,
|
||||
BookFileExtension.fromFileName(bookdropFile.getFileName())
|
||||
.orElseThrow(() -> ApiError.INVALID_FILE_FORMAT.createException("Unsupported file extension"))
|
||||
|
||||
@@ -106,7 +106,7 @@ public class FileMoveHelper {
|
||||
return Paths.get(path, newRelativePathStr);
|
||||
}
|
||||
|
||||
public void deleteEmptyParentDirsUpToLibraryFolders(Path currentDir, Set<Path> libraryRoots) throws IOException {
|
||||
public void deleteEmptyParentDirsUpToLibraryFolders(Path currentDir, Set<Path> libraryRoots) {
|
||||
Path dir = currentDir;
|
||||
Set<String> ignoredFilenames = Set.of(".DS_Store", "Thumbs.db");
|
||||
dir = dir.toAbsolutePath().normalize();
|
||||
|
||||
@@ -19,6 +19,7 @@ import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -39,13 +40,19 @@ public class LibraryProcessingService {
|
||||
private final EntityManager entityManager;
|
||||
|
||||
@Transactional
|
||||
public void processLibrary(long libraryId) throws IOException {
|
||||
public void processLibrary(long libraryId) {
|
||||
LibraryEntity libraryEntity = libraryRepository.findById(libraryId).orElseThrow(() -> ApiError.LIBRARY_NOT_FOUND.createException(libraryId));
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info("Started processing library: " + libraryEntity.getName()));
|
||||
LibraryFileProcessor processor = fileProcessorRegistry.getProcessor(libraryEntity);
|
||||
List<LibraryFile> libraryFiles = libraryFileHelper.getLibraryFiles(libraryEntity, processor);
|
||||
processor.processLibraryFiles(libraryFiles, libraryEntity);
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info("Finished processing library: " + libraryEntity.getName()));
|
||||
try {
|
||||
List<LibraryFile> libraryFiles = libraryFileHelper.getLibraryFiles(libraryEntity, processor);
|
||||
processor.processLibraryFiles(libraryFiles, libraryEntity);
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info("Finished processing library: " + libraryEntity.getName()));
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to process library {}: {}", libraryEntity.getName(), e.getMessage(), e);
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.error("Failed to process library: " + libraryEntity.getName() + " - " + e.getMessage()));
|
||||
throw new UncheckedIOException("Library processing failed", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional
|
||||
|
||||
@@ -130,8 +130,6 @@ public class LibraryService {
|
||||
libraryProcessingService.processLibrary(libraryId);
|
||||
} catch (InvalidDataAccessApiUsageException e) {
|
||||
log.debug("InvalidDataAccessApiUsageException - Library id: {}", libraryId);
|
||||
} catch (IOException e) {
|
||||
log.error("Error while parsing library books", e);
|
||||
}
|
||||
log.info("Parsing task completed!");
|
||||
});
|
||||
@@ -171,8 +169,6 @@ public class LibraryService {
|
||||
libraryProcessingService.processLibrary(libraryId);
|
||||
} catch (InvalidDataAccessApiUsageException e) {
|
||||
log.debug("InvalidDataAccessApiUsageException - Library id: {}", libraryId);
|
||||
} catch (IOException e) {
|
||||
log.error("Error while parsing library books", e);
|
||||
}
|
||||
log.info("Parsing task completed!");
|
||||
});
|
||||
|
||||
@@ -111,11 +111,7 @@ public class MetadataManagementService {
|
||||
|
||||
for (BookMetadataEntity metadata : booksWithOldAuthor) {
|
||||
metadata.getAuthors().remove(oldAuthor);
|
||||
for (AuthorEntity targetAuthor : targetAuthors) {
|
||||
if (!metadata.getAuthors().contains(targetAuthor)) {
|
||||
metadata.getAuthors().add(targetAuthor);
|
||||
}
|
||||
}
|
||||
metadata.getAuthors().addAll(targetAuthors);
|
||||
}
|
||||
|
||||
bookMetadataRepository.saveAll(booksWithOldAuthor);
|
||||
@@ -156,11 +152,7 @@ public class MetadataManagementService {
|
||||
|
||||
for (BookMetadataEntity metadata : booksWithOldCategory) {
|
||||
metadata.getCategories().remove(oldCategory);
|
||||
for (CategoryEntity targetCategory : targetCategories) {
|
||||
if (!metadata.getCategories().contains(targetCategory)) {
|
||||
metadata.getCategories().add(targetCategory);
|
||||
}
|
||||
}
|
||||
metadata.getCategories().addAll(targetCategories);
|
||||
}
|
||||
|
||||
bookMetadataRepository.saveAll(booksWithOldCategory);
|
||||
@@ -201,11 +193,7 @@ public class MetadataManagementService {
|
||||
|
||||
for (BookMetadataEntity metadata : booksWithOldMood) {
|
||||
metadata.getMoods().remove(oldMood);
|
||||
for (MoodEntity targetMood : targetMoods) {
|
||||
if (!metadata.getMoods().contains(targetMood)) {
|
||||
metadata.getMoods().add(targetMood);
|
||||
}
|
||||
}
|
||||
metadata.getMoods().addAll(targetMoods);
|
||||
}
|
||||
|
||||
bookMetadataRepository.saveAll(booksWithOldMood);
|
||||
@@ -246,11 +234,7 @@ public class MetadataManagementService {
|
||||
|
||||
for (BookMetadataEntity metadata : booksWithOldTag) {
|
||||
metadata.getTags().remove(oldTag);
|
||||
for (TagEntity targetTag : targetTags) {
|
||||
if (!metadata.getTags().contains(targetTag)) {
|
||||
metadata.getTags().add(targetTag);
|
||||
}
|
||||
}
|
||||
metadata.getTags().addAll(targetTags);
|
||||
}
|
||||
|
||||
bookMetadataRepository.saveAll(booksWithOldTag);
|
||||
|
||||
@@ -672,7 +672,7 @@ public class CbxMetadataExtractor implements FileMetadataExtractor {
|
||||
}
|
||||
|
||||
// ==== 7z (.cb7) helpers ====
|
||||
private SevenZArchiveEntry findSevenZComicInfoEntry(SevenZFile sevenZ) throws IOException {
|
||||
private SevenZArchiveEntry findSevenZComicInfoEntry(SevenZFile sevenZ) {
|
||||
for (SevenZArchiveEntry e : sevenZ.getEntries()) {
|
||||
if (e == null || e.isDirectory()) continue;
|
||||
String name = e.getName();
|
||||
@@ -683,7 +683,7 @@ public class CbxMetadataExtractor implements FileMetadataExtractor {
|
||||
return null;
|
||||
}
|
||||
|
||||
private SevenZArchiveEntry findSevenZEntryByName(SevenZFile sevenZ, String imageName) throws IOException {
|
||||
private SevenZArchiveEntry findSevenZEntryByName(SevenZFile sevenZ, String imageName) {
|
||||
if (imageName == null) return null;
|
||||
for (SevenZArchiveEntry e : sevenZ.getEntries()) {
|
||||
if (e == null || e.isDirectory()) continue;
|
||||
@@ -696,7 +696,7 @@ public class CbxMetadataExtractor implements FileMetadataExtractor {
|
||||
return null;
|
||||
}
|
||||
|
||||
private SevenZArchiveEntry findSevenZImageEntryByIndex(SevenZFile sevenZ, int index) throws IOException {
|
||||
private SevenZArchiveEntry findSevenZImageEntryByIndex(SevenZFile sevenZ, int index) {
|
||||
int count = 0;
|
||||
for (SevenZArchiveEntry e : sevenZ.getEntries()) {
|
||||
if (!e.isDirectory() && isImageEntry(e.getName())) {
|
||||
@@ -707,7 +707,7 @@ public class CbxMetadataExtractor implements FileMetadataExtractor {
|
||||
return null;
|
||||
}
|
||||
|
||||
private SevenZArchiveEntry findFirstAlphabeticalSevenZImageEntry(SevenZFile sevenZ) throws IOException {
|
||||
private SevenZArchiveEntry findFirstAlphabeticalSevenZImageEntry(SevenZFile sevenZ) {
|
||||
List<SevenZArchiveEntry> images = new ArrayList<>();
|
||||
for (SevenZArchiveEntry e : sevenZ.getEntries()) {
|
||||
if (!e.isDirectory() && isImageEntry(e.getName())) {
|
||||
@@ -741,7 +741,7 @@ public class CbxMetadataExtractor implements FileMetadataExtractor {
|
||||
return images;
|
||||
}
|
||||
|
||||
private java.util.List<SevenZArchiveEntry> listSevenZImageEntries(SevenZFile sevenZ) throws IOException {
|
||||
private java.util.List<SevenZArchiveEntry> listSevenZImageEntries(SevenZFile sevenZ) {
|
||||
java.util.List<SevenZArchiveEntry> images = new java.util.ArrayList<>();
|
||||
for (SevenZArchiveEntry e : sevenZ.getEntries()) {
|
||||
if (!e.isDirectory() && isImageEntry(e.getName())) images.add(e);
|
||||
|
||||
@@ -137,14 +137,14 @@ public class EpubMetadataExtractor implements FileMetadataExtractor {
|
||||
|
||||
if ("calibre:pages".equals(name) || "pagecount".equals(name) || "schema:pagecount".equals(prop) || "media:pagecount".equals(prop) || "booklore:page_count".equals(prop)) {
|
||||
safeParseInt(content, builderMeta::pageCount);
|
||||
} else if (name.equals("calibre:user_metadata:#pagecount")) {
|
||||
} else if ("calibre:user_metadata:#pagecount".equals(name)) {
|
||||
try {
|
||||
JSONObject jsonroot = new JSONObject(content);
|
||||
Object value = jsonroot.opt("#value#");
|
||||
safeParseInt(String.valueOf(value), builderMeta::pageCount);
|
||||
} catch (JSONException ignored) {
|
||||
}
|
||||
} else if (prop.equals("calibre:user_metadata")) {
|
||||
} else if ("calibre:user_metadata".equals(prop)) {
|
||||
try {
|
||||
JSONObject jsonroot = new JSONObject(content);
|
||||
JSONObject pages = jsonroot.getJSONObject("#pagecount");
|
||||
|
||||
@@ -236,7 +236,6 @@ public class DoubanBookParser implements BookParser {
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Error parsing search result item: {}", e.getMessage());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@@ -59,7 +60,8 @@ public class HardcoverParser implements BookParser {
|
||||
if (doc.getAuthorNames() == null || doc.getAuthorNames().isEmpty()) return false;
|
||||
|
||||
List<String> actualAuthorTokens = doc.getAuthorNames().stream()
|
||||
.flatMap(name -> List.of(WHITESPACE_PATTERN.split(name.toLowerCase())).stream())
|
||||
.map(String::toLowerCase)
|
||||
.flatMap(WHITESPACE_PATTERN::splitAsStream)
|
||||
.toList();
|
||||
List<String> searchAuthorTokens = List.of(WHITESPACE_PATTERN.split(searchAuthor.toLowerCase()));
|
||||
|
||||
|
||||
@@ -46,7 +46,6 @@ public class MonitoringTask {
|
||||
log.warn("WatchKey is no longer valid: {}", directory);
|
||||
// Clean up but DO NOT break
|
||||
eventPublisher.publishEvent(new WatchKeyInvalidatedEvent(this, directory));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
|
||||
@@ -86,7 +86,7 @@ public class FileUploadService {
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public AdditionalFile uploadAdditionalFile(Long bookId, MultipartFile file, AdditionalFileType additionalFileType, String description) throws IOException {
|
||||
public AdditionalFile uploadAdditionalFile(Long bookId, MultipartFile file, AdditionalFileType additionalFileType, String description) {
|
||||
final BookEntity book = findBookById(bookId);
|
||||
final String originalFileName = getValidatedFileName(file);
|
||||
|
||||
@@ -225,7 +225,7 @@ public class FileUploadService {
|
||||
}
|
||||
}
|
||||
|
||||
private BookMetadata extractMetadata(BookFileExtension fileExt, File file) throws IOException {
|
||||
private BookMetadata extractMetadata(BookFileExtension fileExt, File file) {
|
||||
return metadataExtractorFactory.extractMetadata(fileExt, file);
|
||||
}
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.time.LocalDate;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
@@ -33,8 +34,10 @@ class PathPatternResolverTest {
|
||||
if (authors == null) {
|
||||
when(metadata.getAuthors()).thenReturn(null);
|
||||
} else {
|
||||
AtomicLong idCounter = new AtomicLong(1);
|
||||
LinkedHashSet<AuthorEntity> authorEntities = authors.stream().map(name -> {
|
||||
AuthorEntity a = new AuthorEntity();
|
||||
a.setId(idCounter.getAndIncrement());
|
||||
a.setName(name);
|
||||
return a;
|
||||
}).collect(Collectors.toCollection(LinkedHashSet::new));
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
package com.adityachandel.booklore.convertor;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookRecommendationLite;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
class BookRecommendationIdsListConverterTest {
|
||||
|
||||
private BookRecommendationIdsListConverter converter;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
converter = new BookRecommendationIdsListConverter();
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToDatabaseColumn_shouldSerializeSetToJsonString() {
|
||||
BookRecommendationLite rec1 = new BookRecommendationLite(1L, 0.95);
|
||||
BookRecommendationLite rec2 = new BookRecommendationLite(2L, 0.87);
|
||||
|
||||
Set<BookRecommendationLite> input = Set.of(rec1, rec2);
|
||||
|
||||
String result = converter.convertToDatabaseColumn(input);
|
||||
|
||||
assertNotNull(result);
|
||||
assertTrue(result.contains("\"b\":1"));
|
||||
assertTrue(result.contains("\"s\":0.95"));
|
||||
assertTrue(result.contains("\"b\":2"));
|
||||
assertTrue(result.contains("\"s\":0.87"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToDatabaseColumn_withNull_shouldReturnNull() {
|
||||
String result = converter.convertToDatabaseColumn(null);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_shouldDeserializeJsonStringToSet() {
|
||||
String json = "[{\"b\":1,\"s\":0.95},{\"b\":2,\"s\":0.87}]";
|
||||
|
||||
Set<BookRecommendationLite> result = converter.convertToEntityAttribute(json);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(2, result.size());
|
||||
|
||||
BookRecommendationLite book1 = result.stream()
|
||||
.filter(b -> b.getB() == 1L)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
assertNotNull(book1);
|
||||
assertEquals(0.95, book1.getS(), 0.001);
|
||||
|
||||
BookRecommendationLite book2 = result.stream()
|
||||
.filter(b -> b.getB() == 2L)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
assertNotNull(book2);
|
||||
assertEquals(0.87, book2.getS(), 0.001);
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_withNull_shouldReturnEmptySet() {
|
||||
Set<BookRecommendationLite> result = converter.convertToEntityAttribute(null);
|
||||
|
||||
assertNotNull(result);
|
||||
assertTrue(result.isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_withEmptyString_shouldReturnEmptySet() {
|
||||
Set<BookRecommendationLite> result = converter.convertToEntityAttribute("");
|
||||
|
||||
assertNotNull(result);
|
||||
assertTrue(result.isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_withBlankString_shouldReturnEmptySet() {
|
||||
Set<BookRecommendationLite> result = converter.convertToEntityAttribute(" ");
|
||||
|
||||
assertNotNull(result);
|
||||
assertTrue(result.isEmpty());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
package com.adityachandel.booklore.convertor;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
class JpaJsonConverterTest {
|
||||
|
||||
private JpaJsonConverter converter;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
converter = new JpaJsonConverter();
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToDatabaseColumn_shouldSerializeMapToJsonString() {
|
||||
Map<String, Object> input = Map.of(
|
||||
"key1", "value1",
|
||||
"key2", 42,
|
||||
"key3", true
|
||||
);
|
||||
|
||||
String result = converter.convertToDatabaseColumn(input);
|
||||
|
||||
assertNotNull(result);
|
||||
assertTrue(result.contains("\"key1\":\"value1\""));
|
||||
assertTrue(result.contains("\"key2\":42"));
|
||||
assertTrue(result.contains("\"key3\":true"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToDatabaseColumn_withNull_shouldReturnNull() {
|
||||
String result = converter.convertToDatabaseColumn(null);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_shouldDeserializeJsonStringToMap() {
|
||||
String json = "{\"key1\":\"value1\",\"key2\":42,\"key3\":true}";
|
||||
Map<String, Object> expected = Map.of(
|
||||
"key1", "value1",
|
||||
"key2", 42,
|
||||
"key3", true
|
||||
);
|
||||
|
||||
Map<String, Object> result = converter.convertToEntityAttribute(json);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(expected, result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_withNull_shouldReturnNull() {
|
||||
Map<String, Object> result = converter.convertToEntityAttribute(null);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_withEmptyString_shouldReturnNull() {
|
||||
Map<String, Object> result = converter.convertToEntityAttribute("");
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_withBlankString_shouldReturnNull() {
|
||||
Map<String, Object> result = converter.convertToEntityAttribute(" ");
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
package com.adityachandel.booklore.convertor;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
class MapToStringConverterTest {
|
||||
|
||||
private MapToStringConverter converter;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
converter = new MapToStringConverter();
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToDatabaseColumn_shouldSerializeMapToJsonString() {
|
||||
Map<String, Object> input = Map.of(
|
||||
"title", "Test Book",
|
||||
"author", "Test Author",
|
||||
"year", 2023
|
||||
);
|
||||
|
||||
String result = converter.convertToDatabaseColumn(input);
|
||||
assertNotNull(result);
|
||||
assertTrue(result.contains("\"title\":\"Test Book\""));
|
||||
assertTrue(result.contains("\"author\":\"Test Author\""));
|
||||
assertTrue(result.contains("\"year\":2023"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToDatabaseColumn_withNull_shouldReturnNull() {
|
||||
String result = converter.convertToDatabaseColumn(null);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_shouldDeserializeJsonStringToMap() {
|
||||
String json = "{\"title\":\"Test Book\",\"author\":\"Test Author\",\"year\":2023}";
|
||||
Map<String, Object> expected = Map.of(
|
||||
"title", "Test Book",
|
||||
"author", "Test Author",
|
||||
"year", 2023
|
||||
);
|
||||
|
||||
Map<String, Object> result = converter.convertToEntityAttribute(json);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(expected, result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_withNull_shouldReturnNull() {
|
||||
Map<String, Object> result = converter.convertToEntityAttribute(null);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_withEmptyString_shouldReturnNull() {
|
||||
Map<String, Object> result = converter.convertToEntityAttribute("");
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertToEntityAttribute_withBlankString_shouldReturnNull() {
|
||||
Map<String, Object> result = converter.convertToEntityAttribute(" ");
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,100 @@
|
||||
package com.adityachandel.booklore.model.entity;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
class EntityEqualityTest {
|
||||
|
||||
@Test
|
||||
void authorEntity_shouldBeEqual_whenIdsAreSame() {
|
||||
// 1. Same ID, Different Names -> Should be EQUAL
|
||||
AuthorEntity a1 = AuthorEntity.builder().id(1L).name("Author A").build();
|
||||
AuthorEntity a2 = AuthorEntity.builder().id(1L).name("Author B").build();
|
||||
|
||||
assertEquals(a1, a2, "Entities with same ID should be equal");
|
||||
assertEquals(a1.hashCode(), a2.hashCode(), "HashCodes must match for equal objects");
|
||||
}
|
||||
|
||||
@Test
|
||||
void authorEntity_shouldNotBeEqual_whenIdsAreDifferent() {
|
||||
// 2. Different ID, Same Name -> Should be DIFFERENT
|
||||
AuthorEntity a1 = AuthorEntity.builder().id(1L).name("Author A").build();
|
||||
AuthorEntity a2 = AuthorEntity.builder().id(2L).name("Author A").build();
|
||||
|
||||
assertNotEquals(a1, a2, "Entities with different IDs should not be equal");
|
||||
}
|
||||
|
||||
@Test
|
||||
void set_shouldDeduplicate_basedOnId() {
|
||||
// 3. Set behavior test (The most important practical test)
|
||||
Set<AuthorEntity> set = new HashSet<>();
|
||||
|
||||
AuthorEntity a1 = AuthorEntity.builder().id(100L).name("John").build();
|
||||
AuthorEntity a2 = AuthorEntity.builder().id(100L).name("John Updated").build(); // Same ID
|
||||
AuthorEntity a3 = AuthorEntity.builder().id(200L).name("Jane").build();
|
||||
|
||||
set.add(a1);
|
||||
set.add(a2); // Should replace a1 or be ignored depending on Set impl, but size should stay 1
|
||||
set.add(a3);
|
||||
|
||||
assertEquals(2, set.size(), "Set should contain only 2 unique entities based on ID");
|
||||
assertTrue(set.contains(a1));
|
||||
assertTrue(set.contains(a3));
|
||||
}
|
||||
|
||||
@Test
|
||||
void categoryEntity_shouldBeEqual_whenIdsAreSame() {
|
||||
CategoryEntity c1 = CategoryEntity.builder().id(1L).name("Fiction").build();
|
||||
CategoryEntity c2 = CategoryEntity.builder().id(1L).name("Non-Fiction").build();
|
||||
|
||||
assertEquals(c1, c2);
|
||||
assertEquals(c1.hashCode(), c2.hashCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
void moodEntity_shouldBeEqual_whenIdsAreSame() {
|
||||
MoodEntity m1 = MoodEntity.builder().id(1L).name("Happy").build();
|
||||
MoodEntity m2 = MoodEntity.builder().id(1L).name("Sad").build();
|
||||
|
||||
assertEquals(m1, m2);
|
||||
assertEquals(m1.hashCode(), m2.hashCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
void tagEntity_shouldBeEqual_whenIdsAreSame() {
|
||||
TagEntity t1 = TagEntity.builder().id(1L).name("Adventure").build();
|
||||
TagEntity t2 = TagEntity.builder().id(1L).name("Mystery").build();
|
||||
|
||||
assertEquals(t1, t2);
|
||||
assertEquals(t1.hashCode(), t2.hashCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
void unsavedEntities_withNullIds_shouldNotBeEqual() {
|
||||
AuthorEntity a1 = AuthorEntity.builder().name("John").build();
|
||||
AuthorEntity a2 = AuthorEntity.builder().name("Jane").build();
|
||||
|
||||
assertNotEquals(a1, a2, "Unsaved entities with null IDs should not be equal");
|
||||
}
|
||||
|
||||
@Test
|
||||
void sameInstance_shouldBeEqual() {
|
||||
AuthorEntity a1 = AuthorEntity.builder().id(1L).name("John").build();
|
||||
|
||||
assertEquals(a1, a1, "Same instance should be equal to itself");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testEntityFactory_createsEntitiesWithUniqueIds() {
|
||||
AuthorEntity a1 = TestEntityFactory.createAuthor("Author 1");
|
||||
AuthorEntity a2 = TestEntityFactory.createAuthor("Author 2");
|
||||
|
||||
assertNotNull(a1.getId(), "Factory should assign an ID");
|
||||
assertNotNull(a2.getId(), "Factory should assign an ID");
|
||||
assertNotEquals(a1.getId(), a2.getId(), "Factory should assign unique IDs");
|
||||
assertNotEquals(a1, a2, "Entities with different IDs should not be equal");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
package com.adityachandel.booklore.model.entity;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
public class TestEntityFactory {
|
||||
|
||||
private static final AtomicLong idCounter = new AtomicLong(1);
|
||||
|
||||
public static AuthorEntity createAuthor(String name) {
|
||||
return AuthorEntity.builder()
|
||||
.id(idCounter.getAndIncrement())
|
||||
.name(name)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static CategoryEntity createCategory(String name) {
|
||||
return CategoryEntity.builder()
|
||||
.id(idCounter.getAndIncrement())
|
||||
.name(name)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static MoodEntity createMood(String name) {
|
||||
return MoodEntity.builder()
|
||||
.id(idCounter.getAndIncrement())
|
||||
.name(name)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static TagEntity createTag(String name) {
|
||||
return TagEntity.builder()
|
||||
.id(idCounter.getAndIncrement())
|
||||
.name(name)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -162,7 +162,7 @@ class AdditionalFileServiceTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void deleteAdditionalFile_WhenFileExists_ShouldDeleteSuccessfully() throws IOException {
|
||||
void deleteAdditionalFile_WhenFileExists_ShouldDeleteSuccessfully() {
|
||||
Long fileId = 1L;
|
||||
Path parentPath = fileEntity.getFullFilePath().getParent();
|
||||
|
||||
@@ -181,7 +181,7 @@ class AdditionalFileServiceTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void deleteAdditionalFile_WhenIOExceptionOccurs_ShouldStillDeleteFromRepository() throws IOException {
|
||||
void deleteAdditionalFile_WhenIOExceptionOccurs_ShouldStillDeleteFromRepository() {
|
||||
Long fileId = 1L;
|
||||
Path parentPath = fileEntity.getFullFilePath().getParent();
|
||||
|
||||
@@ -255,7 +255,7 @@ class AdditionalFileServiceTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void downloadAdditionalFile_WhenFileExists_ShouldReturnFileResource() throws IOException {
|
||||
void downloadAdditionalFile_WhenFileExists_ShouldReturnFileResource() throws Exception {
|
||||
Long fileId = 1L;
|
||||
when(additionalFileRepository.findById(fileId)).thenReturn(Optional.of(fileEntity));
|
||||
|
||||
@@ -276,7 +276,7 @@ class AdditionalFileServiceTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void downloadAdditionalFile_WhenEntityRelationshipsMissing_ShouldThrowIllegalStateException() throws IOException {
|
||||
void downloadAdditionalFile_WhenEntityRelationshipsMissing_ShouldThrowIllegalStateException() {
|
||||
Long fileId = 1L;
|
||||
BookAdditionalFileEntity invalidEntity = new BookAdditionalFileEntity();
|
||||
invalidEntity.setId(fileId);
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
package com.adityachandel.booklore.service;
|
||||
|
||||
import com.adityachandel.booklore.model.entity.BookEntity;
|
||||
import com.adityachandel.booklore.model.entity.ShelfEntity;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.HashSet;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class BookServiceTest {
|
||||
|
||||
@Test
|
||||
void bookEntityShelves_shouldPreventDuplicateAssignments() {
|
||||
BookEntity book = new BookEntity();
|
||||
book.setShelves(new HashSet<>());
|
||||
|
||||
ShelfEntity shelf1 = new ShelfEntity();
|
||||
shelf1.setId(1L);
|
||||
shelf1.setName("Test Shelf");
|
||||
|
||||
book.getShelves().add(shelf1);
|
||||
book.getShelves().add(shelf1);
|
||||
book.getShelves().add(shelf1);
|
||||
|
||||
assertEquals(1, book.getShelves().size());
|
||||
assertTrue(book.getShelves().contains(shelf1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void bookEntityShelves_shouldAllowMultipleDifferentShelves() {
|
||||
BookEntity book = new BookEntity();
|
||||
book.setShelves(new HashSet<>());
|
||||
|
||||
ShelfEntity shelf1 = new ShelfEntity();
|
||||
shelf1.setId(1L);
|
||||
shelf1.setName("Fiction");
|
||||
|
||||
ShelfEntity shelf2 = new ShelfEntity();
|
||||
shelf2.setId(2L);
|
||||
shelf2.setName("Science Fiction");
|
||||
|
||||
book.getShelves().add(shelf1);
|
||||
book.getShelves().add(shelf2);
|
||||
|
||||
assertEquals(2, book.getShelves().size());
|
||||
assertTrue(book.getShelves().contains(shelf1));
|
||||
assertTrue(book.getShelves().contains(shelf2));
|
||||
}
|
||||
}
|
||||
@@ -404,7 +404,7 @@ class KoboReadingStateServiceTest {
|
||||
assertNotNull(result.getReadingStates());
|
||||
assertEquals(1, result.getReadingStates().size());
|
||||
|
||||
KoboReadingState state = result.getReadingStates().get(0);
|
||||
KoboReadingState state = result.getReadingStates().getFirst();
|
||||
assertEquals(entitlementId, state.getEntitlementId());
|
||||
assertNotNull(state.getCurrentBookmark());
|
||||
assertEquals(75, state.getCurrentBookmark().getProgressPercent());
|
||||
@@ -467,7 +467,7 @@ class KoboReadingStateServiceTest {
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(1, result.getReadingStates().size());
|
||||
assertEquals(entitlementId, result.getReadingStates().get(0).getEntitlementId());
|
||||
assertEquals(entitlementId, result.getReadingStates().getFirst().getEntitlementId());
|
||||
verify(progressRepository, never()).findByUserIdAndBookId(anyLong(), anyLong());
|
||||
}
|
||||
|
||||
|
||||
@@ -337,7 +337,7 @@ class BookDropServiceTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void discardSelectedFiles_WhenSelectAllFalse_ShouldDeleteOnlySelected() throws IOException {
|
||||
void discardSelectedFiles_WhenSelectAllFalse_ShouldDeleteOnlySelected() {
|
||||
List<Long> selectedIds = List.of(1L);
|
||||
when(bookdropFileRepository.findAllById(selectedIds)).thenReturn(List.of(bookdropFileEntity));
|
||||
when(appProperties.getBookdropFolder()).thenReturn(tempDir.toString());
|
||||
|
||||
@@ -85,7 +85,7 @@ class LibraryRescanHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleRescanOptions_shouldProcessAllBooks_whenLibraryHasBooks() throws Exception {
|
||||
void handleRescanOptions_shouldProcessAllBooks_whenLibraryHasBooks() {
|
||||
BookEntity book1 = createBookEntity(1L, "book1.epub", BookFileType.EPUB);
|
||||
BookEntity book2 = createBookEntity(2L, "book2.pdf", BookFileType.PDF);
|
||||
library.getBookEntities().add(book1);
|
||||
@@ -110,7 +110,7 @@ class LibraryRescanHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleRescanOptions_shouldSkipDeletedBooks() throws Exception {
|
||||
void handleRescanOptions_shouldSkipDeletedBooks() {
|
||||
BookEntity book1 = createBookEntity(1L, "book1.epub", BookFileType.EPUB);
|
||||
BookEntity book2 = createBookEntity(2L, "book2.pdf", BookFileType.PDF);
|
||||
book2.setDeleted(true);
|
||||
@@ -132,7 +132,7 @@ class LibraryRescanHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleRescanOptions_shouldSkipNullBooks() throws Exception {
|
||||
void handleRescanOptions_shouldSkipNullBooks() {
|
||||
BookEntity book1 = createBookEntity(1L, "book1.epub", BookFileType.EPUB);
|
||||
library.getBookEntities().add(book1);
|
||||
library.getBookEntities().add(null);
|
||||
@@ -149,7 +149,7 @@ class LibraryRescanHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleRescanOptions_shouldContinue_whenMetadataExtractionReturnsNull() throws Exception {
|
||||
void handleRescanOptions_shouldContinue_whenMetadataExtractionReturnsNull() {
|
||||
BookEntity book1 = createBookEntity(1L, "book1.epub", BookFileType.EPUB);
|
||||
BookEntity book2 = createBookEntity(2L, "book2.pdf", BookFileType.PDF);
|
||||
library.getBookEntities().add(book1);
|
||||
@@ -169,7 +169,7 @@ class LibraryRescanHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleRescanOptions_shouldContinue_whenMetadataUpdateThrowsException() throws Exception {
|
||||
void handleRescanOptions_shouldContinue_whenMetadataUpdateThrowsException() {
|
||||
BookEntity book1 = createBookEntity(1L, "book1.epub", BookFileType.EPUB);
|
||||
BookEntity book2 = createBookEntity(2L, "book2.pdf", BookFileType.PDF);
|
||||
library.getBookEntities().add(book1);
|
||||
@@ -192,7 +192,7 @@ class LibraryRescanHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleRescanOptions_shouldCancel_whenTaskCancellationRequested() throws Exception {
|
||||
void handleRescanOptions_shouldCancel_whenTaskCancellationRequested() {
|
||||
BookEntity book1 = createBookEntity(1L, "book1.epub", BookFileType.EPUB);
|
||||
BookEntity book2 = createBookEntity(2L, "book2.pdf", BookFileType.PDF);
|
||||
library.getBookEntities().add(book1);
|
||||
@@ -211,7 +211,7 @@ class LibraryRescanHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleRescanOptions_shouldSendProgressNotifications() throws Exception {
|
||||
void handleRescanOptions_shouldSendProgressNotifications() {
|
||||
BookEntity book1 = createBookEntity(1L, "book1.epub", BookFileType.EPUB);
|
||||
library.getBookEntities().add(book1);
|
||||
|
||||
@@ -250,7 +250,7 @@ class LibraryRescanHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleRescanOptions_shouldSetCorrectMetadataUpdateContext() throws Exception {
|
||||
void handleRescanOptions_shouldSetCorrectMetadataUpdateContext() {
|
||||
BookEntity book = createBookEntity(1L, "book1.epub", BookFileType.EPUB);
|
||||
library.getBookEntities().add(book);
|
||||
|
||||
@@ -274,7 +274,7 @@ class LibraryRescanHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleRescanOptions_shouldHandleNullTaskId() throws Exception {
|
||||
void handleRescanOptions_shouldHandleNullTaskId() {
|
||||
BookEntity book = createBookEntity(1L, "book1.epub", BookFileType.EPUB);
|
||||
library.getBookEntities().add(book);
|
||||
|
||||
@@ -292,7 +292,7 @@ class LibraryRescanHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleRescanOptions_shouldContinue_whenNotificationFails() throws Exception {
|
||||
void handleRescanOptions_shouldContinue_whenNotificationFails() {
|
||||
BookEntity book = createBookEntity(1L, "book1.epub", BookFileType.EPUB);
|
||||
library.getBookEntities().add(book);
|
||||
|
||||
|
||||
@@ -63,11 +63,13 @@ class MetadataManagementServiceTest {
|
||||
String oldName = "Old Author";
|
||||
|
||||
AuthorEntity oldAuthor = new AuthorEntity();
|
||||
oldAuthor.setId(1L);
|
||||
oldAuthor.setName(oldName);
|
||||
|
||||
when(authorRepository.findByNameIgnoreCase(targetName)).thenReturn(Optional.empty());
|
||||
when(authorRepository.save(any(AuthorEntity.class))).thenAnswer(invocation -> {
|
||||
AuthorEntity a = invocation.getArgument(0);
|
||||
a.setId(2L);
|
||||
a.setName(a.getName());
|
||||
return a;
|
||||
});
|
||||
@@ -99,10 +101,15 @@ class MetadataManagementServiceTest {
|
||||
String oldName = "Old Category";
|
||||
|
||||
CategoryEntity oldCategory = new CategoryEntity();
|
||||
oldCategory.setId(1L);
|
||||
oldCategory.setName(oldName);
|
||||
|
||||
when(categoryRepository.findByNameIgnoreCase(targetName)).thenReturn(Optional.empty());
|
||||
when(categoryRepository.save(any(CategoryEntity.class))).thenAnswer(i -> i.getArgument(0));
|
||||
when(categoryRepository.save(any(CategoryEntity.class))).thenAnswer(invocation -> {
|
||||
CategoryEntity c = invocation.getArgument(0);
|
||||
c.setId(2L);
|
||||
return c;
|
||||
});
|
||||
|
||||
when(categoryRepository.findByNameIgnoreCase(oldName)).thenReturn(Optional.of(oldCategory));
|
||||
|
||||
@@ -149,10 +156,15 @@ class MetadataManagementServiceTest {
|
||||
String oldName = "Old Tag";
|
||||
|
||||
TagEntity oldTag = new TagEntity();
|
||||
oldTag.setId(1L);
|
||||
oldTag.setName(oldName);
|
||||
|
||||
when(tagRepository.findByNameIgnoreCase(targetName)).thenReturn(Optional.empty());
|
||||
when(tagRepository.save(any(TagEntity.class))).thenAnswer(i -> i.getArgument(0));
|
||||
when(tagRepository.save(any(TagEntity.class))).thenAnswer(invocation -> {
|
||||
TagEntity t = invocation.getArgument(0);
|
||||
t.setId(2L);
|
||||
return t;
|
||||
});
|
||||
when(tagRepository.findByNameIgnoreCase(oldName)).thenReturn(Optional.of(oldTag));
|
||||
|
||||
BookMetadataEntity metadata = mock(BookMetadataEntity.class);
|
||||
@@ -198,10 +210,15 @@ class MetadataManagementServiceTest {
|
||||
String oldName = "Old Mood";
|
||||
|
||||
MoodEntity oldMood = new MoodEntity();
|
||||
oldMood.setId(1L);
|
||||
oldMood.setName(oldName);
|
||||
|
||||
when(moodRepository.findByNameIgnoreCase(targetName)).thenReturn(Optional.empty());
|
||||
when(moodRepository.save(any(MoodEntity.class))).thenAnswer(i -> i.getArgument(0));
|
||||
when(moodRepository.save(any(MoodEntity.class))).thenAnswer(invocation -> {
|
||||
MoodEntity m = invocation.getArgument(0);
|
||||
m.setId(2L);
|
||||
return m;
|
||||
});
|
||||
when(moodRepository.findByNameIgnoreCase(oldName)).thenReturn(Optional.of(oldMood));
|
||||
|
||||
BookMetadataEntity metadata = mock(BookMetadataEntity.class);
|
||||
@@ -284,13 +301,16 @@ class MetadataManagementServiceTest {
|
||||
String old2 = "OldTag2";
|
||||
|
||||
TagEntity oldTag1 = new TagEntity();
|
||||
oldTag1.setId(1L);
|
||||
oldTag1.setName(old1);
|
||||
TagEntity oldTag2 = new TagEntity();
|
||||
oldTag2.setId(2L);
|
||||
oldTag2.setName(old2);
|
||||
|
||||
when(tagRepository.findByNameIgnoreCase(targetName)).thenReturn(Optional.empty());
|
||||
when(tagRepository.save(any(TagEntity.class))).thenAnswer(i -> {
|
||||
TagEntity t = i.getArgument(0);
|
||||
when(tagRepository.save(any(TagEntity.class))).thenAnswer(invocation -> {
|
||||
TagEntity t = invocation.getArgument(0);
|
||||
t.setId(3L);
|
||||
return t;
|
||||
});
|
||||
when(tagRepository.findByNameIgnoreCase(old1)).thenReturn(Optional.of(oldTag1));
|
||||
@@ -320,8 +340,10 @@ class MetadataManagementServiceTest {
|
||||
String oldName = "OldCat";
|
||||
|
||||
CategoryEntity target = new CategoryEntity();
|
||||
target.setId(1L);
|
||||
target.setName(targetName);
|
||||
CategoryEntity old = new CategoryEntity();
|
||||
old.setId(2L);
|
||||
old.setName(oldName);
|
||||
|
||||
when(categoryRepository.findByNameIgnoreCase(targetName)).thenReturn(Optional.of(target));
|
||||
|
||||
@@ -75,16 +75,20 @@ class CbxMetadataWriterTest {
|
||||
|
||||
Set<AuthorEntity> authors = new HashSet<>();
|
||||
AuthorEntity aliceAuthor = new AuthorEntity();
|
||||
aliceAuthor.setId(1L);
|
||||
aliceAuthor.setName("Alice");
|
||||
AuthorEntity bobAuthor = new AuthorEntity();
|
||||
bobAuthor.setId(2L);
|
||||
bobAuthor.setName("Bob");
|
||||
authors.add(aliceAuthor);
|
||||
authors.add(bobAuthor);
|
||||
meta.setAuthors(authors);
|
||||
Set<CategoryEntity> cats = new HashSet<>();
|
||||
CategoryEntity actionCat = new CategoryEntity();
|
||||
actionCat.setId(1L);
|
||||
actionCat.setName("action");
|
||||
CategoryEntity adventureCat = new CategoryEntity();
|
||||
adventureCat.setId(2L);
|
||||
adventureCat.setName("adventure");
|
||||
cats.add(actionCat);
|
||||
cats.add(adventureCat);
|
||||
|
||||
@@ -37,7 +37,7 @@ class MonitoringServiceTest {
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void teardown() throws Exception {
|
||||
void teardown() {
|
||||
try {
|
||||
service.stopMonitoring();
|
||||
} catch (Exception ignored) {}
|
||||
|
||||
@@ -196,7 +196,7 @@ class FileUploadServiceTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void uploadFile_succeeds_and_processes() throws IOException {
|
||||
void uploadFile_succeeds_and_processes() {
|
||||
byte[] data = "content".getBytes();
|
||||
MockMultipartFile file = new MockMultipartFile("file", "book.cbz", "application/octet-stream", data);
|
||||
|
||||
@@ -216,7 +216,7 @@ class FileUploadServiceTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void uploadAdditionalFile_successful_and_saves_entity() throws Exception {
|
||||
void uploadAdditionalFile_successful_and_saves_entity() {
|
||||
long bookId = 5L;
|
||||
MockMultipartFile file = new MockMultipartFile("file", "add.pdf", "application/pdf", "payload".getBytes());
|
||||
|
||||
|
||||
@@ -795,7 +795,7 @@ class FileServiceTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void mixedExistingAndNonExisting_deletesExisting() throws IOException {
|
||||
void mixedExistingAndNonExisting_deletesExisting() throws Exception {
|
||||
BufferedImage image = createTestImage(100, 100);
|
||||
fileService.saveCoverImages(image, 20L);
|
||||
|
||||
|
||||
@@ -78,7 +78,7 @@ class BookloreSyncTokenGeneratorTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFromBase64_withDot() throws Exception {
|
||||
void testFromBase64_withDot() {
|
||||
String rawToken = "some.raw.token";
|
||||
|
||||
BookloreSyncToken result = generator.fromBase64(rawToken);
|
||||
|
||||
Reference in New Issue
Block a user